diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e64138f27..64518abd4 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -29,9 +29,8 @@ jobs: rustup component add clippy rustup component add rustfmt - # Travis doesn't enforce this yet. - # - name: check formatting - # run: cargo fmt -- --check + - name: check formatting + run: cargo fmt -- --check - name: build and test run: cargo test --locked --all-targets diff --git a/.travis.yml b/.travis.yml index b007d94e9..ce62e6f50 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,7 +4,10 @@ matrix: include: # OS compat - os: linux + # As of this writing, stable Rust (1.44.0) and the OS X version on TravisCI + # do not work well together. - os: osx + rust: 1.43.1 # rustc version compat - rust: 1.41.1 # oldest supported version, keep in sync with README.md @@ -29,6 +32,7 @@ matrix: before_script: - export EXTRA_FEATURES= + - if [[ "${TRAVIS_RUST_VERSION}" = "stable" ]]; then rustup component add rustfmt && cargo fmt -- --check; fi - if [[ "${TRAVIS_RUST_VERSION}" = "nightly" ]]; then export EXTRA_FEATURES="$EXTRA_FEATURES unstable"; fi - if [[ "${DIST_SCCACHE}" = "1" ]]; then export EXTRA_FEATURES="$EXTRA_FEATURES dist-client dist-server"; fi diff --git a/Cargo.lock b/Cargo.lock index 751848ce2..8a87db111 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -65,14 +65,15 @@ checksum = "eab1c04a571841102f5345a8fc0f6bb3d31c315dec879b5c6e42e40ce7ffa34e" [[package]] name = "assert_cmd" -version = "0.9.1" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5b60c276f334145cf2cec09c5bb6f63523f078c0c850909f66bca8f933cf809" +checksum = "c88b9ca26f9c16ec830350d309397e74ee9abdfd8eb1f71cb6ecc71a3fc818da" dependencies = [ - "escargot", + "doc-comment", "predicates", "predicates-core", "predicates-tree", + "wait-timeout", ] [[package]] @@ -549,6 +550,12 @@ dependencies = [ "winapi 0.3.8", ] +[[package]] +name = "doc-comment" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" + [[package]] name = "dtoa" version = "0.4.5" @@ -601,16 +608,6 @@ dependencies = [ "version_check 0.9.1", ] -[[package]] -name = "escargot" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19db1f7e74438642a5018cdf263bb1325b2e792f02dd0a3ca6d6c0f0d7b1d5a5" -dependencies = [ - "serde", - "serde_json", -] - [[package]] name = "failure" version = "0.1.7" @@ -677,9 +674,9 @@ dependencies = [ [[package]] name = "float-cmp" -version = "0.4.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "134a8fa843d80a51a5b77d36d42bc2def9edcb0262c914861d08129fd1926600" +checksum = "da62c4f1b81918835a8c6a484a397775fff5953fe83529afd51b05f5c6a6617d" dependencies = [ "num-traits 0.2.11", ] @@ -733,6 +730,31 @@ version = "0.1.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b980f2816d6ee8673b6517b52cb0e808a180efc92e5c19d02cdda79066703ef" +[[package]] +name = "futures" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c329ae8753502fb44ae4fc2b622fa2a94652c41e795143765ba0927f92ab780" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0c77d04ce8edd9cb903932b608268b3fffec4163dc053b3b402bf47eac1f1a8" +dependencies = [ + "futures-core", + "futures-sink", +] + [[package]] name = "futures-core" version = "0.3.4" @@ -745,10 +767,39 @@ version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab90cde24b3319636588d0c35fe03b1333857621051837ed769faefb4c2162e4" dependencies = [ - "futures", + "futures 0.1.29", "num_cpus", ] +[[package]] +name = "futures-executor" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f674f3e1bcb15b37284a90cedf55afdba482ab061c407a9c0ebbd0f3109741ba" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a638959aa96152c7a4cddf50fcb1e3fede0583b27157c26e67d6f99904090dc6" + +[[package]] +name = "futures-macro" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a5081aa3de1f7542a794a397cde100ed903b0630152d0973479018fd85423a7" +dependencies = [ + "proc-macro-hack", + "proc-macro2", + "quote 1.0.3", + "syn 1.0.18", +] + [[package]] name = "futures-sink" version = "0.3.4" @@ -767,11 +818,17 @@ version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22766cf25d64306bedf0384da004d05c9974ab104fcc4528f1236181c18004c5" dependencies = [ - "futures", + "futures 0.1.29", + "futures-channel", "futures-core", + "futures-io", + "futures-macro", "futures-sink", "futures-task", + "memchr 2.3.3", "pin-utils", + "proc-macro-hack", + "proc-macro-nested", "slab", ] @@ -813,7 +870,7 @@ dependencies = [ "byteorder", "bytes 0.4.12", "fnv", - "futures", + "futures 0.1.29", "http", "indexmap", "log 0.4.8", @@ -859,7 +916,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6741c859c1b2463a423a1dbce98d418e6c3c3fc720fb0d45528657320920292d" dependencies = [ "bytes 0.4.12", - "futures", + "futures 0.1.29", "http", "tokio-buf", ] @@ -886,7 +943,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9dbe6ed1438e1f8ad955a4701e9a944938e9519f6888d12d8558b645e247d5f6" dependencies = [ "bytes 0.4.12", - "futures", + "futures 0.1.29", "futures-cpupool", "h2", "http", @@ -916,7 +973,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3a800d6aa50af4b5850b2b0f659625ce9504df908e9733b635720483be26174f" dependencies = [ "bytes 0.4.12", - "futures", + "futures 0.1.29", "hyper", "native-tls", "tokio-io", @@ -1303,16 +1360,6 @@ dependencies = [ "winapi 0.3.8", ] -[[package]] -name = "msdos_time" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aad9dfe950c057b1bfe9c1f2aa51583a8468ef2a5baba2ebbe06d775efeb7729" -dependencies = [ - "time", - "winapi 0.3.8", -] - [[package]] name = "multipart" version = "0.13.6" @@ -1387,9 +1434,9 @@ dependencies = [ [[package]] name = "normalize-line-endings" -version = "0.2.2" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e0a1a39eab95caf4f5556da9289b9e68f0aafac901b2ce80daaf020d3b733a8" +checksum = "61807f77802ff30975e01f4f071c8ba10c022052f98b3294119f3e615d13e5be" [[package]] name = "num-integer" @@ -1586,9 +1633,9 @@ checksum = "74490b50b9fbe561ac330df47c08f3f33073d2d00c150f719147d7c54522fa1b" [[package]] name = "predicates" -version = "0.9.1" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f31e7977fc111984fdac76b6ae3a4cb598008fc6fd02dfdca189bf180bd7be20" +checksum = "347a1b6f0b21e636bc9872fb60b83b8e185f6f5516298b8238699f7f9a531030" dependencies = [ "difference", "float-cmp", @@ -1599,20 +1646,32 @@ dependencies = [ [[package]] name = "predicates-core" -version = "0.9.0" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85f80bc390d1c02a4cdaa63f27f05c3c426679eb65433d8dd65d392147e4e5c5" +checksum = "06075c3a3e92559ff8929e7a280684489ea27fe44805174c3ebd9328dcb37178" [[package]] name = "predicates-tree" -version = "0.9.0" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e86df9b81bdcb0a5141aca9d2b9c5e0c558ef6626d3ae2c12912f5c9df740bd" +checksum = "8e63c4859013b38a76eca2414c64911fba30def9e3202ac461a2d22831220124" dependencies = [ "predicates-core", "treeline", ] +[[package]] +name = "proc-macro-hack" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d659fe7c6d27f25e9d80a1a094c223f5246f6a6596453e09d7229bf42750b63" + +[[package]] +name = "proc-macro-nested" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e946095f9d3ed29ec38de908c22f95d9ac008e424c7bcae54c75a79c527c694" + [[package]] name = "proc-macro2" version = "1.0.10" @@ -1865,7 +1924,7 @@ checksum = "f0c747d743d48233f9bc3ed3fb00cb84c1d98d8c7f54ed2d4cca9adf461a7ef3" dependencies = [ "bytes 0.4.12", "combine", - "futures", + "futures 0.1.29", "sha1", "tokio-codec", "tokio-executor", @@ -1930,7 +1989,7 @@ dependencies = [ "cookie_store", "encoding_rs", "flate2", - "futures", + "futures 0.1.29", "http", "hyper", "hyper-tls", @@ -2074,10 +2133,10 @@ dependencies = [ "directories", "env_logger", "error-chain 0.12.2", - "escargot", "filetime 0.2.9", "flate2", - "futures", + "futures 0.1.29", + "futures 0.3.4", "futures-cpupool", "hmac", "http", @@ -2542,7 +2601,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a09c0b5bb588872ab2f09afa13ee6e9dac11e10a0ec9e8e3ba39a5a5d530af6" dependencies = [ "bytes 0.4.12", - "futures", + "futures 0.1.29", "mio", "num_cpus", "tokio-codec", @@ -2583,7 +2642,7 @@ checksum = "8fb220f46c53859a4b7ec083e41dec9778ff0b1851c0942b211edb89e0ccdc46" dependencies = [ "bytes 0.4.12", "either", - "futures", + "futures 0.1.29", ] [[package]] @@ -2593,7 +2652,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "25b2998660ba0e70d18684de5d06b70b70a3a747469af9dea7618cc59e75976b" dependencies = [ "bytes 0.4.12", - "futures", + "futures 0.1.29", "tokio-io", ] @@ -2603,7 +2662,7 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "107b625135aa7b9297dd2d99ccd6ca6ab124a5d1230778e159b9095adca4c722" dependencies = [ - "futures", + "futures 0.1.29", "futures-core", "futures-util", "pin-project-lite", @@ -2620,7 +2679,7 @@ version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b1de0e32a83f131e002238d7ccde18211c0a5397f60cbfffcb112868c2e0e20e" dependencies = [ - "futures", + "futures 0.1.29", "tokio-executor", ] @@ -2631,7 +2690,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fb2d1b8f4548dbf5e1f7818512e9c406860678f29c300cdf0ebac72d1a3a1671" dependencies = [ "crossbeam-utils 0.7.2", - "futures", + "futures 0.1.29", ] [[package]] @@ -2640,7 +2699,7 @@ version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "297a1206e0ca6302a0eed35b700d292b275256f596e2f3fea7729d5e629b6ff4" dependencies = [ - "futures", + "futures 0.1.29", "tokio-io", "tokio-threadpool", ] @@ -2652,7 +2711,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57fc868aae093479e3131e3d165c93b1c7474109d13c90ec0dda2a1bbfff0674" dependencies = [ "bytes 0.4.12", - "futures", + "futures 0.1.29", "log 0.4.8", ] @@ -2663,7 +2722,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d282d483052288b2308ba5ee795f5673b159c9bdf63c385a05609da782a5eae" dependencies = [ "bytes 0.4.12", - "futures", + "futures 0.1.29", "mio", "mio-named-pipes", "tokio 0.1.22", @@ -2676,7 +2735,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "382d90f43fa31caebe5d3bc6cfd854963394fff3b8cb59d5146607aaae7e7e43" dependencies = [ "crossbeam-queue 0.1.2", - "futures", + "futures 0.1.29", "lazy_static", "libc", "log 0.4.8", @@ -2695,7 +2754,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09bc590ec4ba8ba87652da2068d150dcada2cfa2e07faae270a5e0409aa51351" dependencies = [ "crossbeam-utils 0.7.2", - "futures", + "futures 0.1.29", "lazy_static", "log 0.4.8", "mio", @@ -2714,7 +2773,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "894168193c4f80862a2244ff953b69145a9961a9efba39500e0970b083d0649c" dependencies = [ "bytes 0.4.12", - "futures", + "futures 0.1.29", ] [[package]] @@ -2726,7 +2785,7 @@ dependencies = [ "bincode 0.8.0", "bytes 0.4.12", "derive-error", - "futures", + "futures 0.1.29", "serde", "tokio-serde", ] @@ -2737,7 +2796,7 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0c34c6e548f101053321cba3da7cbb87a610b85555884c41b07da2eb91aff12" dependencies = [ - "futures", + "futures 0.1.29", "libc", "mio", "mio-uds", @@ -2755,7 +2814,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "edfe50152bc8164fcc456dab7891fa9bf8beaf01c5ee7e1dd43a397c3cf87dee" dependencies = [ "fnv", - "futures", + "futures 0.1.29", ] [[package]] @@ -2765,7 +2824,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "98df18ed66e3b72e742f185882a9e201892407957e45fbff8da17ae7a7c51f72" dependencies = [ "bytes 0.4.12", - "futures", + "futures 0.1.29", "iovec", "mio", "tokio-io", @@ -2781,7 +2840,7 @@ dependencies = [ "crossbeam-deque", "crossbeam-queue 0.2.1", "crossbeam-utils 0.7.2", - "futures", + "futures 0.1.29", "lazy_static", "log 0.4.8", "num_cpus", @@ -2796,7 +2855,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93044f2d313c95ff1cb7809ce9a7a05735b012288a888b62d4434fd58c94f296" dependencies = [ "crossbeam-utils 0.7.2", - "futures", + "futures 0.1.29", "slab", "tokio-executor", ] @@ -2808,7 +2867,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2a0b10e610b39c38b031a2fcab08e4b82f16ece36504988dcbd81dbba650d82" dependencies = [ "bytes 0.4.12", - "futures", + "futures 0.1.29", "log 0.4.8", "mio", "tokio-codec", @@ -2823,7 +2882,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5076db410d6fdc6523df7595447629099a1fdc47b3d9f896220780fa48faf798" dependencies = [ "bytes 0.4.12", - "futures", + "futures 0.1.29", "iovec", "libc", "log 0.4.8", @@ -2849,7 +2908,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc72f33b6a72c75c9df0037afce313018bae845f0ec7fdb9201b8768427a917f" dependencies = [ - "futures", + "futures 0.1.29", "tower-buffer", "tower-discover", "tower-layer", @@ -2867,7 +2926,7 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c7b83e1ccf5b23dd109dd6ae2c07b8e2beec7a51a21f29da2dba576317370e0" dependencies = [ - "futures", + "futures 0.1.29", "tokio-executor", "tokio-sync", "tower-layer", @@ -2881,7 +2940,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73a7632286f78164d65d18fd0e570307acde9362489aa5c8c53e6315cc2bde47" dependencies = [ - "futures", + "futures 0.1.29", "tower-service", ] @@ -2891,7 +2950,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ddf07e10c07dcc8f41da6de036dc66def1a85b70eb8a385159e3908bb258328" dependencies = [ - "futures", + "futures 0.1.29", "tower-service", ] @@ -2901,7 +2960,7 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b2807e2531b621e23e18693d49d0663bc617240ac0da8ed9b0c64cacd5c67fb" dependencies = [ - "futures", + "futures 0.1.29", "tokio-sync", "tokio-timer", "tower-layer", @@ -2915,7 +2974,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04fbaf5bfb63d84204db87b9b2aeec61549613f2bbb8706dcc36f5f3ea8cd769" dependencies = [ - "futures", + "futures 0.1.29", "tower-layer", "tower-service", ] @@ -2926,7 +2985,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09e80588125061f276ed2a7b0939988b411e570a2dbb2965b1382ef4f71036f7" dependencies = [ - "futures", + "futures 0.1.29", "tokio-timer", "tower-layer", "tower-service", @@ -2938,7 +2997,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2cc0c98637d23732f8de6dfd16494c9f1559c3b9e20b4a46462c8f9b9e827bfa" dependencies = [ - "futures", + "futures 0.1.29", ] [[package]] @@ -2947,7 +3006,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c06bbc2fbd056f810940a8c6f0cc194557d36da3c22999a755a7a6612447da9" dependencies = [ - "futures", + "futures 0.1.29", "tokio-timer", "tower-layer", "tower-service", @@ -2959,7 +3018,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4792342fac093db5d2558655055a89a04ca909663467a4310c7739d9f8b64698" dependencies = [ - "futures", + "futures 0.1.29", "tokio-io", "tower-layer", "tower-service", @@ -3193,6 +3252,15 @@ dependencies = [ "utf8parse", ] +[[package]] +name = "wait-timeout" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +dependencies = [ + "libc", +] + [[package]] name = "walkdir" version = "1.0.7" @@ -3210,7 +3278,7 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6395efa4784b027708f7451087e647ec73cc74f5d9bc2e418404248d679a230" dependencies = [ - "futures", + "futures 0.1.29", "log 0.4.8", "try-lock", ] @@ -3303,12 +3371,11 @@ dependencies = [ [[package]] name = "zip" -version = "0.4.2" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36b9e08fb518a65cf7e08a1e482573eb87a2f4f8c6619316612a3c1f162fe822" +checksum = "6df134e83b8f0f8153a094c7b0fd79dfebe437f1d76e7715afa18ed95ebe2fd7" dependencies = [ + "crc32fast", "flate2", - "msdos_time", "podio", - "time", ] diff --git a/Cargo.toml b/Cargo.toml index 0af730eaa..e2f595838 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -38,6 +38,7 @@ error-chain = { version = "0.12.1", default-features = false } filetime = "0.2" flate2 = { version = "1.0", optional = true, default-features = false, features = ["rust_backend"] } futures = "0.1.11" +futures_03 = { package = "futures", version = "0.3", features = ["compat"] } futures-cpupool = "0.1" hmac = { version = "0.7", optional = true } http = "0.1" @@ -84,7 +85,7 @@ uuid = { version = "0.7", features = ["v4"] } walkdir = "1.0.7" # by default which pulls in an outdated failure version which = { version = "3", default-features = false } -zip = { version = "0.4", default-features = false, features = ["deflate"] } +zip = { version = "0.5", default-features = false, features = ["deflate"] } # dist-server only crossbeam-utils = { version = "0.5", optional = true } @@ -101,12 +102,11 @@ quote = "1.0.2" tiny_http = { git = "https://github.com/tiny-http/tiny-http.git", rev = "619680de" } [dev-dependencies] -assert_cmd = "0.9" +assert_cmd = "1" cc = "1.0" chrono = "0.4" -escargot = "0.3" itertools = "0.7" -predicates = "0.9.0" +predicates = "1" # Waiting for #15 to make it into a release selenium-rs = { git = "https://github.com/saresend/selenium-rs.git", rev = "0314a2420da78cce7454a980d862995750771722" } diff --git a/README.md b/README.md index b23edf635..c5538771d 100644 --- a/README.md +++ b/README.md @@ -57,22 +57,20 @@ sccache sccache - Shared Compilation Cache ================================== -Sccache is a [ccache](https://ccache.dev/)-like tool. It is used as a compiler wrapper and avoids compilation when possible, storing a cache in a remote storage using the Amazon Simple Cloud Storage Service (S3) API, the Google Cloud Storage (GCS) API, or Redis. +sccache is a [ccache](https://ccache.dev/)-like compiler caching tool. It is used as a compiler wrapper and avoids compilation when possible, storing cached results either on [local disk](#local) or in one of [several cloud storage backends](#storage-options). -Sccache now includes [experimental Rust support](docs/Rust.md). +sccache includes support for caching the compilation of C/C++ code, [Rust](docs/Rust.md), as well as NVIDIA's CUDA using [nvcc](https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html). -It works as a client-server. The client spawns a server if one is not running already, and sends the wrapped command line as a request to the server, which then does the work and returns stdout/stderr for the job. The client-server model allows the server to be more efficient in its handling of the remote storage. - -Sccache can also be used with local storage instead of remote. +sccache also provides [icecream](https://github.com/icecc/icecream)-style distributed compilation (automatic packaging of local toolchains) for all supported compilers (including Rust). The distributed compilation system includes several security features that icecream lacks such as authentication, transport layer encryption, and sandboxed compiler execution on build servers. See [the distributed quickstart](docs/DistributedQuickstart.md) guide for more information. --- Table of Contents (ToC) ====================== +* [Installation](#installation) * [Build Requirements](#build-requirements) * [Build](#build) -* [Installation](#installation) * [Usage](#usage) * [Storage Options](#storage-options) * [Local](#local) @@ -87,20 +85,93 @@ Table of Contents (ToC) --- +## Installation + +There are prebuilt x86-64 binaries available for Windows, Linux (a portable binary compiled against musl), and macOS [on the releases page](https://github.com/mozilla/sccache/releases/latest). Several package managers also include sccache packages, you can install the latest release from source using cargo, or build directly from a source checkout. + +### macOS + +On macOS sccache can be installed via [Homebrew](https://brew.sh/): + +```bash +brew install sccache +``` + +### Windows + +On Windows, sccache can be installed via [scoop](https://scoop.sh/): + +``` +scoop install sccache +``` + +### Via cargo + +If you have a Rust toolchain installed you can install sccache using cargo. **Note that this will compile sccache from source which is fairly resource-intensive. For CI purposes you should use prebuilt binary packages.** + + +```bash +cargo install sccache +``` + +--- + +Usage +----- + +Running sccache is like running ccache: prefix your compilation commands with it, like so: + +```bash +sccache gcc -o foo.o -c foo.c +``` + +If you want to use sccache for caching Rust builds you can define `build.rustc-wrapper` in the +[cargo configuration file](https://doc.rust-lang.org/cargo/reference/config.html). For example, you can set it globally +in `$HOME/.cargo/config` by adding: + +```toml +[build] +rustc-wrapper = "/path/to/sccache" +``` + +Note that you need to use cargo 1.40 or newer for this to work. + +Alternatively you can use the environment variable `RUSTC_WRAPPER`: + +```bash +RUSTC_WRAPPER=/path/to/sccache cargo build +``` + +sccache supports gcc, clang, MSVC, rustc, NVCC, and [Wind River's diab compiler](https://www.windriver.com/products/development-tools/#diab_compiler). + +If you don't [specify otherwise](#storage-options), sccache will use a local disk cache. + +sccache works using a client-server model, where the server runs locally on the same machine as the client. The client-server model allows the server to be more efficient by keeping some state in memory. The sccache command will spawn a server process if one is not already running, or you can run `sccache --start-server` to start the background server process without performing any compilation. + +You can run `sccache --stop-server` to terminate the server. It will also terminate after (by default) 10 minutes of inactivity. + +Running `sccache --show-stats` will print a summary of cache statistics. + +Some notes about using `sccache` with [Jenkins](https://jenkins.io) are [here](docs/Jenkins.md). + +--- + Build Requirements ------------------ -Sccache is a [Rust](https://www.rust-lang.org/) program. Building it requires `cargo` (and thus `rustc`). sccache currently requires **Rust 1.41.1**. - -We recommend you install Rust via [Rustup](https://rustup.rs/). The generated binaries can be built so that they are very [portable](#building-portable-binaries). By default `sccache` supports a local disk cache. To build `sccache` with support for `S3` and/or `Redis` cache backends, add `--features=all` or select a specific feature by passing `s3`, `gcs`, and/or `redis`. Refer the [Cargo Documentation](http://doc.crates.io/manifest.html#the-features-section) for details. +sccache is a [Rust](https://www.rust-lang.org/) program. Building it requires `cargo` (and thus `rustc`). sccache currently requires **Rust 1.41.1**. We recommend you install Rust via [Rustup](https://rustup.rs/). Build ----- +If you are building sccache for non-development purposes make sure you use `cargo build --release` to get optimized binaries: + ```bash -cargo build [--features=all|redis|s3|gcs] [--release] +cargo build --release [--features=all|s3|redis|gcs|memcached|azure] ``` +By default, `sccache` supports a local disk cache and S3. Use the `--features` flag to build `sccache` with support for other storage options. Refer the [Cargo Documentation](http://doc.crates.io/manifest.html#the-features-section) for details on how to select features with Cargo. + ### Building portable binaries When building with the `gcs` feature, `sccache` will depend on OpenSSL, which can be an annoyance if you want to distribute portable binaries. It is possible to statically link against OpenSSL using the steps below before building with `cargo`. @@ -132,7 +203,7 @@ Build with `cargo` and use `otool -L` to check that the resulting binary does no #### Windows -On Windows it is fairly straight forward to just ship the required `libcrypto` and `libssl` DLLs with `sccache.exe`, but the binary might also depend on a few MSVC CRT DLLs that are not available on older Windows versions. +On Windows it is fairly straightforward to just ship the required `libcrypto` and `libssl` DLLs with `sccache.exe`, but the binary might also depend on a few MSVC CRT DLLs that are not available on older Windows versions. It is possible to statically link against the CRT using a `.cargo/config` file with the following contents. @@ -157,75 +228,13 @@ set OPENSSL_LIBS=libcrypto64MT:libssl64MT --- -## Installation - -### With Rust - -```bash -cargo install sccache -``` - -### macOS - -sccache can also be installed via [Homebrew](https://brew.sh/) - -``` -brew install sccache -``` - -### Windows - -sccache can also be installed via [scoop](https://scoop.sh/) - -``` -scoop install sccache -``` - ---- - -Usage ------ - -Running sccache is like running ccache: wrap your compilation commands with it, like so: - -```bash -sccache gcc -o foo.o -c foo.c -``` - -If you want to use sccache for your rust builds you can define `build.rustc-wrapper` in the -[cargo configuration file](https://doc.rust-lang.org/cargo/reference/config.html). For example, you can set it globally -in `$HOME/.cargo/config` by adding: - -```toml -[build] -rustc-wrapper = "/path/to/sccache" -``` - -Note that you need to use cargo 1.40 or newer for this to work. - -Alternatively you can use the environment variable `RUSTC_WRAPPER`: - -```bash -RUSTC_WRAPPER=/path/to/sccache cargo build -``` - -Sccache (tries to) support gcc, clang, [diab](https://www.windriver.com/products/development-tools/#diab_compiler) and MSVC. If you don't [specify otherwise](#storage-options), sccache will use a local disk cache. - -You can run `sccache --start-server` to start the background server process without performing any compilation. - -You can run `sccache --stop-server` to terminate the server. It will terminate after 10 minutes of inactivity. - -Running `sccache --show-stats` will print a summary of cache statistics. - -Some notes about using `sccache` with [Jenkins](https://jenkins.io) are [here](docs/Jenkins.md). - ---- - Storage Options --------------- ### Local -Sccache defaults to using local disk storage. You can set the `SCCACHE_DIR` environment variable to change the disk cache location. By default it will use a sensible location for the current platform: `~/.cache/sccache` on Linux, `%LOCALAPPDATA%\Mozilla\sccache` on Windows, and `~/Library/Caches/Mozilla.sccache` on MacOS. To limit the cache size set `SCCACHE_CACHE_SIZE`, for example `SCCACHE_CACHE_SIZE="1G"`. The default value is 10 Gigabytes. +sccache defaults to using local disk storage. You can set the `SCCACHE_DIR` environment variable to change the disk cache location. By default it will use a sensible location for the current platform: `~/.cache/sccache` on Linux, `%LOCALAPPDATA%\Mozilla\sccache` on Windows, and `~/Library/Caches/Mozilla.sccache` on MacOS. + +The default cache size is 10 gigabytes. To change this, set `SCCACHE_CACHE_SIZE`, for example `SCCACHE_CACHE_SIZE="1G"`. ### S3 If you want to use S3 storage for the sccache cache, you need to set the `SCCACHE_BUCKET` environment variable to the name of the S3 bucket to use. @@ -252,7 +261,7 @@ To use Azure Blob Storage, you'll need your Azure connection string and an _exis environment variable to your connection string, and `SCCACHE_AZURE_BLOB_CONTAINER` to the name of the container to use. Note that sccache will not create the container for you - you'll need to do that yourself. -**Important:** The environment variables are only taken into account when the server starts, so only on the first run. +**Important:** The environment variables are only taken into account when the server starts, i.e. only on the first run. --- @@ -268,18 +277,6 @@ Alternately, you can set the `SCCACHE_ERROR_LOG` environment variable to a path Interaction with GNU `make` jobserver ------------------------------------- -Sccache provides support for a [GNU make jobserver](https://www.gnu.org/software/make/manual/html_node/Job-Slots.html). When the server is started from a process that provides a jobserver, sccache will use that jobserver and provide it to any processes it spawns. (If you are running sccache from a GNU make recipe, you will need to prefix the command with `+` to get this behavior.) If the sccache server is started without a jobserver present it will create its own with the number of slots equal to the number of available CPU cores. +sccache provides support for a [GNU make jobserver](https://www.gnu.org/software/make/manual/html_node/Job-Slots.html). When the server is started from a process that provides a jobserver, sccache will use that jobserver and provide it to any processes it spawns. (If you are running sccache from a GNU make recipe, you will need to prefix the command with `+` to get this behavior.) If the sccache server is started without a jobserver present it will create its own with the number of slots equal to the number of available CPU cores. This is most useful when using sccache for Rust compilation, as rustc supports using a jobserver for parallel codegen, so this ensures that rustc will not overwhelm the system with codegen tasks. Cargo implements its own jobserver ([see the information on `NUM_JOBS` in the cargo documentation](https://doc.rust-lang.org/stable/cargo/reference/environment-variables.html#environment-variables-cargo-sets-for-build-scripts)) for rustc to use, so using sccache for Rust compilation in cargo via `RUSTC_WRAPPER` should do the right thing automatically. - ---- - -Known caveats -------------- - -(and possible future improvements) - -* Sccache doesn't try to be smart about the command line arguments it uses when computing a key for a given compilation result (like skipping preprocessor-specific arguments) -* It doesn't support all kinds of compiler flags, and is certainly broken with a few of them. Really only the flags used during Firefox builds have been tested. -* It doesn't support ccache's direct mode. -* [It doesn't support an option like `CCACHE_BASEDIR`](https://github.com/mozilla/sccache/issues/35). diff --git a/docs/Rust.md b/docs/Rust.md index 996e61cfb..1dd649f5f 100644 --- a/docs/Rust.md +++ b/docs/Rust.md @@ -1,4 +1,4 @@ -sccache now includes experimental support for caching Rust compilation. This includes many caveats, and is primarily focused on caching rustc invocations as produced by cargo. A (possibly-incomplete) list follows: +sccache includes support for caching Rust compilation. This includes many caveats, and is primarily focused on caching rustc invocations as produced by cargo. A (possibly-incomplete) list follows: * `--emit` is required. * `--crate-name` is required. * Only `link` and `dep-info` are supported as `--emit` values, and `link` must be present. @@ -9,4 +9,4 @@ sccache now includes experimental support for caching Rust compilation. This inc * Procedural macros that read files from the filesystem may not be cached properly * Target specs aren't hashed (e.g. custom target specs) -If you are using Rust 1.18 or later, you can ask cargo to wrap all compilation with sccache by setting `RUSTC_WRAPPER=sccache` in your build environment. \ No newline at end of file +If you are using Rust 1.18 or later, you can ask cargo to wrap all compilation with sccache by setting `RUSTC_WRAPPER=sccache` in your build environment. diff --git a/src/bin/sccache-dist/main.rs b/src/bin/sccache-dist/main.rs index a3d392881..4196a300e 100644 --- a/src/bin/sccache-dist/main.rs +++ b/src/bin/sccache-dist/main.rs @@ -101,18 +101,18 @@ fn main() { Ok(s) => s, Err(e) => { let stderr = &mut std::io::stderr(); - writeln!(stderr, "error: {}", e).unwrap(); + writeln!(stderr, "sccache-dist: error: {}", e).unwrap(); for e in e.iter().skip(1) { - writeln!(stderr, "caused by: {}", e).unwrap(); + writeln!(stderr, "sccache-dist: caused by: {}", e).unwrap(); } 2 } }, Err(e) => { - println!("sccache: {}", e); + println!("sccache-dist: {}", e); for e in e.iter().skip(1) { - println!("caused by: {}", e); + println!("sccache-dist: caused by: {}", e); } get_app().print_help().unwrap(); println!(""); @@ -812,8 +812,7 @@ impl SchedulerIncoming for Scheduler { let mut servers = self.servers.lock().unwrap(); if let btree_map::Entry::Occupied(mut entry) = jobs.entry(job_id) { - // TODO: nll should mean not needing to copy this out - let job_detail = *entry.get(); + let job_detail = entry.get(); if job_detail.server_id != server_id { bail!( "Job id {} is not registed on server {:?}", diff --git a/src/cache/cache.rs b/src/cache/cache.rs index 431440314..6c906e977 100644 --- a/src/cache/cache.rs +++ b/src/cache/cache.rs @@ -26,16 +26,44 @@ use crate::cache::s3::S3Cache; use crate::config::{self, CacheType, Config}; use futures_cpupool::CpuPool; use std::fmt; +use std::fs; #[cfg(feature = "gcs")] use std::fs::File; -use std::io::{self, Read, Seek, Write}; +use std::io::{self, Cursor, Read, Seek, Write}; +use std::path::{Path, PathBuf}; use std::sync::Arc; use std::time::Duration; +use tempfile::NamedTempFile; use zip::write::FileOptions; use zip::{CompressionMethod, ZipArchive, ZipWriter}; use crate::errors::*; +#[cfg(unix)] +fn get_file_mode(file: &fs::File) -> Result> { + use std::os::unix::fs::MetadataExt; + Ok(Some(file.metadata()?.mode())) +} + +#[cfg(windows)] +fn get_file_mode(_file: &fs::File) -> Result> { + Ok(None) +} + +#[cfg(unix)] +fn set_file_mode(path: &Path, mode: u32) -> Result<()> { + use std::fs::Permissions; + use std::os::unix::fs::PermissionsExt; + let p = Permissions::from_mode(mode); + fs::set_permissions(path, p)?; + Ok(()) +} + +#[cfg(windows)] +fn set_file_mode(_path: &Path, _mode: u32) -> Result<()> { + Ok(()) +} + /// Result of a cache lookup. pub enum Cache { /// Result was found in cache. @@ -90,6 +118,46 @@ impl CacheRead { io::copy(&mut file, to)?; Ok(file.unix_mode()) } + + /// Get the stdout from this cache entry, if it exists. + pub fn get_stdout(&mut self) -> Vec { + self.get_bytes("stdout") + } + + /// Get the stderr from this cache entry, if it exists. + pub fn get_stderr(&mut self) -> Vec { + self.get_bytes("stderr") + } + + fn get_bytes(&mut self, name: &str) -> Vec { + let mut bytes = Vec::new(); + drop(self.get_object(name, &mut bytes)); + bytes + } + + pub fn extract_objects(mut self, objects: T, pool: &CpuPool) -> SFuture<()> + where + T: IntoIterator + Send + Sync + 'static, + { + Box::new(pool.spawn_fn(move || { + for (key, path) in objects { + let dir = match path.parent() { + Some(d) => d, + None => bail!("Output file without a parent directory!"), + }; + // Write the cache entry to a tempfile and then atomically + // move it to its final location so that other rustc invocations + // happening in parallel don't see a partially-written file. + let mut tmp = NamedTempFile::new_in(dir)?; + let mode = self.get_object(&key, &mut tmp)?; + tmp.persist(&path)?; + if let Some(mode) = mode { + set_file_mode(&path, mode)?; + } + } + Ok(()) + })) + } } /// Data to be stored in the compiler cache. @@ -105,6 +173,24 @@ impl CacheWrite { } } + /// Create a new cache entry populated with the contents of `objects`. + pub fn from_objects(objects: T, pool: &CpuPool) -> SFuture + where + T: IntoIterator + Send + Sync + 'static, + { + Box::new(pool.spawn_fn(move || -> Result<_> { + let mut entry = CacheWrite::new(); + for (key, path) in objects { + let mut f = fs::File::open(&path)?; + let mode = get_file_mode(&f)?; + entry + .put_object(&key, &mut f, mode) + .chain_err(|| format!("failed to put object `{:?}` in cache entry", path))?; + } + Ok(entry) + })) + } + /// Add an object containing the contents of `from` to this cache entry at `name`. /// If `mode` is `Some`, store the file entry with that mode. pub fn put_object(&mut self, name: &str, from: &mut T, mode: Option) -> Result<()> @@ -124,6 +210,22 @@ impl CacheWrite { Ok(()) } + pub fn put_stdout(&mut self, bytes: &[u8]) -> Result<()> { + self.put_bytes("stdout", bytes) + } + + pub fn put_stderr(&mut self, bytes: &[u8]) -> Result<()> { + self.put_bytes("stderr", bytes) + } + + fn put_bytes(&mut self, name: &str, bytes: &[u8]) -> Result<()> { + if !bytes.is_empty() { + let mut cursor = Cursor::new(bytes); + return self.put_object(name, &mut cursor, None); + } + Ok(()) + } + /// Finish writing data to the cache entry writer, and return the data. pub fn finish(self) -> Result> { let CacheWrite { mut zip } = self; diff --git a/src/cmdline.rs b/src/cmdline.rs index 490055e6f..efbb35063 100644 --- a/src/cmdline.rs +++ b/src/cmdline.rs @@ -107,7 +107,7 @@ pub fn parse() -> Result { let mut args: Vec<_> = env::args_os().collect(); if cfg!(windows) { if args.iter().any(|x| x == "/?") { - return Ok(Command::NoOp) + return Ok(Command::NoOp); } } if !internal_start_server { diff --git a/src/commands.rs b/src/commands.rs index b10d1bbc6..852452619 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -150,8 +150,8 @@ fn run_server_process() -> Result { use std::os::windows::ffi::OsStrExt; use std::ptr; use std::time::Duration; - use tokio_reactor::Handle; use tokio_named_pipes::NamedPipe; + use tokio_reactor::Handle; use uuid::Uuid; use winapi::shared::minwindef::{DWORD, FALSE, LPVOID, TRUE}; use winapi::um::handleapi::CloseHandle; @@ -166,7 +166,11 @@ fn run_server_process() -> Result { let mut runtime = Runtime::new()?; let pipe_name = format!(r"\\.\pipe\{}", Uuid::new_v4().to_simple_ref()); let server = runtime.block_on(future::lazy(|| { - NamedPipe::new(&pipe_name, #[allow(deprecated)] &Handle::current()) + NamedPipe::new( + &pipe_name, + #[allow(deprecated)] + &Handle::current(), + ) }))?; // Connect a client to our server, and we'll wait below if it's still in @@ -425,10 +429,10 @@ fn handle_compile_finished( trace!("compiler exited with status {}", ret); Ok(ret) } else if let Some(signal) = response.signal { - println!("Compiler killed by signal {}", signal); + println!("sccache: Compiler killed by signal {}", signal); Ok(-2) } else { - println!("Missing compiler exit status!"); + println!("sccache: Missing compiler exit status!"); Ok(-3) } } @@ -466,7 +470,7 @@ where Ok(_) => bail!("unexpected response from server"), Err(Error(ErrorKind::Io(ref e), _)) if e.kind() == io::ErrorKind::UnexpectedEof => { eprintln!( - "warning: sccache server looks like it shut down \ + "sccache: warning: The server looks like it shut down \ unexpectedly, compiling locally instead" ); } @@ -499,7 +503,7 @@ where Ok(status.code().unwrap_or_else(|| { if let Some(sig) = status_signal(status) { - println!("Compile terminated by signal {}", sig); + println!("sccache: Compile terminated by signal {}", sig); } // Arbitrary. 2 @@ -560,12 +564,12 @@ pub fn run_command(cmd: Command) -> Result { } Command::StartServer => { trace!("Command::StartServer"); - println!("Starting sccache server..."); + println!("sccache: Starting the server..."); let startup = run_server_process().chain_err(|| "failed to start server process")?; match startup { ServerStartup::Ok { port } => { if port != DEFAULT_PORT { - println!("Listening on port {}", port); + println!("sccache: Listening on port {}", port); } } ServerStartup::TimedOut => bail!("Timed out waiting for server startup"), @@ -663,7 +667,8 @@ pub fn run_command(cmd: Command) -> Result { let out_file = File::create(out)?; let cwd = env::current_dir().expect("A current working dir should exist"); - let compiler = compiler::get_compiler_info(creator, &executable, &cwd, &env, &pool, None); + let compiler = + compiler::get_compiler_info(creator, &executable, &cwd, &env, &pool, None); let packager = compiler.map(|c| c.0.get_toolchain_packager()); let res = packager.and_then(|p| p.write_pkg(out_file)); runtime.block_on(res)? diff --git a/src/compiler/c.rs b/src/compiler/c.rs index f63e47e56..04dc0ee95 100644 --- a/src/compiler/c.rs +++ b/src/compiler/c.rs @@ -67,6 +67,7 @@ pub enum Language { Cxx, ObjectiveC, ObjectiveCxx, + Cuda, } /// The results of parsing a compiler commandline. @@ -77,10 +78,14 @@ pub struct ParsedArguments { pub input: PathBuf, /// The type of language used in the input source file. pub language: Language, + /// The flag required to compile for the given language + pub compilation_flag: OsString, /// The file in which to generate dependencies. pub depfile: Option, /// Output files, keyed by a simple name, like "obj". pub outputs: HashMap<&'static str, PathBuf>, + /// Commandline arguments for dependency generation. + pub dependency_args: Vec, /// Commandline arguments for the preprocessor (not including common_args). pub preprocessor_args: Vec, /// Commandline arguments for the preprocessor or the compiler. @@ -109,9 +114,10 @@ impl Language { pub fn from_file_name(file: &Path) -> Option { match file.extension().and_then(|e| e.to_str()) { Some("c") => Some(Language::C), - Some("C") | Some("cc") | Some("cpp") | Some("cxx") | Some("cu") => Some(Language::Cxx), + Some("C") | Some("cc") | Some("cpp") | Some("cxx") => Some(Language::Cxx), Some("m") => Some(Language::ObjectiveC), Some("mm") => Some(Language::ObjectiveCxx), + Some("cu") => Some(Language::Cuda), e => { trace!("Unknown source extension: {}", e.unwrap_or("(None)")); None @@ -125,6 +131,7 @@ impl Language { Language::Cxx => "c++", Language::ObjectiveC => "objc", Language::ObjectiveCxx => "objc++", + Language::Cuda => "cuda", } } } @@ -154,7 +161,7 @@ pub enum CCompilerKind { /// NVCC NVCC, /// Heterogeneous Compute Compiler - HCC + HCC, } /// An interface to a specific C compiler. @@ -598,6 +605,15 @@ impl pkg::ToolchainPackager for CToolchainPackager { add_named_file(&mut package_builder, "liblto_plugin.so")?; } + CCompilerKind::NVCC => { + // Various programs called by the nvcc front end. + // presumes the underlying host compiler is consistent + add_named_file(&mut package_builder, "cudafe++")?; + add_named_file(&mut package_builder, "fatbinary")?; + add_named_prog(&mut package_builder, "nvlink")?; + add_named_prog(&mut package_builder, "ptxas")?; + } + _ => unreachable!(), } diff --git a/src/compiler/clang.rs b/src/compiler/clang.rs index e1c64e622..eb7f097cb 100644 --- a/src/compiler/clang.rs +++ b/src/compiler/clang.rs @@ -149,8 +149,6 @@ mod test { assert_eq!(Some("foo.c"), a.input.to_str()); assert_eq!(Language::C, a.language); assert_map_contains!(a.outputs, ("obj", PathBuf::from("foo.o"))); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(1, a.outputs.len()); assert!(a.preprocessor_args.is_empty()); assert!(a.common_args.is_empty()); } @@ -164,8 +162,6 @@ mod test { assert_eq!(Some("foo.cxx"), a.input.to_str()); assert_eq!(Language::Cxx, a.language); assert_map_contains!(a.outputs, ("obj", PathBuf::from("foo.o"))); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(1, a.outputs.len()); assert_eq!(ovec!["-Iinclude", "-include", "file"], a.preprocessor_args); assert_eq!(ovec!["-arch", "xyz", "-fabc"], a.common_args); } @@ -272,7 +268,14 @@ mod test { "-debug-info-kind=constructor" ); assert_eq!( - ovec!["-Xclang", "-mllvm", "-Xclang", "-instcombine-lower-dbg-declare=0", "-Xclang", "-debug-info-kind=constructor"], + ovec![ + "-Xclang", + "-mllvm", + "-Xclang", + "-instcombine-lower-dbg-declare=0", + "-Xclang", + "-debug-info-kind=constructor" + ], a.common_args ); } diff --git a/src/compiler/compiler.rs b/src/compiler/compiler.rs index 6bf71ecd2..93f87289a 100644 --- a/src/compiler/compiler.rs +++ b/src/compiler/compiler.rs @@ -12,19 +12,15 @@ // See the License for the specific language governing permissions and // limitations under the License. -use crate::cache::{ - Cache, - CacheWrite, - Storage, -}; -use crate::compiler::msvc; +use crate::cache::{Cache, CacheWrite, Storage}; use crate::compiler::c::{CCompiler, CCompilerKind}; use crate::compiler::clang::Clang; use crate::compiler::diab::Diab; use crate::compiler::gcc::GCC; -use crate::compiler::nvcc::NVCC; use crate::compiler::hcc::HCC; +use crate::compiler::msvc; use crate::compiler::msvc::MSVC; +use crate::compiler::nvcc::NVCC; use crate::compiler::rust::{Rust, RustupProxy}; use crate::dist; #[cfg(feature = "dist-client")] @@ -38,7 +34,7 @@ use std::borrow::Cow; use std::collections::HashMap; use std::ffi::OsString; use std::fmt; -#[cfg(any(feature = "dist-client", unix))] +#[cfg(feature = "dist-client")] use std::fs; use std::fs::File; use std::io::prelude::*; @@ -47,7 +43,7 @@ use std::process::{self, Stdio}; use std::str; use std::sync::Arc; use std::time::{Duration, Instant}; -use tempfile::{NamedTempFile, TempDir}; +use tempfile::TempDir; use tokio_timer::Timeout; use crate::errors::*; @@ -95,6 +91,7 @@ pub enum CompilerKind { impl CompilerKind { pub fn lang_kind(&self) -> String { match self { + CompilerKind::C(CCompilerKind::NVCC) => "CUDA", CompilerKind::C(_) => "C/C++", CompilerKind::Rust => "Rust", } @@ -147,7 +144,7 @@ where &self, creator: T, cwd: PathBuf, - env_vars: &[(OsString,OsString)], + env_vars: &[(OsString, OsString)], ) -> SFuture<(PathBuf, FileTime)>; /// Create a clone of `Self` and puts it in a `Box` @@ -255,28 +252,9 @@ where out_pretty, fmt_duration_as_secs(&duration) ); - let mut stdout = Vec::new(); - let mut stderr = Vec::new(); - drop(entry.get_object("stdout", &mut stdout)); - drop(entry.get_object("stderr", &mut stderr)); - let write = pool.spawn_fn(move || { - for (key, path) in &outputs { - let dir = match path.parent() { - Some(d) => d, - None => bail!("Output file without a parent directory!"), - }; - // Write the cache entry to a tempfile and then atomically - // move it to its final location so that other rustc invocations - // happening in parallel don't see a partially-written file. - let mut tmp = NamedTempFile::new_in(dir)?; - let mode = entry.get_object(&key, &mut tmp)?; - tmp.persist(path)?; - if let Some(mode) = mode { - set_file_mode(&path, mode)?; - } - } - Ok(()) - }); + let stdout = entry.get_stdout(); + let stderr = entry.get_stderr(); + let write = entry.extract_objects(outputs, &pool); let output = process::Output { status: exit_status(0), stdout, @@ -354,30 +332,14 @@ where out_pretty, fmt_duration_as_secs(&duration) ); - let write = pool.spawn_fn(move || -> Result<_> { - let mut entry = CacheWrite::new(); - for (key, path) in &outputs { - let mut f = File::open(&path)?; - let mode = get_file_mode(&f)?; - entry.put_object(key, &mut f, mode).chain_err(|| { - format!("failed to put object `{:?}` in zip", path) - })?; - } - Ok(entry) - }); + let write = CacheWrite::from_objects(outputs, &pool); let write = write.chain_err(|| "failed to zip up compiler outputs"); let o = out_pretty.clone(); Box::new( write .and_then(move |mut entry| { - if !compiler_result.stdout.is_empty() { - let mut stdout = &compiler_result.stdout[..]; - entry.put_object("stdout", &mut stdout, None)?; - } - if !compiler_result.stderr.is_empty() { - let mut stderr = &compiler_result.stderr[..]; - entry.put_object("stderr", &mut stderr, None)?; - } + entry.put_stdout(&compiler_result.stdout)?; + entry.put_stderr(&compiler_result.stderr)?; // Try to finish storing the newly-written cache // entry. We'll get the result back elsewhere. @@ -819,31 +781,6 @@ impl PartialEq for CompileResult { } } -#[cfg(unix)] -fn get_file_mode(file: &File) -> Result> { - use std::os::unix::fs::MetadataExt; - Ok(Some(file.metadata()?.mode())) -} - -#[cfg(windows)] -fn get_file_mode(_file: &File) -> Result> { - Ok(None) -} - -#[cfg(unix)] -fn set_file_mode(path: &Path, mode: u32) -> Result<()> { - use std::fs::Permissions; - use std::os::unix::fs::PermissionsExt; - let p = Permissions::from_mode(mode); - fs::set_permissions(path, p)?; - Ok(()) -} - -#[cfg(windows)] -fn set_file_mode(_path: &Path, _mode: u32) -> Result<()> { - Ok(()) -} - /// Can this result be stored in cache? #[derive(Copy, Clone, Debug, PartialEq)] pub enum Cacheable { @@ -885,7 +822,6 @@ pub fn write_temp_file( .chain_err(|| "failed to write temporary file") } - /// If `executable` is a known compiler, return `Some(Box)`. fn detect_compiler( creator: T, @@ -905,7 +841,9 @@ where None => return f_err("could not determine compiler kind"), Some(f) => f, }; - let rustc_vv = if filename.to_string_lossy().to_lowercase() == "rustc" { + let filename = filename.to_string_lossy().to_lowercase(); + + let rustc_vv = if filename == "rustc" || filename == "clippy-driver" { // Sanity check that it's really rustc. let executable = executable.to_path_buf(); let mut child = creator.clone().new_command_sync(executable); @@ -923,7 +861,6 @@ where f_ok(None) }; - let creator1 = creator.clone(); let creator2 = creator.clone(); let executable = executable.to_owned(); @@ -1029,13 +966,17 @@ where // The detection script doesn't work with NVCC, have to assume NVCC executable name // ends with "nvcc" or "nvcc.exe" instead. - let executable_str = executable.clone().into_os_string().into_string().unwrap().to_lowercase(); + let executable_str = executable + .clone() + .into_os_string() + .into_string() + .unwrap() + .to_lowercase(); debug!("executable: {}", executable_str); if executable_str.ends_with("nvcc") || executable_str.ends_with("nvcc.exe") { debug!("Found NVCC"); return Box::new( - CCompiler::new(NVCC, executable, &pool) - .map(|c| Box::new(c) as Box>), + CCompiler::new(NVCC, executable, &pool).map(|c| Box::new(c) as Box>), ); } @@ -1192,7 +1133,8 @@ mod test { ); let c = detect_compiler(creator, &f.bins[0], f.tempdir.path(), &[], &pool, None) .wait() - .unwrap().0; + .unwrap() + .0; assert_eq!(CompilerKind::C(CCompilerKind::GCC), c.kind()); } @@ -1207,7 +1149,8 @@ mod test { ); let c = detect_compiler(creator, &f.bins[0], f.tempdir.path(), &[], &pool, None) .wait() - .unwrap().0; + .unwrap() + .0; assert_eq!(CompilerKind::C(CCompilerKind::Clang), c.kind()); } @@ -1236,10 +1179,27 @@ mod test { ); let c = detect_compiler(creator, &f.bins[0], f.tempdir.path(), &[], &pool, None) .wait() - .unwrap().0; + .unwrap() + .0; assert_eq!(CompilerKind::C(CCompilerKind::MSVC), c.kind()); } + #[test] + fn test_detect_compiler_kind_nvcc() { + let f = TestFixture::new(); + let creator = new_creator(); + let pool = CpuPool::new(1); + next_command( + &creator, + Ok(MockChild::new(exit_status(0), "nvcc\nfoo", "")), + ); + let c = detect_compiler(creator, &f.bins[0], f.tempdir.path(), &[], &pool, None) + .wait() + .unwrap() + .0; + assert_eq!(CompilerKind::C(CCompilerKind::NVCC), c.kind()); + } + #[test] fn test_detect_compiler_kind_rustc() { let f = TestFixture::new(); @@ -1270,9 +1230,10 @@ LLVM version: 6.0", next_command(&creator, Ok(MockChild::new(exit_status(0), &sysroot, ""))); next_command(&creator, Ok(MockChild::new(exit_status(0), &sysroot, ""))); next_command(&creator, Ok(MockChild::new(exit_status(0), &sysroot, ""))); - let c = detect_compiler(creator, &rustc, f.tempdir.path(),&[], &pool, None) + let c = detect_compiler(creator, &rustc, f.tempdir.path(), &[], &pool, None) .wait() - .unwrap().0; + .unwrap() + .0; assert_eq!(CompilerKind::Rust, c.kind()); } @@ -1287,7 +1248,8 @@ LLVM version: 6.0", ); let c = detect_compiler(creator, &f.bins[0], f.tempdir.path(), &[], &pool, None) .wait() - .unwrap().0; + .unwrap() + .0; assert_eq!(CompilerKind::C(CCompilerKind::Diab), c.kind()); } @@ -1300,11 +1262,16 @@ LLVM version: 6.0", &creator, Ok(MockChild::new(exit_status(0), "something", "")), ); - assert!( - detect_compiler(creator, "/foo/bar".as_ref(),f.tempdir.path(), &[], &pool, None) - .wait() - .is_err() - ); + assert!(detect_compiler( + creator, + "/foo/bar".as_ref(), + f.tempdir.path(), + &[], + &pool, + None + ) + .wait() + .is_err()); } #[test] @@ -1313,11 +1280,16 @@ LLVM version: 6.0", let creator = new_creator(); let pool = CpuPool::new(1); next_command(&creator, Ok(MockChild::new(exit_status(1), "", ""))); - assert!( - detect_compiler(creator, "/foo/bar".as_ref(), f.tempdir.path(), &[], &pool, None) - .wait() - .is_err() - ); + assert!(detect_compiler( + creator, + "/foo/bar".as_ref(), + f.tempdir.path(), + &[], + &pool, + None + ) + .wait() + .is_err()); } #[test] @@ -1329,7 +1301,8 @@ LLVM version: 6.0", next_command(&creator, Ok(MockChild::new(exit_status(0), "gcc", ""))); let c = get_compiler_info(creator, &f.bins[0], f.tempdir.path(), &[], &pool, None) .wait() - .unwrap().0; + .unwrap() + .0; // digest of an empty file. assert_eq!(CompilerKind::C(CCompilerKind::GCC), c.kind()); } @@ -1345,9 +1318,17 @@ LLVM version: 6.0", let storage: Arc = Arc::new(storage); // Pretend to be GCC. next_command(&creator, Ok(MockChild::new(exit_status(0), "gcc", ""))); - let c = get_compiler_info(creator.clone(), &f.bins[0], f.tempdir.path(), &[], &pool, None) - .wait() - .unwrap().0; + let c = get_compiler_info( + creator.clone(), + &f.bins[0], + f.tempdir.path(), + &[], + &pool, + None, + ) + .wait() + .unwrap() + .0; // The preprocessor invocation. next_command( &creator, @@ -1449,9 +1430,17 @@ LLVM version: 6.0", let storage: Arc = Arc::new(storage); // Pretend to be GCC. next_command(&creator, Ok(MockChild::new(exit_status(0), "gcc", ""))); - let c = get_compiler_info(creator.clone(), &f.bins[0], f.tempdir.path(), &[], &pool, None) - .wait() - .unwrap().0; + let c = get_compiler_info( + creator.clone(), + &f.bins[0], + f.tempdir.path(), + &[], + &pool, + None, + ) + .wait() + .unwrap() + .0; // The preprocessor invocation. next_command( &creator, @@ -1549,9 +1538,17 @@ LLVM version: 6.0", let storage: Arc = Arc::new(storage); // Pretend to be GCC. next_command(&creator, Ok(MockChild::new(exit_status(0), "gcc", ""))); - let c = get_compiler_info(creator.clone(), &f.bins[0], f.tempdir.path(), &[], &pool, None) - .wait() - .unwrap().0; + let c = get_compiler_info( + creator.clone(), + &f.bins[0], + f.tempdir.path(), + &[], + &pool, + None, + ) + .wait() + .unwrap() + .0; // The preprocessor invocation. next_command( &creator, @@ -1623,9 +1620,17 @@ LLVM version: 6.0", let storage: Arc = Arc::new(storage); // Pretend to be GCC. next_command(&creator, Ok(MockChild::new(exit_status(0), "gcc", ""))); - let c = get_compiler_info(creator.clone(), &f.bins[0], f.tempdir.path(), &[], &pool, None) - .wait() - .unwrap().0; + let c = get_compiler_info( + creator.clone(), + &f.bins[0], + f.tempdir.path(), + &[], + &pool, + None, + ) + .wait() + .unwrap() + .0; const COMPILER_STDOUT: &[u8] = b"compiler stdout"; const COMPILER_STDERR: &[u8] = b"compiler stderr"; // The compiler should be invoked twice, since we're forcing @@ -1736,9 +1741,17 @@ LLVM version: 6.0", f.write_all(b"file contents")?; Ok(MockChild::new(exit_status(0), "gcc", "")) }); - let c = get_compiler_info(creator.clone(), &f.bins[0], f.tempdir.path(), &[], &pool, None) - .wait() - .unwrap().0; + let c = get_compiler_info( + creator.clone(), + &f.bins[0], + f.tempdir.path(), + &[], + &pool, + None, + ) + .wait() + .unwrap() + .0; // We should now have a fake object file. assert_eq!(fs::metadata(&obj).is_ok(), true); // The preprocessor invocation. @@ -1797,9 +1810,17 @@ LLVM version: 6.0", let storage: Arc = Arc::new(storage); // Pretend to be GCC. next_command(&creator, Ok(MockChild::new(exit_status(0), "gcc", ""))); - let c = get_compiler_info(creator.clone(), &f.bins[0], f.tempdir.path(), &[], &pool, None) - .wait() - .unwrap().0; + let c = get_compiler_info( + creator.clone(), + &f.bins[0], + f.tempdir.path(), + &[], + &pool, + None, + ) + .wait() + .unwrap() + .0; const COMPILER_STDOUT: &[u8] = b"compiler stdout"; const COMPILER_STDERR: &[u8] = b"compiler stderr"; // The compiler should be invoked twice, since we're forcing diff --git a/src/compiler/diab.rs b/src/compiler/diab.rs index b9d7956c7..356e74b04 100644 --- a/src/compiler/diab.rs +++ b/src/compiler/diab.rs @@ -79,9 +79,11 @@ ArgData! { pub DoCompilation, Output(PathBuf), PassThrough(OsString), - PreprocessorArgumentFlag, PreprocessorArgument(OsString), PreprocessorArgumentPath(PathBuf), + DepArgumentFlag, + DepArgument(OsString), + DepArgumentPath(PathBuf), TooHardFlag, TooHard(OsString), } @@ -103,22 +105,22 @@ counted_array!(pub static ARGS: [ArgInfo; _] = [ flag!("-V", TooHardFlag), flag!("-VV", TooHardFlag), take_arg!("-W", OsString, Separated, PassThrough), - flag!("-Xmake-dependency", PreprocessorArgumentFlag), + flag!("-Xmake-dependency", DepArgumentFlag), flag!( "-Xmake-dependency-canonicalize-path-off", - PreprocessorArgumentFlag + DepArgumentFlag ), take_arg!( "-Xmake-dependency-savefile", PathBuf, Concatenated('='), - PreprocessorArgumentPath + DepArgumentPath ), take_arg!( "-Xmake-dependency-target", OsString, Concatenated('='), - PreprocessorArgument + DepArgument ), flag!("-c", DoCompilation), take_arg!( @@ -151,10 +153,12 @@ where { let mut common_args = vec![]; let mut compilation = false; + let mut compilation_flag = OsString::new(); let mut input_arg = None; let mut multiple_input = false; let mut output_arg = None; let mut preprocessor_args = vec![]; + let mut dependency_args = vec![]; // Custom iterator to expand `@` arguments which stand for reading a file // and interpreting it as a list of more arguments. @@ -186,10 +190,16 @@ where Some(TooHardFlag) | Some(TooHard(_)) => { cannot_cache!(arg.flag_str().expect("Can't be Argument::Raw/UnknownFlag",)) } - Some(DoCompilation) => compilation = true, + + Some(DepArgument(_)) | Some(DepArgumentFlag) | Some(DepArgumentPath(_)) => {} + + Some(DoCompilation) => { + compilation = true; + compilation_flag = + OsString::from(arg.flag_str().expect("Compilation flag expected")); + } Some(Output(p)) => output_arg = Some(p.clone()), Some(PreprocessorArgument(_)) - | Some(PreprocessorArgumentFlag) | Some(PreprocessorArgumentPath(_)) | Some(PassThrough(_)) => {} None => match arg { @@ -205,9 +215,12 @@ where } let args = match arg.get_data() { Some(PassThrough(_)) => &mut common_args, - Some(PreprocessorArgumentFlag) - | Some(PreprocessorArgument(_)) - | Some(PreprocessorArgumentPath(_)) => &mut preprocessor_args, + Some(DepArgument(_)) | Some(DepArgumentFlag) | Some(DepArgumentPath(_)) => { + &mut dependency_args + } + Some(PreprocessorArgument(_)) | Some(PreprocessorArgumentPath(_)) => { + &mut preprocessor_args + } Some(DoCompilation) | Some(Output(_)) => continue, Some(TooHardFlag) | Some(TooHard(_)) => unreachable!(), None => match arg { @@ -254,8 +267,10 @@ where CompilerArguments::Ok(ParsedArguments { input: input.into(), language, + compilation_flag, depfile: None, outputs, + dependency_args, preprocessor_args, common_args, extra_hash_files: vec![], @@ -280,6 +295,7 @@ where let mut cmd = creator.clone().new_command_sync(&executable); cmd.arg("-E") .arg(&parsed_args.input) + .args(&parsed_args.dependency_args) .args(&parsed_args.preprocessor_args) .args(&parsed_args.common_args) .env_clear() @@ -289,7 +305,7 @@ where if log_enabled!(Trace) { trace!("preprocess: {:?}", cmd); } - run_input_output(cmd, None) + Box::new(run_input_output(cmd, None)) } pub fn generate_compile_commands( @@ -307,7 +323,7 @@ pub fn generate_compile_commands( }; let mut arguments: Vec = vec![ - "-c".into(), + parsed_args.compilation_flag.clone(), parsed_args.input.clone().into(), "-o".into(), out_file.into(), @@ -430,7 +446,6 @@ mod test { assert_eq!(Some("foo.c"), input.to_str()); assert_eq!(Language::C, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o"))); - assert_eq!(1, outputs.len()); assert!(preprocessor_args.is_empty()); assert!(common_args.is_empty()); assert!(!msvc_show_includes); @@ -454,7 +469,6 @@ mod test { assert_eq!(Some("foo.c"), input.to_str()); assert_eq!(Language::C, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o"))); - assert_eq!(1, outputs.len()); assert!(preprocessor_args.is_empty()); assert!(common_args.is_empty()); assert!(!msvc_show_includes); @@ -478,7 +492,6 @@ mod test { assert_eq!(Some("foo.cc"), input.to_str()); assert_eq!(Language::Cxx, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o"))); - assert_eq!(1, outputs.len()); assert!(preprocessor_args.is_empty()); assert_eq!(ovec!["-fabc", "-mxyz"], common_args); assert!(!msvc_show_includes); @@ -504,7 +517,6 @@ mod test { assert_eq!(Some("foo.cxx"), input.to_str()); assert_eq!(Language::Cxx, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o"))); - assert_eq!(1, outputs.len()); assert_eq!(ovec!["-Iinclude", "-include", "file"], preprocessor_args); assert_eq!(ovec!["-fabc"], common_args); assert!(!msvc_show_includes); @@ -527,7 +539,7 @@ mod test { input, language, outputs, - preprocessor_args, + dependency_args, msvc_show_includes, common_args, .. @@ -538,7 +550,6 @@ mod test { assert_eq!(Some("foo.c"), input.to_str()); assert_eq!(Language::C, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o"))); - assert_eq!(1, outputs.len()); assert_eq!( ovec![ "-Xmake-dependency", @@ -546,7 +557,7 @@ mod test { "-Xmake-dependency-savefile=bar", "-Xmake-dependency-target=foo" ], - preprocessor_args + dependency_args ); assert_eq!(ovec!["-fabc"], common_args); assert!(!msvc_show_includes); @@ -644,7 +655,6 @@ mod test { assert_eq!(Some("foo.c"), input.to_str()); assert_eq!(Language::C, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o"))); - assert_eq!(1, outputs.len()); assert!(preprocessor_args.is_empty()); assert!(common_args.is_empty()); assert!(!msvc_show_includes); @@ -657,8 +667,10 @@ mod test { let parsed_args = ParsedArguments { input: "foo.c".into(), language: Language::C, + compilation_flag: "-c".into(), depfile: None, outputs: vec![("obj", "foo.o".into())].into_iter().collect(), + dependency_args: vec![], preprocessor_args: vec![], common_args: vec![], extra_hash_files: vec![], diff --git a/src/compiler/gcc.rs b/src/compiler/gcc.rs index b8f557af2..ec9243787 100644 --- a/src/compiler/gcc.rs +++ b/src/compiler/gcc.rs @@ -110,6 +110,7 @@ ArgData! { pub // it's not treated as a path by the compiler - it's just written wholesale // (including any funny make syntax) into the dep file. DepTarget(OsString), + DepArgumentPath(PathBuf), Language(OsString), SplitDwarf, ProfileGenerate, @@ -140,11 +141,11 @@ counted_array!(pub static ARGS: [ArgInfo; _] = [ take_arg!("-L", OsString, Separated, PassThrough), flag!("-M", TooHardFlag), flag!("-MD", NeedDepTarget), - take_arg!("-MF", PathBuf, Separated, PreprocessorArgumentPath), + take_arg!("-MF", PathBuf, Separated, DepArgumentPath), flag!("-MM", TooHardFlag), flag!("-MMD", NeedDepTarget), flag!("-MP", NeedDepTarget), - take_arg!("-MQ", OsString, Separated, PreprocessorArgument), + take_arg!("-MQ", OsString, Separated, DepTarget), take_arg!("-MT", OsString, Separated, DepTarget), flag!("-P", TooHardFlag), take_arg!("-U", OsString, CanBeSeparated, PassThrough), @@ -214,14 +215,17 @@ where let mut output_arg = None; let mut input_arg = None; let mut dep_target = None; + let mut dep_flag = OsString::from("-MT"); let mut common_args = vec![]; let mut preprocessor_args = vec![]; + let mut dependency_args = vec![]; let mut extra_hash_files = vec![]; let mut compilation = false; let mut multiple_input = false; let mut split_dwarf = false; let mut need_explicit_dep_target = false; let mut language = None; + let mut compilation_flag = OsString::new(); let mut profile_generate = false; let mut outputs_gcno = false; let mut xclangs: Vec = vec![]; @@ -258,7 +262,11 @@ where cannot_cache!(arg.flag_str().expect("Can't be Argument::Raw/UnknownFlag",)) } Some(SplitDwarf) => split_dwarf = true, - Some(DoCompilation) => compilation = true, + Some(DoCompilation) => { + compilation = true; + compilation_flag = + OsString::from(arg.flag_str().expect("Compilation flag expected")); + } Some(ProfileGenerate) => profile_generate = true, Some(TestCoverage) => outputs_gcno = true, Some(Coverage) => { @@ -276,8 +284,12 @@ where } Some(Output(p)) => output_arg = Some(p.clone()), Some(NeedDepTarget) => need_explicit_dep_target = true, - Some(DepTarget(s)) => dep_target = Some(s.clone()), - Some(ExtraHashFile(_)) + Some(DepTarget(s)) => { + dep_flag = OsString::from(arg.flag_str().expect("Dep target flag expected")); + dep_target = Some(s.clone()); + } + Some(DepArgumentPath(_)) + | Some(ExtraHashFile(_)) | Some(PreprocessorArgumentFlag) | Some(PreprocessorArgument(_)) | Some(PreprocessorArgumentPath(_)) @@ -289,6 +301,7 @@ where "c++" => Some(Language::Cxx), "objective-c" => Some(Language::ObjectiveC), "objective-c++" => Some(Language::ObjectiveCxx), + "cu" => Some(Language::Cuda), _ => cannot_cache!("-x"), }; } @@ -320,8 +333,8 @@ where } Some(PreprocessorArgumentFlag) | Some(PreprocessorArgument(_)) - | Some(PreprocessorArgumentPath(_)) - | Some(NeedDepTarget) => &mut preprocessor_args, + | Some(PreprocessorArgumentPath(_)) => &mut preprocessor_args, + Some(DepArgumentPath(_)) | Some(NeedDepTarget) => &mut dependency_args, Some(DoCompilation) | Some(Language(_)) | Some(Output(_)) | Some(XClang(_)) | Some(DepTarget(_)) => continue, Some(TooHardFlag) | Some(TooHard(_)) => unreachable!(), @@ -376,9 +389,10 @@ where } Some(PreprocessorArgumentFlag) | Some(PreprocessorArgument(_)) - | Some(PreprocessorArgumentPath(_)) - | Some(DepTarget(_)) - | Some(NeedDepTarget) => &mut preprocessor_args, + | Some(PreprocessorArgumentPath(_)) => &mut preprocessor_args, + Some(DepTarget(_)) | Some(DepArgumentPath(_)) | Some(NeedDepTarget) => { + &mut dependency_args + } }; // Normalize attributes such as "-I foo", "-D FOO=bar", as @@ -430,16 +444,18 @@ where profile_generate = true; } if need_explicit_dep_target { - preprocessor_args.push("-MT".into()); - preprocessor_args.push(dep_target.unwrap_or_else(|| output.clone().into_os_string())); + dependency_args.push(dep_flag); + dependency_args.push(dep_target.unwrap_or_else(|| output.clone().into_os_string())); } outputs.insert("obj", output); CompilerArguments::Ok(ParsedArguments { input: input.into(), language, + compilation_flag, depfile: None, outputs, + dependency_args, preprocessor_args, common_args, extra_hash_files, @@ -469,6 +485,7 @@ where Language::Cxx => "c++", Language::ObjectiveC => "objective-c", Language::ObjectiveCxx => "objective-c++", + Language::Cuda => "cu", }; let mut cmd = creator.clone().new_command_sync(executable); cmd.arg("-x").arg(language).arg("-E"); @@ -492,6 +509,7 @@ where } cmd.arg(&parsed_args.input) .args(&parsed_args.preprocessor_args) + .args(&parsed_args.dependency_args) .args(&parsed_args.common_args) .env_clear() .envs(env_vars.iter().map(|&(ref k, ref v)| (k, v))) @@ -500,7 +518,7 @@ where if log_enabled!(Trace) { trace!("preprocess: {:?}", cmd); } - run_input_output(cmd, None) + Box::new(run_input_output(cmd, None)) } pub fn generate_compile_commands( @@ -534,11 +552,12 @@ pub fn generate_compile_commands( Language::Cxx => "c++", Language::ObjectiveC => "objective-c", Language::ObjectiveCxx => "objective-c++", + Language::Cuda => "cu", }; let mut arguments: Vec = vec![ "-x".into(), language.into(), - "-c".into(), + parsed_args.compilation_flag.clone(), parsed_args.input.clone().into(), "-o".into(), out_file.into(), @@ -562,6 +581,7 @@ pub fn generate_compile_commands( Language::Cxx => "c++", Language::ObjectiveC => "objective-c", Language::ObjectiveCxx => "objective-c++", + Language::Cuda => "cu", } .into(); if !rewrite_includes_only { @@ -573,7 +593,7 @@ pub fn generate_compile_commands( let mut arguments: Vec = vec![ "-x".into(), language, - "-c".into(), + parsed_args.compilation_flag.clone().into_string().ok()?, path_transformer.as_dist(&parsed_args.input)?, "-o".into(), path_transformer.as_dist(out_file)?, @@ -700,6 +720,7 @@ mod test { let ParsedArguments { input, language, + compilation_flag, outputs, preprocessor_args, msvc_show_includes, @@ -711,9 +732,8 @@ mod test { }; assert_eq!(Some("foo.c"), input.to_str()); assert_eq!(Language::C, language); + assert_eq!(Some("-c"), compilation_flag.to_str()); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o"))); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(1, outputs.len()); assert!(preprocessor_args.is_empty()); assert!(common_args.is_empty()); assert!(!msvc_show_includes); @@ -737,8 +757,6 @@ mod test { assert_eq!(Some("foo.c"), input.to_str()); assert_eq!(Language::C, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o"))); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(1, outputs.len()); assert!(preprocessor_args.is_empty()); assert!(common_args.is_empty()); assert!(!msvc_show_includes); @@ -776,8 +794,6 @@ mod test { ("obj", PathBuf::from("foo.o")), ("dwo", PathBuf::from("foo.dwo")) ); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(2, outputs.len()); assert!(preprocessor_args.is_empty()); assert_eq!(ovec!["-gsplit-dwarf"], common_args); assert!(!msvc_show_includes); @@ -813,8 +829,6 @@ mod test { assert_eq!(Some("foo.c"), input.to_str()); assert_eq!(Language::C, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o"))); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(1, outputs.len()); assert!(preprocessor_args.is_empty()); assert_eq!(3, common_args.len()); assert!(!msvc_show_includes); @@ -843,8 +857,6 @@ mod test { ("obj", PathBuf::from("foo.o")), ("gcno", PathBuf::from("foo.gcno")) ); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(2, outputs.len()); assert!(preprocessor_args.is_empty()); assert_eq!(ovec!["--coverage"], common_args); assert!(!msvc_show_includes); @@ -874,8 +886,6 @@ mod test { ("obj", PathBuf::from("foo.o")), ("gcno", PathBuf::from("foo.gcno")) ); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(2, outputs.len()); assert!(preprocessor_args.is_empty()); assert_eq!(ovec!["-ftest-coverage"], common_args); assert!(!msvc_show_includes); @@ -901,8 +911,6 @@ mod test { assert_eq!(Some("foo.cpp"), input.to_str()); assert_eq!(Language::Cxx, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o"))); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(1, outputs.len()); assert!(preprocessor_args.is_empty()); assert_eq!(ovec!["-fprofile-generate"], common_args); assert!(!msvc_show_includes); @@ -927,8 +935,6 @@ mod test { assert_eq!(Some("foo.cc"), input.to_str()); assert_eq!(Language::Cxx, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o"))); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(1, outputs.len()); assert!(preprocessor_args.is_empty()); assert_eq!(ovec!["-fabc", "-mxyz"], common_args); assert!(!msvc_show_includes); @@ -954,8 +960,6 @@ mod test { assert_eq!(Some("foo.cxx"), input.to_str()); assert_eq!(Language::Cxx, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o"))); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(1, outputs.len()); assert_eq!(ovec!["-Iinclude", "-include", "file"], preprocessor_args); assert_eq!(ovec!["-fabc"], common_args); assert!(!msvc_show_includes); @@ -963,11 +967,23 @@ mod test { #[test] fn test_parse_arguments_preprocessor_args() { - let args = stringvec!["-c", "foo.c", "-fabc", "-MF", "file", "-o", "foo.o", "-MQ", "abc"]; + let args = stringvec![ + "-c", + "foo.c", + "-fabc", + "-MF", + "file", + "-o", + "foo.o", + "-MQ", + "abc", + "-nostdinc" + ]; let ParsedArguments { input, language, outputs, + dependency_args, preprocessor_args, msvc_show_includes, common_args, @@ -979,9 +995,8 @@ mod test { assert_eq!(Some("foo.c"), input.to_str()); assert_eq!(Language::C, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o"))); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(1, outputs.len()); - assert_eq!(ovec!["-MF", "file", "-MQ", "abc"], preprocessor_args); + assert_eq!(ovec!["-MF", "file"], dependency_args); + assert_eq!(ovec!["-nostdinc"], preprocessor_args); assert_eq!(ovec!["-fabc"], common_args); assert!(!msvc_show_includes); } @@ -994,7 +1009,7 @@ mod test { input, language, outputs, - preprocessor_args, + dependency_args, msvc_show_includes, common_args, .. @@ -1005,9 +1020,7 @@ mod test { assert_eq!(Some("foo.c"), input.to_str()); assert_eq!(Language::C, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o"))); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(1, outputs.len()); - assert_eq!(ovec!["-MF", "file"], preprocessor_args); + assert_eq!(ovec!["-MF", "file"], dependency_args); assert_eq!(ovec!["-fabc"], common_args); assert!(!msvc_show_includes); } @@ -1021,6 +1034,7 @@ mod test { input, language, outputs, + dependency_args, preprocessor_args, msvc_show_includes, common_args, @@ -1032,12 +1046,41 @@ mod test { assert_eq!(Some("foo.c"), input.to_str()); assert_eq!(Language::C, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o"))); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(1, outputs.len()); assert_eq!( ovec!["-MF", "file", "-MD", "-MT", "depfile"], - preprocessor_args + dependency_args + ); + assert!(preprocessor_args.is_empty()); + assert_eq!(ovec!["-fabc"], common_args); + assert!(!msvc_show_includes); + } + + #[test] + fn test_parse_arguments_explicit_mq_dep_target_needed() { + let args = stringvec![ + "-c", "foo.c", "-MQ", "depfile", "-fabc", "-MF", "file", "-o", "foo.o", "-MD" + ]; + let ParsedArguments { + input, + language, + outputs, + dependency_args, + preprocessor_args, + msvc_show_includes, + common_args, + .. + } = match parse_arguments_(args) { + CompilerArguments::Ok(args) => args, + o => panic!("Got unexpected parse result: {:?}", o), + }; + assert_eq!(Some("foo.c"), input.to_str()); + assert_eq!(Language::C, language); + assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o"))); + assert_eq!( + ovec!["-MF", "file", "-MD", "-MQ", "depfile"], + dependency_args ); + assert!(preprocessor_args.is_empty()); assert_eq!(ovec!["-fabc"], common_args); assert!(!msvc_show_includes); } @@ -1077,7 +1120,7 @@ mod test { input, language, outputs, - preprocessor_args, + dependency_args, msvc_show_includes, common_args, .. @@ -1088,12 +1131,7 @@ mod test { assert_eq!(Some("foo.c"), input.to_str()); assert_eq!(Language::C, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o"))); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(1, outputs.len()); - assert_eq!( - ovec!["-MF", "file", "-MD", "-MT", "foo.o"], - preprocessor_args - ); + assert_eq!(ovec!["-MF", "file", "-MD", "-MT", "foo.o"], dependency_args); assert_eq!(ovec!["-fabc"], common_args); assert!(!msvc_show_includes); } @@ -1187,8 +1225,6 @@ mod test { assert_eq!(Some("foo.c"), input.to_str()); assert_eq!(Language::C, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.o"))); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(1, outputs.len()); assert!(preprocessor_args.is_empty()); assert!(common_args.is_empty()); assert!(!msvc_show_includes); @@ -1201,8 +1237,10 @@ mod test { let parsed_args = ParsedArguments { input: "foo.c".into(), language: Language::C, + compilation_flag: "-c".into(), depfile: None, outputs: vec![("obj", "foo.o".into())].into_iter().collect(), + dependency_args: vec![], preprocessor_args: vec![], common_args: vec![], extra_hash_files: vec![], diff --git a/src/compiler/hcc.rs b/src/compiler/hcc.rs index a0f395822..5336420cf 100644 --- a/src/compiler/hcc.rs +++ b/src/compiler/hcc.rs @@ -1,32 +1,19 @@ #![allow(unused_imports,dead_code,unused_variables)] -use crate::compiler::{ - gcc, - Cacheable, - CompileCommand, - CompilerArguments, - write_temp_file, -}; use crate::compiler::args::*; use crate::compiler::c::{CCompilerImpl, CCompilerKind, Language, ParsedArguments}; use crate::compiler::gcc::ArgData::*; +use crate::compiler::{gcc, write_temp_file, Cacheable, CompileCommand, CompilerArguments}; use crate::dist; +use crate::mock_command::{CommandCreator, CommandCreatorSync, RunCommand}; +use crate::util::{run_input_output, OsStrExt}; use futures::future::{self, Future}; use futures_cpupool::CpuPool; -use crate::mock_command::{ - CommandCreator, - CommandCreatorSync, - RunCommand, -}; use std::ffi::OsString; use std::fs::File; -use std::io::{ - self, - Write, -}; +use std::io::{self, Write}; use std::path::Path; use std::process; -use crate::util::{run_input_output, OsStrExt}; use crate::errors::*; @@ -35,11 +22,14 @@ use crate::errors::*; pub struct HCC; impl CCompilerImpl for HCC { - fn kind(&self) -> CCompilerKind { CCompilerKind::HCC } - fn parse_arguments(&self, - arguments: &[OsString], - cwd: &Path) -> CompilerArguments - { + fn kind(&self) -> CCompilerKind { + CCompilerKind::HCC + } + fn parse_arguments( + &self, + arguments: &[OsString], + cwd: &Path, + ) -> CompilerArguments { gcc::parse_arguments(arguments, cwd, (&gcc::ARGS[..], &ARGS[..])) } diff --git a/src/compiler/mod.rs b/src/compiler/mod.rs index ab76ea9c1..4a639c191 100644 --- a/src/compiler/mod.rs +++ b/src/compiler/mod.rs @@ -21,9 +21,9 @@ mod clang; mod compiler; mod diab; mod gcc; -mod nvcc; mod hcc; mod msvc; +mod nvcc; mod rust; pub use crate::compiler::compiler::*; diff --git a/src/compiler/msvc.rs b/src/compiler/msvc.rs index 2b3e62345..f306e288f 100644 --- a/src/compiler/msvc.rs +++ b/src/compiler/msvc.rs @@ -19,7 +19,7 @@ use crate::compiler::{ }; use crate::dist; use crate::mock_command::{CommandCreatorSync, RunCommand}; -use crate::util::{run_input_output, OsStrExt}; +use crate::util::run_input_output; use futures::future::Future; use futures_cpupool::CpuPool; use local_encoding::{Encoder, Encoding}; @@ -216,44 +216,219 @@ ArgData! { TooHardPath(PathBuf), PreprocessorArgument(OsString), PreprocessorArgumentPath(PathBuf), + SuppressCompilation, DoCompilation, ShowIncludes, Output(PathBuf), DepFile(PathBuf), ProgramDatabase(PathBuf), DebugInfo, + PassThrough, // Miscellaneous flags that don't prevent caching. + PassThroughWithPath(PathBuf), // As above, recognised by prefix. + PassThroughWithSuffix(OsString), // As above, recognised by prefix. XClang(OsString), } use self::ArgData::*; -counted_array!(static ARGS: [ArgInfo; _] = [ - take_arg!("-D", OsString, Concatenated, PreprocessorArgument), - take_arg!("-FA", OsString, Concatenated, TooHard), - take_arg!("-FI", PathBuf, CanBeSeparated, PreprocessorArgumentPath), - take_arg!("-FR", PathBuf, Concatenated, TooHardPath), - take_arg!("-Fa", PathBuf, Concatenated, TooHardPath), - take_arg!("-Fd", PathBuf, Concatenated, ProgramDatabase), - take_arg!("-Fe", PathBuf, Concatenated, TooHardPath), - take_arg!("-Fi", PathBuf, Concatenated, TooHardPath), - take_arg!("-Fm", PathBuf, Concatenated, TooHardPath), - take_arg!("-Fo", PathBuf, Concatenated, Output), - take_arg!("-Fp", PathBuf, Concatenated, TooHardPath), - take_arg!("-Fr", PathBuf, Concatenated, TooHardPath), - flag!("-Fx", TooHardFlag), - take_arg!("-I", PathBuf, CanBeSeparated, PreprocessorArgumentPath), - take_arg!("-U", OsString, Concatenated, PreprocessorArgument), - take_arg!("-Xclang", OsString, Separated, XClang), - flag!("-Zi", DebugInfo), - flag!("-ZI", DebugInfo), - flag!("-c", DoCompilation), - take_arg!("-deps", PathBuf, Concatenated, DepFile), - flag!("-fsyntax-only", TooHardFlag), - take_arg!("-o", PathBuf, Separated, Output), // Deprecated but valid - flag!("-showIncludes", ShowIncludes), +macro_rules! msvc_args { + (static ARGS: [$t:ty; _] = [$($macro:ident ! ($($v:tt)*),)*]) => { + counted_array!(static ARGS: [$t; _] = [$(msvc_args!(@one "-", $macro!($($v)*)),)*]); + counted_array!(static SLASH_ARGS: [$t; _] = [$(msvc_args!(@one "/", $macro!($($v)*)),)*]); + }; + (@one $prefix:expr, msvc_take_arg!($s:expr, $($t:tt)*)) => { + take_arg!(concat!($prefix, $s), $($t)+) + }; + (@one $prefix:expr, msvc_flag!($s:expr, $($t:tt)+)) => { + flag!(concat!($prefix, $s), $($t)+) + }; + (@one $prefix:expr, $other:expr) => { $other }; +} + +// Reference: +// https://docs.microsoft.com/en-us/cpp/build/reference/compiler-options-listed-alphabetically?view=vs-2019 +msvc_args!(static ARGS: [ArgInfo; _] = [ + msvc_flag!("?", SuppressCompilation), + msvc_flag!("C", PassThrough), // Ignored unless a preprocess-only flag is specified. + msvc_take_arg!("D", OsString, Concatenated, PreprocessorArgument), + msvc_flag!("E", SuppressCompilation), + msvc_take_arg!("EH", OsString, Concatenated, PassThroughWithSuffix), // /EH[acsr\-]+ - TODO: use a regex? + msvc_flag!("EP", SuppressCompilation), + msvc_take_arg!("F", OsString, Concatenated, PassThroughWithSuffix), + msvc_take_arg!("FA", OsString, Concatenated, TooHard), + msvc_flag!("FC", TooHardFlag), // Use absolute paths in error messages. + msvc_take_arg!("FI", PathBuf, CanBeSeparated, PreprocessorArgumentPath), + msvc_take_arg!("FR", PathBuf, Concatenated, TooHardPath), + msvc_flag!("FS", TooHardFlag), + msvc_take_arg!("FU", PathBuf, CanBeSeparated, TooHardPath), + msvc_take_arg!("Fa", PathBuf, Concatenated, TooHardPath), + msvc_take_arg!("Fd", PathBuf, Concatenated, ProgramDatabase), + msvc_take_arg!("Fe", PathBuf, Concatenated, TooHardPath), + msvc_take_arg!("Fi", PathBuf, Concatenated, TooHardPath), + msvc_take_arg!("Fm", PathBuf, Concatenated, PassThroughWithPath), // No effect if /c is specified. + msvc_take_arg!("Fo", PathBuf, Concatenated, Output), + msvc_take_arg!("Fp", PathBuf, Concatenated, TooHardPath), + msvc_take_arg!("Fr", PathBuf, Concatenated, TooHardPath), + msvc_flag!("Fx", TooHardFlag), + msvc_flag!("GA", PassThrough), + msvc_flag!("GF", PassThrough), + msvc_flag!("GH", PassThrough), + msvc_flag!("GL", PassThrough), + msvc_flag!("GL-", PassThrough), + msvc_flag!("GR", PassThrough), + msvc_flag!("GR-", PassThrough), + msvc_flag!("GS", PassThrough), + msvc_flag!("GS-", PassThrough), + msvc_flag!("GT", PassThrough), + msvc_flag!("GX", PassThrough), + msvc_flag!("GZ", PassThrough), + msvc_flag!("Gd", PassThrough), + msvc_flag!("Ge", PassThrough), + msvc_flag!("Gh", PassThrough), + msvc_flag!("Gm", TooHardFlag), + msvc_flag!("Gr", PassThrough), + msvc_take_arg!("Gs", OsString, Concatenated, PassThroughWithSuffix), + msvc_flag!("Gv", PassThrough), + msvc_flag!("Gw", PassThrough), + msvc_flag!("Gw-", PassThrough), + msvc_flag!("Gy", PassThrough), + msvc_flag!("Gy-", PassThrough), + msvc_flag!("Gz", PassThrough), + msvc_take_arg!("H", OsString, Concatenated, PassThroughWithSuffix), + msvc_flag!("HELP", SuppressCompilation), + msvc_take_arg!("I", PathBuf, CanBeSeparated, PreprocessorArgumentPath), + msvc_flag!("J", PassThrough), + msvc_flag!("JMC", PassThrough), + msvc_flag!("JMC-", PassThrough), + msvc_flag!("LD", PassThrough), + msvc_flag!("LDd", PassThrough), + msvc_flag!("MD", PassThrough), + msvc_flag!("MDd", PassThrough), + msvc_flag!("MP", TooHardFlag), // Multiple source files. + msvc_flag!("MT", PassThrough), + msvc_flag!("MTd", PassThrough), + msvc_flag!("O1", PassThrough), + msvc_flag!("O2", PassThrough), + msvc_flag!("Ob0", PassThrough), + msvc_flag!("Ob1", PassThrough), + msvc_flag!("Ob2", PassThrough), + msvc_flag!("Ob3", PassThrough), + msvc_flag!("Od", PassThrough), + msvc_flag!("Og", PassThrough), + msvc_flag!("Oi", PassThrough), + msvc_flag!("Oi-", PassThrough), + msvc_flag!("Os", PassThrough), + msvc_flag!("Ot", PassThrough), + msvc_flag!("Ox", PassThrough), + msvc_flag!("Oy", PassThrough), + msvc_flag!("Oy-", PassThrough), + msvc_flag!("P", SuppressCompilation), + msvc_flag!("QIfist", PassThrough), + msvc_flag!("QIntel-jcc-erratum", PassThrough), + msvc_flag!("Qfast_transcendentals", PassThrough), + msvc_flag!("Qimprecise_fwaits", PassThrough), + msvc_flag!("Qpar", PassThrough), + msvc_flag!("Qsafe_fp_loads", PassThrough), + msvc_flag!("Qspectre", PassThrough), + msvc_flag!("Qspectre-load", PassThrough), + msvc_flag!("Qspectre-load-cf", PassThrough), + msvc_flag!("Qvec-report:1", PassThrough), + msvc_flag!("Qvec-report:2", PassThrough), + msvc_take_arg!("RTC", OsString, Concatenated, PassThroughWithSuffix), + msvc_flag!("TC", PassThrough), // TODO: disable explicit language check, hope for the best for now? Also, handle /Tc & /Tp. + msvc_flag!("TP", PassThrough), // As above. + msvc_take_arg!("U", OsString, Concatenated, PreprocessorArgument), + msvc_take_arg!("V", OsString, Concatenated, PassThroughWithSuffix), + msvc_flag!("W0", PassThrough), + msvc_flag!("W1", PassThrough), + msvc_flag!("W2", PassThrough), + msvc_flag!("W3", PassThrough), + msvc_flag!("W4", PassThrough), + msvc_flag!("WL", PassThrough), + msvc_flag!("WX", PassThrough), + msvc_flag!("Wall", PassThrough), + msvc_take_arg!("Wv:", OsString, Concatenated, PassThroughWithSuffix), + msvc_flag!("X", PassThrough), + msvc_take_arg!("Xclang", OsString, Separated, XClang), + msvc_flag!("Yd", PassThrough), + msvc_flag!("Z7", PassThrough), // Add debug info to .obj files. + msvc_flag!("ZI", DebugInfo), // Implies /FC, which puts absolute paths in error messages -> TooHardFlag? + msvc_flag!("ZW", PassThrough), + msvc_flag!("Za", PassThrough), + msvc_take_arg!("Zc:", OsString, Concatenated, PassThroughWithSuffix), + msvc_flag!("Ze", PassThrough), + msvc_flag!("Zi", DebugInfo), + msvc_flag!("Zo", PassThrough), + msvc_flag!("Zo-", PassThrough), + msvc_flag!("Zp1", PassThrough), + msvc_flag!("Zp16", PassThrough), + msvc_flag!("Zp2", PassThrough), + msvc_flag!("Zp4", PassThrough), + msvc_flag!("Zp8", PassThrough), + msvc_flag!("Zs", SuppressCompilation), + msvc_flag!("analyze-", PassThrough), + msvc_take_arg!("analyze:", OsString, Concatenated, PassThroughWithSuffix), + msvc_take_arg!("arch:", OsString, Concatenated, PassThroughWithSuffix), + msvc_flag!("await", PassThrough), + msvc_flag!("bigobj", PassThrough), + msvc_flag!("c", DoCompilation), + msvc_take_arg!("cgthreads", OsString, Concatenated, PassThroughWithSuffix), + msvc_flag!("clr", PassThrough), + msvc_take_arg!("clr:", OsString, Concatenated, PassThroughWithSuffix), + msvc_take_arg!("constexpr:", OsString, Concatenated, PassThroughWithSuffix), + msvc_take_arg!("deps", PathBuf, Concatenated, DepFile), + msvc_take_arg!("diagnostics:", OsString, Concatenated, PassThroughWithSuffix), + msvc_take_arg!("doc", PathBuf, Concatenated, TooHardPath), // Creates an .xdc file. + msvc_take_arg!("errorReport:", OsString, Concatenated, PassThroughWithSuffix), // Deprecated. + msvc_take_arg!("execution-charset:", OsString, Concatenated, PassThroughWithSuffix), + msvc_flag!("experimental:module", TooHardFlag), + msvc_flag!("experimental:module-", PassThrough), // Explicitly disabled modules. + msvc_take_arg!("experimental:preprocessor", OsString, Concatenated, PassThroughWithSuffix), + msvc_take_arg!("favor:", OsString, Separated, PassThroughWithSuffix), + msvc_take_arg!("fp:", OsString, Separated, PassThroughWithSuffix), + msvc_flag!("fsyntax-only", SuppressCompilation), + msvc_take_arg!("guard:cf", OsString, Concatenated, PassThroughWithSuffix), + msvc_flag!("homeparams", PassThrough), + msvc_flag!("hotpatch", PassThrough), + msvc_flag!("kernel", PassThrough), + msvc_flag!("kernel-", PassThrough), + msvc_flag!("nologo", PassThrough), + msvc_take_arg!("o", PathBuf, Separated, Output), // Deprecated but valid + msvc_flag!("openmp", PassThrough), + msvc_flag!("openmp:experimental", PassThrough), + msvc_flag!("permissive-", PassThrough), + msvc_flag!("sdl", PassThrough), + msvc_flag!("sdl-", PassThrough), + msvc_flag!("showIncludes", ShowIncludes), + msvc_take_arg!("source-charset:", OsString, Concatenated, PassThroughWithSuffix), + msvc_take_arg!("std:", OsString, Concatenated, PassThroughWithSuffix), + msvc_flag!("u", PassThrough), + msvc_flag!("utf-8", PassThrough), + msvc_flag!("validate-charset", PassThrough), + msvc_flag!("validate-charset-", PassThrough), + msvc_flag!("vd0", PassThrough), + msvc_flag!("vd1", PassThrough), + msvc_flag!("vd2", PassThrough), + msvc_flag!("vmb", PassThrough), + msvc_flag!("vmg", PassThrough), + msvc_flag!("vmm", PassThrough), + msvc_flag!("vms", PassThrough), + msvc_flag!("vmv", PassThrough), + msvc_flag!("volatile:iso", PassThrough), + msvc_flag!("volatile:ms", PassThrough), + msvc_flag!("w", PassThrough), + msvc_take_arg!("w1", OsString, Concatenated, PassThroughWithSuffix), + msvc_take_arg!("w2", OsString, Concatenated, PassThroughWithSuffix), + msvc_take_arg!("w3", OsString, Concatenated, PassThroughWithSuffix), + msvc_take_arg!("w4", OsString, Concatenated, PassThroughWithSuffix), + msvc_take_arg!("wd", OsString, Concatenated, PassThroughWithSuffix), + msvc_take_arg!("we", OsString, Concatenated, PassThroughWithSuffix), + msvc_take_arg!("wo", OsString, Concatenated, PassThroughWithSuffix), take_arg!("@", PathBuf, Concatenated, TooHardPath), ]); +// TODO: what do do with precompiled header flags? eg: /Y-, /Yc, /YI, /Yu, /Zf, /ZH, /Zm + pub fn parse_arguments( arguments: &[OsString], cwd: &Path, @@ -263,32 +438,28 @@ pub fn parse_arguments( let mut input_arg = None; let mut common_args = vec![]; let mut preprocessor_args = vec![]; + let mut dependency_args = vec![]; let mut extra_hash_files = vec![]; let mut compilation = false; + let mut compilation_flag = OsString::new(); let mut debug_info = false; let mut pdb = None; let mut depfile = None; let mut show_includes = false; let mut xclangs: Vec = vec![]; - // First convert all `/foo` arguments to `-foo` to accept both styles - let it = arguments.iter().map(|i| { - if let Some(arg) = i.split_prefix("/") { - let mut dash = OsString::from("-"); - dash.push(&arg); - dash - } else { - i.clone() - } - }); - - for arg in ArgsIter::new(it, &ARGS[..]) { + for arg in ArgsIter::new(arguments.iter().cloned(), (&ARGS[..], &SLASH_ARGS[..])) { let arg = try_or_cannot_cache!(arg, "argument parse"); match arg.get_data() { + Some(PassThrough) | Some(PassThroughWithPath(_)) | Some(PassThroughWithSuffix(_)) => {} Some(TooHardFlag) | Some(TooHard(_)) | Some(TooHardPath(_)) => { cannot_cache!(arg.flag_str().expect("Can't be Argument::Raw/UnknownFlag",)) } - Some(DoCompilation) => compilation = true, + Some(DoCompilation) => { + compilation = true; + compilation_flag = + OsString::from(arg.flag_str().expect("Compilation flag expected")); + } Some(ShowIncludes) => show_includes = true, Some(Output(out)) => { output_arg = Some(out.clone()); @@ -302,6 +473,9 @@ pub fn parse_arguments( Some(ProgramDatabase(p)) => pdb = Some(p.clone()), Some(DebugInfo) => debug_info = true, Some(PreprocessorArgument(_)) | Some(PreprocessorArgumentPath(_)) => {} + Some(SuppressCompilation) => { + return CompilerArguments::NotCompilation; + } Some(XClang(s)) => xclangs.push(s.clone()), None => { match arg { @@ -365,9 +539,10 @@ pub fn parse_arguments( } Some(PreprocessorArgumentFlag) | Some(PreprocessorArgument(_)) - | Some(PreprocessorArgumentPath(_)) - | Some(DepTarget(_)) - | Some(NeedDepTarget) => &mut preprocessor_args, + | Some(PreprocessorArgumentPath(_)) => &mut preprocessor_args, + Some(DepArgumentPath(_)) | Some(DepTarget(_)) | Some(NeedDepTarget) => { + &mut dependency_args + } }; // Normalize attributes such as "-I foo", "-D FOO=bar", as // "-Ifoo", "-DFOO=bar", etc. and "-includefoo", "idirafterbar" as @@ -421,8 +596,10 @@ pub fn parse_arguments( CompilerArguments::Ok(ParsedArguments { input: input.into(), language, + compilation_flag, depfile, outputs, + dependency_args, preprocessor_args, common_args, extra_hash_files, @@ -489,6 +666,7 @@ where .arg(&parsed_args.input) .arg("-nologo") .args(&parsed_args.preprocessor_args) + .args(&parsed_args.dependency_args) .args(&parsed_args.common_args) .env_clear() .envs(env_vars.iter().map(|&(ref k, ref v)| (k, v))) @@ -599,7 +777,11 @@ fn generate_compile_commands( let mut fo = OsString::from("-Fo"); fo.push(&out_file); - let mut arguments: Vec = vec!["-c".into(), parsed_args.input.clone().into(), fo]; + let mut arguments: Vec = vec![ + parsed_args.compilation_flag.clone(), + parsed_args.input.clone().into(), + fo, + ]; arguments.extend(parsed_args.preprocessor_args.clone()); arguments.extend(parsed_args.common_args.clone()); @@ -620,7 +802,7 @@ fn generate_compile_commands( fo.push_str(&path_transformer.as_dist(out_file)?); let mut arguments: Vec = vec![ - "-c".into(), + parsed_args.compilation_flag.clone().into_string().ok()?, path_transformer.as_dist(&parsed_args.input)?, fo, ]; @@ -686,6 +868,32 @@ mod test { let ParsedArguments { input, language, + compilation_flag, + outputs, + preprocessor_args, + msvc_show_includes, + common_args, + .. + } = match parse_arguments(args) { + CompilerArguments::Ok(args) => args, + o => panic!("Got unexpected parse result: {:?}", o), + }; + assert_eq!(Some("foo.c"), input.to_str()); + assert_eq!(Language::C, language); + assert_eq!(Some("-c"), compilation_flag.to_str()); + assert_map_contains!(outputs, ("obj", PathBuf::from("foo.obj"))); + assert!(preprocessor_args.is_empty()); + assert!(common_args.is_empty()); + assert!(!msvc_show_includes); + } + + #[test] + fn test_parse_compile_flag() { + let args = ovec!["/c", "foo.c", "-Fofoo.obj"]; + let ParsedArguments { + input, + language, + compilation_flag, outputs, preprocessor_args, msvc_show_includes, @@ -697,9 +905,8 @@ mod test { }; assert_eq!(Some("foo.c"), input.to_str()); assert_eq!(Language::C, language); + assert_eq!(Some("/c"), compilation_flag.to_str()); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.obj"))); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(1, outputs.len()); assert!(preprocessor_args.is_empty()); assert!(common_args.is_empty()); assert!(!msvc_show_includes); @@ -723,8 +930,6 @@ mod test { assert_eq!(Some("foo.c"), input.to_str()); assert_eq!(Language::C, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.obj"))); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(1, outputs.len()); assert!(preprocessor_args.is_empty()); assert!(common_args.is_empty()); assert!(!msvc_show_includes); @@ -748,8 +953,6 @@ mod test { assert_eq!(Some("foo.c"), input.to_str()); assert_eq!(Language::C, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.obj"))); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(1, outputs.len()); assert!(preprocessor_args.is_empty()); assert!(common_args.is_empty()); assert!(!msvc_show_includes); @@ -773,8 +976,6 @@ mod test { assert_eq!(Some("foo.c"), input.to_str()); assert_eq!(Language::C, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.obj"))); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(1, outputs.len()); assert!(preprocessor_args.is_empty()); assert_eq!(common_args, ovec!["-foo", "-bar"]); assert!(!msvc_show_includes); @@ -798,8 +999,6 @@ mod test { assert_eq!(Some("foo.c"), input.to_str()); assert_eq!(Language::C, language); assert_map_contains!(outputs, ("obj", PathBuf::from("foo.obj"))); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(1, outputs.len()); assert_eq!(preprocessor_args, ovec!["-FIfile"]); assert!(common_args.is_empty()); assert!(msvc_show_includes); @@ -827,8 +1026,6 @@ mod test { ("obj", PathBuf::from("foo.obj")), ("pdb", PathBuf::from("foo.pdb")) ); - //TODO: fix assert_map_contains to assert no extra keys! - assert_eq!(2, outputs.len()); assert!(preprocessor_args.is_empty()); assert_eq!(common_args, ovec!["-Zi", "-Fdfoo.pdb"]); assert!(!msvc_show_includes); @@ -904,8 +1101,10 @@ mod test { let parsed_args = ParsedArguments { input: "foo.c".into(), language: Language::C, + compilation_flag: "-c".into(), depfile: None, outputs: vec![("obj", "foo.obj".into())].into_iter().collect(), + dependency_args: vec![], preprocessor_args: vec![], common_args: vec![], extra_hash_files: vec![], @@ -943,10 +1142,12 @@ mod test { let parsed_args = ParsedArguments { input: "foo.c".into(), language: Language::C, + compilation_flag: "/c".into(), depfile: None, outputs: vec![("obj", "foo.obj".into()), ("pdb", pdb)] .into_iter() .collect(), + dependency_args: vec![], preprocessor_args: vec![], common_args: vec![], extra_hash_files: vec![], diff --git a/src/compiler/nvcc.rs b/src/compiler/nvcc.rs index 0c3d1e30a..c661786e6 100644 --- a/src/compiler/nvcc.rs +++ b/src/compiler/nvcc.rs @@ -12,36 +12,23 @@ // See the License for the specific language governing permissions and // limitations under the License. -#![allow(unused_imports,dead_code,unused_variables)] - -use crate::compiler::{ - gcc, - Cacheable, - CompileCommand, - CompilerArguments, - write_temp_file, -}; +#![allow(unused_imports, dead_code, unused_variables)] + use crate::compiler::args::*; use crate::compiler::c::{CCompilerImpl, CCompilerKind, Language, ParsedArguments}; use crate::compiler::gcc::ArgData::*; +use crate::compiler::{gcc, write_temp_file, Cacheable, CompileCommand, CompilerArguments}; use crate::dist; -use log::Level::Trace; +use crate::mock_command::{CommandCreator, CommandCreatorSync, RunCommand}; +use crate::util::{run_input_output, OsStrExt}; use futures::future::{self, Future}; use futures_cpupool::CpuPool; -use crate::mock_command::{ - CommandCreator, - CommandCreatorSync, - RunCommand, -}; +use log::Level::Trace; use std::ffi::OsString; use std::fs::File; -use std::io::{ - self, - Write, -}; +use std::io::{self, Write}; use std::path::{Path, PathBuf}; use std::process; -use crate::util::{run_input_output, OsStrExt}; use crate::errors::*; @@ -50,11 +37,14 @@ use crate::errors::*; pub struct NVCC; impl CCompilerImpl for NVCC { - fn kind(&self) -> CCompilerKind { CCompilerKind::NVCC } - fn parse_arguments(&self, - arguments: &[OsString], - cwd: &Path) -> CompilerArguments - { + fn kind(&self) -> CCompilerKind { + CCompilerKind::NVCC + } + fn parse_arguments( + &self, + arguments: &[OsString], + cwd: &Path, + ) -> CompilerArguments { gcc::parse_arguments(arguments, cwd, (&gcc::ARGS[..], &ARGS[..])) } @@ -71,16 +61,82 @@ impl CCompilerImpl for NVCC { where T: CommandCreatorSync, { - preprocess( - creator, - executable, - parsed_args, - cwd, - env_vars, - may_dist, - self.kind(), - rewrite_includes_only, - ) + let language = match parsed_args.language { + Language::C => "c", + Language::Cxx => "c++", + Language::ObjectiveC => "objective-c", + Language::ObjectiveCxx => "objective-c++", + Language::Cuda => "cu", + }; + + let initialize_cmd_and_args = || { + let mut command = creator.clone().new_command_sync(executable); + command.args(&parsed_args.preprocessor_args); + command.args(&parsed_args.common_args); + //We need to add "-rdc=true" if we are compiling with `-dc` + //So that the preprocessor has the correct implicit defines + if parsed_args.compilation_flag == "-dc" { + command.arg("-rdc=true"); + } + command.arg("-x").arg(language).arg(&parsed_args.input); + + return command; + }; + + let dep_before_preprocessor = || { + //NVCC doesn't support generating both the dependency information + //and the preprocessor output at the same time. So if we have + //need for both we need separate compiler invocations + let mut dep_cmd = initialize_cmd_and_args(); + let mut transformed_deps = vec![]; + for item in parsed_args.dependency_args.iter() { + if item == "-MD" { + transformed_deps.push(OsString::from("-M")); + } else if item == "-MMD" { + transformed_deps.push(OsString::from("-MM")); + } else { + transformed_deps.push(item.clone()); + } + } + dep_cmd + .args(&transformed_deps) + .env_clear() + .envs(env_vars.iter().map(|&(ref k, ref v)| (k, v))) + .current_dir(cwd); + + if log_enabled!(Trace) { + trace!("dep-gen command: {:?}", dep_cmd); + } + return dep_cmd; + }; + + trace!("preprocess"); + let mut cmd = initialize_cmd_and_args(); + + //NVCC only supports `-E` when it comes after preprocessor + //and common flags. + cmd.arg("-E"); + if cfg!(windows) { + cmd.arg("-Xcompiler=-EP"); + } else { + cmd.arg("-Xcompiler=-P"); + } + cmd.env_clear() + .envs(env_vars.iter().map(|&(ref k, ref v)| (k, v))) + .current_dir(cwd); + if log_enabled!(Trace) { + trace!("preprocess: {:?}", cmd); + } + + //Need to chain the dependency generation and the preprocessor + //to emulate a `proper` front end + if parsed_args.dependency_args.len() > 0 { + let first = run_input_output(dep_before_preprocessor(), None); + let second = run_input_output(cmd, None); + return Box::new(first.join(second).map(|(f, s)| s)); + } else { + return Box::new(run_input_output(cmd, None)); + } } fn generate_compile_commands( @@ -92,7 +148,7 @@ impl CCompilerImpl for NVCC { env_vars: &[(OsString, OsString)], rewrite_includes_only: bool, ) -> Result<(CompileCommand, Option, Cacheable)> { - generate_compile_commands( + gcc::generate_compile_commands( path_transformer, executable, parsed_args, @@ -104,106 +160,282 @@ impl CCompilerImpl for NVCC { } } -pub fn preprocess( - creator: &T, - executable: &Path, - parsed_args: &ParsedArguments, - cwd: &Path, - env_vars: &[(OsString, OsString)], - may_dist: bool, - kind: CCompilerKind, - rewrite_includes_only: bool, -) -> SFuture -where - T: CommandCreatorSync, -{ - trace!("preprocess"); - let language = match parsed_args.language { - Language::C => "c", - Language::Cxx => "c++", - Language::ObjectiveC => "objective-c", - Language::ObjectiveCxx => "objective-c++", - }; - let mut cmd = creator.clone().new_command_sync(executable); - cmd.arg("-E"); - if cfg!(windows) { - cmd.arg("-Xcompiler") - .arg("-EP"); - } - cmd.arg(&parsed_args.input) - .args(&parsed_args.preprocessor_args) - .args(&parsed_args.common_args) - .env_clear() - .envs(env_vars.iter().map(|&(ref k, ref v)| (k, v))) - .current_dir(cwd); - - if log_enabled!(Trace) { - trace!("preprocess: {:?}", cmd); - } - run_input_output(cmd, None) -} +counted_array!(pub static ARGS: [ArgInfo; _] = [ + //todo: refactor show_includes into dependency_args -pub fn generate_compile_commands( - path_transformer: &mut dist::PathTransformer, - executable: &Path, - parsed_args: &ParsedArguments, - cwd: &Path, - env_vars: &[(OsString, OsString)], - kind: CCompilerKind, - rewrite_includes_only: bool, -) -> Result<(CompileCommand, Option, Cacheable)> { - // Unused arguments - { - let _ = path_transformer; - let _ = kind; - let _ = rewrite_includes_only; - } - - trace!("compile"); - - let out_file = match parsed_args.outputs.get("obj") { - Some(obj) => obj, - None => return Err("Missing object file output".into()), - }; - - // Pass the language explicitly as we might have gotten it from the - // command line. - let language = match parsed_args.language { - Language::C => "c", - Language::Cxx => "c++", - Language::ObjectiveC => "objective-c", - Language::ObjectiveCxx => "objective-c++", - }; - let mut arguments: Vec = vec![ - "-c".into(), - parsed_args.input.clone().into(), - "-o".into(), - out_file.into(), - ]; - arguments.extend(parsed_args.preprocessor_args.clone()); - arguments.extend(parsed_args.common_args.clone()); - let command = CompileCommand { - executable: executable.to_owned(), - arguments, - env_vars: env_vars.to_owned(), - cwd: cwd.to_owned(), - }; - - Ok((command, None, Cacheable::Yes)) -} + take_arg!("--Werror", OsString, CanBeSeparated('='), PassThrough), + take_arg!("--archive-options options", OsString, CanBeSeparated('='), PassThrough), + take_arg!("--compiler-bindir", PathBuf, CanBeSeparated('='), PreprocessorArgumentPath), + take_arg!("--compiler-options", OsString, CanBeSeparated('='), PreprocessorArgument), + flag!("--expt-extended-lambda", PreprocessorArgumentFlag), + flag!("--expt-relaxed-constexpr", PreprocessorArgumentFlag), + flag!("--extended-lambda", PreprocessorArgumentFlag), + take_arg!("--generate-code", OsString, CanBeSeparated('='), PassThrough), + take_arg!("--gpu-architecture", OsString, CanBeSeparated('='), PassThrough), + take_arg!("--gpu-code", OsString, CanBeSeparated('='), PassThrough), + take_arg!("--include-path", PathBuf, CanBeSeparated('='), PreprocessorArgumentPath), + take_arg!("--linker-options", OsString, CanBeSeparated('='), PassThrough), + take_arg!("--maxrregcount", OsString, CanBeSeparated('='), PassThrough), + flag!("--no-host-device-initializer-list", PreprocessorArgumentFlag), + take_arg!("--nvlink-options", OsString, CanBeSeparated('='), PassThrough), + take_arg!("--ptxas-options", OsString, CanBeSeparated('='), PassThrough), + take_arg!("--relocatable-device-code", OsString, CanBeSeparated('='), PreprocessorArgument), + take_arg!("--std", OsString, CanBeSeparated('='), PassThrough), + take_arg!("--system-include", PathBuf, CanBeSeparated('='), PreprocessorArgumentPath), -counted_array!(pub static ARGS: [ArgInfo; _] = [ - take_arg!("--Werror", OsString, Separated, PassThrough), - take_arg!("--compiler-bindir", PathBuf, Separated, PassThroughPath), - take_arg!("--compiler-options", OsString, Separated, PassThrough), - take_arg!("--std", OsString, Separated, PassThrough), - take_arg!("-Xcompiler", OsString, Separated, PassThrough), - take_arg!("-Xfatbin", OsString, Separated, PassThrough), - take_arg!("-Xptxas", OsString, Separated, PassThrough), - take_arg!("-ccbin", PathBuf, Separated, PassThroughPath), + take_arg!("-Xarchive", OsString, CanBeSeparated('='), PassThrough), + take_arg!("-Xcompiler", OsString, CanBeSeparated('='), PreprocessorArgument), + take_arg!("-Xfatbin", OsString, CanBeSeparated('='), PassThrough), + take_arg!("-Xlinker", OsString, CanBeSeparated('='), PassThrough), + take_arg!("-Xnvlink", OsString, CanBeSeparated('='), PassThrough), + take_arg!("-Xptxas", OsString, CanBeSeparated('='), PassThrough), + take_arg!("-arch", OsString, CanBeSeparated('='), PassThrough), + take_arg!("-ccbin", PathBuf, CanBeSeparated('='), PreprocessorArgumentPath), + take_arg!("-code", OsString, CanBeSeparated('='), PassThrough), + flag!("-dc", DoCompilation), + flag!("-expt-extended-lambda", PreprocessorArgumentFlag), + flag!("-expt-relaxed-constexpr", PreprocessorArgumentFlag), + flag!("-extended-lambda", PreprocessorArgumentFlag), take_arg!("-gencode", OsString, CanBeSeparated('='), PassThrough), + take_arg!("-isystem", PathBuf, CanBeSeparated('='), PreprocessorArgumentPath), take_arg!("-maxrregcount", OsString, CanBeSeparated('='), PassThrough), - take_arg!("-std", OsString, CanBeSeparated('='), PassThrough), + flag!("-nohdinitlist", PreprocessorArgumentFlag), + flag!("-ptx", DoCompilation), + take_arg!("-rdc", OsString, CanBeSeparated('='), PreprocessorArgument), + take_arg!("-x", OsString, CanBeSeparated('='), Language), ]); -// TODO: add some unit tests +#[cfg(test)] +mod test { + use super::*; + use crate::compiler::gcc; + use crate::compiler::*; + use crate::mock_command::*; + use crate::test::utils::*; + use futures::Future; + use futures_cpupool::CpuPool; + use std::collections::HashMap; + use std::path::PathBuf; + + fn parse_arguments_(arguments: Vec) -> CompilerArguments { + let arguments = arguments.iter().map(OsString::from).collect::>(); + NVCC.parse_arguments(&arguments, ".".as_ref()) + } + + macro_rules! parses { + ( $( $s:expr ),* ) => { + match parse_arguments_(vec![ $( $s.to_string(), )* ]) { + CompilerArguments::Ok(a) => a, + o => panic!("Got unexpected parse result: {:?}", o), + } + } + } + + #[test] + fn test_parse_arguments_simple_c() { + let a = parses!("-c", "foo.c", "-o", "foo.o"); + assert_eq!(Some("foo.c"), a.input.to_str()); + assert_eq!(Language::C, a.language); + assert_map_contains!(a.outputs, ("obj", PathBuf::from("foo.o"))); + assert!(a.preprocessor_args.is_empty()); + assert!(a.common_args.is_empty()); + } + + #[test] + fn test_parse_arguments_simple_cu() { + let a = parses!("-c", "foo.cu", "-o", "foo.o"); + assert_eq!(Some("foo.cu"), a.input.to_str()); + assert_eq!(Language::Cuda, a.language); + assert_map_contains!(a.outputs, ("obj", PathBuf::from("foo.o"))); + assert!(a.preprocessor_args.is_empty()); + assert!(a.common_args.is_empty()); + } + + #[test] + fn test_parse_arguments_simple_c_as_cu() { + let a = parses!("-x", "cu", "-c", "foo.c", "-o", "foo.o"); + assert_eq!(Some("foo.c"), a.input.to_str()); + assert_eq!(Language::Cuda, a.language); + assert_map_contains!(a.outputs, ("obj", PathBuf::from("foo.o"))); + assert!(a.preprocessor_args.is_empty()); + assert!(a.common_args.is_empty()); + } + + #[test] + fn test_parse_arguments_dc_compile_flag() { + let a = parses!("-x", "cu", "-dc", "foo.c", "-o", "foo.o"); + assert_eq!(Some("foo.c"), a.input.to_str()); + assert_eq!(Language::Cuda, a.language); + assert_eq!(Some("-dc"), a.compilation_flag.to_str()); + assert_map_contains!(a.outputs, ("obj", PathBuf::from("foo.o"))); + assert!(a.preprocessor_args.is_empty()); + assert!(a.common_args.is_empty()); + } + + #[test] + fn test_parse_arguments_values() { + let a = parses!( + "-c", + "foo.cpp", + "-fabc", + "-I", + "include-file", + "-o", + "foo.o", + "--include-path", + "include-file", + "-isystem=/system/include/file" + ); + assert_eq!(Some("foo.cpp"), a.input.to_str()); + assert_eq!(Language::Cxx, a.language); + assert_map_contains!(a.outputs, ("obj", PathBuf::from("foo.o"))); + assert_eq!( + ovec![ + "-Iinclude-file", + "--include-path", + "include-file", + "-isystem", + "/system/include/file" + ], + a.preprocessor_args + ); + assert!(a.dependency_args.is_empty()); + assert_eq!(ovec!["-fabc"], a.common_args); + } + + #[test] + fn test_parse_md_mt_flags_cu() { + let a = parses!( + "-x", "cu", "-c", "foo.c", "-fabc", "-MD", "-MT", "foo.o", "-MF", "foo.o.d", "-o", + "foo.o" + ); + assert_eq!(Some("foo.c"), a.input.to_str()); + assert_eq!(Language::Cuda, a.language); + assert_eq!(Some("-c"), a.compilation_flag.to_str()); + assert_map_contains!(a.outputs, ("obj", PathBuf::from("foo.o"))); + assert_eq!( + ovec!["-MD", "-MF", "foo.o.d", "-MT", "foo.o"], + a.dependency_args + ); + assert_eq!(ovec!["-fabc"], a.common_args); + } + + #[test] + fn test_parse_generate_code_flags() { + let a = parses!( + "-x", + "cu", + "--generate-code=arch=compute_61,code=sm_61", + "-c", + "foo.c", + "-o", + "foo.o" + ); + assert_eq!(Some("foo.c"), a.input.to_str()); + assert_eq!(Language::Cuda, a.language); + assert_map_contains!(a.outputs, ("obj", PathBuf::from("foo.o"))); + assert!(a.preprocessor_args.is_empty()); + assert_eq!( + ovec!["--generate-code", "arch=compute_61,code=sm_61"], + a.common_args + ); + } + + #[test] + fn test_parse_pass_to_host_flags() { + let a = parses!( + "-x=cu", + "--generate-code=arch=compute_60,code=[sm_60,sm_61]", + "-Xnvlink=--suppress-stack-size-warning", + "-Xcompiler", + "-fPIC,-fno-common", + "-Xcompiler=-fvisibility=hidden", + "-Xcompiler=-Wall,-Wno-unknown-pragmas,-Wno-unused-local-typedefs", + "-Xcudafe", + "--display_error_number", + "-c", + "foo.c", + "-o", + "foo.o" + ); + assert_eq!(Some("foo.c"), a.input.to_str()); + assert_eq!(Language::Cuda, a.language); + assert_map_contains!(a.outputs, ("obj", PathBuf::from("foo.o"))); + assert_eq!( + ovec![ + "-Xcompiler", + "-fPIC,-fno-common", + "-Xcompiler", + "-fvisibility=hidden", + "-Xcompiler", + "-Wall,-Wno-unknown-pragmas,-Wno-unused-local-typedefs" + ], + a.preprocessor_args + ); + assert_eq!( + ovec![ + "--generate-code", + "arch=compute_60,code=[sm_60,sm_61]", + "-Xnvlink", + "--suppress-stack-size-warning", + "-Xcudafe", + "--display_error_number" + ], + a.common_args + ); + } + + #[test] + fn test_parse_no_capturing_of_xcompiler() { + let a = parses!( + "-x=cu", + "-forward-unknown-to-host-compiler", + "--expt-relaxed-constexpr", + "-Xcompiler", + "-pthread", + "-std=c++14", + "-c", + "foo.c", + "-o", + "foo.o" + ); + assert_eq!(Some("foo.c"), a.input.to_str()); + assert_eq!(Language::Cuda, a.language); + assert_map_contains!(a.outputs, ("obj", PathBuf::from("foo.o"))); + assert_eq!( + ovec!["--expt-relaxed-constexpr", "-Xcompiler", "-pthread"], + a.preprocessor_args + ); + assert_eq!( + ovec!["-forward-unknown-to-host-compiler", "-std=c++14"], + a.common_args + ); + } + + #[test] + fn test_parse_dlink_is_not_compilation() { + assert_eq!( + CompilerArguments::NotCompilation, + parse_arguments_(stringvec![ + "-forward-unknown-to-host-compiler", + "--generate-code=arch=compute_50,code=[compute_50,sm_50,sm_52]", + "-dlink", + "main.cu.o", + "-o", + "device_link.o" + ]) + ); + } + #[test] + fn test_parse_cant_cache_flags() { + assert_eq!( + CompilerArguments::CannotCache("-E", None), + parse_arguments_(stringvec!["-x", "cu", "-c", "foo.c", "-o", "foo.o", "-E"]) + ); + + assert_eq!( + CompilerArguments::CannotCache("-M", None), + parse_arguments_(stringvec!["-x", "cu", "-c", "foo.c", "-o", "foo.o", "-M"]) + ); + } +} diff --git a/src/compiler/rust.rs b/src/compiler/rust.rs index bc00c31dc..81b5532eb 100644 --- a/src/compiler/rust.rs +++ b/src/compiler/rust.rs @@ -112,13 +112,12 @@ pub struct RustHasher { } /// a lookup proxy for determining the actual compiler used per file or directory -#[derive(Debug,Clone)] +#[derive(Debug, Clone)] pub struct RustupProxy { proxy_executable: PathBuf, filetime: FileTime, } - #[derive(Debug, Clone, PartialEq)] pub struct ParsedArguments { /// The full commandline, with all parsed aguments @@ -495,8 +494,6 @@ where } } - - impl CompilerProxy for RustupProxy where T: CommandCreatorSync, @@ -505,42 +502,52 @@ where &self, mut creator: T, cwd: PathBuf, - env: &[(OsString,OsString)], + env: &[(OsString, OsString)], ) -> SFuture<(PathBuf, FileTime)> { - let proxy_executable = self.proxy_executable.clone(); let mut child = creator.new_command_sync(&proxy_executable); - child.current_dir(&cwd) + child + .current_dir(&cwd) .env_clear() .envs(ref_env(&env)) .args(&["which", "rustc"]); - let lookup = - run_input_output(child, None) - .map_err(|e| { format!("Failed to execute rustup which rustc: {}", e).into() }) - .and_then(move |output| { - String::from_utf8(output.stdout.clone()) - .map_err(|e| { format!("Failed to parse output of rustup which rustc: {}", e).into() }) - .and_then(|stdout| { - let proxied_compiler = PathBuf::from(stdout.trim()); - trace!("proxy: rustup which rustc produced: {:?}", &proxied_compiler); - let res = fs::metadata(proxied_compiler.as_path()) - .map_err(|e| { format!("Failed to obtain metadata of the resolved, true rustc: {}", e).into() }) + let lookup = run_input_output(child, None) + .map_err(|e| format!("Failed to execute rustup which rustc: {}", e).into()) + .and_then(move |output| { + String::from_utf8(output.stdout.clone()) + .map_err(|e| { + format!("Failed to parse output of rustup which rustc: {}", e).into() + }) + .and_then(|stdout| { + let proxied_compiler = PathBuf::from(stdout.trim()); + trace!( + "proxy: rustup which rustc produced: {:?}", + &proxied_compiler + ); + let res = fs::metadata(proxied_compiler.as_path()) + .map_err(|e| { + format!( + "Failed to obtain metadata of the resolved, true rustc: {}", + e + ) + .into() + }) .and_then(|attr| { if attr.is_file() { Ok(FileTime::from_last_modification_time(&attr)) } else { - Err("proxy: rustup resolved compiler is not of type file".into()) + Err("proxy: rustup resolved compiler is not of type file" + .into()) } }) - .map(|filetime| {(proxied_compiler, filetime)}); - res - }) - }); + .map(|filetime| (proxied_compiler, filetime)); + res + }) + }); Box::new(lookup) - } fn box_clone(&self) -> Box> { @@ -548,14 +555,13 @@ where } } -impl RustupProxy -{ - pub fn new

(proxy_executable : P) -> Result +impl RustupProxy { + pub fn new

(proxy_executable: P) -> Result where P: AsRef, { let filetime = fs::metadata(proxy_executable.as_ref()) - .map(|attr| { FileTime::from_last_modification_time(&attr) })?; + .map(|attr| FileTime::from_last_modification_time(&attr))?; let proxy_executable = proxy_executable.as_ref().to_owned(); Ok(Self { proxy_executable, @@ -564,15 +570,14 @@ impl RustupProxy } pub fn find_proxy_executable( - compiler_executable : &Path, - proxy_name : &str, + compiler_executable: &Path, + proxy_name: &str, mut creator: T, - env: &[(OsString,OsString)], + env: &[(OsString, OsString)], ) -> SFuture>> where T: CommandCreatorSync, { - let compiler_executable1 = compiler_executable.to_owned(); let compiler_executable2 = compiler_executable.to_owned(); let proxy_name1 = proxy_name.to_owned(); @@ -584,7 +589,7 @@ impl RustupProxy enum ProxyPath { Candidate(PathBuf), ToBeDiscovered, - None + None, } // verification if rustc is a proxy or not @@ -666,39 +671,37 @@ impl RustupProxy f_ok(state) }); - - let f = find_candidate - .and_then(move |state| { - match state { - Err(e) => f_ok(Err(e)), - Ok(ProxyPath::ToBeDiscovered) => f_ok(Err("Failed to discover a rustup executable, but rustc behaves like a proxy".into())), - Ok(ProxyPath::None) => f_ok(Ok(None)), - Ok(ProxyPath::Candidate(proxy_executable)) => { - // verify the candidate is a rustup - let mut child = creator.new_command_sync(proxy_executable.to_owned()); - child.env_clear().envs(ref_env(&env2)).args(&["--version"]); - let rustup_candidate_check = - run_input_output(child, None) - .map(move |output| { - String::from_utf8(output.stdout.clone()) - .map_err(|_e| { "Response of `rustup --version` is not valid UTF-8".into() }) - .and_then(|stdout| { - if stdout.trim().starts_with("rustup ") { - trace!("PROXY rustup --version produced: {}", &stdout); - Self::new(&proxy_executable).map(|proxy| Some(proxy)) - } else { - Err("Unexpected output or `rustup --version`".into()) - } - }) - }); - Box::new(rustup_candidate_check) - } + let f = find_candidate.and_then(move |state| { + match state { + Err(e) => f_ok(Err(e)), + Ok(ProxyPath::ToBeDiscovered) => f_ok(Err( + "Failed to discover a rustup executable, but rustc behaves like a proxy".into(), + )), + Ok(ProxyPath::None) => f_ok(Ok(None)), + Ok(ProxyPath::Candidate(proxy_executable)) => { + // verify the candidate is a rustup + let mut child = creator.new_command_sync(proxy_executable.to_owned()); + child.env_clear().envs(ref_env(&env2)).args(&["--version"]); + let rustup_candidate_check = run_input_output(child, None).map(move |output| { + String::from_utf8(output.stdout.clone()) + .map_err(|_e| { + "Response of `rustup --version` is not valid UTF-8".into() + }) + .and_then(|stdout| { + if stdout.trim().starts_with("rustup ") { + trace!("PROXY rustup --version produced: {}", &stdout); + Self::new(&proxy_executable).map(|proxy| Some(proxy)) + } else { + Err("Unexpected output or `rustup --version`".into()) + } + }) + }); + Box::new(rustup_candidate_check) } - - }); + } + }); Box::new(f) - } } @@ -2270,14 +2273,18 @@ fn parse_rustc_z_ls(stdout: &str) -> Result> { assert!(line_splits.next().is_none()); let mut libstring_splits = libstring.rsplitn(2, '-'); - // Rustc prints strict hash value (rather than extra filename as it likely should be) - // https://github.com/rust-lang/rust/pull/55555 - let _svh = libstring_splits - .next() - .ok_or_else(|| "No hash in lib string from rustc -Z ls")?; - let libname = libstring_splits - .next() - .expect("Zero strings from libstring split"); + // Most things get printed as ${LIBNAME}-${HASH} but for some things + // (native code-only libraries?), ${LIBNAME} is all you get. + let libname = { + let maybe_hash = libstring_splits + .next() + .ok_or_else(|| "Nothing in lib string from `rustc -Z ls`")?; + if let Some(name) = libstring_splits.next() { + name + } else { + maybe_hash + } + }; assert!(libstring_splits.next().is_none()); dep_names.push(libname); @@ -2852,6 +2859,25 @@ c:/foo/bar.rs: ); } + #[cfg(feature = "dist-client")] + #[test] + fn test_parse_rustc_z_ls() { + let output = "=External Dependencies= +1 lucet_runtime +2 lucet_runtime_internals-1ff6232b6940e924 +3 lucet_runtime_macros-c18e1952b835769e + + +"; + let res = parse_rustc_z_ls(&output); + assert!(res.is_ok()); + let res = res.unwrap(); + assert_eq!(res.len(), 3); + assert_eq!(res[0], "lucet_runtime"); + assert_eq!(res[1], "lucet_runtime_internals"); + assert_eq!(res[2], "lucet_runtime_macros"); + } + fn mock_dep_info(creator: &Arc>, dep_srcs: &[&str]) { // Mock the `rustc --emit=dep-info` process by writing // a dep-info file. diff --git a/src/dist/client_auth.rs b/src/dist/client_auth.rs index 65070075f..c47fcf827 100644 --- a/src/dist/client_auth.rs +++ b/src/dist/client_auth.rs @@ -51,7 +51,7 @@ fn serve_sfuture(serve: fn(Request) -> SFutureSend>) -> imp Box::new(serve(req).or_else(move |e| { let body = e.display_chain().to_string(); eprintln!( - "Error during a request to {} on the client auth web server\n{}", + "sccache: Error during a request to {} on the client auth web server\n{}", uri, body ); let len = body.len(); @@ -308,7 +308,7 @@ mod code_grant_pkce { MIN_TOKEN_VALIDITY_WARNING ); eprintln!( - "Token retrieved expires in under {}", + "sccache: Token retrieved expires in under {}", MIN_TOKEN_VALIDITY_WARNING ); } @@ -444,7 +444,7 @@ mod implicit { MIN_TOKEN_VALIDITY_WARNING ); eprintln!( - "Token retrieved expires in under {}", + "sccache: Token retrieved expires in under {}", MIN_TOKEN_VALIDITY_WARNING ); } @@ -576,7 +576,10 @@ pub fn get_token_oauth2_code_grant_pkce( ); info!("Listening on http://localhost:{} with 1 thread.", port); - println!("Please visit http://localhost:{} in your browser", port); + println!( + "sccache: Please visit http://localhost:{} in your browser", + port + ); let (shutdown_tx, shutdown_rx) = oneshot::channel(); let (code_tx, code_rx) = mpsc::sync_channel(1); let state = code_grant_pkce::State { @@ -614,7 +617,10 @@ pub fn get_token_oauth2_implicit(client_id: &str, mut auth_url: Url) -> Result Option<&(dyn std::error::Error +'static)>{ + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { match *self { RouilleBincodeError::ParseError(ref e) => Some(e), _ => None, @@ -440,7 +440,9 @@ mod server { &self, fmt: &mut std::fmt::Formatter<'_>, ) -> std::result::Result<(), std::fmt::Error> { - write!(fmt, "{}", + write!( + fmt, + "{}", match *self { RouilleBincodeError::BodyAlreadyExtracted => { "the body of the request was already extracted" @@ -480,7 +482,7 @@ mod server { } impl ErrJson { - fn from_err(err: &E) -> ErrJson{ + fn from_err(err: &E) -> ErrJson { let cause = err.source().map(ErrJson::from_err).map(Box::new); ErrJson { description: err.to_string(), diff --git a/src/errors.rs b/src/errors.rs index 924f19da7..ab49437fd 100644 --- a/src/errors.rs +++ b/src/errors.rs @@ -12,30 +12,21 @@ // See the License for the specific language governing permissions and // limitations under the License. -// TODO error_chain needs to be upgraded, it uses deprecated APIs. -#![allow(deprecated)] -#![allow(renamed_and_removed_lints)] - +use futures::future; +use futures::Future; use std::boxed::Box; use std::convert; use std::error; use std::io; use std::process; -#[cfg(feature = "jsonwebtoken")] -use crate::jwt; -use futures::future; -use futures::Future; - error_chain! { foreign_links { Hyper(hyper::Error) #[cfg(feature = "hyper")]; Io(io::Error); Lru(lru_disk_cache::Error); Json(serde_json::Error); - Jwt(jwt::errors::Error) #[cfg(feature = "jsonwebtoken")]; Openssl(openssl::error::ErrorStack) #[cfg(feature = "openssl")]; - Base64Decode(base64::DecodeError); Bincode(bincode::Error); Memcached(memcached::proto::Error) #[cfg(feature = "memcached")]; Redis(redis::RedisError) #[cfg(feature = "redis")]; diff --git a/src/lib.rs b/src/lib.rs index e67367243..edd0e35e4 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -13,7 +13,7 @@ // limitations under the License. #![deny(rust_2018_idioms)] -#![recursion_limit = "128"] +#![recursion_limit = "256"] #[macro_use] extern crate clap; @@ -71,10 +71,10 @@ pub fn main() { Ok(s) => s, Err(e) => { let stderr = &mut std::io::stderr(); - writeln!(stderr, "error: {}", e).unwrap(); + writeln!(stderr, "sccache: error: {}", e).unwrap(); for e in e.iter().skip(1) { - writeln!(stderr, "caused by: {}", e).unwrap(); + writeln!(stderr, "sccache: caused by: {}", e).unwrap(); } 2 } @@ -82,7 +82,7 @@ pub fn main() { Err(e) => { println!("sccache: {}", e); for e in e.iter().skip(1) { - println!("caused by: {}", e); + println!("sccache: caused by: {}", e); } cmdline::get_app().print_help().unwrap(); println!(); diff --git a/src/mock_command.rs b/src/mock_command.rs index 523925788..9c1f7b345 100644 --- a/src/mock_command.rs +++ b/src/mock_command.rs @@ -533,7 +533,6 @@ impl CommandCreator for MockCommandCreator { } } - /// To simplify life for using a `CommandCreator` across multiple threads. impl CommandCreatorSync for Arc> { type Cmd = T::Cmd; diff --git a/src/server.rs b/src/server.rs index e56713c23..41892eab0 100644 --- a/src/server.rs +++ b/src/server.rs @@ -30,8 +30,8 @@ use crate::protocol::{Compile, CompileFinished, CompileResponse, Request, Respon use crate::util; use filetime::FileTime; use futures::sync::mpsc; -use futures::task::{self, Task}; use futures::{future, stream, Async, AsyncSink, Future, Poll, Sink, StartSend, Stream}; +use futures_03::compat::Compat; use futures_cpupool::CpuPool; use number_prefix::{binary_prefix, Prefixed, Standalone}; use std::cell::RefCell; @@ -44,11 +44,13 @@ use std::io::{self, Write}; use std::mem; use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4}; use std::path::PathBuf; +use std::pin::Pin; use std::process::{ExitStatus, Output}; use std::rc::Rc; use std::sync::Arc; #[cfg(feature = "dist-client")] use std::sync::Mutex; +use std::task::{Context, Waker}; use std::time::Duration; use std::time::Instant; use std::u64; @@ -190,7 +192,6 @@ impl DistClientContainer { fn new(config: &Config, pool: &CpuPool) -> Self { let config = DistClientConfig { pool: pool.clone(), - scheduler_url: config.dist.scheduler_url.clone(), auth: config.dist.auth.clone(), cache_dir: config.dist.cache_dir.clone(), @@ -321,9 +322,8 @@ impl DistClientContainer { } }}; } - // TODO: NLL would avoid this clone - match config.scheduler_url.clone() { - Some(addr) => { + match config.scheduler_url { + Some(ref addr) => { let url = addr.to_url(); info!("Enabling distributed sccache to {}", url); let auth_token = match &config.auth { @@ -336,7 +336,6 @@ impl DistClientContainer { let auth_token = try_or_fail_with_message!(auth_token.chain_err(|| { "could not load client auth token, run |sccache --dist-auth|" })); - // TODO: NLL would let us move this inside the previous match let dist_client = dist::http::Client::new( &config.pool, url, @@ -580,7 +579,7 @@ impl SccacheServer { // Note that we cap the amount of time this can take, however, as we // don't want to wait *too* long. runtime - .block_on(Timeout::new(wait, Duration::new(30, 0))) + .block_on(Timeout::new(Compat::new(wait), Duration::new(30, 0))) .map_err(|e| { if e.is_inner() { e.into_inner().unwrap() @@ -595,23 +594,27 @@ impl SccacheServer { } } - -type CompilerMap = - HashMap>>; - +type CompilerMap = HashMap>>; /// entry of the compiler cache -struct CompilerCacheEntry { +struct CompilerCacheEntry { /// compiler argument trait obj - pub compiler : Box>, + pub compiler: Box>, /// modification time of the compilers executable file - pub mtime : FileTime, + pub mtime: FileTime, /// distributed compilation extra info - pub dist_info : Option<(PathBuf, FileTime)>, + pub dist_info: Option<(PathBuf, FileTime)>, } -impl CompilerCacheEntry where C: CommandCreatorSync { - fn new(compiler : Box>, mtime : FileTime, dist_info : Option<(PathBuf, FileTime)>) -> Self { +impl CompilerCacheEntry +where + C: CommandCreatorSync, +{ + fn new( + compiler: Box>, + mtime: FileTime, + dist_info: Option<(PathBuf, FileTime)>, + ) -> Self { Self { compiler, mtime, @@ -639,13 +642,7 @@ struct SccacheService { /// (usually file or current working directory) /// the associated `FileTime` is the modification time of /// the compiler proxy, in order to track updates of the proxy itself - compiler_proxies: Rc< - RefCell< - HashMap< - PathBuf, (Box>, FileTime) - > - > - >, + compiler_proxies: Rc>, FileTime)>>>, /// Thread pool to execute work in pool: CpuPool, @@ -798,7 +795,7 @@ where message, body: true, })) - .chain(body.map(|chunk| Frame::Body { chunk: Some(chunk) })) + .chain(Compat::new(body).map(|chunk| Frame::Body { chunk: Some(chunk) })) .chain(stream::once(Ok(Frame::Body { chunk: None }))), ), }; @@ -854,8 +851,6 @@ where ) } - - /// Look up compiler info from the cache for the compiler `path`. /// If not cached, determine the compiler type and cache the result. fn compiler_info( @@ -873,7 +868,10 @@ where let path2 = path.clone(); let path1 = path.clone(); - let env = env.into_iter().cloned().collect::>(); + let env = env + .into_iter() + .cloned() + .collect::>(); let resolve_w_proxy = { let compiler_proxies_borrow = self.compiler_proxies.borrow(); @@ -884,68 +882,68 @@ where cwd.clone(), env.as_slice(), ); - Box::new(fut.then(|res : Result<_>| { Ok(res.ok()) })) + Box::new(fut.then(|res: Result<_>| Ok(res.ok()))) } else { f_ok(None) } }; // use the supplied compiler path as fallback, lookup its modification time too - let w_fallback = resolve_w_proxy - .then(move |res: Result>| { - let opt = match res { - Ok(Some(x)) => Some(x), // TODO resolve the path right away - _ => { - // fallback to using the path directly - metadata(&path2) + let w_fallback = resolve_w_proxy.then(move |res: Result>| { + let opt = match res { + Ok(Some(x)) => Some(x), // TODO resolve the path right away + _ => { + // fallback to using the path directly + metadata(&path2) .map(|attr| FileTime::from_last_modification_time(&attr)) .ok() - .map(move |filetime| { - (path2.clone(),filetime) - }) - } - }; - f_ok(opt) - }); - - let lookup_compiler = - w_fallback.and_then(move |opt : Option<(PathBuf, FileTime)>| { - let (resolved_compiler_path, mtime) - = opt.expect("Must contain sane data, otherwise mtime is not avail"); - + .map(move |filetime| (path2.clone(), filetime)) + } + }; + f_ok(opt) + }); - let dist_info = match me1.dist_client.get_client() { - Ok(Some(ref client)) => { - if let Some(archive) = client.get_custom_toolchain(&resolved_compiler_path) { - match metadata(&archive) - .map(|attr| FileTime::from_last_modification_time(&attr)) - { - Ok(mtime) => Some((archive, mtime)), - _ => None, - } - } else { - None + let lookup_compiler = w_fallback.and_then(move |opt: Option<(PathBuf, FileTime)>| { + let (resolved_compiler_path, mtime) = + opt.expect("Must contain sane data, otherwise mtime is not avail"); + + let dist_info = match me1.dist_client.get_client() { + Ok(Some(ref client)) => { + if let Some(archive) = client.get_custom_toolchain(&resolved_compiler_path) { + match metadata(&archive) + .map(|attr| FileTime::from_last_modification_time(&attr)) + { + Ok(mtime) => Some((archive, mtime)), + _ => None, } + } else { + None } - _ => None, - }; + } + _ => None, + }; - let opt = match me1.compilers.borrow().get(&resolved_compiler_path) { - // It's a hit only if the mtime and dist archive data matches. - Some(&Some(ref entry)) => { - if entry.mtime == mtime && entry.dist_info == dist_info { - Some(entry.compiler.clone()) - } else { - None - } + let opt = match me1.compilers.borrow().get(&resolved_compiler_path) { + // It's a hit only if the mtime and dist archive data matches. + Some(&Some(ref entry)) => { + if entry.mtime == mtime && entry.dist_info == dist_info { + Some(entry.compiler.clone()) + } else { + None } - _ => None, - }; - f_ok((resolved_compiler_path, mtime, opt, dist_info)) - }); + } + _ => None, + }; + f_ok((resolved_compiler_path, mtime, opt, dist_info)) + }); - let obtain = - lookup_compiler.and_then(move |(resolved_compiler_path, mtime, opt, dist_info) : (PathBuf, FileTime, Option>>, Option<(PathBuf,FileTime)> )| { + let obtain = lookup_compiler.and_then( + move |(resolved_compiler_path, mtime, opt, dist_info): ( + PathBuf, + FileTime, + Option>>, + Option<(PathBuf, FileTime)>, + )| { match opt { Some(info) => { trace!("compiler_info cache hit"); @@ -968,43 +966,59 @@ where dist_info.clone().map(|(p, _)| p), ); - Box::new( - x.then(move |info: Result<(Box>,Option>>)>| { - match info { - Ok((ref c, ref proxy)) => { - // register the proxy for this compiler, so it will be used directly from now on - // and the true/resolved compiler will create table hits in the hash map - // based on the resolved path - if let Some(proxy) = proxy { - trace!("Inserting new path proxy {:?} @ {:?} -> {:?}", &path, &cwd, resolved_compiler_path); - let proxy : Box> = proxy.box_clone(); - me.compiler_proxies.borrow_mut().insert(path, (proxy, mtime.clone())); + Box::new(x.then( + move |info: Result<( + Box>, + Option>>, + )>| { + match info { + Ok((ref c, ref proxy)) => { + // register the proxy for this compiler, so it will be used directly from now on + // and the true/resolved compiler will create table hits in the hash map + // based on the resolved path + if let Some(proxy) = proxy { + trace!( + "Inserting new path proxy {:?} @ {:?} -> {:?}", + &path, + &cwd, + resolved_compiler_path + ); + let proxy: Box> = + proxy.box_clone(); + me.compiler_proxies + .borrow_mut() + .insert(path, (proxy, mtime.clone())); + } + // TODO add some safety checks in case a proxy exists, that the initial `path` is not + // TODO the same as the resolved compiler binary + + // cache + let map_info = + CompilerCacheEntry::new(c.clone(), mtime, dist_info); + trace!( + "Inserting POSSIBLY PROXIED cache map info for {:?}", + &resolved_compiler_path + ); + me.compilers + .borrow_mut() + .insert(resolved_compiler_path, Some(map_info)); + } + Err(_) => { + trace!("Inserting PLAIN cache map info for {:?}", &path); + me.compilers.borrow_mut().insert(path, None); } - // TODO add some safety checks in case a proxy exists, that the initial `path` is not - // TODO the same as the resolved compiler binary - - // cache - let map_info = CompilerCacheEntry::new(c.clone(), mtime, dist_info); - trace!("Inserting POSSIBLY PROXIED cache map info for {:?}", &resolved_compiler_path); - me.compilers.borrow_mut().insert(resolved_compiler_path, Some(map_info)); - }, - Err(_) => { - trace!("Inserting PLAIN cache map info for {:?}", &path); - me.compilers.borrow_mut().insert(path, None); } - } - // drop the proxy information, response is compiler only - let r : Result>> = info.map(|info| info.0); - f_ok(r) - })) + // drop the proxy information, response is compiler only + let r: Result>> = info.map(|info| info.0); + f_ok(r) + }, + )) } } - - }); - + }, + ); return Box::new(obtain); - } /// Check that we can handle and cache `cmd` when run with `compiler`. @@ -1182,10 +1196,10 @@ where error!("[{:?}] fatal error: {}", out_pretty, err); let mut error = "sccache: encountered fatal error\n".to_string(); - let _ = writeln!(error, "sccache: error : {}", err); + let _ = writeln!(error, "sccache: error: {}", err); for e in err.iter() { error!("[{:?}] \t{}", out_pretty, e); - let _ = writeln!(error, "sccache: cause: {}", e); + let _ = writeln!(error, "sccache: caused by: {}", e); } stats.cache_errors.increment(&kind); //TODO: figure out a better way to communicate this? @@ -1235,11 +1249,8 @@ pub struct PerLanguageCount { impl PerLanguageCount { fn increment(&mut self, kind: &CompilerKind) { let key = kind.lang_kind(); - let count = match self.counts.get(&key) { - Some(v) => v + 1, - None => 1, - }; - self.counts.insert(key, count); + let count = self.counts.entry(key).or_insert(0); + *count += 1; } pub fn all(&self) -> u64 { @@ -1544,15 +1555,15 @@ impl Body { } } -impl Stream for Body { - type Item = R; - type Error = Error; - fn poll(&mut self) -> Poll, Self::Error> { - match self.receiver.poll().unwrap() { - Async::Ready(Some(Ok(item))) => Ok(Async::Ready(Some(item))), - Async::Ready(Some(Err(err))) => Err(err), - Async::Ready(None) => Ok(Async::Ready(None)), - Async::NotReady => Ok(Async::NotReady), +impl futures_03::Stream for Body { + type Item = Result; + fn poll_next( + mut self: Pin<&mut Self>, + _cx: &mut Context<'_>, + ) -> std::task::Poll> { + match Pin::new(&mut self.receiver).poll().unwrap() { + Async::Ready(item) => std::task::Poll::Ready(item), + Async::NotReady => std::task::Poll::Pending, } } } @@ -1680,14 +1691,14 @@ struct ActiveInfo { struct Info { active: usize, - blocker: Option, + waker: Option, } impl WaitUntilZero { fn new() -> (WaitUntilZero, ActiveInfo) { let info = Rc::new(RefCell::new(Info { active: 1, - blocker: None, + waker: None, })); (WaitUntilZero { info: info.clone() }, ActiveInfo { info }) @@ -1708,24 +1719,23 @@ impl Drop for ActiveInfo { let mut info = self.info.borrow_mut(); info.active -= 1; if info.active == 0 { - if let Some(task) = info.blocker.take() { - task.notify(); + if let Some(waker) = info.waker.take() { + waker.wake(); } } } } -impl Future for WaitUntilZero { - type Item = (); - type Error = io::Error; +impl std::future::Future for WaitUntilZero { + type Output = io::Result<()>; - fn poll(&mut self) -> Poll<(), io::Error> { + fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> std::task::Poll { let mut info = self.info.borrow_mut(); if info.active == 0 { - Ok(().into()) + std::task::Poll::Ready(Ok(())) } else { - info.blocker = Some(task::current()); - Ok(Async::NotReady) + info.waker = Some(cx.waker().clone()); + std::task::Poll::Pending } } } diff --git a/src/test/utils.rs b/src/test/utils.rs index dc02f19d1..89d8ff9f2 100644 --- a/src/test/utils.rs +++ b/src/test/utils.rs @@ -64,16 +64,19 @@ macro_rules! assert_neq { }}; } -/// Assert that `map` contains all of the (`key`, `val`) pairs specified. +/// Assert that `map` contains all of the (`key`, `val`) pairs specified and only those keys. macro_rules! assert_map_contains { ( $map:expr , $( ($key:expr, $val:expr) ),* ) => { + let mut nelems = 0; $( + nelems += 1; match $map.get(&$key) { Some(&ref v) => - assert!($val == *v, format!("{} key `{:?}` doesn't match expected! (expected `{:?}` != actual `{:?}`)", stringify!($map), $key, $val, v)), + assert_eq!($val, *v, "{} key `{:?}` doesn't match expected! (expected `{:?}` != actual `{:?}`)", stringify!($map), $key, $val, v), None => panic!("{} missing key `{:?}`", stringify!($map), $key), } )* + assert_eq!(nelems, $map.len(), "{} contains {} elements, expected {}", stringify!($map), $map.len(), nelems); } } @@ -232,6 +235,15 @@ fn test_map_contains_ok() { assert_map_contains!(m, ("a", 1), ("b", 2)); } +#[test] +#[should_panic] +fn test_map_contains_extra_key() { + let mut m = HashMap::new(); + m.insert("a", 1); + m.insert("b", 2); + assert_map_contains!(m, ("a", 1)); +} + #[test] #[should_panic] fn test_map_contains_missing_key() { diff --git a/src/util.rs b/src/util.rs index 97c962f98..7b2fe597a 100644 --- a/src/util.rs +++ b/src/util.rs @@ -22,7 +22,6 @@ use std::ffi::{OsStr, OsString}; use std::fs::File; use std::hash::Hasher; use std::io::prelude::*; -use std::io::BufReader; use std::path::{Path, PathBuf}; use std::process::{self, Stdio}; use std::time; @@ -52,13 +51,12 @@ impl Digest { } /// Calculate the BLAKE3 digest of the contents read from `reader`. - pub fn reader_sync(reader: R) -> Result { + pub fn reader_sync(mut reader: R) -> Result { let mut m = Digest::new(); - let mut reader = BufReader::new(reader); + // A buffer of 128KB should give us the best performance. + // See https://eklitzke.org/efficient-file-copying-on-linux. + let mut buffer = [0; 128 * 1024]; loop { - // A buffer of 128KB should give us the best performance. - // See https://eklitzke.org/efficient-file-copying-on-linux. - let mut buffer = [0; 128 * 1024]; let count = reader.read(&mut buffer[..])?; if count == 0 { break; @@ -180,7 +178,10 @@ where /// /// If the command returns a non-successful exit status, an error of `ErrorKind::ProcessError` /// will be returned containing the process output. -pub fn run_input_output(mut command: C, input: Option>) -> SFuture +pub fn run_input_output( + mut command: C, + input: Option>, +) -> impl Future where C: RunCommand, { @@ -195,7 +196,7 @@ where .stderr(Stdio::piped()) .spawn(); - Box::new(child.and_then(|child| { + child.and_then(|child| { wait_with_input_output(child, input).and_then(|output| { if output.status.success() { f_ok(output) @@ -203,7 +204,7 @@ where f_err(ErrorKind::ProcessError(output)) } }) - })) + }) } /// Write `data` to `writer` with bincode serialization, prefixed by a `u32` length. diff --git a/tests/harness/mod.rs b/tests/harness/mod.rs index 599876cba..0943f74d6 100644 --- a/tests/harness/mod.rs +++ b/tests/harness/mod.rs @@ -14,7 +14,6 @@ use std::thread; use std::time::{Duration, Instant}; use assert_cmd::prelude::*; -use escargot::CargoBuild; #[cfg(feature = "dist-server")] use nix::{ sys::{ @@ -110,65 +109,17 @@ pub fn write_source(path: &Path, filename: &str, contents: &str) { f.write_all(contents.as_bytes()).unwrap(); } -// Alter an sccache command to override any environment variables that could adversely -// affect test execution -fn blankslate_sccache(mut cmd: Command) -> Command { +// Override any environment variables that could adversely affect test execution. +pub fn sccache_command() -> Command { + let mut cmd = Command::new(assert_cmd::cargo::cargo_bin("sccache")); cmd.env("SCCACHE_CONF", "nonexistent_conf_path") .env("SCCACHE_CACHED_CONF", "nonexistent_cached_conf_path"); cmd } -#[cfg(not(feature = "dist-client"))] -pub fn sccache_command() -> Command { - blankslate_sccache( - CargoBuild::new() - .bin("sccache") - .current_release() - .current_target() - .run() - .unwrap() - .command(), - ) -} - -#[cfg(feature = "dist-client")] -pub fn sccache_command() -> Command { - // dist-server isn't available on all platforms, so only pass it here if we - // compiled with it. - let features = if cfg!(feature = "dist-server") { - "dist-client dist-server" - } else { - "dist-client" - }; - blankslate_sccache( - CargoBuild::new() - .bin("sccache") - // This should just inherit from the feature list we're compiling with to avoid recompilation - // https://github.com/assert-rs/assert_cmd/issues/44#issuecomment-418485128 - .arg("--features") - .arg(features) - .current_release() - .current_target() - .run() - .unwrap() - .command(), - ) -} - #[cfg(feature = "dist-server")] pub fn sccache_dist_path() -> PathBuf { - CargoBuild::new() - .bin("sccache-dist") - // This should just inherit from the feature list we're compiling with to avoid recompilation - // https://github.com/assert-rs/assert_cmd/issues/44#issuecomment-418485128 - .arg("--features") - .arg("dist-client dist-server") - .current_release() - .current_target() - .run() - .unwrap() - .path() - .to_owned() + assert_cmd::cargo::cargo_bin("sccache-dist") } pub fn sccache_client_cfg(tmpdir: &Path) -> sccache::config::FileConfig { diff --git a/tests/oauth.rs b/tests/oauth.rs old mode 100644 new mode 100755 index 38e81f97f..bbcb4b8bd --- a/tests/oauth.rs +++ b/tests/oauth.rs @@ -1,7 +1,6 @@ #![deny(rust_2018_idioms)] #![cfg(all(feature = "dist-client"))] -use escargot::CargoBuild; use selenium_rs::webdriver::{Browser, Selector, WebDriver}; use std::fs; use std::io::{self, Read, Write}; @@ -64,17 +63,7 @@ fn config_with_dist_auth( } fn sccache_command() -> Command { - CargoBuild::new() - .bin("sccache") - // This should just inherit from the feature list we're compiling with to avoid recompilation - // https://github.com/assert-rs/assert_cmd/issues/44#issuecomment-418485128 - .arg("--features") - .arg("dist-client dist-server") - .current_release() - .current_target() - .run() - .unwrap() - .command() + Command::new(assert_cmd::cargo::cargo_bin("sccache")) } fn retry Option, T>(interval: Duration, until: Duration, mut f: F) -> Option { diff --git a/tests/sccache_cargo.rs b/tests/sccache_cargo.rs index fc1ec1bcb..40cae42dc 100644 --- a/tests/sccache_cargo.rs +++ b/tests/sccache_cargo.rs @@ -22,7 +22,6 @@ fn test_rust_cargo() { fn test_rust_cargo_cmd(cmd: &str) { use assert_cmd::prelude::*; use chrono::Local; - use escargot::CargoBuild; use predicates::prelude::*; use std::env; use std::fs; @@ -31,13 +30,7 @@ fn test_rust_cargo_cmd(cmd: &str) { use std::process::{Command, Stdio}; fn sccache_command() -> Command { - CargoBuild::new() - .bin("sccache") - .current_release() - .current_target() - .run() - .unwrap() - .command() + Command::new(assert_cmd::cargo::cargo_bin("sccache")) } fn stop() { @@ -67,8 +60,7 @@ fn test_rust_cargo_cmd(cmd: &str) { ); let cargo = env!("CARGO"); debug!("cargo: {}", cargo); - #[allow(deprecated)] - let sccache = assert_cmd::cargo::main_binary_path().unwrap(); + let sccache = assert_cmd::cargo::cargo_bin("sccache"); debug!("sccache: {:?}", sccache); let crate_dir = Path::new(file!()).parent().unwrap().join("test-crate"); // Ensure there's no existing sccache server running. diff --git a/tests/system.rs b/tests/system.rs index 00a9af8b6..70ed5246d 100644 --- a/tests/system.rs +++ b/tests/system.rs @@ -24,7 +24,6 @@ use crate::harness::{ write_json_cfg, write_source, zero_stats, }; use assert_cmd::prelude::*; -use escargot::CargoBuild; use log::Level::Trace; use predicates::prelude::*; use std::collections::HashMap; @@ -161,8 +160,7 @@ fn test_noncacheable_stats(compiler: Compiler, tempdir: &Path) { copy_to_tempdir(&[INPUT], tempdir); trace!("compile"); - Command::main_binary() - .unwrap() + Command::new(assert_cmd::cargo::cargo_bin("sccache")) .arg(&exe) .arg("-E") .arg(INPUT)