From f89f0b43d3004c662f702b1f82fabf2306c13a9f Mon Sep 17 00:00:00 2001 From: Mark Dittmer Date: Thu, 14 May 2026 17:46:11 +0000 Subject: [PATCH] Introduce toml_const dependency; r/Setup/Install; bind url<->archive-format and checksum<->digest-algorithm gherrit-pr-id: G2y4sltgs7oubpor2or2wxi3pgk56i5eu --- anneal/Cargo.lock | 75 + anneal/v2/toolchain-config/Cargo.toml | 1 + .../examples/static-toml/Cargo.toml | 6 +- .../examples/static-toml/src/main.rs | 27 +- .../examples/static-toml/toolchain.tar.zst | Bin 181 -> 181 bytes anneal/v2/toolchain-config/src/lib.rs | 159 +- anneal/vendor/ascii/.cargo-checksum.json | 1 - anneal/vendor/ascii/.cargo_vcs_info.json | 6 - anneal/vendor/ascii/.github/workflows/ci.yml | 57 - anneal/vendor/ascii/Cargo.toml | 41 - anneal/vendor/ascii/Cargo.toml.orig | 22 - anneal/vendor/ascii/LICENSE-APACHE | 201 --- anneal/vendor/ascii/LICENSE-MIT | 22 - anneal/vendor/ascii/README.md | 64 - anneal/vendor/ascii/RELEASES.md | 193 -- anneal/vendor/ascii/src/ascii_char.rs | 1069 ----------- anneal/vendor/ascii/src/ascii_str.rs | 1600 ----------------- anneal/vendor/ascii/src/ascii_string.rs | 1057 ----------- anneal/vendor/ascii/src/free_functions.rs | 59 - anneal/vendor/ascii/src/lib.rs | 82 - .../ascii/src/serialization/ascii_char.rs | 89 - .../ascii/src/serialization/ascii_str.rs | 79 - .../ascii/src/serialization/ascii_string.rs | 149 -- anneal/vendor/ascii/src/serialization/mod.rs | 3 - anneal/vendor/ascii/tests.rs | 143 -- .../chunked_transfer/.cargo-checksum.json | 1 - .../chunked_transfer/.cargo_vcs_info.json | 6 - .../.github/workflows/rust.yml | 22 - anneal/vendor/chunked_transfer/Cargo.toml | 27 - .../vendor/chunked_transfer/Cargo.toml.orig | 15 - anneal/vendor/chunked_transfer/LICENSE-APACHE | 201 --- anneal/vendor/chunked_transfer/LICENSE-MIT | 22 - anneal/vendor/chunked_transfer/README.md | 59 - .../vendor/chunked_transfer/benches/encode.rs | 22 - anneal/vendor/chunked_transfer/src/decoder.rs | 300 ---- anneal/vendor/chunked_transfer/src/encoder.rs | 207 --- anneal/vendor/chunked_transfer/src/lib.rs | 5 - anneal/vendor/httpdate/.cargo-checksum.json | 1 - anneal/vendor/httpdate/.cargo_vcs_info.json | 6 - .../vendor/httpdate/.github/workflows/ci.yml | 24 - anneal/vendor/httpdate/Cargo.toml | 35 - anneal/vendor/httpdate/Cargo.toml.orig | 18 - anneal/vendor/httpdate/LICENSE-APACHE | 201 --- anneal/vendor/httpdate/LICENSE-MIT | 19 - anneal/vendor/httpdate/README.md | 27 - anneal/vendor/httpdate/benches/benchmarks.rs | 57 - anneal/vendor/httpdate/src/date.rs | 420 ----- anneal/vendor/httpdate/src/lib.rs | 160 -- anneal/vendor/phf/.cargo-checksum.json | 1 + anneal/vendor/phf/.cargo_vcs_info.json | 6 + anneal/vendor/phf/CHANGELOG.md | 469 +++++ anneal/vendor/phf/Cargo.lock | 136 ++ anneal/vendor/phf/Cargo.toml | 74 + anneal/vendor/phf/Cargo.toml.orig | 36 + anneal/vendor/phf/LICENSE | 20 + anneal/vendor/phf/README.md | 127 ++ anneal/vendor/phf/src/lib.rs | 153 ++ anneal/vendor/phf/src/map.rs | 334 ++++ anneal/vendor/phf/src/ordered_map.rs | 332 ++++ anneal/vendor/phf/src/ordered_set.rs | 181 ++ anneal/vendor/phf/src/set.rs | 158 ++ .../vendor/phf_generator/.cargo-checksum.json | 1 + .../vendor/phf_generator/.cargo_vcs_info.json | 6 + anneal/vendor/phf_generator/CHANGELOG.md | 255 +++ anneal/vendor/phf_generator/Cargo.lock | 649 +++++++ anneal/vendor/phf_generator/Cargo.toml | 58 + anneal/vendor/phf_generator/Cargo.toml.orig | 28 + anneal/vendor/phf_generator/LICENSE | 20 + anneal/vendor/phf_generator/README.md | 5 + .../vendor/phf_generator/benches/benches.rs | 56 + .../phf_generator/src/bin/gen_hash_test.rs | 21 + anneal/vendor/phf_generator/src/lib.rs | 156 ++ anneal/vendor/phf_macros/.cargo-checksum.json | 1 + anneal/vendor/phf_macros/.cargo_vcs_info.json | 6 + anneal/vendor/phf_macros/CHANGELOG.md | 401 +++++ anneal/vendor/phf_macros/Cargo.lock | 105 ++ anneal/vendor/phf_macros/Cargo.toml | 71 + anneal/vendor/phf_macros/Cargo.toml.orig | 28 + anneal/vendor/phf_macros/LICENSE | 20 + anneal/vendor/phf_macros/README.md | 127 ++ anneal/vendor/phf_macros/src/lib.rs | 358 ++++ anneal/vendor/phf_shared/.cargo-checksum.json | 1 + anneal/vendor/phf_shared/.cargo_vcs_info.json | 6 + anneal/vendor/phf_shared/CHANGELOG.md | 330 ++++ anneal/vendor/phf_shared/Cargo.lock | 39 + anneal/vendor/phf_shared/Cargo.toml | 50 + anneal/vendor/phf_shared/Cargo.toml.orig | 25 + anneal/vendor/phf_shared/LICENSE | 20 + anneal/vendor/phf_shared/README.md | 5 + anneal/vendor/phf_shared/src/lib.rs | 475 +++++ anneal/vendor/siphasher/.cargo-checksum.json | 1 + anneal/vendor/siphasher/.cargo_vcs_info.json | 6 + .../vendor/siphasher/.github/dependabot.yml | 8 + .../siphasher/.github/workflows/issues.yml | 17 + anneal/vendor/siphasher/COPYING | 7 + anneal/vendor/siphasher/Cargo.lock | 89 + anneal/vendor/siphasher/Cargo.toml | 64 + anneal/vendor/siphasher/Cargo.toml.orig | 28 + anneal/vendor/siphasher/README.md | 93 + anneal/vendor/siphasher/src/common.rs | 65 + anneal/vendor/siphasher/src/lib.rs | 30 + anneal/vendor/siphasher/src/sip.rs | 566 ++++++ anneal/vendor/siphasher/src/sip128.rs | 667 +++++++ anneal/vendor/siphasher/src/tests.rs | 332 ++++ anneal/vendor/siphasher/src/tests128.rs | 134 ++ anneal/vendor/tiny_http/.cargo-checksum.json | 1 - anneal/vendor/tiny_http/.cargo_vcs_info.json | 6 - .../tiny_http/.github/workflows/ci.yaml | 64 - anneal/vendor/tiny_http/CHANGELOG.md | 172 -- anneal/vendor/tiny_http/Cargo.lock | 395 ---- anneal/vendor/tiny_http/Cargo.toml | 82 - anneal/vendor/tiny_http/Cargo.toml.orig | 37 - anneal/vendor/tiny_http/LICENSE-APACHE | 201 --- anneal/vendor/tiny_http/LICENSE-MIT | 25 - anneal/vendor/tiny_http/README.md | 108 -- anneal/vendor/tiny_http/benches/bench.rs | 80 - .../vendor/tiny_http/examples/hello-world.rs | 26 - .../tiny_http/examples/php-cgi-example.php | 3 - anneal/vendor/tiny_http/examples/php-cgi.rs | 95 - .../tiny_http/examples/readme-example.rs | 19 - .../vendor/tiny_http/examples/serve-root.rs | 58 - anneal/vendor/tiny_http/examples/ssl-cert.pem | 23 - anneal/vendor/tiny_http/examples/ssl-key.pem | 28 - anneal/vendor/tiny_http/examples/ssl.rs | 42 - .../vendor/tiny_http/examples/websockets.rs | 148 -- anneal/vendor/tiny_http/src/client.rs | 309 ---- anneal/vendor/tiny_http/src/common.rs | 440 ----- anneal/vendor/tiny_http/src/connection.rs | 194 -- anneal/vendor/tiny_http/src/lib.rs | 445 ----- anneal/vendor/tiny_http/src/request.rs | 518 ------ anneal/vendor/tiny_http/src/response.rs | 574 ------ anneal/vendor/tiny_http/src/ssl.rs | 20 - anneal/vendor/tiny_http/src/ssl/openssl.rs | 110 -- anneal/vendor/tiny_http/src/ssl/rustls.rs | 120 -- anneal/vendor/tiny_http/src/test.rs | 127 -- .../tiny_http/src/util/custom_stream.rs | 39 - .../vendor/tiny_http/src/util/equal_reader.rs | 131 -- .../vendor/tiny_http/src/util/fused_reader.rs | 48 - .../tiny_http/src/util/messages_queue.rs | 96 - anneal/vendor/tiny_http/src/util/mod.rs | 64 - .../tiny_http/src/util/refined_tcp_stream.rs | 152 -- .../vendor/tiny_http/src/util/sequential.rs | 174 -- anneal/vendor/tiny_http/src/util/task_pool.rs | 137 -- anneal/vendor/tiny_http/tests/input-tests.rs | 122 -- anneal/vendor/tiny_http/tests/network.rs | 222 --- .../tiny_http/tests/non-chunked-buffering.rs | 103 -- anneal/vendor/tiny_http/tests/promptness.rs | 207 --- anneal/vendor/tiny_http/tests/simple-test.rs | 29 - anneal/vendor/tiny_http/tests/support/mod.rs | 40 - anneal/vendor/tiny_http/tests/unblock-test.rs | 34 - anneal/vendor/tiny_http/tests/unix-test.rs | 44 - anneal/vendor/toml_const/.cargo-checksum.json | 1 + anneal/vendor/toml_const/.cargo_vcs_info.json | 6 + anneal/vendor/toml_const/Cargo.lock | 223 +++ anneal/vendor/toml_const/Cargo.toml | 61 + anneal/vendor/toml_const/Cargo.toml.orig | 26 + anneal/vendor/toml_const/README.md | 202 +++ anneal/vendor/toml_const/src/lib.rs | 157 ++ .../toml_const_macros/.cargo-checksum.json | 1 + .../toml_const_macros/.cargo_vcs_info.json | 6 + anneal/vendor/toml_const_macros/Cargo.lock | 157 ++ anneal/vendor/toml_const_macros/Cargo.toml | 53 + .../vendor/toml_const_macros/Cargo.toml.orig | 23 + anneal/vendor/toml_const_macros/src/check.rs | 439 +++++ .../toml_const_macros/src/instantiate.rs | 404 +++++ anneal/vendor/toml_const_macros/src/lib.rs | 202 +++ .../vendor/toml_const_macros/src/normalize.rs | 820 +++++++++ anneal/vendor/toml_const_macros/src/parse.rs | 523 ++++++ 168 files changed, 10942 insertions(+), 12991 deletions(-) delete mode 100644 anneal/vendor/ascii/.cargo-checksum.json delete mode 100644 anneal/vendor/ascii/.cargo_vcs_info.json delete mode 100644 anneal/vendor/ascii/.github/workflows/ci.yml delete mode 100644 anneal/vendor/ascii/Cargo.toml delete mode 100644 anneal/vendor/ascii/Cargo.toml.orig delete mode 100644 anneal/vendor/ascii/LICENSE-APACHE delete mode 100644 anneal/vendor/ascii/LICENSE-MIT delete mode 100644 anneal/vendor/ascii/README.md delete mode 100644 anneal/vendor/ascii/RELEASES.md delete mode 100644 anneal/vendor/ascii/src/ascii_char.rs delete mode 100644 anneal/vendor/ascii/src/ascii_str.rs delete mode 100644 anneal/vendor/ascii/src/ascii_string.rs delete mode 100644 anneal/vendor/ascii/src/free_functions.rs delete mode 100644 anneal/vendor/ascii/src/lib.rs delete mode 100644 anneal/vendor/ascii/src/serialization/ascii_char.rs delete mode 100644 anneal/vendor/ascii/src/serialization/ascii_str.rs delete mode 100644 anneal/vendor/ascii/src/serialization/ascii_string.rs delete mode 100644 anneal/vendor/ascii/src/serialization/mod.rs delete mode 100644 anneal/vendor/ascii/tests.rs delete mode 100644 anneal/vendor/chunked_transfer/.cargo-checksum.json delete mode 100644 anneal/vendor/chunked_transfer/.cargo_vcs_info.json delete mode 100644 anneal/vendor/chunked_transfer/.github/workflows/rust.yml delete mode 100644 anneal/vendor/chunked_transfer/Cargo.toml delete mode 100644 anneal/vendor/chunked_transfer/Cargo.toml.orig delete mode 100644 anneal/vendor/chunked_transfer/LICENSE-APACHE delete mode 100644 anneal/vendor/chunked_transfer/LICENSE-MIT delete mode 100644 anneal/vendor/chunked_transfer/README.md delete mode 100644 anneal/vendor/chunked_transfer/benches/encode.rs delete mode 100644 anneal/vendor/chunked_transfer/src/decoder.rs delete mode 100644 anneal/vendor/chunked_transfer/src/encoder.rs delete mode 100644 anneal/vendor/chunked_transfer/src/lib.rs delete mode 100644 anneal/vendor/httpdate/.cargo-checksum.json delete mode 100644 anneal/vendor/httpdate/.cargo_vcs_info.json delete mode 100644 anneal/vendor/httpdate/.github/workflows/ci.yml delete mode 100644 anneal/vendor/httpdate/Cargo.toml delete mode 100644 anneal/vendor/httpdate/Cargo.toml.orig delete mode 100644 anneal/vendor/httpdate/LICENSE-APACHE delete mode 100644 anneal/vendor/httpdate/LICENSE-MIT delete mode 100644 anneal/vendor/httpdate/README.md delete mode 100644 anneal/vendor/httpdate/benches/benchmarks.rs delete mode 100644 anneal/vendor/httpdate/src/date.rs delete mode 100644 anneal/vendor/httpdate/src/lib.rs create mode 100644 anneal/vendor/phf/.cargo-checksum.json create mode 100644 anneal/vendor/phf/.cargo_vcs_info.json create mode 100644 anneal/vendor/phf/CHANGELOG.md create mode 100644 anneal/vendor/phf/Cargo.lock create mode 100644 anneal/vendor/phf/Cargo.toml create mode 100644 anneal/vendor/phf/Cargo.toml.orig create mode 100644 anneal/vendor/phf/LICENSE create mode 100644 anneal/vendor/phf/README.md create mode 100644 anneal/vendor/phf/src/lib.rs create mode 100644 anneal/vendor/phf/src/map.rs create mode 100644 anneal/vendor/phf/src/ordered_map.rs create mode 100644 anneal/vendor/phf/src/ordered_set.rs create mode 100644 anneal/vendor/phf/src/set.rs create mode 100644 anneal/vendor/phf_generator/.cargo-checksum.json create mode 100644 anneal/vendor/phf_generator/.cargo_vcs_info.json create mode 100644 anneal/vendor/phf_generator/CHANGELOG.md create mode 100644 anneal/vendor/phf_generator/Cargo.lock create mode 100644 anneal/vendor/phf_generator/Cargo.toml create mode 100644 anneal/vendor/phf_generator/Cargo.toml.orig create mode 100644 anneal/vendor/phf_generator/LICENSE create mode 100644 anneal/vendor/phf_generator/README.md create mode 100644 anneal/vendor/phf_generator/benches/benches.rs create mode 100644 anneal/vendor/phf_generator/src/bin/gen_hash_test.rs create mode 100644 anneal/vendor/phf_generator/src/lib.rs create mode 100644 anneal/vendor/phf_macros/.cargo-checksum.json create mode 100644 anneal/vendor/phf_macros/.cargo_vcs_info.json create mode 100644 anneal/vendor/phf_macros/CHANGELOG.md create mode 100644 anneal/vendor/phf_macros/Cargo.lock create mode 100644 anneal/vendor/phf_macros/Cargo.toml create mode 100644 anneal/vendor/phf_macros/Cargo.toml.orig create mode 100644 anneal/vendor/phf_macros/LICENSE create mode 100644 anneal/vendor/phf_macros/README.md create mode 100644 anneal/vendor/phf_macros/src/lib.rs create mode 100644 anneal/vendor/phf_shared/.cargo-checksum.json create mode 100644 anneal/vendor/phf_shared/.cargo_vcs_info.json create mode 100644 anneal/vendor/phf_shared/CHANGELOG.md create mode 100644 anneal/vendor/phf_shared/Cargo.lock create mode 100644 anneal/vendor/phf_shared/Cargo.toml create mode 100644 anneal/vendor/phf_shared/Cargo.toml.orig create mode 100644 anneal/vendor/phf_shared/LICENSE create mode 100644 anneal/vendor/phf_shared/README.md create mode 100644 anneal/vendor/phf_shared/src/lib.rs create mode 100644 anneal/vendor/siphasher/.cargo-checksum.json create mode 100644 anneal/vendor/siphasher/.cargo_vcs_info.json create mode 100644 anneal/vendor/siphasher/.github/dependabot.yml create mode 100644 anneal/vendor/siphasher/.github/workflows/issues.yml create mode 100644 anneal/vendor/siphasher/COPYING create mode 100644 anneal/vendor/siphasher/Cargo.lock create mode 100644 anneal/vendor/siphasher/Cargo.toml create mode 100644 anneal/vendor/siphasher/Cargo.toml.orig create mode 100644 anneal/vendor/siphasher/README.md create mode 100644 anneal/vendor/siphasher/src/common.rs create mode 100644 anneal/vendor/siphasher/src/lib.rs create mode 100644 anneal/vendor/siphasher/src/sip.rs create mode 100644 anneal/vendor/siphasher/src/sip128.rs create mode 100644 anneal/vendor/siphasher/src/tests.rs create mode 100644 anneal/vendor/siphasher/src/tests128.rs delete mode 100644 anneal/vendor/tiny_http/.cargo-checksum.json delete mode 100644 anneal/vendor/tiny_http/.cargo_vcs_info.json delete mode 100644 anneal/vendor/tiny_http/.github/workflows/ci.yaml delete mode 100644 anneal/vendor/tiny_http/CHANGELOG.md delete mode 100644 anneal/vendor/tiny_http/Cargo.lock delete mode 100644 anneal/vendor/tiny_http/Cargo.toml delete mode 100644 anneal/vendor/tiny_http/Cargo.toml.orig delete mode 100644 anneal/vendor/tiny_http/LICENSE-APACHE delete mode 100644 anneal/vendor/tiny_http/LICENSE-MIT delete mode 100644 anneal/vendor/tiny_http/README.md delete mode 100644 anneal/vendor/tiny_http/benches/bench.rs delete mode 100644 anneal/vendor/tiny_http/examples/hello-world.rs delete mode 100644 anneal/vendor/tiny_http/examples/php-cgi-example.php delete mode 100644 anneal/vendor/tiny_http/examples/php-cgi.rs delete mode 100644 anneal/vendor/tiny_http/examples/readme-example.rs delete mode 100644 anneal/vendor/tiny_http/examples/serve-root.rs delete mode 100644 anneal/vendor/tiny_http/examples/ssl-cert.pem delete mode 100644 anneal/vendor/tiny_http/examples/ssl-key.pem delete mode 100644 anneal/vendor/tiny_http/examples/ssl.rs delete mode 100644 anneal/vendor/tiny_http/examples/websockets.rs delete mode 100644 anneal/vendor/tiny_http/src/client.rs delete mode 100644 anneal/vendor/tiny_http/src/common.rs delete mode 100644 anneal/vendor/tiny_http/src/connection.rs delete mode 100644 anneal/vendor/tiny_http/src/lib.rs delete mode 100644 anneal/vendor/tiny_http/src/request.rs delete mode 100644 anneal/vendor/tiny_http/src/response.rs delete mode 100644 anneal/vendor/tiny_http/src/ssl.rs delete mode 100644 anneal/vendor/tiny_http/src/ssl/openssl.rs delete mode 100644 anneal/vendor/tiny_http/src/ssl/rustls.rs delete mode 100644 anneal/vendor/tiny_http/src/test.rs delete mode 100644 anneal/vendor/tiny_http/src/util/custom_stream.rs delete mode 100644 anneal/vendor/tiny_http/src/util/equal_reader.rs delete mode 100644 anneal/vendor/tiny_http/src/util/fused_reader.rs delete mode 100644 anneal/vendor/tiny_http/src/util/messages_queue.rs delete mode 100644 anneal/vendor/tiny_http/src/util/mod.rs delete mode 100644 anneal/vendor/tiny_http/src/util/refined_tcp_stream.rs delete mode 100644 anneal/vendor/tiny_http/src/util/sequential.rs delete mode 100644 anneal/vendor/tiny_http/src/util/task_pool.rs delete mode 100644 anneal/vendor/tiny_http/tests/input-tests.rs delete mode 100644 anneal/vendor/tiny_http/tests/network.rs delete mode 100644 anneal/vendor/tiny_http/tests/non-chunked-buffering.rs delete mode 100644 anneal/vendor/tiny_http/tests/promptness.rs delete mode 100644 anneal/vendor/tiny_http/tests/simple-test.rs delete mode 100644 anneal/vendor/tiny_http/tests/support/mod.rs delete mode 100644 anneal/vendor/tiny_http/tests/unblock-test.rs delete mode 100644 anneal/vendor/tiny_http/tests/unix-test.rs create mode 100644 anneal/vendor/toml_const/.cargo-checksum.json create mode 100644 anneal/vendor/toml_const/.cargo_vcs_info.json create mode 100644 anneal/vendor/toml_const/Cargo.lock create mode 100644 anneal/vendor/toml_const/Cargo.toml create mode 100644 anneal/vendor/toml_const/Cargo.toml.orig create mode 100644 anneal/vendor/toml_const/README.md create mode 100644 anneal/vendor/toml_const/src/lib.rs create mode 100644 anneal/vendor/toml_const_macros/.cargo-checksum.json create mode 100644 anneal/vendor/toml_const_macros/.cargo_vcs_info.json create mode 100644 anneal/vendor/toml_const_macros/Cargo.lock create mode 100644 anneal/vendor/toml_const_macros/Cargo.toml create mode 100644 anneal/vendor/toml_const_macros/Cargo.toml.orig create mode 100644 anneal/vendor/toml_const_macros/src/check.rs create mode 100644 anneal/vendor/toml_const_macros/src/instantiate.rs create mode 100644 anneal/vendor/toml_const_macros/src/lib.rs create mode 100644 anneal/vendor/toml_const_macros/src/normalize.rs create mode 100644 anneal/vendor/toml_const_macros/src/parse.rs diff --git a/anneal/Cargo.lock b/anneal/Cargo.lock index 00f68d5437..94b1b4b41f 100644 --- a/anneal/Cargo.lock +++ b/anneal/Cargo.lock @@ -1541,6 +1541,49 @@ version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" +[[package]] +name = "phf" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7" +dependencies = [ + "phf_macros", + "phf_shared", + "serde", +] + +[[package]] +name = "phf_generator" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2cbb1126afed61dd6368748dae63b1ee7dc480191c6262a3b4ff1e29d86a6c5b" +dependencies = [ + "fastrand", + "phf_shared", +] + +[[package]] +name = "phf_macros" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d713258393a82f091ead52047ca779d37e5766226d009de21696c4e667044368" +dependencies = [ + "phf_generator", + "phf_shared", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "phf_shared" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981" +dependencies = [ + "siphasher", +] + [[package]] name = "pin-project-lite" version = "0.2.17" @@ -2146,6 +2189,12 @@ version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbbb5d9659141646ae647b42fe094daf6c6192d1620870b449d9557f748b2daa" +[[package]] +name = "siphasher" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ee5873ec9cce0195efcb7a4e9507a04cd49aec9c83d0389df45b1ef7ba2e649" + [[package]] name = "slab" version = "0.4.12" @@ -2462,12 +2511,37 @@ version = "0.8.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" dependencies = [ + "indexmap", "serde", "serde_spanned", "toml_datetime", "toml_edit", ] +[[package]] +name = "toml_const" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60a93119c23cd286a0e585f25bffcd1c292eaa6b90edfd8d58c442a3d2fe57c1" +dependencies = [ + "phf", + "toml", + "toml_const_macros", +] + +[[package]] +name = "toml_const_macros" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0ca608371311e568b6f918f3cf851640c6811625f39852c188b50ce11e2201b" +dependencies = [ + "indexmap", + "proc-macro2", + "quote", + "syn", + "toml", +] + [[package]] name = "toml_datetime" version = "0.6.11" @@ -2507,6 +2581,7 @@ dependencies = [ "sha2", "tar", "tempfile", + "toml_const", "zstd", ] diff --git a/anneal/v2/toolchain-config/Cargo.toml b/anneal/v2/toolchain-config/Cargo.toml index 542f0b12ae..7c9d5d50b6 100644 --- a/anneal/v2/toolchain-config/Cargo.toml +++ b/anneal/v2/toolchain-config/Cargo.toml @@ -11,3 +11,4 @@ tempfile = "3.27.0" tar = "0.4" zstd = "0.13" digest = "0.10" +toml_const = "1.2" diff --git a/anneal/v2/toolchain-config/examples/static-toml/Cargo.toml b/anneal/v2/toolchain-config/examples/static-toml/Cargo.toml index f39a6ddb5b..f97decc5c7 100644 --- a/anneal/v2/toolchain-config/examples/static-toml/Cargo.toml +++ b/anneal/v2/toolchain-config/examples/static-toml/Cargo.toml @@ -15,12 +15,12 @@ toml = "0.8" [package.metadata.toolchain.linux.x86_64] checksum = "1111111111111111111111111111111111111111111111111111111111111111" -url = "http://example.com/archive.tar.zst" +url = "http://example.com/linux.x86_64.tar.zst" [package.metadata.toolchain.macos.aarch64] checksum = "2222222222222222222222222222222222222222222222222222222222222222" -url = "http://example.com/macos.tar.zst" +url = "http://example.com/macos.aarch64.tar.zst" [package.metadata.toolchain.windows.x86_64] checksum = "3333333333333333333333333333333333333333333333333333333333333333" -url = "http://example.com/windows.tar.zst" +url = "http://example.com/windows.x86_64.tar.zst" diff --git a/anneal/v2/toolchain-config/examples/static-toml/src/main.rs b/anneal/v2/toolchain-config/examples/static-toml/src/main.rs index 06d06e32b2..f5db02c425 100644 --- a/anneal/v2/toolchain-config/examples/static-toml/src/main.rs +++ b/anneal/v2/toolchain-config/examples/static-toml/src/main.rs @@ -1,6 +1,6 @@ use clap::{Parser, Subcommand}; use std::process::Command; -use toolchain_config::{Config, LocalOverride, TarZstLibraryExtractor}; +use toolchain_config::{Checksum, Config, LocalOverride, RemoteArchive, TarZstLibraryExtractor}; #[derive(Parser)] #[command(name = "toolchain-config-example-static-toml")] @@ -12,15 +12,12 @@ struct Cli { #[derive(Subcommand)] enum Commands { - Setup, + Install, Hello, } fn decode_hex(s: &str) -> Vec { - (0..s.len()) - .step_by(2) - .map(|i| u8::from_str_radix(&s[i..i + 2], 16).unwrap()) - .collect() + (0..s.len()).step_by(2).map(|i| u8::from_str_radix(&s[i..i + 2], 16).unwrap()).collect() } fn get_root_dir() -> std::path::PathBuf { @@ -34,13 +31,14 @@ fn main() { let checksum_bytes = decode_hex(env!("TOOLCHAIN_CHECKSUM")); let config = Config::::new( - env!("TOOLCHAIN_URL"), - &checksum_bytes, + RemoteArchive::new(env!("TOOLCHAIN_URL")), + Checksum::new(&checksum_bytes), ); match cli.command { - Commands::Setup => { + Commands::Install => { // TODO: Probably use a flag, not an environment variable to activate override. + let archive_path; let local_override = if std::env::var("__TOOLCHAIN_EXAMPLE_STATIC_TOML").is_ok() { println!("Local testing override active. Assembling mock toolchain archive..."); let manifest_dir = std::env::var("CARGO_MANIFEST_DIR") @@ -52,14 +50,14 @@ fn main() { .expect("Failed to execute build-toolchain.sh"); assert!(status.success(), "build-toolchain.sh script failed"); - let archive_path = std::path::Path::new(&manifest_dir).join("toolchain.tar.zst"); - Some(LocalOverride::::archive(archive_path)) + archive_path = std::path::Path::new(&manifest_dir).join("toolchain.tar.zst"); + Some(LocalOverride::::archive(&archive_path)) } else { None }; println!("Provisioning toolchain environment..."); - toolchain_config::setup(&config, local_override, root_dir) + toolchain_config::install(&config, local_override, &root_dir) .expect("Setup subcommand failed"); println!("Toolchain successfully set up."); } @@ -68,7 +66,10 @@ fn main() { let hello_bin = toolchain_dir.join("bin").join("hello"); if !hello_bin.exists() { - eprintln!("Error: Toolchain executable missing at {:?}. Please run setup first.", hello_bin); + eprintln!( + "Error: Toolchain executable missing at {:?}. Please run install first.", + hello_bin + ); std::process::exit(1); } diff --git a/anneal/v2/toolchain-config/examples/static-toml/toolchain.tar.zst b/anneal/v2/toolchain-config/examples/static-toml/toolchain.tar.zst index fd046ad8e422dca9597f27472e7ba56efbd93643..afd5bc4c7de5f342fc71996c7aea980a4f3c54bd 100644 GIT binary patch delta 142 zcmV;90CE4d0kr{;AAid3t!Rl5YC!;slL3TLEU+Vh3L=DWU}$6rk6A zCToo?M+hI2tj15HAV)$|>_R|0r)enrIg->YhIUS>wv(bMSyQ#IeKy7GY_E88DNa~D wm(^PK{FDDryAaUMX&TCYjwCgUp`DYe?WAZ*)=cs?H6yd#R;$H w@?wiT|K$Hu9RUyklp)6h3&bIi+X4nu13G*`s?&fv0&@d|5Rsk}1c@6iNWX_ZK>z>% diff --git a/anneal/v2/toolchain-config/src/lib.rs b/anneal/v2/toolchain-config/src/lib.rs index ee99f2f59c..549a4dd71c 100644 --- a/anneal/v2/toolchain-config/src/lib.rs +++ b/anneal/v2/toolchain-config/src/lib.rs @@ -1,58 +1,72 @@ +#[derive(Debug, Clone)] +pub struct RemoteArchive<'a, E>(pub &'a str, pub std::marker::PhantomData); + +#[derive(Debug, Clone)] +pub struct Checksum<'a, D>(pub &'a [u8], pub std::marker::PhantomData); + +impl<'a, E> RemoteArchive<'a, E> { + pub fn new(url: &'a str) -> Self { + Self(url, std::marker::PhantomData) + } + pub fn url(&self) -> &'a str { + self.0 + } +} + +impl<'a, D> Checksum<'a, D> { + pub fn new(bytes: &'a [u8]) -> Self { + Self(bytes, std::marker::PhantomData) + } + pub fn bytes(&self) -> &'a [u8] { + self.0 + } +} + /// Setup configuration specifying platform, remote source, and remote checksum. #[derive(Debug, Clone)] pub struct Config<'a, E, D> { pub os: &'a str, pub arch: &'a str, - pub url: &'a str, - pub checksum: &'a [u8], - _extractor: std::marker::PhantomData, - _digest: std::marker::PhantomData, + pub remote: RemoteArchive<'a, E>, + pub checksum: Checksum<'a, D>, } /// Local toolchain definition that overrides remote specified in [`Config`]. -pub enum LocalOverride { - Dir(std::path::PathBuf), - Archive((std::path::PathBuf, std::marker::PhantomData)), +#[derive(Debug, Clone)] +pub enum LocalOverride<'a, E: Extractor = NoExtractor> { + Dir(&'a std::path::Path), + Archive((&'a std::path::Path, std::marker::PhantomData)), } -impl LocalOverride { - pub fn dir(path: std::path::PathBuf) -> Self { +impl<'a, E: Extractor> LocalOverride<'a, E> { + pub fn dir(path: &'a std::path::Path) -> Self { Self::Dir(path) } - pub fn archive(path: std::path::PathBuf) -> Self { + pub fn archive(path: &'a std::path::Path) -> Self { Self::Archive((path, std::marker::PhantomData)) } } impl<'a, E: Extractor, D: digest::Digest> Config<'a, E, D> { /// Instantiates static toolchain parameters auto-detecting current runtime OS and Architecture. - pub fn new(url: &'a str, checksum: &'a [u8]) -> Self { - Self { - os: std::env::consts::OS, - arch: std::env::consts::ARCH, - url, - checksum, - _extractor: std::marker::PhantomData, - _digest: std::marker::PhantomData, - } + pub fn new(remote: RemoteArchive<'a, E>, checksum: Checksum<'a, D>) -> Self { + Self { os: std::env::consts::OS, arch: std::env::consts::ARCH, remote, checksum } } /// Explicitly overrides target platform parameters for specialized configurations. - pub fn new_platform(os: &'a str, arch: &'a str, url: &'a str, checksum: &'a [u8]) -> Self { - Self { - os, - arch, - url, - checksum, - _extractor: std::marker::PhantomData, - _digest: std::marker::PhantomData, - } + pub fn new_platform( + os: &'a str, + arch: &'a str, + remote: RemoteArchive<'a, E>, + checksum: Checksum<'a, D>, + ) -> Self { + Self { os, arch, remote, checksum } } /// Resolves the deterministic subdirectory path containing the verified toolchain files. pub fn toolchain_dir(&self, root: &std::path::Path) -> std::path::PathBuf { - let expected_hex = encode_hex(self.checksum); + let expected_hex = encode_hex(self.checksum.bytes()); root.join(&format!("{}-{}-{}", self.os, self.arch, &expected_hex[..12])) } } @@ -64,7 +78,9 @@ pub trait Extractor { /// /// Instantiated and used for extracting archives downloaded from [`Config::url`] or archives /// designated by [`LocalOverride::Archive`]. - fn new() -> Self; + fn new() -> Self + where + Self: Sized; /// Unpacks stream bytes directly into the specified target directory synchronously on the calling thread. fn extract(&self, src: &mut dyn std::io::Read, dst: &std::path::Path) -> std::io::Result<()>; @@ -130,7 +146,7 @@ impl std::io::Read for HashReader { } } -fn setup_from_archive( +fn install_from_archive( src: impl std::io::Read, dst: &std::path::Path, extractor: &E, @@ -205,7 +221,7 @@ fn link_or_copy_dir(src: &std::path::Path, dst: &std::path::Path) -> std::io::Re } } -fn setup_from_directory(src: &std::path::Path, dst: &std::path::Path) -> Result<(), String> { +fn install_from_directory(src: &std::path::Path, dst: &std::path::Path) -> Result<(), String> { let parent = dst.parent().expect("toolchains directory has parent"); std::fs::create_dir_all(parent) .map_err(|e| format!("Failed to create toolchain parent directory: {e}"))?; @@ -229,14 +245,14 @@ fn setup_from_directory(src: &std::path::Path, dst: &std::path::Path) -> Result< Ok(()) } -fn setup_inner( +fn install_inner( config: &Config<'_, CE, D>, - local_override: Option>, - toolchain_root: std::path::PathBuf, + local_override: Option>, + toolchain_root: &std::path::Path, fetcher: impl FnOnce(&str) -> Result, String>, ) -> Result<(), String> { - let toolchain_dir = config.toolchain_dir(&toolchain_root); - let expected_hex = encode_hex(config.checksum); + let toolchain_dir = config.toolchain_dir(toolchain_root); + let expected_hex = encode_hex(config.checksum.bytes()); if let Some(override_src) = local_override { match override_src { @@ -247,24 +263,24 @@ fn setup_inner( let extractor = LE::new(); let file = std::fs::File::open(path) .map_err(|e| format!("Failed to open local archive: {e}"))?; - setup_from_archive::(file, &toolchain_dir, &extractor) + install_from_archive::(file, &toolchain_dir, &extractor) .map_err(|e| format!("Failed to extract archive: {e}"))?; } LocalOverride::Dir(path) => { log::warn!( "Toolchain contents from local directory may not match expected toolchain hash/version number." ); - setup_from_directory(&path, &toolchain_dir)?; + install_from_directory(path, &toolchain_dir)?; } } } else { let extractor = CE::new(); - let response = fetcher(config.url)?; + let response = fetcher(config.remote.url())?; - let actual_hash = setup_from_archive::(response, &toolchain_dir, &extractor) + let actual_hash = install_from_archive::(response, &toolchain_dir, &extractor) .map_err(|e| format!("Failed to extract downloaded archive: {e}"))?; - if actual_hash.as_slice() != config.checksum { + if actual_hash.as_slice() != config.checksum.bytes() { let _ = std::fs::remove_dir_all(&toolchain_dir); return Err(format!( "Checksum mismatch for downloaded archive. Expected {}, got {}", @@ -277,19 +293,17 @@ fn setup_inner( Ok(()) } -/// Coordinates the provisioning and verification of the active toolchain dependency environment. -/// -/// This function processes the incoming dependency source and installs it into a toolchain -/// directory named according to the source SHA256 hash. +/// Install a toolchain packaged with all its dependencies. /// -/// When no local override is specified, the configured [`Extractor`] type `E` is instantiated -/// via [`Extractor::new`] and used to extract the downloaded toolchain archive stream. -pub fn setup( +/// The default behaviour is to install according to the remote URL and checksom in `config`. If +/// `local_override` is provided, however, the toolchain naming convention associated with `config` +/// will be used to install a local directory or archive. +pub fn install( config: &Config<'_, CE, D>, - local_override: Option>, - toolchain_root: std::path::PathBuf, + local_override: Option>, + toolchain_root: &std::path::Path, ) -> Result<(), String> { - setup_inner(config, local_override, toolchain_root, |url| { + install_inner(config, local_override, toolchain_root, |url| { let response = reqwest::blocking::get(url).map_err(|e| format!("Failed to download archive: {e}"))?; let response = response @@ -328,16 +342,17 @@ mod tests { std::fs::write(src.join("file.txt"), "hello").unwrap(); let dst = temp.path().join("dst"); - setup_from_directory(&src, &dst).unwrap(); + install_from_directory(&src, &dst).unwrap(); assert!(dst.join("file.txt").exists()); assert_eq!(std::fs::read_to_string(dst.join("file.txt")).unwrap(), "hello"); + // Test atomic replacement by running it again with updated contents let src2 = temp.path().join("src2"); std::fs::create_dir(&src2).unwrap(); std::fs::write(src2.join("file.txt"), "world").unwrap(); - setup_from_directory(&src2, &dst).unwrap(); + install_from_directory(&src2, &dst).unwrap(); assert_eq!(std::fs::read_to_string(dst.join("file.txt")).unwrap(), "world"); } @@ -353,7 +368,7 @@ mod tests { let dst = temp.path().join("dst"); let file = std::fs::File::open(&archive_path).unwrap(); - let hash = setup_from_archive::( + let hash = install_from_archive::( file, &dst, &TarZstLibraryExtractor, @@ -373,15 +388,15 @@ mod tests { let expected_hash = [1u8; 32]; let config = Config::::new( - "http://example.com", - &expected_hash, + RemoteArchive::new("http://example.com"), + Checksum::new(&expected_hash), ); let target_dir = config.toolchain_dir(temp.path()); - setup_inner( + install_inner( &config, - Some(LocalOverride::::Dir(src)), - temp.path().to_path_buf(), + Some(LocalOverride::::dir(&src)), + temp.path(), |_| unreachable!(), ) .unwrap(); @@ -401,15 +416,15 @@ mod tests { let expected_hash = [2u8; 32]; let config = Config::::new( - "http://example.com", - &expected_hash, + RemoteArchive::new("http://example.com"), + Checksum::new(&expected_hash), ); let target_dir = config.toolchain_dir(temp.path()); - setup_inner( + install_inner( &config, - Some(LocalOverride::::archive(archive_path)), - temp.path().to_path_buf(), + Some(LocalOverride::::archive(&archive_path)), + temp.path(), |_| unreachable!(), ) .unwrap(); @@ -428,12 +443,14 @@ mod tests { create_test_archive(&src, &archive_path); let actual_hash = compute_sha256(&archive_path); - let config = - Config::::new("http://example.com", &actual_hash); + let config = Config::::new( + RemoteArchive::new("http://example.com"), + Checksum::new(&actual_hash), + ); let target_dir = config.toolchain_dir(temp.path()); let archive_path_clone = archive_path.clone(); - setup_inner::<_, _, NoExtractor>(&config, None, temp.path().to_path_buf(), move |_url| { + install_inner::<_, _, NoExtractor>(&config, None, temp.path(), move |_url| { let file = std::fs::File::open(&archive_path_clone).unwrap(); Ok(Box::new(file)) }) @@ -457,13 +474,13 @@ mod tests { expected_hash[0] ^= 1; // invalidate checksum let config = Config::::new( - "http://example.com", - &expected_hash, + RemoteArchive::new("http://example.com"), + Checksum::new(&expected_hash), ); let target_dir = config.toolchain_dir(temp.path()); let archive_path_clone = archive_path.clone(); - let res = setup_inner::<_, _, NoExtractor>(&config, None, temp.path().to_path_buf(), move |_url| { + let res = install_inner::<_, _, NoExtractor>(&config, None, temp.path(), move |_url| { let file = std::fs::File::open(&archive_path_clone).unwrap(); Ok(Box::new(file)) }); diff --git a/anneal/vendor/ascii/.cargo-checksum.json b/anneal/vendor/ascii/.cargo-checksum.json deleted file mode 100644 index f60d070600..0000000000 --- a/anneal/vendor/ascii/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{".cargo_vcs_info.json":"7dc279d6cc614afd157d09eaf63f0249a590d99bd3659976740a48e19b51c783",".github/workflows/ci.yml":"26ca0180639f06c994852c2e6f6c10edbee283d9aa173de9d1a0fde1c50f0d96","Cargo.toml":"466554aebd51e2ff3da0e3b2160c9c77785701812d65f2bbeda5d2d0b3fc0c9e","Cargo.toml.orig":"5199ce15775aa1d2a706f6d2d3a45339fa6ce79fa50b263949cdd023392a3650","LICENSE-APACHE":"fabba0cb7d00a4b3211fdc13699223ab2d22f88678ccd608494cf2b332b903e9","LICENSE-MIT":"7e4b8a17b118d3d7fd7a362a4b6563e0bd98b098a24c4c7f07dd22042091a847","README.md":"c229f6bd9fb52731479c5e1d321b53ef7eb9bb33105b0052870620107add1425","RELEASES.md":"81ec9d5ed1268c499e5a7e4d9d7bbf7469f2e3ddeede45ad62ed5128f700f818","src/ascii_char.rs":"8c4bdedd3bc07387baddbea23cff6a5abd469884f2fe274c6f6a03c00110b549","src/ascii_str.rs":"464c156b2a9f8183e8f472df6b6b34ea8406c19cab85112475669585a607efff","src/ascii_string.rs":"0cc46f26623721221c0635f17951cd2c40ae1aed4a2e08c333802117ebd94640","src/free_functions.rs":"efebe83e30b05d2394f5f491254e868f8b2837029c39159756eda599199c27b8","src/lib.rs":"3f0640d3cb0fd274c7ddd60efc7ab09e633b7ce726ef366fcf468b784be451dc","src/serialization/ascii_char.rs":"09328995f6691f55be8be016e886cfb390eabe0a002057a41f2e6e029ca5588a","src/serialization/ascii_str.rs":"2dea9ba3d101fa42c09645c2889dc02f16ed2a05409f0d2212b8fc75f2e2fe8b","src/serialization/ascii_string.rs":"342f74d9f367f8450ba9f6c8b76022dc04c9af6bd038dc5e4fc99882b7e978c5","src/serialization/mod.rs":"0f92b7e47156ea3b86ef0c6131c749862fc3b3922e9d16ee22264324fed9a56f","tests.rs":"58cff9cd5607746c9334fd07f73bac9db3fd97034232b1a4751024666d3f1657"},"package":"d92bec98840b8f03a5ff5413de5293bfcd8bf96467cf5452609f939ec6f5de16"} \ No newline at end of file diff --git a/anneal/vendor/ascii/.cargo_vcs_info.json b/anneal/vendor/ascii/.cargo_vcs_info.json deleted file mode 100644 index 2887e77a44..0000000000 --- a/anneal/vendor/ascii/.cargo_vcs_info.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "git": { - "sha1": "8605175df3a9bd1c5e36a300fa68eb5b4a181e6f" - }, - "path_in_vcs": "" -} \ No newline at end of file diff --git a/anneal/vendor/ascii/.github/workflows/ci.yml b/anneal/vendor/ascii/.github/workflows/ci.yml deleted file mode 100644 index e8cabbede6..0000000000 --- a/anneal/vendor/ascii/.github/workflows/ci.yml +++ /dev/null @@ -1,57 +0,0 @@ -name: CI - -on: - pull_request: - push: - branches: - - master - -jobs: - test: - name: Test with Rust ${{ matrix.rust }} - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - rust: [1.41.1, stable, beta, nightly] - steps: - - uses: actions/checkout@v2 - - uses: hecrj/setup-rust-action@v1 - with: - rust-version: ${{ matrix.rust }} - - run: cargo test --verbose --all-features - - run: cargo test --verbose --no-default-features --features alloc - - run: cargo test --verbose --no-default-features - - clippy: - name: Lint with Clippy - runs-on: ubuntu-latest - env: - RUSTFLAGS: -Dwarnings - steps: - - uses: actions/checkout@v2 - - uses: hecrj/setup-rust-action@v1 - with: - components: clippy - - run: cargo clippy --all-targets --verbose --no-default-features - - run: cargo clippy --all-targets --verbose --all-features - - test-minimal: - name: Test minimal dependency version with Rust nightly - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: hecrj/setup-rust-action@v1 - with: - rust-version: nightly - - run: cargo test -Zminimal-versions --verbose --all-features - - miri: - name: Run tests under `miri` to check for UB - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: dtolnay/rust-toolchain@nightly - with: - components: miri - - run: cargo miri test --all-features diff --git a/anneal/vendor/ascii/Cargo.toml b/anneal/vendor/ascii/Cargo.toml deleted file mode 100644 index ce17c6f73b..0000000000 --- a/anneal/vendor/ascii/Cargo.toml +++ /dev/null @@ -1,41 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies. -# -# If you are reading this file be aware that the original Cargo.toml -# will likely look very different (and much more reasonable). -# See Cargo.toml.orig for the original contents. - -[package] -name = "ascii" -version = "1.1.0" -authors = [ - "Thomas Bahn ", - "Torbjørn Birch Moltu ", - "Simon Sapin ", -] -description = "ASCII-only equivalents to `char`, `str` and `String`." -documentation = "https://docs.rs/ascii" -readme = "README.md" -license = "Apache-2.0 OR MIT" -repository = "https://github.com/tomprogrammer/rust-ascii" - -[[test]] -name = "tests" -path = "tests.rs" - -[dependencies.serde] -version = "1.0.25" -optional = true - -[dependencies.serde_test] -version = "1.0" -optional = true - -[features] -alloc = [] -default = ["std"] -std = ["alloc"] diff --git a/anneal/vendor/ascii/Cargo.toml.orig b/anneal/vendor/ascii/Cargo.toml.orig deleted file mode 100644 index 8ec25de27d..0000000000 --- a/anneal/vendor/ascii/Cargo.toml.orig +++ /dev/null @@ -1,22 +0,0 @@ -[package] -authors = ["Thomas Bahn ", "Torbjørn Birch Moltu ", "Simon Sapin "] -description = "ASCII-only equivalents to `char`, `str` and `String`." -documentation = "https://docs.rs/ascii" -license = "Apache-2.0 OR MIT" -name = "ascii" -readme = "README.md" -repository = "https://github.com/tomprogrammer/rust-ascii" -version = "1.1.0" - -[dependencies] -serde = { version = "1.0.25", optional = true } -serde_test = { version = "1.0", optional = true } - -[features] -default = ["std"] -std = ["alloc"] -alloc = [] - -[[test]] -name = "tests" -path = "tests.rs" diff --git a/anneal/vendor/ascii/LICENSE-APACHE b/anneal/vendor/ascii/LICENSE-APACHE deleted file mode 100644 index 211fa24663..0000000000 --- a/anneal/vendor/ascii/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/anneal/vendor/ascii/LICENSE-MIT b/anneal/vendor/ascii/LICENSE-MIT deleted file mode 100644 index 84c5b9e430..0000000000 --- a/anneal/vendor/ascii/LICENSE-MIT +++ /dev/null @@ -1,22 +0,0 @@ -MIT License - -Copyright (c) 2017 Thomas Bahn and contributors -Copyright (c) 2014 The Rust Project Developers - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/anneal/vendor/ascii/README.md b/anneal/vendor/ascii/README.md deleted file mode 100644 index 690fd59f23..0000000000 --- a/anneal/vendor/ascii/README.md +++ /dev/null @@ -1,64 +0,0 @@ -# ascii - -A library that provides ASCII-only string and character types, equivalent to the -`char`, `str` and `String` types in the standard library. - -Types and conversion traits are described in the [Documentation](https://docs.rs/ascii). - -You can include this crate in your cargo project by adding it to the -dependencies section in `Cargo.toml`: - -```toml -[dependencies] -ascii = "1.1" -``` - -## Using ascii without libstd - -Most of `AsciiChar` and `AsciiStr` can be used without `std` by disabling the -default features. The owned string type `AsciiString` and the conversion trait -`IntoAsciiString` as well as all methods referring to these types can be -re-enabled by enabling the `alloc` feature. - -Methods referring to `CStr` and `CString` are also unavailable. -The `Error` trait also only exists in `std`, but `description()` is made -available as an inherent method for `ToAsciiCharError` and `AsAsciiStrError` -in `#![no_std]`-mode. - -To use the `ascii` crate in `#![no_std]` mode in your cargo project, -just add the following dependency declaration in `Cargo.toml`: - -```toml -[dependencies] -ascii = { version = "1.1", default-features = false, features = ["alloc"] } -``` - -## Minimum supported Rust version - -The minimum Rust version for 1.1.\* releases is 1.41.1. -Later 1.y.0 releases might require newer Rust versions, but the three most -recent stable releases at the time of publishing will always be supported. -For example this means that if the current stable Rust version is 1.70 when -ascii 1.2.0 is released, then ascii 1.2.\* will not require a newer -Rust version than 1.68. - -## History - -This package included the Ascii types that were removed from the Rust standard -library by the 2014-12 [reform of the `std::ascii` module](https://github.com/rust-lang/rfcs/pull/486). -The API changed significantly since then. - -## License - -Licensed under either of - -* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0) -* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT) - -at your option. - -### Contribution - -Unless you explicitly state otherwise, any contribution intentionally submitted -for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any -additional terms or conditions. diff --git a/anneal/vendor/ascii/RELEASES.md b/anneal/vendor/ascii/RELEASES.md deleted file mode 100644 index aa1057890f..0000000000 --- a/anneal/vendor/ascii/RELEASES.md +++ /dev/null @@ -1,193 +0,0 @@ -Version 1.1.0 (2022-09-18) -========================== -* Add alloc feature. - This enables `AsciiString` and methods that take or return `Box<[AsciiStr]>` in `!#[no_std]`-mode. -* Add `AsciiStr::into_ascii_string()`, `AsciiString::into_boxed_ascii_str()` and `AsciiString::insert_str()`. -* Implement `From>` and `From` for `AsciiString`. -* Implement `From` for `Box`, `Rc`, `Arc` and `Vec`. -* Make `AsciiString::new()`, `AsciiStr::len()` and `AsciiStr::is_empty()` `const fn`. -* Require Rust 1.44.1. - -Version 1.0.0 (2019-08-26) -========================== - -Breaking changes: - -* Change `AsciiChar.is_whitespace()` to also return true for '\0xb' (vertical tab) and '\0xc' (form feed). -* Remove quickcheck feature. -* Remove `AsciiStr::new()`. -* Rename `AsciiChar::from()` and `AsciiChar::from_unchecked()` to `from_ascii()` and `from_ascii_unchecked()`. -* Rename several `AsciiChar.is_xxx()` methods to `is_ascii_xxx()` (for comsistency with std). -* Rename `AsciiChar::Null` to `Nul` (for consistency with eg. `CStr::from_bytes_with_nul()`). -* Rename `AsciiStr.trim_left()` and `AsciiStr.trim_right()` to `trim_start()` and `trim_end()`. -* Remove impls of the deprecated `std::ascii::AsciiExt` trait. -* Change iterators `Chars`, `CharsMut` and `CharsRef` from type aliases to newtypes. -* Return `impl Trait` from `AsciiStr.lines()` and `AsciiStr.split()`, and remove iterator types `Lines` and `Split`. -* Add `slice_ascii_str()`, `get_ascii()` and `unwrap_ascii()` to the `AsAsciiStr` trait. -* Add `slice_mut_ascii_str()` and `unwrap_ascii_mut()` to the `AsMutAsciiStr` trait. -* Require Rust 1.33.0 for 1.0.\*, and allow later semver-compatible 1.y.0 releases to increase it. - -Additions: - -* Add `const fn` `AsciiChar::new()` which panicks on invalid values. -* Make most `AsciiChar` methods `const fn`. -* Add multiple `AsciiChar::is_[ascii_]xxx()` methods. -* Implement `AsRef` for `AsciiChar`. -* Make `AsciiString`'s `Extend` and `FromIterator` impl generic over all `AsRef`. -* Implement inclusive range indexing for `AsciiStr` (and thereby `AsciiString`). -* Mark `AsciiStr` and `AsciiString` `#[repr(transparent)]` (to `[AsciiChar]` and `Vec` respectively). - -Version 0.9.3 (2019-08-26) -========================== - -Soundness fix: - -**Remove** [unsound](https://github.com/tomprogrammer/rust-ascii/issues/64) impls of `From<&mut AsciiStr>` for `&mut [u8]` and `&mut str`. -This is a breaking change, but theese impls can lead to undefined behavior in safe code. - -If you use this impl and know that non-ASCII values are never inserted into the `[u8]` or `str`, -you can pin ascii to 0.9.2. - -Other changes: - -* Make quickcheck `Arbitrary` impl sometimes produce `AsciiChar::DEL`. -* Implement `Clone`, `Copy` and `Eq` for `ToAsciiCharError`. -* Implement `ToAsciiChar` for `u16`, `u32` and `i8`. - -Version 0.9.2 (2019-07-07) -========================== -* Implement the `IntoAsciiString` trait for `std::ffi::CStr` and `std::ffi::CString` types, - and implemented the `AsAsciiStr` trait for `std::ffi::CStr` type. -* Implement the `IntoAsciiString` for `std::borrow::Cow`, where the inner types themselves - implement `IntoAsciiString`. -* Implement conversions between `AsciiString` and `Cow<'a, AsciiStr>`. -* Implement the `std::ops::AddAssign` trait for `AsciiString`. -* Implement `BorrowMut`, `AsRef<[AsciiChar]>`, `AsRef`, `AsMut<[AsciiChar]>` for `AsciiString`. -* Implement `PartialEq<[u8]>` and `PartialEq<[AsciiChar]>` for `AsciiStr`. -* Add `AsciiStr::first()`, `AsciiStr::last()` and `AsciiStr::split()` methods. -* Implement `DoubleEndedIterator` for `AsciiStr::lines()`. -* Implement `AsRef` and `AsMut for AsciiString`. - -Version 0.8.4 (2017-04-18) -========================== -* Fix the tests when running without std. - -Version 0.8.3 (2017-04-18) -========================== -* Bugfix: `::to_ascii_lowercase` did erroneously convert to uppercase. - -Version 0.8.2 (2017-04-17) -========================== -* Implement `IntoAsciiString` for `&'a str` and `&'a [u8]`. -* Implement the `quickcheck::Arbitrary` trait for `AsciiChar` and `AsciiString`. - The implementation is enabled by the `quickcheck` feature. - -Version 0.8.1 (2017-02-11) -========================== -* Add `Chars`, `CharsMut` and `Lines` iterators. -* Implement `std::fmt::Write` for `AsciiString`. - -Version 0.8.0 (2017-01-02) -========================== - -Breaking changes: - -* Return `FromAsciiError` instead of the input when `AsciiString::from_ascii()` or `into_ascii_string()` fails. -* Replace the `no_std` feature with the additive `std` feature, which is part of the default features. (Issue #29) -* `AsciiChar::is_*()` and `::as_{byte,char}()` take `self` by value instead of by reference. - -Additions: - -* Make `AsciiChar` comparable with `char` and `u8`. -* Add `AsciiChar::as_printable_char()` and the free functions `caret_encode()` and `caret_decode()`. -* Implement some methods from `AsciiExt` and `Error` (which are not in libcore) directly in `core` mode: - * `Ascii{Char,Str}::eq_ignore_ascii_case()` - * `AsciiChar::to_ascii_{upper,lower}case()` - * `AsciiStr::make_ascii_{upper,lower}case()` - * `{ToAsciiChar,AsAsciiStr}Error::description()` - -Version 0.7.1 (2016-08-15) -========================== -* Fix the implementation of `AsciiExt::to_ascii_lowercase()` for `AsciiChar` converting to uppercase. (introduced in 0.7.0) - -Version 0.7.0 (2016-06-25) -========================== -* Rename `Ascii` to `AsciiChar` and convert it into an enum. - (with a variant for every ASCII character) -* Replace `OwnedAsciiCast` with `IntoAsciiString`. -* Replace `AsciiCast` with `As[Mut]AsciiStr` and `IntoAsciiChar`. -* Add *from[_ascii]_unchecked* methods. -* Replace *from_bytes* with *from_ascii* in method names. -* Return `std::error::Error`-implementing types instead of `()` and `None` when - conversion to `AsciiStr` or `AsciiChar` fails. -* Implement `AsciiExt` without the `unstable` Cargo feature flag, which is removed. -* Require Rust 1.9 or later. -* Add `#[no_std]` support in a Cargo feature. -* Implement `From<{&,&mut,Box<}AsciiStr>` for `[Ascii]`, `[u8]` and `str` -* Implement `From<{&,&mut,Box<}[Ascii]>`, `As{Ref,Mut}<[Ascii]>` and Default for `AsciiStr` -* Implement `From>` for `AsciiString`. -* Implement `AsMut` for `AsciiString`. -* Stop some `Ascii::is_xxx()` methods from panicking. -* Add `Ascii::is_whitespace()`. -* Add `AsciiString::as_mut_slice()`. -* Add raw pointer methods on `AsciiString`: - * `from_raw_parts` - * `as_ptr` - * `as_mut_ptr` - -Version 0.6.0 (2015-12-30) -========================== -* Add `Ascii::from_byte()` -* Add `AsciiStr::trim[_{left,right}]()` - -Version 0.5.4 (2015-07-29) -========================== -Implement `IndexMut` for AsciiStr and AsciiString. - -Version 0.5.1 (2015-06-13) -========================== -* Add `Ascii::from()`. -* Implement `Index` for `AsciiStr` and `AsciiString`. -* Implement `Default`,`FromIterator`,`Extend` and `Add` for `AsciiString` -* Added inherent methods on `AsciiString`: - * `with_capacity` - * `push_str` - * `capacity` - * `reserve` - * `reserve_exact` - * `shrink_to_fit` - * `push` - * `truncate` - * `pop` - * `remove` - * `insert` - * `len` - * `is_empty` - * `clear` - -Version 0.5.0 (2015-05-05) -========================== -First release compatible with Rust 1.0.0. diff --git a/anneal/vendor/ascii/src/ascii_char.rs b/anneal/vendor/ascii/src/ascii_char.rs deleted file mode 100644 index 5011949f76..0000000000 --- a/anneal/vendor/ascii/src/ascii_char.rs +++ /dev/null @@ -1,1069 +0,0 @@ -use core::cmp::Ordering; -use core::mem; -use core::{char, fmt}; -#[cfg(feature = "std")] -use std::error::Error; - -#[allow(non_camel_case_types)] -/// An ASCII character. It wraps a `u8`, with the highest bit always zero. -#[derive(Clone, PartialEq, PartialOrd, Ord, Eq, Hash, Copy)] -#[repr(u8)] -pub enum AsciiChar { - /// `'\0'` - Null = 0, - /// [Start Of Heading](http://en.wikipedia.org/wiki/Start_of_Heading) - SOH = 1, - /// [Start Of teXt](http://en.wikipedia.org/wiki/Start_of_Text) - SOX = 2, - /// [End of TeXt](http://en.wikipedia.org/wiki/End-of-Text_character) - ETX = 3, - /// [End Of Transmission](http://en.wikipedia.org/wiki/End-of-Transmission_character) - EOT = 4, - /// [Enquiry](http://en.wikipedia.org/wiki/Enquiry_character) - ENQ = 5, - /// [Acknowledgement](http://en.wikipedia.org/wiki/Acknowledge_character) - ACK = 6, - /// [bell / alarm / audible](http://en.wikipedia.org/wiki/Bell_character) - /// - /// `'\a'` is not recognized by Rust. - Bell = 7, - /// [Backspace](http://en.wikipedia.org/wiki/Backspace) - /// - /// `'\b'` is not recognized by Rust. - BackSpace = 8, - /// `'\t'` - Tab = 9, - /// `'\n'` - LineFeed = 10, - /// [Vertical tab](http://en.wikipedia.org/wiki/Vertical_Tab) - /// - /// `'\v'` is not recognized by Rust. - VT = 11, - /// [Form Feed](http://en.wikipedia.org/wiki/Form_Feed) - /// - /// `'\f'` is not recognized by Rust. - FF = 12, - /// `'\r'` - CarriageReturn = 13, - /// [Shift In](http://en.wikipedia.org/wiki/Shift_Out_and_Shift_In_characters) - SI = 14, - /// [Shift Out](http://en.wikipedia.org/wiki/Shift_Out_and_Shift_In_characters) - SO = 15, - /// [Data Link Escape](http://en.wikipedia.org/wiki/Data_Link_Escape) - DLE = 16, - /// [Device control 1, often XON](http://en.wikipedia.org/wiki/Device_Control_1) - DC1 = 17, - /// Device control 2 - DC2 = 18, - /// Device control 3, Often XOFF - DC3 = 19, - /// Device control 4 - DC4 = 20, - /// [Negative AcKnowledgement](http://en.wikipedia.org/wiki/Negative-acknowledge_character) - NAK = 21, - /// [Synchronous idle](http://en.wikipedia.org/wiki/Synchronous_Idle) - SYN = 22, - /// [End of Transmission Block](http://en.wikipedia.org/wiki/End-of-Transmission-Block_character) - ETB = 23, - /// [Cancel](http://en.wikipedia.org/wiki/Cancel_character) - CAN = 24, - /// [End of Medium](http://en.wikipedia.org/wiki/End_of_Medium) - EM = 25, - /// [Substitute](http://en.wikipedia.org/wiki/Substitute_character) - SUB = 26, - /// [Escape](http://en.wikipedia.org/wiki/Escape_character) - /// - /// `'\e'` is not recognized by Rust. - ESC = 27, - /// [File Separator](http://en.wikipedia.org/wiki/File_separator) - FS = 28, - /// [Group Separator](http://en.wikipedia.org/wiki/Group_separator) - GS = 29, - /// [Record Separator](http://en.wikipedia.org/wiki/Record_separator) - RS = 30, - /// [Unit Separator](http://en.wikipedia.org/wiki/Unit_separator) - US = 31, - /// `' '` - Space = 32, - /// `'!'` - Exclamation = 33, - /// `'"'` - Quotation = 34, - /// `'#'` - Hash = 35, - /// `'$'` - Dollar = 36, - /// `'%'` - Percent = 37, - /// `'&'` - Ampersand = 38, - /// `'\''` - Apostrophe = 39, - /// `'('` - ParenOpen = 40, - /// `')'` - ParenClose = 41, - /// `'*'` - Asterisk = 42, - /// `'+'` - Plus = 43, - /// `','` - Comma = 44, - /// `'-'` - Minus = 45, - /// `'.'` - Dot = 46, - /// `'/'` - Slash = 47, - /// `'0'` - _0 = 48, - /// `'1'` - _1 = 49, - /// `'2'` - _2 = 50, - /// `'3'` - _3 = 51, - /// `'4'` - _4 = 52, - /// `'5'` - _5 = 53, - /// `'6'` - _6 = 54, - /// `'7'` - _7 = 55, - /// `'8'` - _8 = 56, - /// `'9'` - _9 = 57, - /// `':'` - Colon = 58, - /// `';'` - Semicolon = 59, - /// `'<'` - LessThan = 60, - /// `'='` - Equal = 61, - /// `'>'` - GreaterThan = 62, - /// `'?'` - Question = 63, - /// `'@'` - At = 64, - /// `'A'` - A = 65, - /// `'B'` - B = 66, - /// `'C'` - C = 67, - /// `'D'` - D = 68, - /// `'E'` - E = 69, - /// `'F'` - F = 70, - /// `'G'` - G = 71, - /// `'H'` - H = 72, - /// `'I'` - I = 73, - /// `'J'` - J = 74, - /// `'K'` - K = 75, - /// `'L'` - L = 76, - /// `'M'` - M = 77, - /// `'N'` - N = 78, - /// `'O'` - O = 79, - /// `'P'` - P = 80, - /// `'Q'` - Q = 81, - /// `'R'` - R = 82, - /// `'S'` - S = 83, - /// `'T'` - T = 84, - /// `'U'` - U = 85, - /// `'V'` - V = 86, - /// `'W'` - W = 87, - /// `'X'` - X = 88, - /// `'Y'` - Y = 89, - /// `'Z'` - Z = 90, - /// `'['` - BracketOpen = 91, - /// `'\'` - BackSlash = 92, - /// `']'` - BracketClose = 93, - /// `'^'` - Caret = 94, - /// `'_'` - UnderScore = 95, - /// `'`'` - Grave = 96, - /// `'a'` - a = 97, - /// `'b'` - b = 98, - /// `'c'` - c = 99, - /// `'d'` - d = 100, - /// `'e'` - e = 101, - /// `'f'` - f = 102, - /// `'g'` - g = 103, - /// `'h'` - h = 104, - /// `'i'` - i = 105, - /// `'j'` - j = 106, - /// `'k'` - k = 107, - /// `'l'` - l = 108, - /// `'m'` - m = 109, - /// `'n'` - n = 110, - /// `'o'` - o = 111, - /// `'p'` - p = 112, - /// `'q'` - q = 113, - /// `'r'` - r = 114, - /// `'s'` - s = 115, - /// `'t'` - t = 116, - /// `'u'` - u = 117, - /// `'v'` - v = 118, - /// `'w'` - w = 119, - /// `'x'` - x = 120, - /// `'y'` - y = 121, - /// `'z'` - z = 122, - /// `'{'` - CurlyBraceOpen = 123, - /// `'|'` - VerticalBar = 124, - /// `'}'` - CurlyBraceClose = 125, - /// `'~'` - Tilde = 126, - /// [Delete](http://en.wikipedia.org/wiki/Delete_character) - DEL = 127, -} - -impl AsciiChar { - /// Constructs an ASCII character from a `u8`, `char` or other character type. - /// - /// # Errors - /// Returns `Err(())` if the character can't be ASCII encoded. - /// - /// # Example - /// ``` - /// # use ascii::AsciiChar; - /// let a = AsciiChar::from_ascii('g').unwrap(); - /// assert_eq!(a.as_char(), 'g'); - /// ``` - #[inline] - pub fn from_ascii(ch: C) -> Result { - ch.to_ascii_char() - } - - /// Create an `AsciiChar` from a `char`, panicking if it's not ASCII. - /// - /// This function is intended for creating `AsciiChar` values from - /// hardcoded known-good character literals such as `'K'`, `'-'` or `'\0'`, - /// and for use in `const` contexts. - /// Use [`from_ascii()`](#method.from_ascii) instead when you're not - /// certain the character is ASCII. - /// - /// # Examples - /// - /// ``` - /// # use ascii::AsciiChar; - /// assert_eq!(AsciiChar::new('@'), AsciiChar::At); - /// assert_eq!(AsciiChar::new('C').as_char(), 'C'); - /// ``` - /// - /// In a constant: - /// ``` - /// # use ascii::AsciiChar; - /// const SPLIT_ON: AsciiChar = AsciiChar::new(','); - /// ``` - /// - /// This will not compile: - /// ```compile_fail - /// # use ascii::AsciiChar; - /// const BAD: AsciiChar = AsciiChar::new('Ø'); - /// ``` - /// - /// # Panics - /// - /// This function will panic if passed a non-ASCII character. - /// - /// The panic message might not be the most descriptive due to the - /// current limitations of `const fn`. - #[must_use] - pub const fn new(ch: char) -> AsciiChar { - // It's restricted to this function, and without it - // we'd need to specify `AsciiChar::` or `Self::` 128 times. - #[allow(clippy::enum_glob_use)] - use AsciiChar::*; - - #[rustfmt::skip] - const ALL: [AsciiChar; 128] = [ - Null, SOH, SOX, ETX, EOT, ENQ, ACK, Bell, - BackSpace, Tab, LineFeed, VT, FF, CarriageReturn, SI, SO, - DLE, DC1, DC2, DC3, DC4, NAK, SYN, ETB, - CAN, EM, SUB, ESC, FS, GS, RS, US, - Space, Exclamation, Quotation, Hash, Dollar, Percent, Ampersand, Apostrophe, - ParenOpen, ParenClose, Asterisk, Plus, Comma, Minus, Dot, Slash, - _0, _1, _2, _3, _4, _5, _6, _7, - _8, _9, Colon, Semicolon, LessThan, Equal, GreaterThan, Question, - At, A, B, C, D, E, F, G, - H, I, J, K, L, M, N, O, - P, Q, R, S, T, U, V, W, - X, Y, Z, BracketOpen, BackSlash, BracketClose, Caret, UnderScore, - Grave, a, b, c, d, e, f, g, - h, i, j, k, l, m, n, o, - p, q, r, s, t, u, v, w, - x, y, z, CurlyBraceOpen, VerticalBar, CurlyBraceClose, Tilde, DEL, - ]; - - // We want to slice here and detect `const_err` from rustc if the slice is invalid - #[allow(clippy::indexing_slicing)] - ALL[ch as usize] - } - - /// Constructs an ASCII character from a `u8`, `char` or other character - /// type without any checks. - /// - /// # Safety - /// - /// This function is very unsafe as it can create invalid enum - /// discriminants, which instantly creates undefined behavior. - /// (`let _ = AsciiChar::from_ascii_unchecked(200);` alone is UB). - /// - /// The undefined behavior is not just theoretical either: - /// For example, `[0; 128][AsciiChar::from_ascii_unchecked(255) as u8 as usize] = 0` - /// might not panic, creating a buffer overflow, - /// and `Some(AsciiChar::from_ascii_unchecked(128))` might be `None`. - #[inline] - #[must_use] - pub unsafe fn from_ascii_unchecked(ch: u8) -> Self { - // SAFETY: Caller guarantees `ch` is within bounds of ascii. - unsafe { ch.to_ascii_char_unchecked() } - } - - /// Converts an ASCII character into a `u8`. - #[inline] - #[must_use] - pub const fn as_byte(self) -> u8 { - self as u8 - } - - /// Converts an ASCII character into a `char`. - #[inline] - #[must_use] - pub const fn as_char(self) -> char { - self as u8 as char - } - - // the following methods are like ctype, and the implementation is inspired by musl. - // The ascii_ methods take self by reference for maximum compatibility - // with the corresponding methods on u8 and char. - // It is bad for both usability and performance, but marking those - // that doesn't have a non-ascii sibling #[inline] should - // make the compiler optimize away the indirection. - - /// Turns uppercase into lowercase, but also modifies '@' and '<'..='_' - #[must_use] - const fn to_not_upper(self) -> u8 { - self as u8 | 0b010_0000 - } - - /// Check if the character is a letter (a-z, A-Z) - #[inline] - #[must_use] - pub const fn is_alphabetic(self) -> bool { - (self.to_not_upper() >= b'a') & (self.to_not_upper() <= b'z') - } - - /// Check if the character is a letter (a-z, A-Z). - /// - /// This method is identical to [`is_alphabetic()`](#method.is_alphabetic) - #[inline] - #[must_use] - pub const fn is_ascii_alphabetic(&self) -> bool { - self.is_alphabetic() - } - - /// Check if the character is a digit in the given radix. - /// - /// If the radix is always 10 or 16, - /// [`is_ascii_digit()`](#method.is_ascii_digit) and - /// [`is_ascii_hexdigit()`](#method.is_ascii_hexdigit()) will be faster. - /// - /// # Panics - /// - /// Radixes greater than 36 are not supported and will result in a panic. - #[must_use] - pub fn is_digit(self, radix: u32) -> bool { - match (self as u8, radix) { - (b'0'..=b'9', 0..=36) => u32::from(self as u8 - b'0') < radix, - (b'a'..=b'z', 11..=36) => u32::from(self as u8 - b'a') < radix - 10, - (b'A'..=b'Z', 11..=36) => u32::from(self as u8 - b'A') < radix - 10, - (_, 0..=36) => false, - (_, _) => panic!("radixes greater than 36 are not supported"), - } - } - - /// Check if the character is a number (0-9) - /// - /// # Examples - /// ``` - /// # use ascii::AsciiChar; - /// assert_eq!(AsciiChar::new('0').is_ascii_digit(), true); - /// assert_eq!(AsciiChar::new('9').is_ascii_digit(), true); - /// assert_eq!(AsciiChar::new('a').is_ascii_digit(), false); - /// assert_eq!(AsciiChar::new('A').is_ascii_digit(), false); - /// assert_eq!(AsciiChar::new('/').is_ascii_digit(), false); - /// ``` - #[inline] - #[must_use] - pub const fn is_ascii_digit(&self) -> bool { - (*self as u8 >= b'0') & (*self as u8 <= b'9') - } - - /// Check if the character is a letter or number - #[inline] - #[must_use] - pub const fn is_alphanumeric(self) -> bool { - self.is_alphabetic() | self.is_ascii_digit() - } - - /// Check if the character is a letter or number - /// - /// This method is identical to [`is_alphanumeric()`](#method.is_alphanumeric) - #[inline] - #[must_use] - pub const fn is_ascii_alphanumeric(&self) -> bool { - self.is_alphanumeric() - } - - /// Check if the character is a space or horizontal tab - /// - /// # Examples - /// ``` - /// # use ascii::AsciiChar; - /// assert!(AsciiChar::Space.is_ascii_blank()); - /// assert!(AsciiChar::Tab.is_ascii_blank()); - /// assert!(!AsciiChar::VT.is_ascii_blank()); - /// assert!(!AsciiChar::LineFeed.is_ascii_blank()); - /// assert!(!AsciiChar::CarriageReturn.is_ascii_blank()); - /// assert!(!AsciiChar::FF.is_ascii_blank()); - /// ``` - #[inline] - #[must_use] - pub const fn is_ascii_blank(&self) -> bool { - (*self as u8 == b' ') | (*self as u8 == b'\t') - } - - /// Check if the character one of ' ', '\t', '\n', '\r', - /// '\0xb' (vertical tab) or '\0xc' (form feed). - #[inline] - #[must_use] - pub const fn is_whitespace(self) -> bool { - let b = self as u8; - self.is_ascii_blank() | (b == b'\n') | (b == b'\r') | (b == 0x0b) | (b == 0x0c) - } - - /// Check if the character is a ' ', '\t', '\n', '\r' or '\0xc' (form feed). - /// - /// This method is NOT identical to `is_whitespace()`. - #[inline] - #[must_use] - pub const fn is_ascii_whitespace(&self) -> bool { - self.is_ascii_blank() - | (*self as u8 == b'\n') - | (*self as u8 == b'\r') - | (*self as u8 == 0x0c/*form feed*/) - } - - /// Check if the character is a control character - /// - /// # Examples - /// ``` - /// # use ascii::AsciiChar; - /// assert_eq!(AsciiChar::new('\0').is_ascii_control(), true); - /// assert_eq!(AsciiChar::new('n').is_ascii_control(), false); - /// assert_eq!(AsciiChar::new(' ').is_ascii_control(), false); - /// assert_eq!(AsciiChar::new('\n').is_ascii_control(), true); - /// assert_eq!(AsciiChar::new('\t').is_ascii_control(), true); - /// assert_eq!(AsciiChar::EOT.is_ascii_control(), true); - /// ``` - #[inline] - #[must_use] - pub const fn is_ascii_control(&self) -> bool { - ((*self as u8) < b' ') | (*self as u8 == 127) - } - - /// Checks if the character is printable (except space) - /// - /// # Examples - /// ``` - /// # use ascii::AsciiChar; - /// assert_eq!(AsciiChar::new('n').is_ascii_graphic(), true); - /// assert_eq!(AsciiChar::new(' ').is_ascii_graphic(), false); - /// assert_eq!(AsciiChar::new('\n').is_ascii_graphic(), false); - /// ``` - #[inline] - #[must_use] - pub const fn is_ascii_graphic(&self) -> bool { - self.as_byte().wrapping_sub(b' ' + 1) < 0x5E - } - - /// Checks if the character is printable (including space) - /// - /// # Examples - /// ``` - /// # use ascii::AsciiChar; - /// assert_eq!(AsciiChar::new('n').is_ascii_printable(), true); - /// assert_eq!(AsciiChar::new(' ').is_ascii_printable(), true); - /// assert_eq!(AsciiChar::new('\n').is_ascii_printable(), false); - /// ``` - #[inline] - #[must_use] - pub const fn is_ascii_printable(&self) -> bool { - self.as_byte().wrapping_sub(b' ') < 0x5F - } - - /// Checks if the character is alphabetic and lowercase (a-z). - /// - /// # Examples - /// ``` - /// use ascii::AsciiChar; - /// assert_eq!(AsciiChar::new('a').is_lowercase(), true); - /// assert_eq!(AsciiChar::new('A').is_lowercase(), false); - /// assert_eq!(AsciiChar::new('@').is_lowercase(), false); - /// ``` - #[inline] - #[must_use] - pub const fn is_lowercase(self) -> bool { - self.as_byte().wrapping_sub(b'a') < 26 - } - - /// Checks if the character is alphabetic and lowercase (a-z). - /// - /// This method is identical to [`is_lowercase()`](#method.is_lowercase) - #[inline] - #[must_use] - pub const fn is_ascii_lowercase(&self) -> bool { - self.is_lowercase() - } - - /// Checks if the character is alphabetic and uppercase (A-Z). - /// - /// # Examples - /// ``` - /// # use ascii::AsciiChar; - /// assert_eq!(AsciiChar::new('A').is_uppercase(), true); - /// assert_eq!(AsciiChar::new('a').is_uppercase(), false); - /// assert_eq!(AsciiChar::new('@').is_uppercase(), false); - /// ``` - #[inline] - #[must_use] - pub const fn is_uppercase(self) -> bool { - self.as_byte().wrapping_sub(b'A') < 26 - } - - /// Checks if the character is alphabetic and uppercase (A-Z). - /// - /// This method is identical to [`is_uppercase()`](#method.is_uppercase) - #[inline] - #[must_use] - pub const fn is_ascii_uppercase(&self) -> bool { - self.is_uppercase() - } - - /// Checks if the character is punctuation - /// - /// # Examples - /// ``` - /// # use ascii::AsciiChar; - /// assert_eq!(AsciiChar::new('n').is_ascii_punctuation(), false); - /// assert_eq!(AsciiChar::new(' ').is_ascii_punctuation(), false); - /// assert_eq!(AsciiChar::new('_').is_ascii_punctuation(), true); - /// assert_eq!(AsciiChar::new('~').is_ascii_punctuation(), true); - /// ``` - #[inline] - #[must_use] - pub const fn is_ascii_punctuation(&self) -> bool { - self.is_ascii_graphic() & !self.is_alphanumeric() - } - - /// Checks if the character is a valid hex digit - /// - /// # Examples - /// ``` - /// # use ascii::AsciiChar; - /// assert_eq!(AsciiChar::new('5').is_ascii_hexdigit(), true); - /// assert_eq!(AsciiChar::new('a').is_ascii_hexdigit(), true); - /// assert_eq!(AsciiChar::new('F').is_ascii_hexdigit(), true); - /// assert_eq!(AsciiChar::new('G').is_ascii_hexdigit(), false); - /// assert_eq!(AsciiChar::new(' ').is_ascii_hexdigit(), false); - /// ``` - #[inline] - #[must_use] - pub const fn is_ascii_hexdigit(&self) -> bool { - self.is_ascii_digit() | ((*self as u8 | 0x20_u8).wrapping_sub(b'a') < 6) - } - - /// Unicode has printable versions of the ASCII control codes, like '␛'. - /// - /// This function is identical with `.as_char()` - /// for all values `.is_printable()` returns true for, - /// but replaces the control codes with those unicodes printable versions. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiChar; - /// assert_eq!(AsciiChar::new('\0').as_printable_char(), '␀'); - /// assert_eq!(AsciiChar::new('\n').as_printable_char(), '␊'); - /// assert_eq!(AsciiChar::new(' ').as_printable_char(), ' '); - /// assert_eq!(AsciiChar::new('p').as_printable_char(), 'p'); - /// ``` - #[must_use] - pub fn as_printable_char(self) -> char { - match self as u8 { - // Non printable characters - // SAFETY: From codepoint 0x2400 ('␀') to 0x241f (`␟`), there are characters representing - // the unprintable characters from 0x0 to 0x1f, ordered correctly. - // As `b` is guaranteed to be within 0x0 to 0x1f, the conversion represents a - // valid character. - b @ 0x0..=0x1f => unsafe { char::from_u32_unchecked(u32::from('␀') + u32::from(b)) }, - - // 0x7f (delete) has it's own character at codepoint 0x2420, not 0x247f, so it is special - // cased to return it's character - 0x7f => '␡', - - // All other characters are printable, and per function contract use `Self::as_char` - _ => self.as_char(), - } - } - - /// Replaces letters `a` to `z` with `A` to `Z` - pub fn make_ascii_uppercase(&mut self) { - *self = self.to_ascii_uppercase(); - } - - /// Replaces letters `A` to `Z` with `a` to `z` - pub fn make_ascii_lowercase(&mut self) { - *self = self.to_ascii_lowercase(); - } - - /// Maps letters a-z to A-Z and returns any other character unchanged. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiChar; - /// assert_eq!(AsciiChar::new('u').to_ascii_uppercase().as_char(), 'U'); - /// assert_eq!(AsciiChar::new('U').to_ascii_uppercase().as_char(), 'U'); - /// assert_eq!(AsciiChar::new('2').to_ascii_uppercase().as_char(), '2'); - /// assert_eq!(AsciiChar::new('=').to_ascii_uppercase().as_char(), '='); - /// assert_eq!(AsciiChar::new('[').to_ascii_uppercase().as_char(), '['); - /// ``` - #[inline] - #[must_use] - #[allow(clippy::indexing_slicing)] // We're sure it'll either access one or the other, as `bool` is either `0` or `1` - pub const fn to_ascii_uppercase(&self) -> Self { - [*self, AsciiChar::new((*self as u8 & 0b101_1111) as char)][self.is_lowercase() as usize] - } - - /// Maps letters A-Z to a-z and returns any other character unchanged. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiChar; - /// assert_eq!(AsciiChar::new('U').to_ascii_lowercase().as_char(), 'u'); - /// assert_eq!(AsciiChar::new('u').to_ascii_lowercase().as_char(), 'u'); - /// assert_eq!(AsciiChar::new('2').to_ascii_lowercase().as_char(), '2'); - /// assert_eq!(AsciiChar::new('^').to_ascii_lowercase().as_char(), '^'); - /// assert_eq!(AsciiChar::new('\x7f').to_ascii_lowercase().as_char(), '\x7f'); - /// ``` - #[inline] - #[must_use] - #[allow(clippy::indexing_slicing)] // We're sure it'll either access one or the other, as `bool` is either `0` or `1` - pub const fn to_ascii_lowercase(&self) -> Self { - [*self, AsciiChar::new(self.to_not_upper() as char)][self.is_uppercase() as usize] - } - - /// Compares two characters case-insensitively. - #[inline] - #[must_use] - pub const fn eq_ignore_ascii_case(&self, other: &Self) -> bool { - (self.as_byte() == other.as_byte()) - | (self.is_alphabetic() & (self.to_not_upper() == other.to_not_upper())) - } -} - -impl fmt::Display for AsciiChar { - #[inline] - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.as_char().fmt(f) - } -} - -impl fmt::Debug for AsciiChar { - #[inline] - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.as_char().fmt(f) - } -} - -impl Default for AsciiChar { - fn default() -> AsciiChar { - AsciiChar::Null - } -} - -macro_rules! impl_into_partial_eq_ord { - ($wider:ty, $to_wider:expr) => { - impl From for $wider { - #[inline] - fn from(a: AsciiChar) -> $wider { - $to_wider(a) - } - } - impl PartialEq<$wider> for AsciiChar { - #[inline] - fn eq(&self, rhs: &$wider) -> bool { - $to_wider(*self) == *rhs - } - } - impl PartialEq for $wider { - #[inline] - fn eq(&self, rhs: &AsciiChar) -> bool { - *self == $to_wider(*rhs) - } - } - impl PartialOrd<$wider> for AsciiChar { - #[inline] - fn partial_cmp(&self, rhs: &$wider) -> Option { - $to_wider(*self).partial_cmp(rhs) - } - } - impl PartialOrd for $wider { - #[inline] - fn partial_cmp(&self, rhs: &AsciiChar) -> Option { - self.partial_cmp(&$to_wider(*rhs)) - } - } - }; -} -impl_into_partial_eq_ord! {u8, AsciiChar::as_byte} -impl_into_partial_eq_ord! {char, AsciiChar::as_char} - -/// Error returned by `ToAsciiChar`. -#[derive(Clone, Copy, PartialEq, Eq)] -pub struct ToAsciiCharError(()); - -const ERRORMSG_CHAR: &str = "not an ASCII character"; - -#[cfg(not(feature = "std"))] -impl ToAsciiCharError { - /// Returns a description for this error, like `std::error::Error::description`. - #[inline] - #[must_use] - #[allow(clippy::unused_self)] - pub const fn description(&self) -> &'static str { - ERRORMSG_CHAR - } -} - -impl fmt::Debug for ToAsciiCharError { - fn fmt(&self, fmtr: &mut fmt::Formatter) -> fmt::Result { - write!(fmtr, "{}", ERRORMSG_CHAR) - } -} - -impl fmt::Display for ToAsciiCharError { - fn fmt(&self, fmtr: &mut fmt::Formatter) -> fmt::Result { - write!(fmtr, "{}", ERRORMSG_CHAR) - } -} - -#[cfg(feature = "std")] -impl Error for ToAsciiCharError { - #[inline] - fn description(&self) -> &'static str { - ERRORMSG_CHAR - } -} - -/// Convert `char`, `u8` and other character types to `AsciiChar`. -pub trait ToAsciiChar { - /// Convert to `AsciiChar`. - /// - /// # Errors - /// If `self` is outside the valid ascii range, this returns `Err` - fn to_ascii_char(self) -> Result; - - /// Convert to `AsciiChar` without checking that it is an ASCII character. - /// - /// # Safety - /// Calling this function with a value outside of the ascii range, `0x0` to `0x7f` inclusive, - /// is undefined behavior. - // TODO: Make sure this is the contract we want to express in this function. - // It is ambigous if numbers such as `0xffffff20_u32` are valid ascii characters, - // as this function returns `Ascii::Space` due to the cast to `u8`, even though - // `to_ascii_char` returns `Err()`. - unsafe fn to_ascii_char_unchecked(self) -> AsciiChar; -} - -impl ToAsciiChar for AsciiChar { - #[inline] - fn to_ascii_char(self) -> Result { - Ok(self) - } - - #[inline] - unsafe fn to_ascii_char_unchecked(self) -> AsciiChar { - self - } -} - -impl ToAsciiChar for u8 { - #[inline] - fn to_ascii_char(self) -> Result { - u32::from(self).to_ascii_char() - } - #[inline] - unsafe fn to_ascii_char_unchecked(self) -> AsciiChar { - // SAFETY: Caller guarantees `self` is within bounds of the enum - // variants, so this cast successfully produces a valid ascii - // variant - unsafe { mem::transmute::(self) } - } -} - -// Note: Casts to `u8` here does not cause problems, as the negative -// range is mapped outside of ascii bounds and we don't mind losing -// the sign, as long as negative numbers are mapped outside ascii range. -#[allow(clippy::cast_sign_loss)] -impl ToAsciiChar for i8 { - #[inline] - fn to_ascii_char(self) -> Result { - u32::from(self as u8).to_ascii_char() - } - #[inline] - unsafe fn to_ascii_char_unchecked(self) -> AsciiChar { - // SAFETY: Caller guarantees `self` is within bounds of the enum - // variants, so this cast successfully produces a valid ascii - // variant - unsafe { mem::transmute::(self as u8) } - } -} - -impl ToAsciiChar for char { - #[inline] - fn to_ascii_char(self) -> Result { - u32::from(self).to_ascii_char() - } - #[inline] - unsafe fn to_ascii_char_unchecked(self) -> AsciiChar { - // SAFETY: Caller guarantees we're within ascii range. - unsafe { u32::from(self).to_ascii_char_unchecked() } - } -} - -impl ToAsciiChar for u32 { - fn to_ascii_char(self) -> Result { - match self { - // SAFETY: We're within the valid ascii range in this branch. - 0x0..=0x7f => Ok(unsafe { self.to_ascii_char_unchecked() }), - _ => Err(ToAsciiCharError(())), - } - } - - #[inline] - unsafe fn to_ascii_char_unchecked(self) -> AsciiChar { - // Note: This cast discards the top bytes, this may cause problems, see - // the TODO on this method's documentation in the trait. - // SAFETY: Caller guarantees we're within ascii range. - #[allow(clippy::cast_possible_truncation)] // We want to truncate it - unsafe { - (self as u8).to_ascii_char_unchecked() - } - } -} - -impl ToAsciiChar for u16 { - fn to_ascii_char(self) -> Result { - u32::from(self).to_ascii_char() - } - #[inline] - unsafe fn to_ascii_char_unchecked(self) -> AsciiChar { - // Note: This cast discards the top bytes, this may cause problems, see - // the TODO on this method's documentation in the trait. - // SAFETY: Caller guarantees we're within ascii range. - #[allow(clippy::cast_possible_truncation)] // We want to truncate it - unsafe { - (self as u8).to_ascii_char_unchecked() - } - } -} - -#[cfg(test)] -mod tests { - use super::{AsciiChar, ToAsciiChar, ToAsciiCharError}; - - #[test] - fn to_ascii_char() { - fn generic(ch: C) -> Result { - ch.to_ascii_char() - } - assert_eq!(generic(AsciiChar::A), Ok(AsciiChar::A)); - assert_eq!(generic(b'A'), Ok(AsciiChar::A)); - assert_eq!(generic('A'), Ok(AsciiChar::A)); - assert!(generic(200_u16).is_err()); - assert!(generic('λ').is_err()); - } - - #[test] - fn as_byte_and_char() { - assert_eq!(AsciiChar::A.as_byte(), b'A'); - assert_eq!(AsciiChar::A.as_char(), 'A'); - } - - #[test] - fn new_array_is_correct() { - for byte in 0..128_u8 { - assert_eq!(AsciiChar::new(byte as char).as_byte(), byte); - } - } - - #[test] - fn is_all() { - #![allow(clippy::is_digit_ascii_radix)] // testing it - for byte in 0..128_u8 { - let ch = byte as char; - let ascii = AsciiChar::new(ch); - assert_eq!(ascii.is_alphabetic(), ch.is_alphabetic()); - assert_eq!(ascii.is_ascii_alphabetic(), ch.is_ascii_alphabetic()); - assert_eq!(ascii.is_alphanumeric(), ch.is_alphanumeric()); - assert_eq!(ascii.is_ascii_alphanumeric(), ch.is_ascii_alphanumeric()); - assert_eq!(ascii.is_digit(8), ch.is_digit(8), "is_digit(8) {:?}", ch); - assert_eq!(ascii.is_digit(10), ch.is_digit(10), "is_digit(10) {:?}", ch); - assert_eq!(ascii.is_digit(16), ch.is_digit(16), "is_digit(16) {:?}", ch); - assert_eq!(ascii.is_digit(36), ch.is_digit(36), "is_digit(36) {:?}", ch); - assert_eq!(ascii.is_ascii_digit(), ch.is_ascii_digit()); - assert_eq!(ascii.is_ascii_hexdigit(), ch.is_ascii_hexdigit()); - assert_eq!(ascii.is_ascii_control(), ch.is_ascii_control()); - assert_eq!(ascii.is_ascii_graphic(), ch.is_ascii_graphic()); - assert_eq!(ascii.is_ascii_punctuation(), ch.is_ascii_punctuation()); - assert_eq!( - ascii.is_whitespace(), - ch.is_whitespace(), - "{:?} ({:#04x})", - ch, - byte - ); - assert_eq!( - ascii.is_ascii_whitespace(), - ch.is_ascii_whitespace(), - "{:?} ({:#04x})", - ch, - byte - ); - assert_eq!(ascii.is_uppercase(), ch.is_uppercase()); - assert_eq!(ascii.is_ascii_uppercase(), ch.is_ascii_uppercase()); - assert_eq!(ascii.is_lowercase(), ch.is_lowercase()); - assert_eq!(ascii.is_ascii_lowercase(), ch.is_ascii_lowercase()); - assert_eq!(ascii.to_ascii_uppercase(), ch.to_ascii_uppercase()); - assert_eq!(ascii.to_ascii_lowercase(), ch.to_ascii_lowercase()); - } - } - - #[test] - fn is_digit_strange_radixes() { - assert_eq!(AsciiChar::_0.is_digit(0), '0'.is_digit(0)); - assert_eq!(AsciiChar::_0.is_digit(1), '0'.is_digit(1)); - assert_eq!(AsciiChar::_5.is_digit(5), '5'.is_digit(5)); - assert_eq!(AsciiChar::z.is_digit(35), 'z'.is_digit(35)); - } - - #[test] - #[should_panic] - fn is_digit_bad_radix() { - let _ = AsciiChar::_7.is_digit(37); - } - - #[test] - fn cmp_wider() { - assert_eq!(AsciiChar::A, 'A'); - assert_eq!(b'b', AsciiChar::b); - assert!(AsciiChar::a < 'z'); - } - - #[test] - fn ascii_case() { - assert_eq!(AsciiChar::At.to_ascii_lowercase(), AsciiChar::At); - assert_eq!(AsciiChar::At.to_ascii_uppercase(), AsciiChar::At); - assert_eq!(AsciiChar::A.to_ascii_lowercase(), AsciiChar::a); - assert_eq!(AsciiChar::A.to_ascii_uppercase(), AsciiChar::A); - assert_eq!(AsciiChar::a.to_ascii_lowercase(), AsciiChar::a); - assert_eq!(AsciiChar::a.to_ascii_uppercase(), AsciiChar::A); - - let mut mutable = (AsciiChar::A, AsciiChar::a); - mutable.0.make_ascii_lowercase(); - mutable.1.make_ascii_uppercase(); - assert_eq!(mutable.0, AsciiChar::a); - assert_eq!(mutable.1, AsciiChar::A); - - assert!(AsciiChar::LineFeed.eq_ignore_ascii_case(&AsciiChar::LineFeed)); - assert!(!AsciiChar::LineFeed.eq_ignore_ascii_case(&AsciiChar::CarriageReturn)); - assert!(AsciiChar::z.eq_ignore_ascii_case(&AsciiChar::Z)); - assert!(AsciiChar::Z.eq_ignore_ascii_case(&AsciiChar::z)); - assert!(AsciiChar::A.eq_ignore_ascii_case(&AsciiChar::a)); - assert!(!AsciiChar::K.eq_ignore_ascii_case(&AsciiChar::C)); - assert!(!AsciiChar::Z.eq_ignore_ascii_case(&AsciiChar::DEL)); - assert!(!AsciiChar::BracketOpen.eq_ignore_ascii_case(&AsciiChar::CurlyBraceOpen)); - assert!(!AsciiChar::Grave.eq_ignore_ascii_case(&AsciiChar::At)); - assert!(!AsciiChar::Grave.eq_ignore_ascii_case(&AsciiChar::DEL)); - } - - #[test] - #[cfg(feature = "std")] - fn fmt_ascii() { - assert_eq!(format!("{}", AsciiChar::t), "t"); - assert_eq!(format!("{:?}", AsciiChar::t), "'t'"); - assert_eq!(format!("{}", AsciiChar::LineFeed), "\n"); - assert_eq!(format!("{:?}", AsciiChar::LineFeed), "'\\n'"); - } -} diff --git a/anneal/vendor/ascii/src/ascii_str.rs b/anneal/vendor/ascii/src/ascii_str.rs deleted file mode 100644 index e8a6e12550..0000000000 --- a/anneal/vendor/ascii/src/ascii_str.rs +++ /dev/null @@ -1,1600 +0,0 @@ -#[cfg(feature = "alloc")] -use alloc::borrow::ToOwned; -#[cfg(feature = "alloc")] -use alloc::boxed::Box; -use core::fmt; -use core::ops::{Index, IndexMut}; -use core::ops::{Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive}; -use core::slice::{self, Iter, IterMut, SliceIndex}; -#[cfg(feature = "std")] -use std::error::Error; -#[cfg(feature = "std")] -use std::ffi::CStr; - -use ascii_char::AsciiChar; -#[cfg(feature = "alloc")] -use ascii_string::AsciiString; - -/// [`AsciiStr`] represents a byte or string slice that only contains ASCII characters. -/// -/// It wraps an `[AsciiChar]` and implements many of `str`s methods and traits. -/// -/// It can be created by a checked conversion from a `str` or `[u8]`, or borrowed from an -/// `AsciiString`. -#[derive(PartialEq, Eq, PartialOrd, Ord, Hash)] -#[repr(transparent)] -pub struct AsciiStr { - slice: [AsciiChar], -} - -impl AsciiStr { - /// Converts `&self` to a `&str` slice. - #[inline] - #[must_use] - pub fn as_str(&self) -> &str { - // SAFETY: All variants of `AsciiChar` are valid bytes for a `str`. - unsafe { &*(self as *const AsciiStr as *const str) } - } - - /// Converts `&self` into a byte slice. - #[inline] - #[must_use] - pub fn as_bytes(&self) -> &[u8] { - // SAFETY: All variants of `AsciiChar` are valid `u8`, given they're `repr(u8)`. - unsafe { &*(self as *const AsciiStr as *const [u8]) } - } - - /// Returns the entire string as slice of `AsciiChar`s. - #[inline] - #[must_use] - pub const fn as_slice(&self) -> &[AsciiChar] { - &self.slice - } - - /// Returns the entire string as mutable slice of `AsciiChar`s. - #[inline] - #[must_use] - pub fn as_mut_slice(&mut self) -> &mut [AsciiChar] { - &mut self.slice - } - - /// Returns a raw pointer to the `AsciiStr`'s buffer. - /// - /// The caller must ensure that the slice outlives the pointer this function returns, or else it - /// will end up pointing to garbage. Modifying the `AsciiStr` may cause it's buffer to be - /// reallocated, which would also make any pointers to it invalid. - #[inline] - #[must_use] - pub const fn as_ptr(&self) -> *const AsciiChar { - self.as_slice().as_ptr() - } - - /// Returns an unsafe mutable pointer to the `AsciiStr`'s buffer. - /// - /// The caller must ensure that the slice outlives the pointer this function returns, or else it - /// will end up pointing to garbage. Modifying the `AsciiStr` may cause it's buffer to be - /// reallocated, which would also make any pointers to it invalid. - #[inline] - #[must_use] - pub fn as_mut_ptr(&mut self) -> *mut AsciiChar { - self.as_mut_slice().as_mut_ptr() - } - - /// Copies the content of this `AsciiStr` into an owned `AsciiString`. - #[cfg(feature = "alloc")] - #[must_use] - pub fn to_ascii_string(&self) -> AsciiString { - AsciiString::from(self.slice.to_vec()) - } - - /// Converts anything that can represent a byte slice into an `AsciiStr`. - /// - /// # Errors - /// If `bytes` contains a non-ascii byte, `Err` will be returned - /// - /// # Examples - /// ``` - /// # use ascii::AsciiStr; - /// let foo = AsciiStr::from_ascii(b"foo"); - /// let err = AsciiStr::from_ascii("Ŋ"); - /// assert_eq!(foo.unwrap().as_str(), "foo"); - /// assert_eq!(err.unwrap_err().valid_up_to(), 0); - /// ``` - #[inline] - pub fn from_ascii(bytes: &B) -> Result<&AsciiStr, AsAsciiStrError> - where - B: AsRef<[u8]> + ?Sized, - { - bytes.as_ref().as_ascii_str() - } - - /// Converts anything that can be represented as a byte slice to an `AsciiStr` without checking - /// for non-ASCII characters.. - /// - /// # Safety - /// If any of the bytes in `bytes` do not represent valid ascii characters, calling - /// this function is undefined behavior. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiStr; - /// let foo = unsafe { AsciiStr::from_ascii_unchecked(&b"foo"[..]) }; - /// assert_eq!(foo.as_str(), "foo"); - /// ``` - #[inline] - #[must_use] - pub unsafe fn from_ascii_unchecked(bytes: &[u8]) -> &AsciiStr { - // SAFETY: Caller guarantees all bytes in `bytes` are valid - // ascii characters. - unsafe { bytes.as_ascii_str_unchecked() } - } - - /// Returns the number of characters / bytes in this ASCII sequence. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiStr; - /// let s = AsciiStr::from_ascii("foo").unwrap(); - /// assert_eq!(s.len(), 3); - /// ``` - #[inline] - #[must_use] - pub const fn len(&self) -> usize { - self.slice.len() - } - - /// Returns true if the ASCII slice contains zero bytes. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiStr; - /// let mut empty = AsciiStr::from_ascii("").unwrap(); - /// let mut full = AsciiStr::from_ascii("foo").unwrap(); - /// assert!(empty.is_empty()); - /// assert!(!full.is_empty()); - /// ``` - #[inline] - #[must_use] - pub const fn is_empty(&self) -> bool { - self.len() == 0 - } - - /// Returns an iterator over the characters of the `AsciiStr`. - #[inline] - #[must_use] - pub fn chars(&self) -> Chars { - Chars(self.slice.iter()) - } - - /// Returns an iterator over the characters of the `AsciiStr` which allows you to modify the - /// value of each `AsciiChar`. - #[inline] - #[must_use] - pub fn chars_mut(&mut self) -> CharsMut { - CharsMut(self.slice.iter_mut()) - } - - /// Returns an iterator over parts of the `AsciiStr` separated by a character. - /// - /// # Examples - /// ``` - /// # use ascii::{AsciiStr, AsciiChar}; - /// let words = AsciiStr::from_ascii("apple banana lemon").unwrap() - /// .split(AsciiChar::Space) - /// .map(|a| a.as_str()) - /// .collect::>(); - /// assert_eq!(words, ["apple", "banana", "lemon"]); - /// ``` - #[must_use] - pub fn split(&self, on: AsciiChar) -> impl DoubleEndedIterator { - Split { - on, - ended: false, - chars: self.chars(), - } - } - - /// Returns an iterator over the lines of the `AsciiStr`, which are themselves `AsciiStr`s. - /// - /// Lines are ended with either `LineFeed` (`\n`), or `CarriageReturn` then `LineFeed` (`\r\n`). - /// - /// The final line ending is optional. - #[inline] - #[must_use] - pub fn lines(&self) -> impl DoubleEndedIterator { - Lines { string: self } - } - - /// Returns an ASCII string slice with leading and trailing whitespace removed. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiStr; - /// let example = AsciiStr::from_ascii(" \twhite \tspace \t").unwrap(); - /// assert_eq!("white \tspace", example.trim()); - /// ``` - #[must_use] - pub fn trim(&self) -> &Self { - self.trim_start().trim_end() - } - - /// Returns an ASCII string slice with leading whitespace removed. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiStr; - /// let example = AsciiStr::from_ascii(" \twhite \tspace \t").unwrap(); - /// assert_eq!("white \tspace \t", example.trim_start()); - /// ``` - #[must_use] - pub fn trim_start(&self) -> &Self { - let whitespace_len = self - .chars() - .position(|ch| !ch.is_whitespace()) - .unwrap_or_else(|| self.len()); - - // SAFETY: `whitespace_len` is `0..=len`, which is at most `len`, which is a valid empty slice. - unsafe { self.as_slice().get_unchecked(whitespace_len..).into() } - } - - /// Returns an ASCII string slice with trailing whitespace removed. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiStr; - /// let example = AsciiStr::from_ascii(" \twhite \tspace \t").unwrap(); - /// assert_eq!(" \twhite \tspace", example.trim_end()); - /// ``` - #[must_use] - pub fn trim_end(&self) -> &Self { - // Number of whitespace characters counting from the end - let whitespace_len = self - .chars() - .rev() - .position(|ch| !ch.is_whitespace()) - .unwrap_or_else(|| self.len()); - - // SAFETY: `whitespace_len` is `0..=len`, which is at most `len`, which is a valid empty slice, and at least `0`, which is the whole slice. - unsafe { - self.as_slice() - .get_unchecked(..self.len() - whitespace_len) - .into() - } - } - - /// Compares two strings case-insensitively. - #[must_use] - pub fn eq_ignore_ascii_case(&self, other: &Self) -> bool { - self.len() == other.len() - && self - .chars() - .zip(other.chars()) - .all(|(ch, other_ch)| ch.eq_ignore_ascii_case(&other_ch)) - } - - /// Replaces lowercase letters with their uppercase equivalent. - pub fn make_ascii_uppercase(&mut self) { - for ch in self.chars_mut() { - *ch = ch.to_ascii_uppercase(); - } - } - - /// Replaces uppercase letters with their lowercase equivalent. - pub fn make_ascii_lowercase(&mut self) { - for ch in self.chars_mut() { - *ch = ch.to_ascii_lowercase(); - } - } - - /// Returns a copy of this string where letters 'a' to 'z' are mapped to 'A' to 'Z'. - #[cfg(feature = "alloc")] - #[must_use] - pub fn to_ascii_uppercase(&self) -> AsciiString { - let mut ascii_string = self.to_ascii_string(); - ascii_string.make_ascii_uppercase(); - ascii_string - } - - /// Returns a copy of this string where letters 'A' to 'Z' are mapped to 'a' to 'z'. - #[cfg(feature = "alloc")] - #[must_use] - pub fn to_ascii_lowercase(&self) -> AsciiString { - let mut ascii_string = self.to_ascii_string(); - ascii_string.make_ascii_lowercase(); - ascii_string - } - - /// Returns the first character if the string is not empty. - #[inline] - #[must_use] - pub fn first(&self) -> Option { - self.slice.first().copied() - } - - /// Returns the last character if the string is not empty. - #[inline] - #[must_use] - pub fn last(&self) -> Option { - self.slice.last().copied() - } - - /// Converts a [`Box`] into a [`AsciiString`] without copying or allocating. - #[cfg(feature = "alloc")] - #[inline] - #[must_use] - pub fn into_ascii_string(self: Box) -> AsciiString { - let slice = Box::<[AsciiChar]>::from(self); - AsciiString::from(slice.into_vec()) - } -} - -macro_rules! impl_partial_eq { - ($wider: ty) => { - impl PartialEq<$wider> for AsciiStr { - #[inline] - fn eq(&self, other: &$wider) -> bool { - >::as_ref(self) == other - } - } - impl PartialEq for $wider { - #[inline] - fn eq(&self, other: &AsciiStr) -> bool { - self == >::as_ref(other) - } - } - }; -} - -impl_partial_eq! {str} -impl_partial_eq! {[u8]} -impl_partial_eq! {[AsciiChar]} - -#[cfg(feature = "alloc")] -impl ToOwned for AsciiStr { - type Owned = AsciiString; - - #[inline] - fn to_owned(&self) -> AsciiString { - self.to_ascii_string() - } -} - -impl AsRef<[u8]> for AsciiStr { - #[inline] - fn as_ref(&self) -> &[u8] { - self.as_bytes() - } -} -impl AsRef for AsciiStr { - #[inline] - fn as_ref(&self) -> &str { - self.as_str() - } -} -impl AsRef<[AsciiChar]> for AsciiStr { - #[inline] - fn as_ref(&self) -> &[AsciiChar] { - &self.slice - } -} -impl AsMut<[AsciiChar]> for AsciiStr { - #[inline] - fn as_mut(&mut self) -> &mut [AsciiChar] { - &mut self.slice - } -} - -impl Default for &'static AsciiStr { - #[inline] - fn default() -> &'static AsciiStr { - From::from(&[] as &[AsciiChar]) - } -} -impl<'a> From<&'a [AsciiChar]> for &'a AsciiStr { - #[inline] - fn from(slice: &[AsciiChar]) -> &AsciiStr { - let ptr = slice as *const [AsciiChar] as *const AsciiStr; - unsafe { &*ptr } - } -} -impl<'a> From<&'a mut [AsciiChar]> for &'a mut AsciiStr { - #[inline] - fn from(slice: &mut [AsciiChar]) -> &mut AsciiStr { - let ptr = slice as *mut [AsciiChar] as *mut AsciiStr; - unsafe { &mut *ptr } - } -} -#[cfg(feature = "alloc")] -impl From> for Box { - #[inline] - fn from(owned: Box<[AsciiChar]>) -> Box { - let ptr = Box::into_raw(owned) as *mut AsciiStr; - unsafe { Box::from_raw(ptr) } - } -} - -impl AsRef for AsciiStr { - #[inline] - fn as_ref(&self) -> &AsciiStr { - self - } -} -impl AsMut for AsciiStr { - #[inline] - fn as_mut(&mut self) -> &mut AsciiStr { - self - } -} -impl AsRef for [AsciiChar] { - #[inline] - fn as_ref(&self) -> &AsciiStr { - self.into() - } -} -impl AsMut for [AsciiChar] { - #[inline] - fn as_mut(&mut self) -> &mut AsciiStr { - self.into() - } -} - -impl<'a> From<&'a AsciiStr> for &'a [AsciiChar] { - #[inline] - fn from(astr: &AsciiStr) -> &[AsciiChar] { - &astr.slice - } -} -impl<'a> From<&'a mut AsciiStr> for &'a mut [AsciiChar] { - #[inline] - fn from(astr: &mut AsciiStr) -> &mut [AsciiChar] { - &mut astr.slice - } -} -impl<'a> From<&'a AsciiStr> for &'a [u8] { - #[inline] - fn from(astr: &AsciiStr) -> &[u8] { - astr.as_bytes() - } -} -impl<'a> From<&'a AsciiStr> for &'a str { - #[inline] - fn from(astr: &AsciiStr) -> &str { - astr.as_str() - } -} -macro_rules! widen_box { - ($wider: ty) => { - #[cfg(feature = "alloc")] - impl From> for Box<$wider> { - #[inline] - fn from(owned: Box) -> Box<$wider> { - let ptr = Box::into_raw(owned) as *mut $wider; - unsafe { Box::from_raw(ptr) } - } - } - }; -} -widen_box! {[AsciiChar]} -widen_box! {[u8]} -widen_box! {str} - -// allows &AsciiChar to be used by generic AsciiString Extend and FromIterator -impl AsRef for AsciiChar { - fn as_ref(&self) -> &AsciiStr { - slice::from_ref(self).into() - } -} - -impl fmt::Display for AsciiStr { - #[inline] - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(self.as_str(), f) - } -} - -impl fmt::Debug for AsciiStr { - #[inline] - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Debug::fmt(self.as_str(), f) - } -} - -macro_rules! impl_index { - ($idx:ty) => { - #[allow(clippy::indexing_slicing)] // In `Index`, if it's out of bounds, panic is the default - impl Index<$idx> for AsciiStr { - type Output = AsciiStr; - - #[inline] - fn index(&self, index: $idx) -> &AsciiStr { - self.slice[index].as_ref() - } - } - - #[allow(clippy::indexing_slicing)] // In `IndexMut`, if it's out of bounds, panic is the default - impl IndexMut<$idx> for AsciiStr { - #[inline] - fn index_mut(&mut self, index: $idx) -> &mut AsciiStr { - self.slice[index].as_mut() - } - } - }; -} - -impl_index! { Range } -impl_index! { RangeTo } -impl_index! { RangeFrom } -impl_index! { RangeFull } -impl_index! { RangeInclusive } -impl_index! { RangeToInclusive } - -#[allow(clippy::indexing_slicing)] // In `Index`, if it's out of bounds, panic is the default -impl Index for AsciiStr { - type Output = AsciiChar; - - #[inline] - fn index(&self, index: usize) -> &AsciiChar { - &self.slice[index] - } -} - -#[allow(clippy::indexing_slicing)] // In `IndexMut`, if it's out of bounds, panic is the default -impl IndexMut for AsciiStr { - #[inline] - fn index_mut(&mut self, index: usize) -> &mut AsciiChar { - &mut self.slice[index] - } -} - -/// Produces references for compatibility with `[u8]`. -/// -/// (`str` doesn't implement `IntoIterator` for its references, -/// so there is no compatibility to lose.) -impl<'a> IntoIterator for &'a AsciiStr { - type Item = &'a AsciiChar; - type IntoIter = CharsRef<'a>; - #[inline] - fn into_iter(self) -> Self::IntoIter { - CharsRef(self.as_slice().iter()) - } -} - -impl<'a> IntoIterator for &'a mut AsciiStr { - type Item = &'a mut AsciiChar; - type IntoIter = CharsMut<'a>; - #[inline] - fn into_iter(self) -> Self::IntoIter { - self.chars_mut() - } -} - -/// A copying iterator over the characters of an `AsciiStr`. -#[derive(Clone, Debug)] -pub struct Chars<'a>(Iter<'a, AsciiChar>); -impl<'a> Chars<'a> { - /// Returns the ascii string slice with the remaining characters. - #[must_use] - pub fn as_str(&self) -> &'a AsciiStr { - self.0.as_slice().into() - } -} -impl<'a> Iterator for Chars<'a> { - type Item = AsciiChar; - #[inline] - fn next(&mut self) -> Option { - self.0.next().copied() - } - fn size_hint(&self) -> (usize, Option) { - self.0.size_hint() - } -} -impl<'a> DoubleEndedIterator for Chars<'a> { - #[inline] - fn next_back(&mut self) -> Option { - self.0.next_back().copied() - } -} -impl<'a> ExactSizeIterator for Chars<'a> { - fn len(&self) -> usize { - self.0.len() - } -} - -/// A mutable iterator over the characters of an `AsciiStr`. -#[derive(Debug)] -pub struct CharsMut<'a>(IterMut<'a, AsciiChar>); -impl<'a> CharsMut<'a> { - /// Returns the ascii string slice with the remaining characters. - #[must_use] - pub fn into_str(self) -> &'a mut AsciiStr { - self.0.into_slice().into() - } -} -impl<'a> Iterator for CharsMut<'a> { - type Item = &'a mut AsciiChar; - #[inline] - fn next(&mut self) -> Option<&'a mut AsciiChar> { - self.0.next() - } - fn size_hint(&self) -> (usize, Option) { - self.0.size_hint() - } -} -impl<'a> DoubleEndedIterator for CharsMut<'a> { - #[inline] - fn next_back(&mut self) -> Option<&'a mut AsciiChar> { - self.0.next_back() - } -} -impl<'a> ExactSizeIterator for CharsMut<'a> { - fn len(&self) -> usize { - self.0.len() - } -} - -/// An immutable iterator over the characters of an `AsciiStr`. -#[derive(Clone, Debug)] -pub struct CharsRef<'a>(Iter<'a, AsciiChar>); -impl<'a> CharsRef<'a> { - /// Returns the ascii string slice with the remaining characters. - #[must_use] - pub fn as_str(&self) -> &'a AsciiStr { - self.0.as_slice().into() - } -} -impl<'a> Iterator for CharsRef<'a> { - type Item = &'a AsciiChar; - #[inline] - fn next(&mut self) -> Option<&'a AsciiChar> { - self.0.next() - } - fn size_hint(&self) -> (usize, Option) { - self.0.size_hint() - } -} -impl<'a> DoubleEndedIterator for CharsRef<'a> { - #[inline] - fn next_back(&mut self) -> Option<&'a AsciiChar> { - self.0.next_back() - } -} - -/// An iterator over parts of an `AsciiStr` separated by an `AsciiChar`. -/// -/// This type is created by [`AsciiChar::split()`](struct.AsciiChar.html#method.split). -#[derive(Clone, Debug)] -struct Split<'a> { - on: AsciiChar, - ended: bool, - chars: Chars<'a>, -} -impl<'a> Iterator for Split<'a> { - type Item = &'a AsciiStr; - - fn next(&mut self) -> Option<&'a AsciiStr> { - if !self.ended { - let start: &AsciiStr = self.chars.as_str(); - let split_on = self.on; - - if let Some(at) = self.chars.position(|ch| ch == split_on) { - // SAFETY: `at` is guaranteed to be in bounds, as `position` returns `Ok(0..len)`. - Some(unsafe { start.as_slice().get_unchecked(..at).into() }) - } else { - self.ended = true; - Some(start) - } - } else { - None - } - } -} -impl<'a> DoubleEndedIterator for Split<'a> { - fn next_back(&mut self) -> Option<&'a AsciiStr> { - if !self.ended { - let start: &AsciiStr = self.chars.as_str(); - let split_on = self.on; - - if let Some(at) = self.chars.rposition(|ch| ch == split_on) { - // SAFETY: `at` is guaranteed to be in bounds, as `rposition` returns `Ok(0..len)`, and slices `1..`, `2..`, etc... until `len..` inclusive, are valid. - Some(unsafe { start.as_slice().get_unchecked(at + 1..).into() }) - } else { - self.ended = true; - Some(start) - } - } else { - None - } - } -} - -/// An iterator over the lines of the internal character array. -#[derive(Clone, Debug)] -struct Lines<'a> { - string: &'a AsciiStr, -} -impl<'a> Iterator for Lines<'a> { - type Item = &'a AsciiStr; - - fn next(&mut self) -> Option<&'a AsciiStr> { - if let Some(idx) = self - .string - .chars() - .position(|chr| chr == AsciiChar::LineFeed) - { - // SAFETY: `idx` is guaranteed to be `1..len`, as we get it from `position` as `0..len` and make sure it's not `0`. - let line = if idx > 0 - && *unsafe { self.string.as_slice().get_unchecked(idx - 1) } - == AsciiChar::CarriageReturn - { - // SAFETY: As per above, `idx` is guaranteed to be `1..len` - unsafe { self.string.as_slice().get_unchecked(..idx - 1).into() } - } else { - // SAFETY: As per above, `idx` is guaranteed to be `0..len` - unsafe { self.string.as_slice().get_unchecked(..idx).into() } - }; - // SAFETY: As per above, `idx` is guaranteed to be `0..len`, so at the extreme, slicing `len..` is a valid empty slice. - self.string = unsafe { self.string.as_slice().get_unchecked(idx + 1..).into() }; - Some(line) - } else if self.string.is_empty() { - None - } else { - let line = self.string; - // SAFETY: An empty string is a valid string. - self.string = unsafe { AsciiStr::from_ascii_unchecked(b"") }; - Some(line) - } - } -} - -impl<'a> DoubleEndedIterator for Lines<'a> { - fn next_back(&mut self) -> Option<&'a AsciiStr> { - if self.string.is_empty() { - return None; - } - - // If we end with `LF` / `CR/LF`, remove them - if let Some(AsciiChar::LineFeed) = self.string.last() { - // SAFETY: `last()` returned `Some`, so our len is at least 1. - self.string = unsafe { - self.string - .as_slice() - .get_unchecked(..self.string.len() - 1) - .into() - }; - - if let Some(AsciiChar::CarriageReturn) = self.string.last() { - // SAFETY: `last()` returned `Some`, so our len is at least 1. - self.string = unsafe { - self.string - .as_slice() - .get_unchecked(..self.string.len() - 1) - .into() - }; - } - } - - // Get the position of the first `LF` from the end. - let lf_rev_pos = self - .string - .chars() - .rev() - .position(|ch| ch == AsciiChar::LineFeed) - .unwrap_or_else(|| self.string.len()); - - // SAFETY: `lf_rev_pos` will be in range `0..=len`, so `len - lf_rev_pos` - // will be within `0..=len`, making it correct as a start and end - // point for the strings. - let line = unsafe { - self.string - .as_slice() - .get_unchecked(self.string.len() - lf_rev_pos..) - .into() - }; - self.string = unsafe { - self.string - .as_slice() - .get_unchecked(..self.string.len() - lf_rev_pos) - .into() - }; - Some(line) - } -} - -/// Error that is returned when a sequence of `u8` are not all ASCII. -/// -/// Is used by `As[Mut]AsciiStr` and the `from_ascii` method on `AsciiStr` and `AsciiString`. -#[derive(Clone, Copy, PartialEq, Eq, Debug)] -pub struct AsAsciiStrError(usize); - -const ERRORMSG_STR: &str = "one or more bytes are not ASCII"; - -impl AsAsciiStrError { - /// Returns the index of the first non-ASCII byte. - /// - /// It is the maximum index such that `from_ascii(input[..index])` would return `Ok(_)`. - #[inline] - #[must_use] - pub const fn valid_up_to(self) -> usize { - self.0 - } - #[cfg(not(feature = "std"))] - /// Returns a description for this error, like `std::error::Error::description`. - #[inline] - #[must_use] - #[allow(clippy::unused_self)] - pub const fn description(&self) -> &'static str { - ERRORMSG_STR - } -} -impl fmt::Display for AsAsciiStrError { - fn fmt(&self, fmtr: &mut fmt::Formatter) -> fmt::Result { - write!(fmtr, "the byte at index {} is not ASCII", self.0) - } -} -#[cfg(feature = "std")] -impl Error for AsAsciiStrError { - #[inline] - fn description(&self) -> &'static str { - ERRORMSG_STR - } -} - -/// Convert slices of bytes or [`AsciiChar`] to [`AsciiStr`]. -// Could nearly replace this trait with SliceIndex, but its methods isn't even -// on a path for stabilization. -pub trait AsAsciiStr { - /// Used to constrain `SliceIndex` - #[doc(hidden)] - type Inner; - /// Convert a subslice to an ASCII slice. - /// - /// # Errors - /// Returns `Err` if the range is out of bounds or if not all bytes in the - /// slice are ASCII. The value in the error will be the index of the first - /// non-ASCII byte or the end of the slice. - /// - /// # Examples - /// ``` - /// use ascii::AsAsciiStr; - /// assert!("'zoä'".slice_ascii(..3).is_ok()); - /// assert!("'zoä'".slice_ascii(0..4).is_err()); - /// assert!("'zoä'".slice_ascii(5..=5).is_ok()); - /// assert!("'zoä'".slice_ascii(4..).is_err()); - /// assert!(b"\r\n".slice_ascii(..).is_ok()); - /// ``` - fn slice_ascii(&self, range: R) -> Result<&AsciiStr, AsAsciiStrError> - where - R: SliceIndex<[Self::Inner], Output = [Self::Inner]>; - /// Convert to an ASCII slice. - /// - /// # Errors - /// Returns `Err` if not all bytes are valid ascii values. - /// - /// # Example - /// ``` - /// use ascii::{AsAsciiStr, AsciiChar}; - /// assert!("ASCII".as_ascii_str().is_ok()); - /// assert!(b"\r\n".as_ascii_str().is_ok()); - /// assert!("'zoä'".as_ascii_str().is_err()); - /// assert!(b"\xff".as_ascii_str().is_err()); - /// assert!([AsciiChar::C][..].as_ascii_str().is_ok()); // infallible - /// ``` - fn as_ascii_str(&self) -> Result<&AsciiStr, AsAsciiStrError> { - self.slice_ascii(..) - } - /// Get a single ASCII character from the slice. - /// - /// Returns `None` if the index is out of bounds or the byte is not ASCII. - /// - /// # Examples - /// ``` - /// use ascii::{AsAsciiStr, AsciiChar}; - /// assert_eq!("'zoä'".get_ascii(4), None); - /// assert_eq!("'zoä'".get_ascii(5), Some(AsciiChar::Apostrophe)); - /// assert_eq!("'zoä'".get_ascii(6), None); - /// ``` - fn get_ascii(&self, index: usize) -> Option { - self.slice_ascii(index..=index) - .ok() - .and_then(AsciiStr::first) - } - /// Convert to an ASCII slice without checking for non-ASCII characters. - /// - /// # Safety - /// Calling this function when `self` contains non-ascii characters is - /// undefined behavior. - /// - /// # Examples - /// - unsafe fn as_ascii_str_unchecked(&self) -> &AsciiStr; -} - -/// Convert mutable slices of bytes or [`AsciiChar`] to [`AsciiStr`]. -pub trait AsMutAsciiStr: AsAsciiStr { - /// Convert a subslice to an ASCII slice. - /// - /// # Errors - /// This function returns `Err` if range is out of bounds, or if - /// `self` contains non-ascii values - fn slice_ascii_mut(&mut self, range: R) -> Result<&mut AsciiStr, AsAsciiStrError> - where - R: SliceIndex<[Self::Inner], Output = [Self::Inner]>; - - /// Convert to a mutable ASCII slice. - /// - /// # Errors - /// This function returns `Err` if `self` contains non-ascii values - fn as_mut_ascii_str(&mut self) -> Result<&mut AsciiStr, AsAsciiStrError> { - self.slice_ascii_mut(..) - } - - /// Convert to a mutable ASCII slice without checking for non-ASCII characters. - /// - /// # Safety - /// Calling this function when `self` contains non-ascii characters is - /// undefined behavior. - unsafe fn as_mut_ascii_str_unchecked(&mut self) -> &mut AsciiStr; -} - -// These generic implementations mirror the generic implementations for AsRef in core. -impl<'a, T> AsAsciiStr for &'a T -where - T: AsAsciiStr + ?Sized, -{ - type Inner = ::Inner; - fn slice_ascii(&self, range: R) -> Result<&AsciiStr, AsAsciiStrError> - where - R: SliceIndex<[Self::Inner], Output = [Self::Inner]>, - { - ::slice_ascii(*self, range) - } - - unsafe fn as_ascii_str_unchecked(&self) -> &AsciiStr { - // SAFETY: Caller guarantees `self` does not contain non-ascii characters - unsafe { ::as_ascii_str_unchecked(*self) } - } -} - -impl<'a, T> AsAsciiStr for &'a mut T -where - T: AsAsciiStr + ?Sized, -{ - type Inner = ::Inner; - fn slice_ascii(&self, range: R) -> Result<&AsciiStr, AsAsciiStrError> - where - R: SliceIndex<[Self::Inner], Output = [Self::Inner]>, - { - ::slice_ascii(*self, range) - } - - unsafe fn as_ascii_str_unchecked(&self) -> &AsciiStr { - // SAFETY: Caller guarantees `self` does not contain non-ascii characters - unsafe { ::as_ascii_str_unchecked(*self) } - } -} - -impl<'a, T> AsMutAsciiStr for &'a mut T -where - T: AsMutAsciiStr + ?Sized, -{ - fn slice_ascii_mut(&mut self, range: R) -> Result<&mut AsciiStr, AsAsciiStrError> - where - R: SliceIndex<[Self::Inner], Output = [Self::Inner]>, - { - ::slice_ascii_mut(*self, range) - } - - unsafe fn as_mut_ascii_str_unchecked(&mut self) -> &mut AsciiStr { - // SAFETY: Caller guarantees `self` does not contain non-ascii characters - unsafe { ::as_mut_ascii_str_unchecked(*self) } - } -} - -impl AsAsciiStr for AsciiStr { - type Inner = AsciiChar; - - fn slice_ascii(&self, range: R) -> Result<&AsciiStr, AsAsciiStrError> - where - R: SliceIndex<[AsciiChar], Output = [AsciiChar]>, - { - self.slice.slice_ascii(range) - } - - #[inline] - fn as_ascii_str(&self) -> Result<&AsciiStr, AsAsciiStrError> { - Ok(self) - } - - #[inline] - unsafe fn as_ascii_str_unchecked(&self) -> &AsciiStr { - self - } - - #[inline] - fn get_ascii(&self, index: usize) -> Option { - self.slice.get_ascii(index) - } -} -impl AsMutAsciiStr for AsciiStr { - fn slice_ascii_mut(&mut self, range: R) -> Result<&mut AsciiStr, AsAsciiStrError> - where - R: SliceIndex<[AsciiChar], Output = [AsciiChar]>, - { - self.slice.slice_ascii_mut(range) - } - - #[inline] - unsafe fn as_mut_ascii_str_unchecked(&mut self) -> &mut AsciiStr { - self - } -} - -impl AsAsciiStr for [AsciiChar] { - type Inner = AsciiChar; - fn slice_ascii(&self, range: R) -> Result<&AsciiStr, AsAsciiStrError> - where - R: SliceIndex<[AsciiChar], Output = [AsciiChar]>, - { - match self.get(range) { - Some(slice) => Ok(slice.into()), - None => Err(AsAsciiStrError(self.len())), - } - } - - #[inline] - fn as_ascii_str(&self) -> Result<&AsciiStr, AsAsciiStrError> { - Ok(self.into()) - } - - #[inline] - unsafe fn as_ascii_str_unchecked(&self) -> &AsciiStr { - <&AsciiStr>::from(self) - } - - #[inline] - fn get_ascii(&self, index: usize) -> Option { - self.get(index).copied() - } -} -impl AsMutAsciiStr for [AsciiChar] { - fn slice_ascii_mut(&mut self, range: R) -> Result<&mut AsciiStr, AsAsciiStrError> - where - R: SliceIndex<[AsciiChar], Output = [AsciiChar]>, - { - let len = self.len(); - match self.get_mut(range) { - Some(slice) => Ok(slice.into()), - None => Err(AsAsciiStrError(len)), - } - } - #[inline] - unsafe fn as_mut_ascii_str_unchecked(&mut self) -> &mut AsciiStr { - <&mut AsciiStr>::from(self) - } -} - -impl AsAsciiStr for [u8] { - type Inner = u8; - - fn slice_ascii(&self, range: R) -> Result<&AsciiStr, AsAsciiStrError> - where - R: SliceIndex<[u8], Output = [u8]>, - { - if let Some(slice) = self.get(range) { - slice.as_ascii_str().map_err(|AsAsciiStrError(not_ascii)| { - let offset = slice.as_ptr() as usize - self.as_ptr() as usize; - AsAsciiStrError(offset + not_ascii) - }) - } else { - Err(AsAsciiStrError(self.len())) - } - } - - fn as_ascii_str(&self) -> Result<&AsciiStr, AsAsciiStrError> { - // is_ascii is likely optimized - if self.is_ascii() { - // SAFETY: `is_ascii` guarantees all bytes are within ascii range. - unsafe { Ok(self.as_ascii_str_unchecked()) } - } else { - Err(AsAsciiStrError( - self.iter().take_while(|&b| b.is_ascii()).count(), - )) - } - } - - #[inline] - unsafe fn as_ascii_str_unchecked(&self) -> &AsciiStr { - // SAFETY: Caller guarantees `self` does not contain non-ascii characters - unsafe { &*(self as *const [u8] as *const AsciiStr) } - } -} -impl AsMutAsciiStr for [u8] { - fn slice_ascii_mut(&mut self, range: R) -> Result<&mut AsciiStr, AsAsciiStrError> - where - R: SliceIndex<[u8], Output = [u8]>, - { - let (ptr, len) = (self.as_ptr(), self.len()); - if let Some(slice) = self.get_mut(range) { - let slice_ptr = slice.as_ptr(); - slice - .as_mut_ascii_str() - .map_err(|AsAsciiStrError(not_ascii)| { - let offset = slice_ptr as usize - ptr as usize; - AsAsciiStrError(offset + not_ascii) - }) - } else { - Err(AsAsciiStrError(len)) - } - } - - fn as_mut_ascii_str(&mut self) -> Result<&mut AsciiStr, AsAsciiStrError> { - // is_ascii() is likely optimized - if self.is_ascii() { - // SAFETY: `is_ascii` guarantees all bytes are within ascii range. - unsafe { Ok(self.as_mut_ascii_str_unchecked()) } - } else { - Err(AsAsciiStrError( - self.iter().take_while(|&b| b.is_ascii()).count(), - )) - } - } - - #[inline] - unsafe fn as_mut_ascii_str_unchecked(&mut self) -> &mut AsciiStr { - // SAFETY: Caller guarantees `self` does not contain non-ascii characters - unsafe { &mut *(self as *mut [u8] as *mut AsciiStr) } - } -} - -impl AsAsciiStr for str { - type Inner = u8; - fn slice_ascii(&self, range: R) -> Result<&AsciiStr, AsAsciiStrError> - where - R: SliceIndex<[u8], Output = [u8]>, - { - self.as_bytes().slice_ascii(range) - } - fn as_ascii_str(&self) -> Result<&AsciiStr, AsAsciiStrError> { - self.as_bytes().as_ascii_str() - } - #[inline] - unsafe fn as_ascii_str_unchecked(&self) -> &AsciiStr { - // SAFETY: Caller guarantees `self` does not contain non-ascii characters - unsafe { self.as_bytes().as_ascii_str_unchecked() } - } -} -impl AsMutAsciiStr for str { - fn slice_ascii_mut(&mut self, range: R) -> Result<&mut AsciiStr, AsAsciiStrError> - where - R: SliceIndex<[u8], Output = [u8]>, - { - // SAFETY: We don't modify the reference in this function, and the caller may - // only modify it to include valid ascii characters. - let bytes = unsafe { self.as_bytes_mut() }; - match bytes.get_mut(range) { - // Valid ascii slice - Some(slice) if slice.is_ascii() => { - // SAFETY: All bytes are ascii, so this cast is valid - let ptr = slice.as_mut_ptr().cast::(); - let len = slice.len(); - - // SAFETY: The pointer is valid for `len` elements, as it came - // from a slice. - unsafe { - let slice = core::slice::from_raw_parts_mut(ptr, len); - Ok(<&mut AsciiStr>::from(slice)) - } - } - Some(slice) => { - let not_ascii_len = slice.iter().copied().take_while(u8::is_ascii).count(); - let offset = slice.as_ptr() as usize - self.as_ptr() as usize; - - Err(AsAsciiStrError(offset + not_ascii_len)) - } - None => Err(AsAsciiStrError(self.len())), - } - } - fn as_mut_ascii_str(&mut self) -> Result<&mut AsciiStr, AsAsciiStrError> { - match self.bytes().position(|b| !b.is_ascii()) { - Some(index) => Err(AsAsciiStrError(index)), - // SAFETY: All bytes were iterated, and all were ascii - None => unsafe { Ok(self.as_mut_ascii_str_unchecked()) }, - } - } - #[inline] - unsafe fn as_mut_ascii_str_unchecked(&mut self) -> &mut AsciiStr { - // SAFETY: Caller guarantees `self` does not contain non-ascii characters - &mut *(self as *mut str as *mut AsciiStr) - } -} - -/// Note that the trailing null byte will be removed in the conversion. -#[cfg(feature = "std")] -impl AsAsciiStr for CStr { - type Inner = u8; - fn slice_ascii(&self, range: R) -> Result<&AsciiStr, AsAsciiStrError> - where - R: SliceIndex<[u8], Output = [u8]>, - { - self.to_bytes().slice_ascii(range) - } - #[inline] - fn as_ascii_str(&self) -> Result<&AsciiStr, AsAsciiStrError> { - self.to_bytes().as_ascii_str() - } - #[inline] - unsafe fn as_ascii_str_unchecked(&self) -> &AsciiStr { - // SAFETY: Caller guarantees `self` does not contain non-ascii characters - unsafe { self.to_bytes().as_ascii_str_unchecked() } - } -} - -#[cfg(test)] -mod tests { - use super::{AsAsciiStr, AsAsciiStrError, AsMutAsciiStr, AsciiStr}; - #[cfg(feature = "alloc")] - use alloc::string::{String, ToString}; - #[cfg(feature = "alloc")] - use alloc::vec::Vec; - use AsciiChar; - - /// Ensures that common types, `str`, `[u8]`, `AsciiStr` and their - /// references, shared and mutable implement `AsAsciiStr`. - #[test] - fn generic_as_ascii_str() { - // Generic function to ensure `C` implements `AsAsciiStr` - fn generic(c: &C) -> Result<&AsciiStr, AsAsciiStrError> { - c.as_ascii_str() - } - - let arr = [AsciiChar::A]; - let ascii_str = arr.as_ref().into(); - let mut mut_arr = arr; // Note: We need a second copy to prevent overlapping mutable borrows. - let mut_ascii_str = mut_arr.as_mut().into(); - let mut_arr_mut_ref: &mut [AsciiChar] = &mut [AsciiChar::A]; - let mut string_bytes = [b'A']; - let string_mut = unsafe { core::str::from_utf8_unchecked_mut(&mut string_bytes) }; // SAFETY: 'A' is a valid string. - let string_mut_bytes: &mut [u8] = &mut [b'A']; - - // Note: This is a trick because `rustfmt` doesn't support - // attributes on blocks yet. - #[rustfmt::skip] - let _ = [ - assert_eq!(generic::("A" ), Ok(ascii_str)), - assert_eq!(generic::<[u8] >(&b"A"[..] ), Ok(ascii_str)), - assert_eq!(generic::(ascii_str ), Ok(ascii_str)), - assert_eq!(generic::<[AsciiChar] >(&arr ), Ok(ascii_str)), - assert_eq!(generic::<&str >(&"A" ), Ok(ascii_str)), - assert_eq!(generic::<&[u8] >(&&b"A"[..] ), Ok(ascii_str)), - assert_eq!(generic::<&AsciiStr >(&ascii_str ), Ok(ascii_str)), - assert_eq!(generic::<&[AsciiChar] >(&&arr[..] ), Ok(ascii_str)), - assert_eq!(generic::<&mut str >(&string_mut ), Ok(ascii_str)), - assert_eq!(generic::<&mut [u8] >(&string_mut_bytes), Ok(ascii_str)), - assert_eq!(generic::<&mut AsciiStr >(&mut_ascii_str ), Ok(ascii_str)), - assert_eq!(generic::<&mut [AsciiChar]>(&mut_arr_mut_ref ), Ok(ascii_str)), - ]; - } - - #[cfg(feature = "std")] - #[test] - fn cstring_as_ascii_str() { - use std::ffi::CString; - fn generic(c: &C) -> Result<&AsciiStr, AsAsciiStrError> { - c.as_ascii_str() - } - let arr = [AsciiChar::A]; - let ascii_str: &AsciiStr = arr.as_ref().into(); - let cstr = CString::new("A").unwrap(); - assert_eq!(generic(&*cstr), Ok(ascii_str)); - } - - #[test] - fn generic_as_mut_ascii_str() { - fn generic_mut( - c: &mut C, - ) -> Result<&mut AsciiStr, AsAsciiStrError> { - c.as_mut_ascii_str() - } - - let mut arr_mut = [AsciiChar::B]; - let mut ascii_str_mut: &mut AsciiStr = arr_mut.as_mut().into(); - // Need a second reference to prevent overlapping mutable borrows - let mut arr_mut_2 = [AsciiChar::B]; - let ascii_str_mut_2: &mut AsciiStr = arr_mut_2.as_mut().into(); - assert_eq!(generic_mut(&mut ascii_str_mut), Ok(&mut *ascii_str_mut_2)); - assert_eq!(generic_mut(ascii_str_mut), Ok(&mut *ascii_str_mut_2)); - } - - #[test] - fn as_ascii_str() { - macro_rules! err {{$i:expr} => {Err(AsAsciiStrError($i))}} - let s = "abčd"; - let b = s.as_bytes(); - assert_eq!(s.as_ascii_str(), err!(2)); - assert_eq!(b.as_ascii_str(), err!(2)); - let a: &AsciiStr = [AsciiChar::a, AsciiChar::b][..].as_ref(); - assert_eq!(s[..2].as_ascii_str(), Ok(a)); - assert_eq!(b[..2].as_ascii_str(), Ok(a)); - assert_eq!(s.slice_ascii(..2), Ok(a)); - assert_eq!(b.slice_ascii(..2), Ok(a)); - assert_eq!(s.slice_ascii(..=2), err!(2)); - assert_eq!(b.slice_ascii(..=2), err!(2)); - assert_eq!(s.get_ascii(4), Some(AsciiChar::d)); - assert_eq!(b.get_ascii(4), Some(AsciiChar::d)); - assert_eq!(s.get_ascii(3), None); - assert_eq!(b.get_ascii(3), None); - assert_eq!(s.get_ascii(b.len()), None); - assert_eq!(b.get_ascii(b.len()), None); - assert_eq!(a.get_ascii(0), Some(AsciiChar::a)); - assert_eq!(a.get_ascii(a.len()), None); - } - - #[test] - #[cfg(feature = "std")] - fn cstr_as_ascii_str() { - use std::ffi::CStr; - macro_rules! err {{$i:expr} => {Err(AsAsciiStrError($i))}} - let cstr = CStr::from_bytes_with_nul(b"a\xbbcde\xffg\0").unwrap(); - assert_eq!(cstr.as_ascii_str(), err!(1)); - assert_eq!(cstr.slice_ascii(2..), err!(5)); - assert_eq!(cstr.get_ascii(5), None); - assert_eq!(cstr.get_ascii(6), Some(AsciiChar::g)); - assert_eq!(cstr.get_ascii(7), None); - let ascii_slice = &[AsciiChar::X, AsciiChar::Y, AsciiChar::Z, AsciiChar::Null][..]; - let ascii_str: &AsciiStr = ascii_slice.as_ref(); - let cstr = CStr::from_bytes_with_nul(ascii_str.as_bytes()).unwrap(); - assert_eq!(cstr.slice_ascii(..2), Ok(&ascii_str[..2])); - assert_eq!(cstr.as_ascii_str(), Ok(&ascii_str[..3])); - } - - #[test] - #[cfg(feature = "alloc")] - fn as_mut_ascii_str() { - macro_rules! err {{$i:expr} => {Err(AsAsciiStrError($i))}} - let mut s: String = "abčd".to_string(); - let mut b: Vec = s.clone().into(); - let mut first = [AsciiChar::a, AsciiChar::b]; - let mut second = [AsciiChar::d]; - assert_eq!(s.as_mut_ascii_str(), err!(2)); - assert_eq!(b.as_mut_ascii_str(), err!(2)); - assert_eq!(s.slice_ascii_mut(..), err!(2)); - assert_eq!(b.slice_ascii_mut(..), err!(2)); - assert_eq!(s[..2].as_mut_ascii_str(), Ok((&mut first[..]).into())); - assert_eq!(b[..2].as_mut_ascii_str(), Ok((&mut first[..]).into())); - assert_eq!(s.slice_ascii_mut(0..2), Ok((&mut first[..]).into())); - assert_eq!(b.slice_ascii_mut(0..2), Ok((&mut first[..]).into())); - assert_eq!(s.slice_ascii_mut(4..), Ok((&mut second[..]).into())); - assert_eq!(b.slice_ascii_mut(4..), Ok((&mut second[..]).into())); - assert_eq!(s.slice_ascii_mut(4..=10), err!(5)); - assert_eq!(b.slice_ascii_mut(4..=10), err!(5)); - } - - #[test] - fn default() { - let default: &'static AsciiStr = Default::default(); - assert!(default.is_empty()); - } - - #[test] - #[allow(clippy::redundant_slicing)] - fn index() { - let mut arr = [AsciiChar::A, AsciiChar::B, AsciiChar::C, AsciiChar::D]; - { - let a: &AsciiStr = arr[..].into(); - assert_eq!(a[..].as_slice(), &a.as_slice()[..]); - assert_eq!(a[..4].as_slice(), &a.as_slice()[..4]); - assert_eq!(a[4..].as_slice(), &a.as_slice()[4..]); - assert_eq!(a[2..3].as_slice(), &a.as_slice()[2..3]); - assert_eq!(a[..=3].as_slice(), &a.as_slice()[..=3]); - assert_eq!(a[1..=1].as_slice(), &a.as_slice()[1..=1]); - } - let mut copy = arr; - let a_mut: &mut AsciiStr = { &mut arr[..] }.into(); - assert_eq!(a_mut[..].as_mut_slice(), &mut copy[..]); - assert_eq!(a_mut[..2].as_mut_slice(), &mut copy[..2]); - assert_eq!(a_mut[3..].as_mut_slice(), &mut copy[3..]); - assert_eq!(a_mut[4..4].as_mut_slice(), &mut copy[4..4]); - assert_eq!(a_mut[..=0].as_mut_slice(), &mut copy[..=0]); - assert_eq!(a_mut[0..=2].as_mut_slice(), &mut copy[0..=2]); - } - - #[test] - fn as_str() { - let b = b"( ;"; - let v = AsciiStr::from_ascii(b).unwrap(); - assert_eq!(v.as_str(), "( ;"); - assert_eq!(AsRef::::as_ref(v), "( ;"); - } - - #[test] - fn as_bytes() { - let b = b"( ;"; - let v = AsciiStr::from_ascii(b).unwrap(); - assert_eq!(v.as_bytes(), b"( ;"); - assert_eq!(AsRef::<[u8]>::as_ref(v), b"( ;"); - } - - #[test] - fn make_ascii_case() { - let mut bytes = ([b'a', b'@', b'A'], [b'A', b'@', b'a']); - let a = bytes.0.as_mut_ascii_str().unwrap(); - let b = bytes.1.as_mut_ascii_str().unwrap(); - assert!(a.eq_ignore_ascii_case(b)); - assert!(b.eq_ignore_ascii_case(a)); - a.make_ascii_lowercase(); - b.make_ascii_uppercase(); - assert_eq!(a, "a@a"); - assert_eq!(b, "A@A"); - } - - #[test] - #[cfg(feature = "alloc")] - fn to_ascii_case() { - let bytes = ([b'a', b'@', b'A'], [b'A', b'@', b'a']); - let a = bytes.0.as_ascii_str().unwrap(); - let b = bytes.1.as_ascii_str().unwrap(); - assert_eq!(a.to_ascii_lowercase().as_str(), "a@a"); - assert_eq!(a.to_ascii_uppercase().as_str(), "A@A"); - assert_eq!(b.to_ascii_lowercase().as_str(), "a@a"); - assert_eq!(b.to_ascii_uppercase().as_str(), "A@A"); - } - - #[test] - fn chars_iter() { - let chars = &[ - b'h', b'e', b'l', b'l', b'o', b' ', b'w', b'o', b'r', b'l', b'd', b'\0', - ]; - let ascii = AsciiStr::from_ascii(chars).unwrap(); - for (achar, byte) in ascii.chars().zip(chars.iter().copied()) { - assert_eq!(achar, byte); - } - } - - #[test] - fn chars_iter_mut() { - let chars = &mut [ - b'h', b'e', b'l', b'l', b'o', b' ', b'w', b'o', b'r', b'l', b'd', b'\0', - ]; - let ascii = chars.as_mut_ascii_str().unwrap(); - *ascii.chars_mut().next().unwrap() = AsciiChar::H; - assert_eq!(ascii[0], b'H'); - } - - #[test] - fn lines_iter() { - use core::iter::Iterator; - - let lines: [&str; 4] = ["foo", "bar", "", "baz"]; - let joined = "foo\r\nbar\n\nbaz\n"; - let ascii = AsciiStr::from_ascii(joined.as_bytes()).unwrap(); - for (asciiline, line) in ascii.lines().zip(&lines) { - assert_eq!(asciiline, *line); - } - assert_eq!(ascii.lines().count(), lines.len()); - - let lines: [&str; 4] = ["foo", "bar", "", "baz"]; - let joined = "foo\r\nbar\n\nbaz"; - let ascii = AsciiStr::from_ascii(joined.as_bytes()).unwrap(); - for (asciiline, line) in ascii.lines().zip(&lines) { - assert_eq!(asciiline, *line); - } - assert_eq!(ascii.lines().count(), lines.len()); - - let trailing_line_break = b"\n"; - let ascii = AsciiStr::from_ascii(&trailing_line_break).unwrap(); - let mut line_iter = ascii.lines(); - assert_eq!(line_iter.next(), Some(AsciiStr::from_ascii("").unwrap())); - assert_eq!(line_iter.next(), None); - - let empty_lines = b"\n\r\n\n\r\n"; - let mut iter_count = 0; - let ascii = AsciiStr::from_ascii(&empty_lines).unwrap(); - for line in ascii.lines() { - iter_count += 1; - assert!(line.is_empty()); - } - assert_eq!(4, iter_count); - } - - #[test] - fn lines_iter_rev() { - let joined = "foo\r\nbar\n\nbaz\n"; - let ascii = AsciiStr::from_ascii(joined.as_bytes()).unwrap(); - assert_eq!(ascii.lines().rev().count(), 4); - assert_eq!(ascii.lines().rev().count(), joined.lines().rev().count()); - for (asciiline, line) in ascii.lines().rev().zip(joined.lines().rev()) { - assert_eq!(asciiline, line); - } - let mut iter = ascii.lines(); - assert_eq!(iter.next(), Some("foo".as_ascii_str().unwrap())); - assert_eq!(iter.next_back(), Some("baz".as_ascii_str().unwrap())); - assert_eq!(iter.next_back(), Some("".as_ascii_str().unwrap())); - assert_eq!(iter.next(), Some("bar".as_ascii_str().unwrap())); - - let empty_lines = b"\n\r\n\n\r\n"; - let mut iter_count = 0; - let ascii = AsciiStr::from_ascii(&empty_lines).unwrap(); - for line in ascii.lines().rev() { - iter_count += 1; - assert!(line.is_empty()); - } - assert_eq!(4, iter_count); - } - - #[test] - fn lines_iter_empty() { - assert_eq!("".as_ascii_str().unwrap().lines().next(), None); - assert_eq!("".as_ascii_str().unwrap().lines().next_back(), None); - assert_eq!("".lines().next(), None); - } - - #[test] - fn split_str() { - fn split_equals_str(haystack: &str, needle: char) { - let mut strs = haystack.split(needle); - let mut asciis = haystack - .as_ascii_str() - .unwrap() - .split(AsciiChar::from_ascii(needle).unwrap()) - .map(AsciiStr::as_str); - loop { - assert_eq!(asciis.size_hint(), strs.size_hint()); - let (a, s) = (asciis.next(), strs.next()); - assert_eq!(a, s); - if a == None { - break; - } - } - // test fusedness if str's version is fused - if strs.next() == None { - assert_eq!(asciis.next(), None); - } - } - split_equals_str("", '='); - split_equals_str("1,2,3", ','); - split_equals_str("foo;bar;baz;", ';'); - split_equals_str("|||", '|'); - split_equals_str(" a b c ", ' '); - } - - #[test] - fn split_str_rev() { - let words = " foo bar baz "; - let ascii = words.as_ascii_str().unwrap(); - for (word, asciiword) in words - .split(' ') - .rev() - .zip(ascii.split(AsciiChar::Space).rev()) - { - assert_eq!(asciiword, word); - } - let mut iter = ascii.split(AsciiChar::Space); - assert_eq!(iter.next(), Some("".as_ascii_str().unwrap())); - assert_eq!(iter.next_back(), Some("".as_ascii_str().unwrap())); - assert_eq!(iter.next(), Some("foo".as_ascii_str().unwrap())); - assert_eq!(iter.next_back(), Some("baz".as_ascii_str().unwrap())); - assert_eq!(iter.next_back(), Some("bar".as_ascii_str().unwrap())); - assert_eq!(iter.next(), Some("".as_ascii_str().unwrap())); - assert_eq!(iter.next_back(), None); - } - - #[test] - fn split_str_empty() { - let empty = <&AsciiStr>::default(); - let mut iter = empty.split(AsciiChar::NAK); - assert_eq!(iter.next(), Some(empty)); - assert_eq!(iter.next(), None); - let mut iter = empty.split(AsciiChar::NAK); - assert_eq!(iter.next_back(), Some(empty)); - assert_eq!(iter.next_back(), None); - assert_eq!("".split('s').next(), Some("")); // str.split() also produces one element - } - - #[test] - #[cfg(feature = "std")] - fn fmt_ascii_str() { - let s = "abc".as_ascii_str().unwrap(); - assert_eq!(format!("{}", s), "abc".to_string()); - assert_eq!(format!("{:?}", s), "\"abc\"".to_string()); - } -} diff --git a/anneal/vendor/ascii/src/ascii_string.rs b/anneal/vendor/ascii/src/ascii_string.rs deleted file mode 100644 index 4cb6a17356..0000000000 --- a/anneal/vendor/ascii/src/ascii_string.rs +++ /dev/null @@ -1,1057 +0,0 @@ -use alloc::borrow::{Borrow, BorrowMut, Cow, ToOwned}; -use alloc::fmt; -use alloc::string::String; -use alloc::vec::Vec; -use alloc::boxed::Box; -use alloc::rc::Rc; -use alloc::sync::Arc; -#[cfg(feature = "std")] -use core::any::Any; -use core::iter::FromIterator; -use core::mem; -use core::ops::{Add, AddAssign, Deref, DerefMut, Index, IndexMut}; -use core::str::FromStr; -#[cfg(feature = "std")] -use std::error::Error; -#[cfg(feature = "std")] -use std::ffi::{CStr, CString}; - -use ascii_char::AsciiChar; -use ascii_str::{AsAsciiStr, AsAsciiStrError, AsciiStr}; - -/// A growable string stored as an ASCII encoded buffer. -#[derive(Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] -#[repr(transparent)] -pub struct AsciiString { - vec: Vec, -} - -impl AsciiString { - /// Creates a new, empty ASCII string buffer without allocating. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiString; - /// let mut s = AsciiString::new(); - /// ``` - #[inline] - #[must_use] - pub const fn new() -> Self { - AsciiString { vec: Vec::new() } - } - - /// Creates a new ASCII string buffer with the given capacity. - /// The string will be able to hold exactly `capacity` bytes without reallocating. - /// If `capacity` is 0, the ASCII string will not allocate. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiString; - /// let mut s = AsciiString::with_capacity(10); - /// ``` - #[inline] - #[must_use] - pub fn with_capacity(capacity: usize) -> Self { - AsciiString { - vec: Vec::with_capacity(capacity), - } - } - - /// Creates a new `AsciiString` from a length, capacity and pointer. - /// - /// # Safety - /// - /// This is highly unsafe, due to the number of invariants that aren't checked: - /// - /// * The memory at `buf` need to have been previously allocated by the same allocator this - /// library uses, with an alignment of 1. - /// * `length` needs to be less than or equal to `capacity`. - /// * `capacity` needs to be the correct value. - /// * `buf` must have `length` valid ascii elements and contain a total of `capacity` total, - /// possibly, uninitialized, elements. - /// * Nothing else must be using the memory `buf` points to. - /// - /// Violating these may cause problems like corrupting the allocator's internal data structures. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// # use ascii::AsciiString; - /// use std::mem; - /// - /// unsafe { - /// let mut s = AsciiString::from_ascii("hello").unwrap(); - /// let ptr = s.as_mut_ptr(); - /// let len = s.len(); - /// let capacity = s.capacity(); - /// - /// mem::forget(s); - /// - /// let s = AsciiString::from_raw_parts(ptr, len, capacity); - /// - /// assert_eq!(AsciiString::from_ascii("hello").unwrap(), s); - /// } - /// ``` - #[inline] - #[must_use] - pub unsafe fn from_raw_parts(buf: *mut AsciiChar, length: usize, capacity: usize) -> Self { - AsciiString { - // SAFETY: Caller guarantees that `buf` was previously allocated by this library, - // that `buf` contains `length` valid ascii elements and has a total capacity - // of `capacity` elements, and that nothing else is using the momory. - vec: unsafe { Vec::from_raw_parts(buf, length, capacity) }, - } - } - - /// Converts a vector of bytes to an `AsciiString` without checking for non-ASCII characters. - /// - /// # Safety - /// This function is unsafe because it does not check that the bytes passed to it are valid - /// ASCII characters. If this constraint is violated, it may cause memory unsafety issues with - /// future of the `AsciiString`, as the rest of this library assumes that `AsciiString`s are - /// ASCII encoded. - #[inline] - #[must_use] - pub unsafe fn from_ascii_unchecked(bytes: B) -> Self - where - B: Into>, - { - let mut bytes = bytes.into(); - // SAFETY: The caller guarantees all bytes are valid ascii bytes. - let ptr = bytes.as_mut_ptr().cast::(); - let length = bytes.len(); - let capacity = bytes.capacity(); - mem::forget(bytes); - - // SAFETY: We guarantee all invariants, as we got the - // pointer, length and capacity from a `Vec`, - // and we also guarantee the pointer is valid per - // the `SAFETY` notice above. - let vec = Vec::from_raw_parts(ptr, length, capacity); - - Self { vec } - } - - /// Converts anything that can represent a byte buffer into an `AsciiString`. - /// - /// # Errors - /// Returns the byte buffer if not all of the bytes are ASCII characters. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiString; - /// let foo = AsciiString::from_ascii("foo".to_string()).unwrap(); - /// let err = AsciiString::from_ascii("Ŋ".to_string()).unwrap_err(); - /// assert_eq!(foo.as_str(), "foo"); - /// assert_eq!(err.into_source(), "Ŋ"); - /// ``` - pub fn from_ascii(bytes: B) -> Result> - where - B: Into> + AsRef<[u8]>, - { - match bytes.as_ref().as_ascii_str() { - // SAFETY: `as_ascii_str` guarantees all bytes are valid ascii bytes. - Ok(_) => Ok(unsafe { AsciiString::from_ascii_unchecked(bytes) }), - Err(e) => Err(FromAsciiError { - error: e, - owner: bytes, - }), - } - } - - /// Pushes the given ASCII string onto this ASCII string buffer. - /// - /// # Examples - /// ``` - /// # use ascii::{AsciiString, AsAsciiStr}; - /// use std::str::FromStr; - /// let mut s = AsciiString::from_str("foo").unwrap(); - /// s.push_str("bar".as_ascii_str().unwrap()); - /// assert_eq!(s, "foobar".as_ascii_str().unwrap()); - /// ``` - #[inline] - pub fn push_str(&mut self, string: &AsciiStr) { - self.vec.extend(string.chars()); - } - - /// Inserts the given ASCII string at the given place in this ASCII string buffer. - /// - /// # Panics - /// - /// Panics if `idx` is larger than the `AsciiString`'s length. - /// - /// # Examples - /// ``` - /// # use ascii::{AsciiString, AsAsciiStr}; - /// use std::str::FromStr; - /// let mut s = AsciiString::from_str("abc").unwrap(); - /// s.insert_str(1, "def".as_ascii_str().unwrap()); - /// assert_eq!(&*s, "adefbc"); - #[inline] - pub fn insert_str(&mut self, idx: usize, string: &AsciiStr) { - self.vec.reserve(string.len()); - self.vec.splice(idx..idx, string.into_iter().copied()); - } - - /// Returns the number of bytes that this ASCII string buffer can hold without reallocating. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiString; - /// let s = String::with_capacity(10); - /// assert!(s.capacity() >= 10); - /// ``` - #[inline] - #[must_use] - pub fn capacity(&self) -> usize { - self.vec.capacity() - } - - /// Reserves capacity for at least `additional` more bytes to be inserted in the given - /// `AsciiString`. The collection may reserve more space to avoid frequent reallocations. - /// - /// # Panics - /// Panics if the new capacity overflows `usize`. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiString; - /// let mut s = AsciiString::new(); - /// s.reserve(10); - /// assert!(s.capacity() >= 10); - /// ``` - #[inline] - pub fn reserve(&mut self, additional: usize) { - self.vec.reserve(additional); - } - - /// Reserves the minimum capacity for exactly `additional` more bytes to be inserted in the - /// given `AsciiString`. Does nothing if the capacity is already sufficient. - /// - /// Note that the allocator may give the collection more space than it requests. Therefore - /// capacity can not be relied upon to be precisely minimal. Prefer `reserve` if future - /// insertions are expected. - /// - /// # Panics - /// Panics if the new capacity overflows `usize`. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiString; - /// let mut s = AsciiString::new(); - /// s.reserve_exact(10); - /// assert!(s.capacity() >= 10); - /// ``` - #[inline] - - pub fn reserve_exact(&mut self, additional: usize) { - self.vec.reserve_exact(additional); - } - - /// Shrinks the capacity of this ASCII string buffer to match it's length. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiString; - /// use std::str::FromStr; - /// let mut s = AsciiString::from_str("foo").unwrap(); - /// s.reserve(100); - /// assert!(s.capacity() >= 100); - /// s.shrink_to_fit(); - /// assert_eq!(s.capacity(), 3); - /// ``` - #[inline] - - pub fn shrink_to_fit(&mut self) { - self.vec.shrink_to_fit(); - } - - /// Adds the given ASCII character to the end of the ASCII string. - /// - /// # Examples - /// ``` - /// # use ascii::{ AsciiChar, AsciiString}; - /// let mut s = AsciiString::from_ascii("abc").unwrap(); - /// s.push(AsciiChar::from_ascii('1').unwrap()); - /// s.push(AsciiChar::from_ascii('2').unwrap()); - /// s.push(AsciiChar::from_ascii('3').unwrap()); - /// assert_eq!(s, "abc123"); - /// ``` - #[inline] - - pub fn push(&mut self, ch: AsciiChar) { - self.vec.push(ch); - } - - /// Shortens a ASCII string to the specified length. - /// - /// # Panics - /// Panics if `new_len` > current length. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiString; - /// let mut s = AsciiString::from_ascii("hello").unwrap(); - /// s.truncate(2); - /// assert_eq!(s, "he"); - /// ``` - #[inline] - - pub fn truncate(&mut self, new_len: usize) { - self.vec.truncate(new_len); - } - - /// Removes the last character from the ASCII string buffer and returns it. - /// Returns `None` if this string buffer is empty. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiString; - /// let mut s = AsciiString::from_ascii("foo").unwrap(); - /// assert_eq!(s.pop().map(|c| c.as_char()), Some('o')); - /// assert_eq!(s.pop().map(|c| c.as_char()), Some('o')); - /// assert_eq!(s.pop().map(|c| c.as_char()), Some('f')); - /// assert_eq!(s.pop(), None); - /// ``` - #[inline] - #[must_use] - pub fn pop(&mut self) -> Option { - self.vec.pop() - } - - /// Removes the ASCII character at position `idx` from the buffer and returns it. - /// - /// # Warning - /// This is an O(n) operation as it requires copying every element in the buffer. - /// - /// # Panics - /// If `idx` is out of bounds this function will panic. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiString; - /// let mut s = AsciiString::from_ascii("foo").unwrap(); - /// assert_eq!(s.remove(0).as_char(), 'f'); - /// assert_eq!(s.remove(1).as_char(), 'o'); - /// assert_eq!(s.remove(0).as_char(), 'o'); - /// ``` - #[inline] - #[must_use] - pub fn remove(&mut self, idx: usize) -> AsciiChar { - self.vec.remove(idx) - } - - /// Inserts an ASCII character into the buffer at position `idx`. - /// - /// # Warning - /// This is an O(n) operation as it requires copying every element in the buffer. - /// - /// # Panics - /// If `idx` is out of bounds this function will panic. - /// - /// # Examples - /// ``` - /// # use ascii::{AsciiString,AsciiChar}; - /// let mut s = AsciiString::from_ascii("foo").unwrap(); - /// s.insert(2, AsciiChar::b); - /// assert_eq!(s, "fobo"); - /// ``` - #[inline] - - pub fn insert(&mut self, idx: usize, ch: AsciiChar) { - self.vec.insert(idx, ch); - } - - /// Returns the number of bytes in this ASCII string. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiString; - /// let s = AsciiString::from_ascii("foo").unwrap(); - /// assert_eq!(s.len(), 3); - /// ``` - #[inline] - #[must_use] - pub fn len(&self) -> usize { - self.vec.len() - } - - /// Returns true if the ASCII string contains zero bytes. - /// - /// # Examples - /// ``` - /// # use ascii::{AsciiChar, AsciiString}; - /// let mut s = AsciiString::new(); - /// assert!(s.is_empty()); - /// s.push(AsciiChar::from_ascii('a').unwrap()); - /// assert!(!s.is_empty()); - /// ``` - #[inline] - #[must_use] - pub fn is_empty(&self) -> bool { - self.len() == 0 - } - - /// Truncates the ASCII string, setting length (but not capacity) to zero. - /// - /// # Examples - /// ``` - /// # use ascii::AsciiString; - /// let mut s = AsciiString::from_ascii("foo").unwrap(); - /// s.clear(); - /// assert!(s.is_empty()); - /// ``` - #[inline] - - pub fn clear(&mut self) { - self.vec.clear(); - } - - /// Converts this [`AsciiString`] into a [`Box`]`<`[`AsciiStr`]`>`. - /// - /// This will drop any excess capacity - #[inline] - #[must_use] - pub fn into_boxed_ascii_str(self) -> Box { - let slice = self.vec.into_boxed_slice(); - Box::from(slice) - } -} - -impl Deref for AsciiString { - type Target = AsciiStr; - - #[inline] - fn deref(&self) -> &AsciiStr { - self.vec.as_slice().as_ref() - } -} - -impl DerefMut for AsciiString { - #[inline] - fn deref_mut(&mut self) -> &mut AsciiStr { - self.vec.as_mut_slice().as_mut() - } -} - -impl PartialEq for AsciiString { - #[inline] - fn eq(&self, other: &str) -> bool { - **self == *other - } -} - -impl PartialEq for str { - #[inline] - fn eq(&self, other: &AsciiString) -> bool { - **other == *self - } -} - -macro_rules! impl_eq { - ($lhs:ty, $rhs:ty) => { - impl PartialEq<$rhs> for $lhs { - #[inline] - fn eq(&self, other: &$rhs) -> bool { - PartialEq::eq(&**self, &**other) - } - } - }; -} - -impl_eq! { AsciiString, String } -impl_eq! { String, AsciiString } -impl_eq! { &AsciiStr, String } -impl_eq! { String, &AsciiStr } -impl_eq! { &AsciiStr, AsciiString } -impl_eq! { AsciiString, &AsciiStr } -impl_eq! { &str, AsciiString } -impl_eq! { AsciiString, &str } - -impl Borrow for AsciiString { - #[inline] - fn borrow(&self) -> &AsciiStr { - &**self - } -} - -impl BorrowMut for AsciiString { - #[inline] - fn borrow_mut(&mut self) -> &mut AsciiStr { - &mut **self - } -} - -impl From> for AsciiString { - #[inline] - fn from(vec: Vec) -> Self { - AsciiString { vec } - } -} - -impl From for AsciiString { - #[inline] - fn from(ch: AsciiChar) -> Self { - AsciiString { vec: vec![ch] } - } -} - -impl From for Vec { - fn from(mut s: AsciiString) -> Vec { - // SAFETY: All ascii bytes are valid `u8`, as we are `repr(u8)`. - // Note: We forget `self` to avoid `self.vec` from being deallocated. - let ptr = s.vec.as_mut_ptr().cast::(); - let length = s.vec.len(); - let capacity = s.vec.capacity(); - mem::forget(s); - - // SAFETY: We guarantee all invariants due to getting `ptr`, `length` - // and `capacity` from a `Vec`. We also guarantee `ptr` is valid - // due to the `SAFETY` block above. - unsafe { Vec::from_raw_parts(ptr, length, capacity) } - } -} - -impl From for Vec { - fn from(s: AsciiString) -> Vec { - s.vec - } -} - -impl<'a> From<&'a AsciiStr> for AsciiString { - #[inline] - fn from(s: &'a AsciiStr) -> Self { - s.to_ascii_string() - } -} - -impl<'a> From<&'a [AsciiChar]> for AsciiString { - #[inline] - fn from(s: &'a [AsciiChar]) -> AsciiString { - s.iter().copied().collect() - } -} - -impl From for String { - #[inline] - fn from(s: AsciiString) -> String { - // SAFETY: All ascii bytes are `utf8`. - unsafe { String::from_utf8_unchecked(s.into()) } - } -} - -impl From> for AsciiString { - #[inline] - fn from(boxed: Box) -> Self { - boxed.into_ascii_string() - } -} - -impl From for Box { - #[inline] - fn from(string: AsciiString) -> Self { - string.into_boxed_ascii_str() - } -} - -impl From for Rc { - fn from(s: AsciiString) -> Rc { - let var: Rc<[AsciiChar]> = s.vec.into(); - // SAFETY: AsciiStr is repr(transparent) and thus has the same layout as [AsciiChar] - unsafe { Rc::from_raw(Rc::into_raw(var) as *const AsciiStr) } - } -} - -impl From for Arc { - fn from(s: AsciiString) -> Arc { - let var: Arc<[AsciiChar]> = s.vec.into(); - // SAFETY: AsciiStr is repr(transparent) and thus has the same layout as [AsciiChar] - unsafe { Arc::from_raw(Arc::into_raw(var) as *const AsciiStr) } - } -} - -impl<'a> From> for AsciiString { - fn from(cow: Cow<'a, AsciiStr>) -> AsciiString { - cow.into_owned() - } -} - -impl From for Cow<'static, AsciiStr> { - fn from(string: AsciiString) -> Cow<'static, AsciiStr> { - Cow::Owned(string) - } -} - -impl<'a> From<&'a AsciiStr> for Cow<'a, AsciiStr> { - fn from(s: &'a AsciiStr) -> Cow<'a, AsciiStr> { - Cow::Borrowed(s) - } -} - -impl AsRef for AsciiString { - #[inline] - fn as_ref(&self) -> &AsciiStr { - &**self - } -} - -impl AsRef<[AsciiChar]> for AsciiString { - #[inline] - fn as_ref(&self) -> &[AsciiChar] { - &self.vec - } -} - -impl AsRef<[u8]> for AsciiString { - #[inline] - fn as_ref(&self) -> &[u8] { - self.as_bytes() - } -} - -impl AsRef for AsciiString { - #[inline] - fn as_ref(&self) -> &str { - self.as_str() - } -} - -impl AsMut for AsciiString { - #[inline] - fn as_mut(&mut self) -> &mut AsciiStr { - &mut *self - } -} - -impl AsMut<[AsciiChar]> for AsciiString { - #[inline] - fn as_mut(&mut self) -> &mut [AsciiChar] { - &mut self.vec - } -} - -impl FromStr for AsciiString { - type Err = AsAsciiStrError; - - fn from_str(s: &str) -> Result { - s.as_ascii_str().map(AsciiStr::to_ascii_string) - } -} - -impl fmt::Display for AsciiString { - #[inline] - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&**self, f) - } -} - -impl fmt::Debug for AsciiString { - #[inline] - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Debug::fmt(&**self, f) - } -} - -/// Please note that the `std::fmt::Result` returned by these methods does not support -/// transmission of an error other than that an error occurred. -impl fmt::Write for AsciiString { - fn write_str(&mut self, s: &str) -> fmt::Result { - if let Ok(astr) = AsciiStr::from_ascii(s) { - self.push_str(astr); - Ok(()) - } else { - Err(fmt::Error) - } - } - - fn write_char(&mut self, c: char) -> fmt::Result { - if let Ok(achar) = AsciiChar::from_ascii(c) { - self.push(achar); - Ok(()) - } else { - Err(fmt::Error) - } - } -} - -impl> FromIterator for AsciiString { - fn from_iter>(iter: I) -> AsciiString { - let mut buf = AsciiString::new(); - buf.extend(iter); - buf - } -} - -impl> Extend for AsciiString { - fn extend>(&mut self, iterable: I) { - let iterator = iterable.into_iter(); - let (lower_bound, _) = iterator.size_hint(); - self.reserve(lower_bound); - for item in iterator { - self.push_str(item.as_ref()); - } - } -} - -impl<'a> Add<&'a AsciiStr> for AsciiString { - type Output = AsciiString; - - #[inline] - fn add(mut self, other: &AsciiStr) -> AsciiString { - self.push_str(other); - self - } -} - -impl<'a> AddAssign<&'a AsciiStr> for AsciiString { - #[inline] - fn add_assign(&mut self, other: &AsciiStr) { - self.push_str(other); - } -} - -#[allow(clippy::indexing_slicing)] // In `Index`, if it's out of bounds, panic is the default -impl Index for AsciiString -where - AsciiStr: Index, -{ - type Output = >::Output; - - #[inline] - fn index(&self, index: T) -> &>::Output { - &(**self)[index] - } -} - -#[allow(clippy::indexing_slicing)] // In `IndexMut`, if it's out of bounds, panic is the default -impl IndexMut for AsciiString -where - AsciiStr: IndexMut, -{ - #[inline] - fn index_mut(&mut self, index: T) -> &mut >::Output { - &mut (**self)[index] - } -} - -/// A possible error value when converting an `AsciiString` from a byte vector or string. -/// It wraps an `AsAsciiStrError` which you can get through the `ascii_error()` method. -/// -/// This is the error type for `AsciiString::from_ascii()` and -/// `IntoAsciiString::into_ascii_string()`. They will never clone or touch the content of the -/// original type; It can be extracted by the `into_source` method. -/// -/// #Examples -/// ``` -/// # use ascii::IntoAsciiString; -/// let err = "bø!".to_string().into_ascii_string().unwrap_err(); -/// assert_eq!(err.ascii_error().valid_up_to(), 1); -/// assert_eq!(err.into_source(), "bø!".to_string()); -/// ``` -#[derive(Clone, Copy, PartialEq, Eq)] -pub struct FromAsciiError { - error: AsAsciiStrError, - owner: O, -} -impl FromAsciiError { - /// Get the position of the first non-ASCII byte or character. - #[inline] - #[must_use] - pub fn ascii_error(&self) -> AsAsciiStrError { - self.error - } - /// Get back the original, unmodified type. - #[inline] - #[must_use] - pub fn into_source(self) -> O { - self.owner - } -} -impl fmt::Debug for FromAsciiError { - #[inline] - fn fmt(&self, fmtr: &mut fmt::Formatter) -> fmt::Result { - fmt::Debug::fmt(&self.error, fmtr) - } -} -impl fmt::Display for FromAsciiError { - #[inline] - fn fmt(&self, fmtr: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.error, fmtr) - } -} -#[cfg(feature = "std")] -impl Error for FromAsciiError { - #[inline] - #[allow(deprecated)] // TODO: Remove deprecation once the earliest version we support deprecates this method. - fn description(&self) -> &str { - self.error.description() - } - /// Always returns an `AsAsciiStrError` - fn cause(&self) -> Option<&dyn Error> { - Some(&self.error as &dyn Error) - } -} - -/// Convert vectors into `AsciiString`. -pub trait IntoAsciiString: Sized { - /// Convert to `AsciiString` without checking for non-ASCII characters. - /// - /// # Safety - /// If `self` contains non-ascii characters, calling this function is - /// undefined behavior. - unsafe fn into_ascii_string_unchecked(self) -> AsciiString; - - /// Convert to `AsciiString`. - /// - /// # Errors - /// If `self` contains non-ascii characters, this will return `Err` - fn into_ascii_string(self) -> Result>; -} - -impl IntoAsciiString for Vec { - #[inline] - unsafe fn into_ascii_string_unchecked(self) -> AsciiString { - AsciiString::from(self) - } - #[inline] - fn into_ascii_string(self) -> Result> { - Ok(AsciiString::from(self)) - } -} - -impl<'a> IntoAsciiString for &'a [AsciiChar] { - #[inline] - unsafe fn into_ascii_string_unchecked(self) -> AsciiString { - AsciiString::from(self) - } - #[inline] - fn into_ascii_string(self) -> Result> { - Ok(AsciiString::from(self)) - } -} - -impl<'a> IntoAsciiString for &'a AsciiStr { - #[inline] - unsafe fn into_ascii_string_unchecked(self) -> AsciiString { - AsciiString::from(self) - } - #[inline] - fn into_ascii_string(self) -> Result> { - Ok(AsciiString::from(self)) - } -} - -macro_rules! impl_into_ascii_string { - ('a, $wider:ty) => { - impl<'a> IntoAsciiString for $wider { - #[inline] - unsafe fn into_ascii_string_unchecked(self) -> AsciiString { - // SAFETY: Caller guarantees `self` only has valid ascii bytes - unsafe { AsciiString::from_ascii_unchecked(self) } - } - - #[inline] - fn into_ascii_string(self) -> Result> { - AsciiString::from_ascii(self) - } - } - }; - - ($wider:ty) => { - impl IntoAsciiString for $wider { - #[inline] - unsafe fn into_ascii_string_unchecked(self) -> AsciiString { - // SAFETY: Caller guarantees `self` only has valid ascii bytes - unsafe { AsciiString::from_ascii_unchecked(self) } - } - - #[inline] - fn into_ascii_string(self) -> Result> { - AsciiString::from_ascii(self) - } - } - }; -} - -impl_into_ascii_string! {AsciiString} -impl_into_ascii_string! {Vec} -impl_into_ascii_string! {'a, &'a [u8]} -impl_into_ascii_string! {String} -impl_into_ascii_string! {'a, &'a str} - -/// # Notes -/// The trailing null byte `CString` has will be removed during this conversion. -#[cfg(feature = "std")] -impl IntoAsciiString for CString { - #[inline] - unsafe fn into_ascii_string_unchecked(self) -> AsciiString { - // SAFETY: Caller guarantees `self` only has valid ascii bytes - unsafe { AsciiString::from_ascii_unchecked(self.into_bytes()) } - } - - fn into_ascii_string(self) -> Result> { - AsciiString::from_ascii(self.into_bytes_with_nul()) - .map_err(|FromAsciiError { error, owner }| { - FromAsciiError { - // SAFETY: We don't discard the NULL byte from the original - // string, so we ensure that it's null terminated - owner: unsafe { CString::from_vec_unchecked(owner) }, - error, - } - }) - .map(|mut s| { - let nul = s.pop(); - debug_assert_eq!(nul, Some(AsciiChar::Null)); - s - }) - } -} - -/// Note that the trailing null byte will be removed in the conversion. -#[cfg(feature = "std")] -impl<'a> IntoAsciiString for &'a CStr { - #[inline] - unsafe fn into_ascii_string_unchecked(self) -> AsciiString { - // SAFETY: Caller guarantees `self` only has valid ascii bytes - unsafe { AsciiString::from_ascii_unchecked(self.to_bytes()) } - } - - fn into_ascii_string(self) -> Result> { - AsciiString::from_ascii(self.to_bytes_with_nul()) - .map_err(|FromAsciiError { error, owner }| FromAsciiError { - // SAFETY: We don't discard the NULL byte from the original - // string, so we ensure that it's null terminated - owner: unsafe { CStr::from_ptr(owner.as_ptr().cast()) }, - error, - }) - .map(|mut s| { - let nul = s.pop(); - debug_assert_eq!(nul, Some(AsciiChar::Null)); - s - }) - } -} - -impl<'a, B> IntoAsciiString for Cow<'a, B> -where - B: 'a + ToOwned + ?Sized, - &'a B: IntoAsciiString, - ::Owned: IntoAsciiString, -{ - #[inline] - unsafe fn into_ascii_string_unchecked(self) -> AsciiString { - // SAFETY: Caller guarantees `self` only has valid ascii bytes - unsafe { IntoAsciiString::into_ascii_string_unchecked(self.into_owned()) } - } - - fn into_ascii_string(self) -> Result> { - match self { - Cow::Owned(b) => { - IntoAsciiString::into_ascii_string(b).map_err(|FromAsciiError { error, owner }| { - FromAsciiError { - owner: Cow::Owned(owner), - error, - } - }) - } - Cow::Borrowed(b) => { - IntoAsciiString::into_ascii_string(b).map_err(|FromAsciiError { error, owner }| { - FromAsciiError { - owner: Cow::Borrowed(owner), - error, - } - }) - } - } - } -} - -#[cfg(test)] -mod tests { - use super::{AsciiString, IntoAsciiString}; - use alloc::str::FromStr; - use alloc::string::{String, ToString}; - use alloc::vec::Vec; - use alloc::boxed::Box; - #[cfg(feature = "std")] - use std::ffi::CString; - use {AsciiChar, AsciiStr}; - - #[test] - fn into_string() { - let v = AsciiString::from_ascii(&[40_u8, 32, 59][..]).unwrap(); - assert_eq!(Into::::into(v), "( ;".to_string()); - } - - #[test] - fn into_bytes() { - let v = AsciiString::from_ascii(&[40_u8, 32, 59][..]).unwrap(); - assert_eq!(Into::>::into(v), vec![40_u8, 32, 59]); - } - - #[test] - fn from_ascii_vec() { - let vec = vec![ - AsciiChar::from_ascii('A').unwrap(), - AsciiChar::from_ascii('B').unwrap(), - ]; - assert_eq!(AsciiString::from(vec), AsciiString::from_str("AB").unwrap()); - } - - #[test] - #[cfg(feature = "std")] - fn from_cstring() { - let cstring = CString::new("baz").unwrap(); - let ascii_str = cstring.clone().into_ascii_string().unwrap(); - let expected_chars = &[AsciiChar::b, AsciiChar::a, AsciiChar::z]; - assert_eq!(ascii_str.len(), 3); - assert_eq!(ascii_str.as_slice(), expected_chars); - - // SAFETY: "baz" only contains valid ascii characters. - let ascii_str_unchecked = unsafe { cstring.into_ascii_string_unchecked() }; - assert_eq!(ascii_str_unchecked.len(), 3); - assert_eq!(ascii_str_unchecked.as_slice(), expected_chars); - - let sparkle_heart_bytes = vec![240_u8, 159, 146, 150]; - let cstring = CString::new(sparkle_heart_bytes).unwrap(); - let cstr = &*cstring; - let ascii_err = cstr.into_ascii_string().unwrap_err(); - assert_eq!(ascii_err.into_source(), &*cstring); - } - - #[test] - #[cfg(feature = "std")] - fn fmt_ascii_string() { - let s = "abc".to_string().into_ascii_string().unwrap(); - assert_eq!(format!("{}", s), "abc".to_string()); - assert_eq!(format!("{:?}", s), "\"abc\"".to_string()); - } - - #[test] - fn write_fmt() { - use alloc::{fmt, str}; - - let mut s0 = AsciiString::new(); - fmt::write(&mut s0, format_args!("Hello World")).unwrap(); - assert_eq!(s0, "Hello World"); - - let mut s1 = AsciiString::new(); - fmt::write(&mut s1, format_args!("{}", 9)).unwrap(); - assert_eq!(s1, "9"); - - let mut s2 = AsciiString::new(); - let sparkle_heart_bytes = [240, 159, 146, 150]; - let sparkle_heart = str::from_utf8(&sparkle_heart_bytes).unwrap(); - assert!(fmt::write(&mut s2, format_args!("{}", sparkle_heart)).is_err()); - } - - #[test] - fn to_and_from_box() { - let string = "abc".into_ascii_string().unwrap(); - let converted: Box = Box::from(string.clone()); - let converted: AsciiString = converted.into(); - assert_eq!(string, converted); - } -} diff --git a/anneal/vendor/ascii/src/free_functions.rs b/anneal/vendor/ascii/src/free_functions.rs deleted file mode 100644 index 55d97321e4..0000000000 --- a/anneal/vendor/ascii/src/free_functions.rs +++ /dev/null @@ -1,59 +0,0 @@ -use ascii_char::{AsciiChar, ToAsciiChar}; - -/// Terminals use [caret notation](https://en.wikipedia.org/wiki/Caret_notation) -/// to display some typed control codes, such as ^D for EOT and ^Z for SUB. -/// -/// This function returns the caret notation letter for control codes, -/// or `None` for printable characters. -/// -/// # Examples -/// ``` -/// # use ascii::{AsciiChar, caret_encode}; -/// assert_eq!(caret_encode(b'\0'), Some(AsciiChar::At)); -/// assert_eq!(caret_encode(AsciiChar::DEL), Some(AsciiChar::Question)); -/// assert_eq!(caret_encode(b'E'), None); -/// assert_eq!(caret_encode(b'\n'), Some(AsciiChar::J)); -/// ``` -pub fn caret_encode>(c: C) -> Option { - // The formula is explained in the Wikipedia article. - let c = c.into() ^ 0b0100_0000; - if (b'?'..=b'_').contains(&c) { - // SAFETY: All bytes between '?' (0x3F) and '_' (0x5f) are valid ascii characters. - Some(unsafe { c.to_ascii_char_unchecked() }) - } else { - None - } -} - -/// Returns the control code represented by a [caret notation](https://en.wikipedia.org/wiki/Caret_notation) -/// letter, or `None` if the letter is not used in caret notation. -/// -/// This function is the inverse of `caret_encode()`. -/// -/// # Examples -/// -/// Basic usage: -/// -/// ``` -/// # use ascii::{AsciiChar, caret_decode}; -/// assert_eq!(caret_decode(b'?'), Some(AsciiChar::DEL)); -/// assert_eq!(caret_decode(AsciiChar::D), Some(AsciiChar::EOT)); -/// assert_eq!(caret_decode(b'\0'), None); -/// ``` -/// -/// Symmetry: -/// -/// ``` -/// # use ascii::{AsciiChar, caret_encode, caret_decode}; -/// assert_eq!(caret_encode(AsciiChar::US).and_then(caret_decode), Some(AsciiChar::US)); -/// assert_eq!(caret_decode(b'@').and_then(caret_encode), Some(AsciiChar::At)); -/// ``` -pub fn caret_decode>(c: C) -> Option { - // The formula is explained in the Wikipedia article. - match c.into() { - // SAFETY: All bytes between '?' (0x3F) and '_' (0x5f) after `xoring` with `0b0100_0000` are - // valid bytes, as they represent characters between '␀' (0x0) and '␠' (0x1f) + '␡' (0x7f) - b'?'..=b'_' => Some(unsafe { AsciiChar::from_ascii_unchecked(c.into() ^ 0b0100_0000) }), - _ => None, - } -} diff --git a/anneal/vendor/ascii/src/lib.rs b/anneal/vendor/ascii/src/lib.rs deleted file mode 100644 index 5eacc162fb..0000000000 --- a/anneal/vendor/ascii/src/lib.rs +++ /dev/null @@ -1,82 +0,0 @@ -// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! A library that provides ASCII-only string and character types, equivalent to the `char`, `str` -//! and `String` types in the standard library. -//! -//! Please refer to the readme file to learn about the different feature modes of this crate. -//! -//! # Minimum supported Rust version -//! -//! The minimum Rust version for 1.1.\* releases is 1.41.1. -//! Later 1.y.0 releases might require newer Rust versions, but the three most -//! recent stable releases at the time of publishing will always be supported. -//! For example this means that if the current stable Rust version is 1.70 when -//! ascii 1.2.0 is released, then ascii 1.2.\* will not require a newer -//! Rust version than 1.68. -//! -//! # History -//! -//! This package included the Ascii types that were removed from the Rust standard library by the -//! 2014-12 [reform of the `std::ascii` module](https://github.com/rust-lang/rfcs/pull/486). The -//! API changed significantly since then. - -#![cfg_attr(not(feature = "std"), no_std)] -// Clippy lints -#![warn( - clippy::pedantic, - clippy::decimal_literal_representation, - clippy::get_unwrap, - clippy::indexing_slicing -)] -// Naming conventions sometimes go against this lint -#![allow(clippy::module_name_repetitions)] -// We need to get literal non-asciis for tests -#![allow(clippy::non_ascii_literal)] -// Sometimes it looks better to invert the order, such as when the `else` block is small -#![allow(clippy::if_not_else)] -// Shadowing is common and doesn't affect understanding -// TODO: Consider removing `shadow_unrelated`, as it can show some actual logic errors -#![allow(clippy::shadow_unrelated, clippy::shadow_reuse, clippy::shadow_same)] -// A `if let` / `else` sometimes looks better than using iterator adaptors -#![allow(clippy::option_if_let_else)] -// In tests, we're fine with indexing, since a panic is a failure. -#![cfg_attr(test, allow(clippy::indexing_slicing))] -// for compatibility with methods on char and u8 -#![allow(clippy::trivially_copy_pass_by_ref)] -// In preparation for feature `unsafe_block_in_unsafe_fn` (https://github.com/rust-lang/rust/issues/71668) -#![allow(unused_unsafe)] - -#[cfg(feature = "alloc")] -#[macro_use] -extern crate alloc; -#[cfg(feature = "std")] -extern crate core; - -#[cfg(feature = "serde")] -extern crate serde; - -#[cfg(all(test, feature = "serde_test"))] -extern crate serde_test; - -mod ascii_char; -mod ascii_str; -#[cfg(feature = "alloc")] -mod ascii_string; -mod free_functions; -#[cfg(feature = "serde")] -mod serialization; - -pub use ascii_char::{AsciiChar, ToAsciiChar, ToAsciiCharError}; -pub use ascii_str::{AsAsciiStr, AsAsciiStrError, AsMutAsciiStr, AsciiStr}; -pub use ascii_str::{Chars, CharsMut, CharsRef}; -#[cfg(feature = "alloc")] -pub use ascii_string::{AsciiString, FromAsciiError, IntoAsciiString}; -pub use free_functions::{caret_decode, caret_encode}; diff --git a/anneal/vendor/ascii/src/serialization/ascii_char.rs b/anneal/vendor/ascii/src/serialization/ascii_char.rs deleted file mode 100644 index 9fd843a149..0000000000 --- a/anneal/vendor/ascii/src/serialization/ascii_char.rs +++ /dev/null @@ -1,89 +0,0 @@ -use std::fmt; - -use serde::de::{Error, Unexpected, Visitor}; -use serde::{Deserialize, Deserializer, Serialize, Serializer}; - -use ascii_char::AsciiChar; - -impl Serialize for AsciiChar { - #[inline] - fn serialize(&self, serializer: S) -> Result { - serializer.serialize_char(self.as_char()) - } -} - -struct AsciiCharVisitor; - -impl<'de> Visitor<'de> for AsciiCharVisitor { - type Value = AsciiChar; - - fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str("an ascii character") - } - - #[inline] - fn visit_char(self, v: char) -> Result { - AsciiChar::from_ascii(v).map_err(|_| Error::invalid_value(Unexpected::Char(v), &self)) - } - - #[inline] - fn visit_str(self, v: &str) -> Result { - if v.len() == 1 { - let c = v.chars().next().unwrap(); - self.visit_char(c) - } else { - Err(Error::invalid_value(Unexpected::Str(v), &self)) - } - } -} - -impl<'de> Deserialize<'de> for AsciiChar { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_char(AsciiCharVisitor) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[cfg(feature = "serde_test")] - const ASCII_CHAR: char = 'e'; - #[cfg(feature = "serde_test")] - const ASCII_STR: &str = "e"; - #[cfg(feature = "serde_test")] - const UNICODE_CHAR: char = 'é'; - - #[test] - fn basic() { - fn assert_serialize() {} - fn assert_deserialize<'de, T: Deserialize<'de>>() {} - assert_serialize::(); - assert_deserialize::(); - } - - #[test] - #[cfg(feature = "serde_test")] - fn serialize() { - use serde_test::{assert_tokens, Token}; - let ascii_char = AsciiChar::from_ascii(ASCII_CHAR).unwrap(); - assert_tokens(&ascii_char, &[Token::Char(ASCII_CHAR)]); - } - - #[test] - #[cfg(feature = "serde_test")] - fn deserialize() { - use serde_test::{assert_de_tokens, assert_de_tokens_error, Token}; - let ascii_char = AsciiChar::from_ascii(ASCII_CHAR).unwrap(); - assert_de_tokens(&ascii_char, &[Token::String(ASCII_STR)]); - assert_de_tokens(&ascii_char, &[Token::Str(ASCII_STR)]); - assert_de_tokens(&ascii_char, &[Token::BorrowedStr(ASCII_STR)]); - assert_de_tokens_error::( - &[Token::Char(UNICODE_CHAR)], - "invalid value: character `é`, expected an ascii character", - ); - } -} diff --git a/anneal/vendor/ascii/src/serialization/ascii_str.rs b/anneal/vendor/ascii/src/serialization/ascii_str.rs deleted file mode 100644 index 9aa9353afb..0000000000 --- a/anneal/vendor/ascii/src/serialization/ascii_str.rs +++ /dev/null @@ -1,79 +0,0 @@ -use std::fmt; - -use serde::de::{Error, Unexpected, Visitor}; -use serde::{Deserialize, Deserializer, Serialize, Serializer}; - -use ascii_str::AsciiStr; - -impl Serialize for AsciiStr { - #[inline] - fn serialize(&self, serializer: S) -> Result { - serializer.serialize_str(self.as_str()) - } -} - -struct AsciiStrVisitor; - -impl<'a> Visitor<'a> for AsciiStrVisitor { - type Value = &'a AsciiStr; - - fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str("a borrowed ascii string") - } - - fn visit_borrowed_str(self, v: &'a str) -> Result { - AsciiStr::from_ascii(v.as_bytes()) - .map_err(|_| Error::invalid_value(Unexpected::Str(v), &self)) - } - - fn visit_borrowed_bytes(self, v: &'a [u8]) -> Result { - AsciiStr::from_ascii(v).map_err(|_| Error::invalid_value(Unexpected::Bytes(v), &self)) - } -} - -impl<'de: 'a, 'a> Deserialize<'de> for &'a AsciiStr { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_str(AsciiStrVisitor) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[cfg(feature = "serde_test")] - const ASCII: &str = "Francais"; - #[cfg(feature = "serde_test")] - const UNICODE: &str = "Français"; - - #[test] - fn basic() { - fn assert_serialize() {} - fn assert_deserialize<'de, T: Deserialize<'de>>() {} - assert_serialize::<&AsciiStr>(); - assert_deserialize::<&AsciiStr>(); - } - - #[test] - #[cfg(feature = "serde_test")] - fn serialize() { - use serde_test::{assert_tokens, Token}; - let ascii_str = AsciiStr::from_ascii(ASCII).unwrap(); - assert_tokens(&ascii_str, &[Token::BorrowedStr(ASCII)]); - } - - #[test] - #[cfg(feature = "serde_test")] - fn deserialize() { - use serde_test::{assert_de_tokens, assert_de_tokens_error, Token}; - let ascii_str = AsciiStr::from_ascii(ASCII).unwrap(); - assert_de_tokens(&ascii_str, &[Token::BorrowedBytes(ASCII.as_bytes())]); - assert_de_tokens_error::<&AsciiStr>( - &[Token::BorrowedStr(UNICODE)], - "invalid value: string \"Français\", expected a borrowed ascii string", - ); - } -} diff --git a/anneal/vendor/ascii/src/serialization/ascii_string.rs b/anneal/vendor/ascii/src/serialization/ascii_string.rs deleted file mode 100644 index 1547655f1e..0000000000 --- a/anneal/vendor/ascii/src/serialization/ascii_string.rs +++ /dev/null @@ -1,149 +0,0 @@ -use std::fmt; - -use serde::de::{Error, Unexpected, Visitor}; -use serde::{Deserialize, Deserializer, Serialize, Serializer}; - -use ascii_str::AsciiStr; -use ascii_string::AsciiString; - -impl Serialize for AsciiString { - #[inline] - fn serialize(&self, serializer: S) -> Result { - serializer.serialize_str(self.as_str()) - } -} - -struct AsciiStringVisitor; - -impl<'de> Visitor<'de> for AsciiStringVisitor { - type Value = AsciiString; - - fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str("an ascii string") - } - - fn visit_str(self, v: &str) -> Result { - AsciiString::from_ascii(v).map_err(|_| Error::invalid_value(Unexpected::Str(v), &self)) - } - - fn visit_string(self, v: String) -> Result { - AsciiString::from_ascii(v.as_bytes()) - .map_err(|_| Error::invalid_value(Unexpected::Str(&v), &self)) - } - - fn visit_bytes(self, v: &[u8]) -> Result { - AsciiString::from_ascii(v).map_err(|_| Error::invalid_value(Unexpected::Bytes(v), &self)) - } - - fn visit_byte_buf(self, v: Vec) -> Result { - AsciiString::from_ascii(v.as_slice()) - .map_err(|_| Error::invalid_value(Unexpected::Bytes(&v), &self)) - } -} - -struct AsciiStringInPlaceVisitor<'a>(&'a mut AsciiString); - -impl<'a, 'de> Visitor<'de> for AsciiStringInPlaceVisitor<'a> { - type Value = (); - - fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { - formatter.write_str("an ascii string") - } - - fn visit_str(self, v: &str) -> Result { - let ascii_str = match AsciiStr::from_ascii(v.as_bytes()) { - Ok(ascii_str) => ascii_str, - Err(_) => return Err(Error::invalid_value(Unexpected::Str(v), &self)), - }; - self.0.clear(); - self.0.push_str(ascii_str); - Ok(()) - } - - fn visit_string(self, v: String) -> Result { - let ascii_string = match AsciiString::from_ascii(v.as_bytes()) { - Ok(ascii_string) => ascii_string, - Err(_) => return Err(Error::invalid_value(Unexpected::Str(&v), &self)), - }; - *self.0 = ascii_string; - Ok(()) - } - - fn visit_bytes(self, v: &[u8]) -> Result { - let ascii_str = match AsciiStr::from_ascii(v) { - Ok(ascii_str) => ascii_str, - Err(_) => return Err(Error::invalid_value(Unexpected::Bytes(v), &self)), - }; - self.0.clear(); - self.0.push_str(ascii_str); - Ok(()) - } - - fn visit_byte_buf(self, v: Vec) -> Result { - let ascii_string = match AsciiString::from_ascii(v.as_slice()) { - Ok(ascii_string) => ascii_string, - Err(_) => return Err(Error::invalid_value(Unexpected::Bytes(&v), &self)), - }; - *self.0 = ascii_string; - Ok(()) - } -} - -impl<'de> Deserialize<'de> for AsciiString { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - deserializer.deserialize_string(AsciiStringVisitor) - } - - fn deserialize_in_place(deserializer: D, place: &mut Self) -> Result<(), D::Error> - where - D: Deserializer<'de>, - { - deserializer.deserialize_string(AsciiStringInPlaceVisitor(place)) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[cfg(feature = "serde_test")] - const ASCII: &str = "Francais"; - #[cfg(feature = "serde_test")] - const UNICODE: &str = "Français"; - - #[test] - fn basic() { - fn assert_serialize() {} - fn assert_deserialize<'de, T: Deserialize<'de>>() {} - assert_serialize::(); - assert_deserialize::(); - } - - #[test] - #[cfg(feature = "serde_test")] - fn serialize() { - use serde_test::{assert_tokens, Token}; - - let ascii_string = AsciiString::from_ascii(ASCII).unwrap(); - assert_tokens(&ascii_string, &[Token::String(ASCII)]); - assert_tokens(&ascii_string, &[Token::Str(ASCII)]); - assert_tokens(&ascii_string, &[Token::BorrowedStr(ASCII)]); - } - - #[test] - #[cfg(feature = "serde_test")] - fn deserialize() { - use serde_test::{assert_de_tokens, assert_de_tokens_error, Token}; - let ascii_string = AsciiString::from_ascii(ASCII).unwrap(); - assert_de_tokens(&ascii_string, &[Token::Bytes(ASCII.as_bytes())]); - assert_de_tokens(&ascii_string, &[Token::BorrowedBytes(ASCII.as_bytes())]); - assert_de_tokens(&ascii_string, &[Token::ByteBuf(ASCII.as_bytes())]); - assert_de_tokens_error::( - &[Token::String(UNICODE)], - "invalid value: string \"Français\", expected an ascii string", - ); - } -} diff --git a/anneal/vendor/ascii/src/serialization/mod.rs b/anneal/vendor/ascii/src/serialization/mod.rs deleted file mode 100644 index 69a4e03a1c..0000000000 --- a/anneal/vendor/ascii/src/serialization/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -mod ascii_char; -mod ascii_str; -mod ascii_string; diff --git a/anneal/vendor/ascii/tests.rs b/anneal/vendor/ascii/tests.rs deleted file mode 100644 index 017a7be781..0000000000 --- a/anneal/vendor/ascii/tests.rs +++ /dev/null @@ -1,143 +0,0 @@ -extern crate ascii; - -use ascii::{AsAsciiStr, AsciiChar, AsciiStr}; -#[cfg(feature = "std")] -use ascii::{AsciiString, IntoAsciiString}; - -#[test] -#[cfg(feature = "std")] -fn ascii_vec() { - let test = b"( ;"; - let a = AsciiStr::from_ascii(test).unwrap(); - assert_eq!(test.as_ascii_str(), Ok(a)); - assert_eq!("( ;".as_ascii_str(), Ok(a)); - let v = test.to_vec(); - assert_eq!(v.as_ascii_str(), Ok(a)); - assert_eq!("( ;".to_string().as_ascii_str(), Ok(a)); -} - -#[test] -fn to_ascii() { - assert!("zoä华".as_ascii_str().is_err()); - assert!([127_u8, 128, 255].as_ascii_str().is_err()); - - let arr = [AsciiChar::ParenOpen, AsciiChar::Space, AsciiChar::Semicolon]; - let a: &AsciiStr = (&arr[..]).into(); - assert_eq!(b"( ;".as_ascii_str(), Ok(a)); - assert_eq!("( ;".as_ascii_str(), Ok(a)); -} - -#[test] -#[cfg(feature = "std")] -fn into_ascii() { - let arr = [AsciiChar::ParenOpen, AsciiChar::Space, AsciiChar::Semicolon]; - let v = AsciiString::from(arr.to_vec()); - assert_eq!(b"( ;".to_vec().into_ascii_string(), Ok(v.clone())); - assert_eq!("( ;".to_string().into_ascii_string(), Ok(v.clone())); - assert_eq!(b"( ;", AsRef::<[u8]>::as_ref(&v)); - - let err = "zoä华".to_string().into_ascii_string().unwrap_err(); - assert_eq!(Err(err.ascii_error()), "zoä华".as_ascii_str()); - assert_eq!(err.into_source(), "zoä华"); - let err = vec![127, 128, 255].into_ascii_string().unwrap_err(); - assert_eq!(Err(err.ascii_error()), [127, 128, 255].as_ascii_str()); - assert_eq!(err.into_source(), &[127, 128, 255]); -} - -#[test] -#[cfg(feature = "std")] -fn compare_ascii_string_ascii_str() { - let v = b"abc"; - let ascii_string = AsciiString::from_ascii(&v[..]).unwrap(); - let ascii_str = AsciiStr::from_ascii(v).unwrap(); - assert!(ascii_string == ascii_str); - assert!(ascii_str == ascii_string); -} - -#[test] -#[cfg(feature = "std")] -fn compare_ascii_string_string() { - let v = b"abc"; - let string = String::from_utf8(v.to_vec()).unwrap(); - let ascii_string = AsciiString::from_ascii(&v[..]).unwrap(); - assert!(string == ascii_string); - assert!(ascii_string == string); -} - -#[test] -#[cfg(feature = "std")] -fn compare_ascii_str_string() { - let v = b"abc"; - let string = String::from_utf8(v.to_vec()).unwrap(); - let ascii_str = AsciiStr::from_ascii(&v[..]).unwrap(); - assert!(string == ascii_str); - assert!(ascii_str == string); -} - -#[test] -#[cfg(feature = "std")] -fn compare_ascii_string_str() { - let v = b"abc"; - let sstr = ::std::str::from_utf8(v).unwrap(); - let ascii_string = AsciiString::from_ascii(&v[..]).unwrap(); - assert!(sstr == ascii_string); - assert!(ascii_string == sstr); -} - -#[test] -fn compare_ascii_str_str() { - let v = b"abc"; - let sstr = ::std::str::from_utf8(v).unwrap(); - let ascii_str = AsciiStr::from_ascii(v).unwrap(); - assert!(sstr == ascii_str); - assert!(ascii_str == sstr); -} - -#[test] -#[allow(clippy::redundant_slicing)] -fn compare_ascii_str_slice() { - let b = b"abc".as_ascii_str().unwrap(); - let c = b"ab".as_ascii_str().unwrap(); - assert_eq!(&b[..2], &c[..]); - assert_eq!(c[1].as_char(), 'b'); -} - -#[test] -#[cfg(feature = "std")] -fn compare_ascii_string_slice() { - let b = AsciiString::from_ascii("abc").unwrap(); - let c = AsciiString::from_ascii("ab").unwrap(); - assert_eq!(&b[..2], &c[..]); - assert_eq!(c[1].as_char(), 'b'); -} - -#[test] -#[cfg(feature = "std")] -fn extend_from_iterator() { - use std::borrow::Cow; - - let abc = "abc".as_ascii_str().unwrap(); - let mut s = abc.chars().collect::(); - assert_eq!(s, abc); - s.extend(abc); - assert_eq!(s, "abcabc"); - - let lines = "one\ntwo\nthree".as_ascii_str().unwrap().lines(); - s.extend(lines); - assert_eq!(s, "abcabconetwothree"); - - let cows = "ASCII Ascii ascii" - .as_ascii_str() - .unwrap() - .split(AsciiChar::Space) - .map(|case| { - if case.chars().all(AsciiChar::is_uppercase) { - Cow::from(case) - } else { - Cow::from(case.to_ascii_uppercase()) - } - }); - s.extend(cows); - s.extend(&[AsciiChar::LineFeed]); - assert_eq!(s, "abcabconetwothreeASCIIASCIIASCII\n"); -} diff --git a/anneal/vendor/chunked_transfer/.cargo-checksum.json b/anneal/vendor/chunked_transfer/.cargo-checksum.json deleted file mode 100644 index 83680bf097..0000000000 --- a/anneal/vendor/chunked_transfer/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{".cargo_vcs_info.json":"6efd477ff120102f1e31ff8f7625366ab64d193a5f0bb5d70882c3413e89e00a",".github/workflows/rust.yml":"6fdda8eaeae4d2e4b64a8497b836e40a244bd8f725ec49f19a9e1095aa62e819","Cargo.toml":"7753964e6adfc969908ea8f54aaad4c4bfed26a05e44bedb0faf588b9961f746","Cargo.toml.orig":"bcc025821244a7c37bf244d24a9116c9c48a0ade225fbcd34c825ff68c8036d7","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"5eda8c6538e7d543efe31ebad3fc0e66a7c4df056ddb901a5bdfbf5ef397e116","README.md":"4c3df261bfbfe8f7ed7bef76b833c606164dc768fe2919d7faf2ad3a3c1cb749","benches/encode.rs":"39e69d513bde63cbc977a4dc70f289c2ff494919ce0b934769039ffa82579312","src/decoder.rs":"e6df82dca0f409e866898464cb83d65cefb8d99b08109b77501444ff0c5ca5ec","src/encoder.rs":"1f645db6731b69d0394cb2e06250ea1de7a07c348aacc50df07b593421846953","src/lib.rs":"b1af36d3dddf7d2f7f45732281fa3fcafb741a60146f6befd33884fab51a6b03"},"package":"6e4de3bc4ea267985becf712dc6d9eed8b04c953b3fcfb339ebc87acd9804901"} \ No newline at end of file diff --git a/anneal/vendor/chunked_transfer/.cargo_vcs_info.json b/anneal/vendor/chunked_transfer/.cargo_vcs_info.json deleted file mode 100644 index 6bade92d3b..0000000000 --- a/anneal/vendor/chunked_transfer/.cargo_vcs_info.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "git": { - "sha1": "d4637cda63ec2da7052bba9d4d37f69cbeae9ab8" - }, - "path_in_vcs": "" -} \ No newline at end of file diff --git a/anneal/vendor/chunked_transfer/.github/workflows/rust.yml b/anneal/vendor/chunked_transfer/.github/workflows/rust.yml deleted file mode 100644 index 31000a2744..0000000000 --- a/anneal/vendor/chunked_transfer/.github/workflows/rust.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: Rust - -on: - push: - branches: [ "main" ] - pull_request: - branches: [ "main" ] - -env: - CARGO_TERM_COLOR: always - -jobs: - build: - - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - name: Build - run: cargo build --verbose - - name: Run tests - run: cargo test --verbose diff --git a/anneal/vendor/chunked_transfer/Cargo.toml b/anneal/vendor/chunked_transfer/Cargo.toml deleted file mode 100644 index 52bd6cd984..0000000000 --- a/anneal/vendor/chunked_transfer/Cargo.toml +++ /dev/null @@ -1,27 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies. -# -# If you are reading this file be aware that the original Cargo.toml -# will likely look very different (and much more reasonable). -# See Cargo.toml.orig for the original contents. - -[package] -edition = "2018" -name = "chunked_transfer" -version = "1.5.0" -authors = ["Corey Farwell "] -description = "Encoder and decoder for HTTP chunked transfer coding (RFC 7230 § 4.1)" -readme = "README.md" -license = "MIT OR Apache-2.0" -repository = "https://github.com/frewsxcv/rust-chunked-transfer" - -[[bench]] -name = "encode" -harness = false - -[dev-dependencies.criterion] -version = "0.3" diff --git a/anneal/vendor/chunked_transfer/Cargo.toml.orig b/anneal/vendor/chunked_transfer/Cargo.toml.orig deleted file mode 100644 index e0eaae9dab..0000000000 --- a/anneal/vendor/chunked_transfer/Cargo.toml.orig +++ /dev/null @@ -1,15 +0,0 @@ -[package] -name = "chunked_transfer" -version = "1.5.0" -authors = ["Corey Farwell "] -license = "MIT OR Apache-2.0" -repository = "https://github.com/frewsxcv/rust-chunked-transfer" -description = "Encoder and decoder for HTTP chunked transfer coding (RFC 7230 § 4.1)" -edition = "2018" - -[dev-dependencies] -criterion = "0.3" - -[[bench]] -name = "encode" -harness = false diff --git a/anneal/vendor/chunked_transfer/LICENSE-APACHE b/anneal/vendor/chunked_transfer/LICENSE-APACHE deleted file mode 100644 index 16fe87b06e..0000000000 --- a/anneal/vendor/chunked_transfer/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/anneal/vendor/chunked_transfer/LICENSE-MIT b/anneal/vendor/chunked_transfer/LICENSE-MIT deleted file mode 100644 index 892d6c7c47..0000000000 --- a/anneal/vendor/chunked_transfer/LICENSE-MIT +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2015 The tiny-http Contributors -Copyright (c) 2015 The rust-chunked-transfer Contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/anneal/vendor/chunked_transfer/README.md b/anneal/vendor/chunked_transfer/README.md deleted file mode 100644 index 92cfe05993..0000000000 --- a/anneal/vendor/chunked_transfer/README.md +++ /dev/null @@ -1,59 +0,0 @@ -# rust-chunked-transfer - -[Documentation](https://docs.rs/chunked_transfer/) - -Encoder and decoder for HTTP chunked transfer coding. For more information about chunked transfer encoding: - -* [RFC 7230 § 4.1](https://tools.ietf.org/html/rfc7230#section-4.1) -* [RFC 2616 § 3.6.1](https://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.6.1) (deprecated) -* [Wikipedia: Chunked transfer encoding](https://en.wikipedia.org/wiki/Chunked_transfer_encoding) - -## Example - -### Decoding - -```rust -use chunked_transfer::Decoder; -use std::io::Read; - -let encoded = b"3\r\nhel\r\nb\r\nlo world!!!\r\n0\r\n\r\n"; -let mut decoded = String::new(); - -let mut decoder = Decoder::new(encoded as &[u8]); -decoder.read_to_string(&mut decoded); - -assert_eq!(decoded, "hello world!!!"); -``` - -### Encoding - -```rust -use chunked_transfer::Encoder; -use std::io::Write; - -let mut decoded = "hello world"; -let mut encoded: Vec = vec![]; - -{ - let mut encoder = Encoder::with_chunks_size(&mut encoded, 5); - encoder.write_all(decoded.as_bytes()); -} - -assert_eq!(encoded, b"5\r\nhello\r\n5\r\n worl\r\n1\r\nd\r\n0\r\n\r\n"); -``` - -## Authors - -* [tomaka](https://github.com/tomaka) -* [frewsxcv](https://github.com/frewsxcv) - -# License - -Licensed under either of: - - * Apache License, Version 2.0, (LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0) - * MIT license (LICENSE-MIT or http://opensource.org/licenses/MIT) - -## Contribution - -Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. diff --git a/anneal/vendor/chunked_transfer/benches/encode.rs b/anneal/vendor/chunked_transfer/benches/encode.rs deleted file mode 100644 index 76306ff64b..0000000000 --- a/anneal/vendor/chunked_transfer/benches/encode.rs +++ /dev/null @@ -1,22 +0,0 @@ -#![feature(test)] - -use chunked_transfer; -use criterion::{criterion_group, criterion_main, Criterion}; -use std::io::Write; - -extern crate test; - -fn encode_benchmark(c: &mut Criterion) { - c.bench_function("encode", |b| { - let writer = vec![]; - let mut encoder = chunked_transfer::Encoder::new(writer); - let mut to_write = vec![b'a'; 1000]; - - b.iter(|| { - test::black_box(encoder.write_all(&mut to_write)); - }); - }); -} - -criterion_group!(benches, encode_benchmark); -criterion_main!(benches); diff --git a/anneal/vendor/chunked_transfer/src/decoder.rs b/anneal/vendor/chunked_transfer/src/decoder.rs deleted file mode 100644 index 453752fd69..0000000000 --- a/anneal/vendor/chunked_transfer/src/decoder.rs +++ /dev/null @@ -1,300 +0,0 @@ -use std::error::Error; -use std::fmt; -use std::io::Error as IoError; -use std::io::ErrorKind; -use std::io::Read; -use std::io::Result as IoResult; - -/// Reads HTTP chunks and sends back real data. -/// -/// # Example -/// -/// ``` -/// use chunked_transfer::Decoder; -/// use std::io::Read; -/// -/// let encoded = b"3\r\nhel\r\nb\r\nlo world!!!\r\n0\r\n\r\n"; -/// let mut decoded = String::new(); -/// -/// let mut decoder = Decoder::new(encoded as &[u8]); -/// decoder.read_to_string(&mut decoded); -/// -/// assert_eq!(decoded, "hello world!!!"); -/// ``` -pub struct Decoder { - // where the chunks come from - source: R, - - // remaining size of the chunk being read - // none if we are not in a chunk - remaining_chunks_size: Option, -} - -impl Decoder -where - R: Read, -{ - pub fn new(source: R) -> Decoder { - Decoder { - source, - remaining_chunks_size: None, - } - } - - /// Returns the remaining bytes left in the chunk being read. - pub fn remaining_chunks_size(&self) -> Option { - self.remaining_chunks_size - } - - /// Unwraps the Decoder into its inner `Read` source. - pub fn into_inner(self) -> R { - self.source - } - - /// Gets a reference to the underlying value in this decoder. - pub fn get_ref(&self) -> &R { - &self.source - } - - /// Gets a mutable reference to the underlying value in this decoder. - pub fn get_mut(&mut self) -> &mut R { - &mut self.source - } - - fn read_chunk_size(&mut self) -> IoResult { - let mut chunk_size_bytes = Vec::new(); - let mut has_ext = false; - - loop { - let byte = match self.source.by_ref().bytes().next() { - Some(b) => b?, - None => return Err(IoError::new(ErrorKind::InvalidInput, DecoderError)), - }; - - if byte == b'\r' { - break; - } - - if byte == b';' { - has_ext = true; - break; - } - - chunk_size_bytes.push(byte); - } - - // Ignore extensions for now - if has_ext { - loop { - let byte = match self.source.by_ref().bytes().next() { - Some(b) => b?, - None => return Err(IoError::new(ErrorKind::InvalidInput, DecoderError)), - }; - if byte == b'\r' { - break; - } - } - } - - self.read_line_feed()?; - - let chunk_size = String::from_utf8(chunk_size_bytes) - .ok() - .and_then(|c| usize::from_str_radix(c.trim(), 16).ok()) - .ok_or_else(|| IoError::new(ErrorKind::InvalidInput, DecoderError))?; - - Ok(chunk_size) - } - - fn read_carriage_return(&mut self) -> IoResult<()> { - match self.source.by_ref().bytes().next() { - Some(Ok(b'\r')) => Ok(()), - _ => Err(IoError::new(ErrorKind::InvalidInput, DecoderError)), - } - } - - fn read_line_feed(&mut self) -> IoResult<()> { - match self.source.by_ref().bytes().next() { - Some(Ok(b'\n')) => Ok(()), - _ => Err(IoError::new(ErrorKind::InvalidInput, DecoderError)), - } - } -} - -impl Read for Decoder -where - R: Read, -{ - fn read(&mut self, buf: &mut [u8]) -> IoResult { - let remaining_chunks_size = match self.remaining_chunks_size { - Some(c) => c, - None => { - // first possibility: we are not in a chunk, so we'll attempt to determine - // the chunks size - let chunk_size = self.read_chunk_size()?; - - // if the chunk size is 0, we are at EOF - if chunk_size == 0 { - self.read_carriage_return()?; - self.read_line_feed()?; - return Ok(0); - } - - chunk_size - } - }; - - // second possibility: we continue reading from a chunk - if buf.len() < remaining_chunks_size { - let read = self.source.read(buf)?; - self.remaining_chunks_size = Some(remaining_chunks_size - read); - return Ok(read); - } - - // third possibility: the read request goes further than the current chunk - // we simply read until the end of the chunk and return - assert!(buf.len() >= remaining_chunks_size); - - let buf = &mut buf[..remaining_chunks_size]; - let read = self.source.read(buf)?; - - self.remaining_chunks_size = if read == remaining_chunks_size { - self.read_carriage_return()?; - self.read_line_feed()?; - None - } else { - Some(remaining_chunks_size - read) - }; - - Ok(read) - } -} - -#[derive(Debug, Copy, Clone)] -struct DecoderError; - -impl fmt::Display for DecoderError { - fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { - write!(fmt, "Error while decoding chunks") - } -} - -impl Error for DecoderError { - fn description(&self) -> &str { - "Error while decoding chunks" - } -} - -#[cfg(test)] -mod test { - use super::Decoder; - use std::io; - use std::io::Read; - - /// This unit test is taken from from Hyper - /// https://github.com/hyperium/hyper - /// Copyright (c) 2014 Sean McArthur - #[test] - fn test_read_chunk_size() { - fn read(s: &str, expected: usize) { - let mut decoded = Decoder::new(s.as_bytes()); - let actual = decoded.read_chunk_size().unwrap(); - assert_eq!(expected, actual); - } - - fn read_err(s: &str) { - let mut decoded = Decoder::new(s.as_bytes()); - let err_kind = decoded.read_chunk_size().unwrap_err().kind(); - assert_eq!(err_kind, io::ErrorKind::InvalidInput); - } - - read("1\r\n", 1); - read("01\r\n", 1); - read("0\r\n", 0); - read("00\r\n", 0); - read("A\r\n", 10); - read("a\r\n", 10); - read("Ff\r\n", 255); - read("Ff \r\n", 255); - // Missing LF or CRLF - read_err("F\rF"); - read_err("F"); - // Invalid hex digit - read_err("X\r\n"); - read_err("1X\r\n"); - read_err("-\r\n"); - read_err("-1\r\n"); - // Acceptable (if not fully valid) extensions do not influence the size - read("1;extension\r\n", 1); - read("a;ext name=value\r\n", 10); - read("1;extension;extension2\r\n", 1); - read("1;;; ;\r\n", 1); - read("2; extension...\r\n", 2); - read("3 ; extension=123\r\n", 3); - read("3 ;\r\n", 3); - read("3 ; \r\n", 3); - // Invalid extensions cause an error - read_err("1 invalid extension\r\n"); - read_err("1 A\r\n"); - read_err("1;no CRLF"); - } - - #[test] - fn test_valid_chunk_decode() { - let source = io::Cursor::new( - "3\r\nhel\r\nb\r\nlo world!!!\r\n0\r\n\r\n" - .to_string() - .into_bytes(), - ); - let mut decoded = Decoder::new(source); - - let mut string = String::new(); - decoded.read_to_string(&mut string).unwrap(); - - assert_eq!(string, "hello world!!!"); - } - - #[test] - fn test_decode_zero_length() { - let mut decoder = Decoder::new(b"0\r\n\r\n" as &[u8]); - - let mut decoded = String::new(); - decoder.read_to_string(&mut decoded).unwrap(); - - assert_eq!(decoded, ""); - } - - #[test] - fn test_decode_invalid_chunk_length() { - let mut decoder = Decoder::new(b"m\r\n\r\n" as &[u8]); - - let mut decoded = String::new(); - assert!(decoder.read_to_string(&mut decoded).is_err()); - } - - #[test] - fn invalid_input1() { - let source = io::Cursor::new( - "2\r\nhel\r\nb\r\nlo world!!!\r\n0\r\n" - .to_string() - .into_bytes(), - ); - let mut decoded = Decoder::new(source); - - let mut string = String::new(); - assert!(decoded.read_to_string(&mut string).is_err()); - } - - #[test] - fn invalid_input2() { - let source = io::Cursor::new( - "3\rhel\r\nb\r\nlo world!!!\r\n0\r\n" - .to_string() - .into_bytes(), - ); - let mut decoded = Decoder::new(source); - - let mut string = String::new(); - assert!(decoded.read_to_string(&mut string).is_err()); - } -} diff --git a/anneal/vendor/chunked_transfer/src/encoder.rs b/anneal/vendor/chunked_transfer/src/encoder.rs deleted file mode 100644 index f1f45d0cc9..0000000000 --- a/anneal/vendor/chunked_transfer/src/encoder.rs +++ /dev/null @@ -1,207 +0,0 @@ -use std::io::Result as IoResult; -use std::io::Write; - -/// Splits the incoming data into HTTP chunks. -/// -/// # Example -/// -/// ``` -/// use chunked_transfer::Encoder; -/// use std::io::Write; -/// -/// let mut decoded = "hello world"; -/// let mut encoded: Vec = vec![]; -/// -/// { -/// let mut encoder = Encoder::with_chunks_size(&mut encoded, 5); -/// encoder.write_all(decoded.as_bytes()); -/// } -/// -/// assert_eq!(encoded, b"5\r\nhello\r\n5\r\n worl\r\n1\r\nd\r\n0\r\n\r\n"); -/// ``` -pub struct Encoder -where - W: Write, -{ - // where to send the result - output: W, - - // size of each chunk - chunks_size: usize, - - // data waiting to be sent is stored here - // This will always be at least 6 bytes long. The first 6 bytes - // are reserved for the chunk size and \r\n. - buffer: Vec, - - // Flushes the internal buffer after each write. This might be useful - // if data should be sent immediately to downstream consumers - flush_after_write: bool, -} - -const MAX_CHUNK_SIZE: usize = std::u32::MAX as usize; -// This accounts for four hex digits (enough to hold a u32) plus two bytes -// for the \r\n -const MAX_HEADER_SIZE: usize = 6; - -impl Encoder -where - W: Write, -{ - pub fn new(output: W) -> Encoder { - Encoder::with_chunks_size(output, 8192) - } - - /// Gets a reference to the underlying value in this encoder. - pub fn get_ref(&self) -> &W { - &self.output - } - - /// Gets a mutable reference to the underlying value in this encoder. - pub fn get_mut(&mut self) -> &mut W { - &mut self.output - } - - pub fn with_chunks_size(output: W, chunks: usize) -> Encoder { - let chunks_size = chunks.min(MAX_CHUNK_SIZE); - let mut encoder = Encoder { - output, - chunks_size, - buffer: vec![0; MAX_HEADER_SIZE], - flush_after_write: false, - }; - encoder.reset_buffer(); - encoder - } - - pub fn with_flush_after_write(output: W) -> Encoder { - let mut encoder = Encoder { - output, - chunks_size: 8192, - buffer: vec![0; MAX_HEADER_SIZE], - flush_after_write: true, - }; - encoder.reset_buffer(); - encoder - } - - fn reset_buffer(&mut self) { - // Reset buffer, still leaving space for the chunk size. That space - // will be populated once we know the size of the chunk. - self.buffer.truncate(MAX_HEADER_SIZE); - } - - fn is_buffer_empty(&self) -> bool { - self.buffer.len() == MAX_HEADER_SIZE - } - - fn buffer_len(&self) -> usize { - self.buffer.len() - MAX_HEADER_SIZE - } - - fn send(&mut self) -> IoResult<()> { - // Never send an empty buffer, because that would be interpreted - // as the end of the stream, which we indicate explicitly on drop. - if self.is_buffer_empty() { - return Ok(()); - } - // Prepend the length and \r\n to the buffer. - let prelude = format!("{:x}\r\n", self.buffer_len()); - let prelude = prelude.as_bytes(); - - // This should never happen because MAX_CHUNK_SIZE of u32::MAX - // can always be encoded in 4 hex bytes. - assert!( - prelude.len() <= MAX_HEADER_SIZE, - "invariant failed: prelude longer than MAX_HEADER_SIZE" - ); - - // Copy the prelude into the buffer. For small chunks, this won't necessarily - // take up all the space that was reserved for the prelude. - let offset = MAX_HEADER_SIZE - prelude.len(); - self.buffer[offset..MAX_HEADER_SIZE].clone_from_slice(prelude); - - // Append the chunk-finishing \r\n to the buffer. - self.buffer.write_all(b"\r\n")?; - - self.output.write_all(&self.buffer[offset..])?; - self.reset_buffer(); - - Ok(()) - } -} - -impl Write for Encoder -where - W: Write, -{ - fn write(&mut self, data: &[u8]) -> IoResult { - let remaining_buffer_space = self.chunks_size - self.buffer_len(); - let bytes_to_buffer = std::cmp::min(remaining_buffer_space, data.len()); - self.buffer.extend_from_slice(&data[0..bytes_to_buffer]); - let more_to_write: bool = bytes_to_buffer < data.len(); - if self.flush_after_write || more_to_write { - self.send()?; - } - - // If we didn't write the whole thing, keep working on it. - if more_to_write { - self.write_all(&data[bytes_to_buffer..])?; - } - Ok(data.len()) - } - - fn flush(&mut self) -> IoResult<()> { - self.send() - } -} - -impl Drop for Encoder -where - W: Write, -{ - fn drop(&mut self) { - self.flush().ok(); - write!(self.output, "0\r\n\r\n").ok(); - } -} - -#[cfg(test)] -mod test { - use super::Encoder; - use std::io; - use std::io::Write; - use std::str::from_utf8; - - #[test] - fn test() { - let mut source = io::Cursor::new("hello world".to_string().into_bytes()); - let mut dest: Vec = vec![]; - - { - let mut encoder = Encoder::with_chunks_size(dest.by_ref(), 5); - io::copy(&mut source, &mut encoder).unwrap(); - assert!(!encoder.is_buffer_empty()); - } - - let output = from_utf8(&dest).unwrap(); - - assert_eq!(output, "5\r\nhello\r\n5\r\n worl\r\n1\r\nd\r\n0\r\n\r\n"); - } - #[test] - fn flush_after_write() { - let mut source = io::Cursor::new("hello world".to_string().into_bytes()); - let mut dest: Vec = vec![]; - - { - let mut encoder = Encoder::with_flush_after_write(dest.by_ref()); - io::copy(&mut source, &mut encoder).unwrap(); - // The internal buffer has been flushed. - assert!(encoder.is_buffer_empty()); - } - - let output = from_utf8(&dest).unwrap(); - - assert_eq!(output, "b\r\nhello world\r\n0\r\n\r\n"); - } -} diff --git a/anneal/vendor/chunked_transfer/src/lib.rs b/anneal/vendor/chunked_transfer/src/lib.rs deleted file mode 100644 index ba9542b334..0000000000 --- a/anneal/vendor/chunked_transfer/src/lib.rs +++ /dev/null @@ -1,5 +0,0 @@ -mod decoder; -pub use crate::decoder::Decoder; - -mod encoder; -pub use crate::encoder::Encoder; diff --git a/anneal/vendor/httpdate/.cargo-checksum.json b/anneal/vendor/httpdate/.cargo-checksum.json deleted file mode 100644 index 29b7b44f66..0000000000 --- a/anneal/vendor/httpdate/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{".cargo_vcs_info.json":"158299c819b7d96576c47ecdbb6d8a12749ea3f45de887679fe58e3a462cf6fc",".github/workflows/ci.yml":"9a88cd3211399918ee069a77456e6c9317efeddae0848c681d14d08364211814","Cargo.toml":"d1adbc974f706b302bc844ce7b6d6323d30a1689b9e3bf53ab78ba0bb09a9ed9","Cargo.toml.orig":"935ef51333a927e121be199efebd6aa6df22e14682770d1cfd9d32fd2bc931eb","LICENSE-APACHE":"4d10fe5f3aa176b05b229a248866bad70b834c173f1252a814ff4748d8a13837","LICENSE-MIT":"934887691e05d69d7c86ad3f2c360980fa30c15b035e351f3c9865e99da4debc","README.md":"276cab7dac6cc74706b2aec34e649ef09c5b0149dbf15329020781161bb13673","benches/benchmarks.rs":"13f1208dfb86e3c02dcd67a4c08c2bae300c0a153de5df437eac4a136579ec23","src/date.rs":"87e7de1394f6b0d37128bfbf5943e256c886c35ed3f9078d15a08309c2206c69","src/lib.rs":"1c5b99558a8b2fec28d003b58a198cb2aebd232f0a2d162906524c5e6ead162e"},"package":"df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"} \ No newline at end of file diff --git a/anneal/vendor/httpdate/.cargo_vcs_info.json b/anneal/vendor/httpdate/.cargo_vcs_info.json deleted file mode 100644 index f43f4cfa20..0000000000 --- a/anneal/vendor/httpdate/.cargo_vcs_info.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "git": { - "sha1": "638da761065df8b67282b0a1d139c0a7e4a02429" - }, - "path_in_vcs": "" -} \ No newline at end of file diff --git a/anneal/vendor/httpdate/.github/workflows/ci.yml b/anneal/vendor/httpdate/.github/workflows/ci.yml deleted file mode 100644 index 8a9dfcc7de..0000000000 --- a/anneal/vendor/httpdate/.github/workflows/ci.yml +++ /dev/null @@ -1,24 +0,0 @@ -on: [push, pull_request] - -name: Continuous integration - -jobs: - check-test: - name: Check and test crate - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: dtolnay/rust-toolchain@stable - - run: cargo check --all-targets - - run: cargo test - - clippy-fmt: - name: Run Clippy and format code - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: dtolnay/rust-toolchain@stable - with: - components: clippy, rustfmt - - run: cargo clippy --all-targets -- -D warnings - - run: cargo fmt --all --check diff --git a/anneal/vendor/httpdate/Cargo.toml b/anneal/vendor/httpdate/Cargo.toml deleted file mode 100644 index f5049680ff..0000000000 --- a/anneal/vendor/httpdate/Cargo.toml +++ /dev/null @@ -1,35 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies. -# -# If you are reading this file be aware that the original Cargo.toml -# will likely look very different (and much more reasonable). -# See Cargo.toml.orig for the original contents. - -[package] -edition = "2021" -rust-version = "1.56" -name = "httpdate" -version = "1.0.3" -authors = ["Pyfisch "] -description = "HTTP date parsing and formatting" -readme = "README.md" -keywords = [ - "http", - "date", - "time", - "simple", - "timestamp", -] -license = "MIT OR Apache-2.0" -repository = "https://github.com/pyfisch/httpdate" - -[[bench]] -name = "benchmarks" -harness = false - -[dev-dependencies.criterion] -version = "0.5" diff --git a/anneal/vendor/httpdate/Cargo.toml.orig b/anneal/vendor/httpdate/Cargo.toml.orig deleted file mode 100644 index ddc17f1099..0000000000 --- a/anneal/vendor/httpdate/Cargo.toml.orig +++ /dev/null @@ -1,18 +0,0 @@ -[package] -name = "httpdate" -version = "1.0.3" -authors = ["Pyfisch "] -license = "MIT OR Apache-2.0" -description = "HTTP date parsing and formatting" -keywords = ["http", "date", "time", "simple", "timestamp"] -readme = "README.md" -repository = "https://github.com/pyfisch/httpdate" -edition = "2021" -rust-version = "1.56" - -[dev-dependencies] -criterion = "0.5" - -[[bench]] -name = "benchmarks" -harness = false diff --git a/anneal/vendor/httpdate/LICENSE-APACHE b/anneal/vendor/httpdate/LICENSE-APACHE deleted file mode 100644 index cd482d8976..0000000000 --- a/anneal/vendor/httpdate/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ -Apache License -Version 2.0, January 2004 -http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - -"License" shall mean the terms and conditions for use, reproduction, -and distribution as defined by Sections 1 through 9 of this document. - -"Licensor" shall mean the copyright owner or entity authorized by -the copyright owner that is granting the License. - -"Legal Entity" shall mean the union of the acting entity and all -other entities that control, are controlled by, or are under common -control with that entity. For the purposes of this definition, -"control" means (i) the power, direct or indirect, to cause the -direction or management of such entity, whether by contract or -otherwise, or (ii) ownership of fifty percent (50%) or more of the -outstanding shares, or (iii) beneficial ownership of such entity. - -"You" (or "Your") shall mean an individual or Legal Entity -exercising permissions granted by this License. - -"Source" form shall mean the preferred form for making modifications, -including but not limited to software source code, documentation -source, and configuration files. - -"Object" form shall mean any form resulting from mechanical -transformation or translation of a Source form, including but -not limited to compiled object code, generated documentation, -and conversions to other media types. - -"Work" shall mean the work of authorship, whether in Source or -Object form, made available under the License, as indicated by a -copyright notice that is included in or attached to the work -(an example is provided in the Appendix below). - -"Derivative Works" shall mean any work, whether in Source or Object -form, that is based on (or derived from) the Work and for which the -editorial revisions, annotations, elaborations, or other modifications -represent, as a whole, an original work of authorship. For the purposes -of this License, Derivative Works shall not include works that remain -separable from, or merely link (or bind by name) to the interfaces of, -the Work and Derivative Works thereof. - -"Contribution" shall mean any work of authorship, including -the original version of the Work and any modifications or additions -to that Work or Derivative Works thereof, that is intentionally -submitted to Licensor for inclusion in the Work by the copyright owner -or by an individual or Legal Entity authorized to submit on behalf of -the copyright owner. For the purposes of this definition, "submitted" -means any form of electronic, verbal, or written communication sent -to the Licensor or its representatives, including but not limited to -communication on electronic mailing lists, source code control systems, -and issue tracking systems that are managed by, or on behalf of, the -Licensor for the purpose of discussing and improving the Work, but -excluding communication that is conspicuously marked or otherwise -designated in writing by the copyright owner as "Not a Contribution." - -"Contributor" shall mean Licensor and any individual or Legal Entity -on behalf of whom a Contribution has been received by Licensor and -subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of -this License, each Contributor hereby grants to You a perpetual, -worldwide, non-exclusive, no-charge, royalty-free, irrevocable -copyright license to reproduce, prepare Derivative Works of, -publicly display, publicly perform, sublicense, and distribute the -Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of -this License, each Contributor hereby grants to You a perpetual, -worldwide, non-exclusive, no-charge, royalty-free, irrevocable -(except as stated in this section) patent license to make, have made, -use, offer to sell, sell, import, and otherwise transfer the Work, -where such license applies only to those patent claims licensable -by such Contributor that are necessarily infringed by their -Contribution(s) alone or by combination of their Contribution(s) -with the Work to which such Contribution(s) was submitted. If You -institute patent litigation against any entity (including a -cross-claim or counterclaim in a lawsuit) alleging that the Work -or a Contribution incorporated within the Work constitutes direct -or contributory patent infringement, then any patent licenses -granted to You under this License for that Work shall terminate -as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the -Work or Derivative Works thereof in any medium, with or without -modifications, and in Source or Object form, provided that You -meet the following conditions: - -(a) You must give any other recipients of the Work or -Derivative Works a copy of this License; and - -(b) You must cause any modified files to carry prominent notices -stating that You changed the files; and - -(c) You must retain, in the Source form of any Derivative Works -that You distribute, all copyright, patent, trademark, and -attribution notices from the Source form of the Work, -excluding those notices that do not pertain to any part of -the Derivative Works; and - -(d) If the Work includes a "NOTICE" text file as part of its -distribution, then any Derivative Works that You distribute must -include a readable copy of the attribution notices contained -within such NOTICE file, excluding those notices that do not -pertain to any part of the Derivative Works, in at least one -of the following places: within a NOTICE text file distributed -as part of the Derivative Works; within the Source form or -documentation, if provided along with the Derivative Works; or, -within a display generated by the Derivative Works, if and -wherever such third-party notices normally appear. The contents -of the NOTICE file are for informational purposes only and -do not modify the License. You may add Your own attribution -notices within Derivative Works that You distribute, alongside -or as an addendum to the NOTICE text from the Work, provided -that such additional attribution notices cannot be construed -as modifying the License. - -You may add Your own copyright statement to Your modifications and -may provide additional or different license terms and conditions -for use, reproduction, or distribution of Your modifications, or -for any such Derivative Works as a whole, provided Your use, -reproduction, and distribution of the Work otherwise complies with -the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, -any Contribution intentionally submitted for inclusion in the Work -by You to the Licensor shall be under the terms and conditions of -this License, without any additional terms or conditions. -Notwithstanding the above, nothing herein shall supersede or modify -the terms of any separate license agreement you may have executed -with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade -names, trademarks, service marks, or product names of the Licensor, -except as required for reasonable and customary use in describing the -origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or -agreed to in writing, Licensor provides the Work (and each -Contributor provides its Contributions) on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or -implied, including, without limitation, any warranties or conditions -of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A -PARTICULAR PURPOSE. You are solely responsible for determining the -appropriateness of using or redistributing the Work and assume any -risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, -whether in tort (including negligence), contract, or otherwise, -unless required by applicable law (such as deliberate and grossly -negligent acts) or agreed to in writing, shall any Contributor be -liable to You for damages, including any direct, indirect, special, -incidental, or consequential damages of any character arising as a -result of this License or out of the use or inability to use the -Work (including but not limited to damages for loss of goodwill, -work stoppage, computer failure or malfunction, or any and all -other commercial damages or losses), even if such Contributor -has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing -the Work or Derivative Works thereof, You may choose to offer, -and charge a fee for, acceptance of support, warranty, indemnity, -or other liability obligations and/or rights consistent with this -License. However, in accepting such obligations, You may act only -on Your own behalf and on Your sole responsibility, not on behalf -of any other Contributor, and only if You agree to indemnify, -defend, and hold each Contributor harmless for any liability -incurred by, or claims asserted against, such Contributor by reason -of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - -To apply the Apache License to your work, attach the following -boilerplate notice, with the fields enclosed by brackets "[]" -replaced with your own identifying information. (Don't include -the brackets!) The text should be enclosed in the appropriate -comment syntax for the file format. We also recommend that a -file or class name and description of purpose be included on the -same "printed page" as the copyright notice for easier -identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/anneal/vendor/httpdate/LICENSE-MIT b/anneal/vendor/httpdate/LICENSE-MIT deleted file mode 100644 index 8819964156..0000000000 --- a/anneal/vendor/httpdate/LICENSE-MIT +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (c) 2016 Pyfisch - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/anneal/vendor/httpdate/README.md b/anneal/vendor/httpdate/README.md deleted file mode 100644 index dd436e86e0..0000000000 --- a/anneal/vendor/httpdate/README.md +++ /dev/null @@ -1,27 +0,0 @@ -# Date and time utils for HTTP. - -[![Build Status](https://github.com/pyfisch/httpdate/actions/workflows/ci.yml/badge.svg)](https://github.com/pyfisch/httpdate/actions/workflows/ci.yml) -[![Crates.io](https://img.shields.io/crates/v/httpdate.svg)](https://crates.io/crates/httpdate) -[![Documentation](https://docs.rs/httpdate/badge.svg)](https://docs.rs/httpdate) - -Multiple HTTP header fields store timestamps. -For example a response created on May 15, 2015 may contain the header -`Date: Fri, 15 May 2015 15:34:21 GMT`. Since the timestamp does not -contain any timezone or leap second information it is equvivalent to -writing 1431696861 Unix time. Rust’s `SystemTime` is used to store -these timestamps. - -This crate provides two public functions: - -* `parse_http_date` to parse a HTTP datetime string to a system time -* `fmt_http_date` to format a system time to a IMF-fixdate - -In addition it exposes the `HttpDate` type that can be used to parse -and format timestamps. Convert a sytem time to `HttpDate` and vice versa. -The `HttpDate` (8 bytes) is smaller than `SystemTime` (16 bytes) and -using the display impl avoids a temporary allocation. - -Read the [blog post](https://pyfisch.org/blog/http-datetime-handling/) to learn -more. - -Fuzz it by installing *cargo-fuzz* and running `cargo fuzz run fuzz_target_1`. diff --git a/anneal/vendor/httpdate/benches/benchmarks.rs b/anneal/vendor/httpdate/benches/benchmarks.rs deleted file mode 100644 index 4f82467bdb..0000000000 --- a/anneal/vendor/httpdate/benches/benchmarks.rs +++ /dev/null @@ -1,57 +0,0 @@ -use criterion::{black_box, criterion_group, criterion_main, Criterion}; - -pub fn parse_imf_fixdate(c: &mut Criterion) { - c.bench_function("parse_imf_fixdate", |b| { - b.iter(|| { - let d = black_box("Sun, 06 Nov 1994 08:49:37 GMT"); - black_box(httpdate::parse_http_date(d)).unwrap(); - }) - }); -} - -pub fn parse_rfc850_date(c: &mut Criterion) { - c.bench_function("parse_rfc850_date", |b| { - b.iter(|| { - let d = black_box("Sunday, 06-Nov-94 08:49:37 GMT"); - black_box(httpdate::parse_http_date(d)).unwrap(); - }) - }); -} - -pub fn parse_asctime(c: &mut Criterion) { - c.bench_function("parse_asctime", |b| { - b.iter(|| { - let d = black_box("Sun Nov 6 08:49:37 1994"); - black_box(httpdate::parse_http_date(d)).unwrap(); - }) - }); -} - -struct BlackBoxWrite; - -impl std::fmt::Write for BlackBoxWrite { - fn write_str(&mut self, s: &str) -> Result<(), std::fmt::Error> { - black_box(s); - Ok(()) - } -} - -pub fn encode_date(c: &mut Criterion) { - c.bench_function("encode_date", |b| { - let d = "Wed, 21 Oct 2015 07:28:00 GMT"; - black_box(httpdate::parse_http_date(d)).unwrap(); - b.iter(|| { - use std::fmt::Write; - let _ = write!(BlackBoxWrite, "{}", d); - }) - }); -} - -criterion_group!( - benches, - parse_imf_fixdate, - parse_rfc850_date, - parse_asctime, - encode_date -); -criterion_main!(benches); diff --git a/anneal/vendor/httpdate/src/date.rs b/anneal/vendor/httpdate/src/date.rs deleted file mode 100644 index 8bc0a3b33b..0000000000 --- a/anneal/vendor/httpdate/src/date.rs +++ /dev/null @@ -1,420 +0,0 @@ -use std::cmp; -use std::fmt::{self, Display, Formatter}; -use std::str::FromStr; -use std::time::{Duration, SystemTime, UNIX_EPOCH}; - -use crate::Error; - -/// HTTP timestamp type. -/// -/// Parse using `FromStr` impl. -/// Format using the `Display` trait. -/// Convert timestamp into/from `SytemTime` to use. -/// Supports comparsion and sorting. -#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] -pub struct HttpDate { - /// 0...59 - sec: u8, - /// 0...59 - min: u8, - /// 0...23 - hour: u8, - /// 1...31 - day: u8, - /// 1...12 - mon: u8, - /// 1970...9999 - year: u16, - /// 1...7 - wday: u8, -} - -impl HttpDate { - fn is_valid(&self) -> bool { - self.sec < 60 - && self.min < 60 - && self.hour < 24 - && self.day > 0 - && self.day < 32 - && self.mon > 0 - && self.mon <= 12 - && self.year >= 1970 - && self.year <= 9999 - && &HttpDate::from(SystemTime::from(*self)) == self - } -} - -impl From for HttpDate { - fn from(v: SystemTime) -> HttpDate { - let dur = v - .duration_since(UNIX_EPOCH) - .expect("all times should be after the epoch"); - let secs_since_epoch = dur.as_secs(); - - if secs_since_epoch >= 253402300800 { - // year 9999 - panic!("date must be before year 9999"); - } - - /* 2000-03-01 (mod 400 year, immediately after feb29 */ - const LEAPOCH: i64 = 11017; - const DAYS_PER_400Y: i64 = 365 * 400 + 97; - const DAYS_PER_100Y: i64 = 365 * 100 + 24; - const DAYS_PER_4Y: i64 = 365 * 4 + 1; - - let days = (secs_since_epoch / 86400) as i64 - LEAPOCH; - let secs_of_day = secs_since_epoch % 86400; - - let mut qc_cycles = days / DAYS_PER_400Y; - let mut remdays = days % DAYS_PER_400Y; - - if remdays < 0 { - remdays += DAYS_PER_400Y; - qc_cycles -= 1; - } - - let mut c_cycles = remdays / DAYS_PER_100Y; - if c_cycles == 4 { - c_cycles -= 1; - } - remdays -= c_cycles * DAYS_PER_100Y; - - let mut q_cycles = remdays / DAYS_PER_4Y; - if q_cycles == 25 { - q_cycles -= 1; - } - remdays -= q_cycles * DAYS_PER_4Y; - - let mut remyears = remdays / 365; - if remyears == 4 { - remyears -= 1; - } - remdays -= remyears * 365; - - let mut year = 2000 + remyears + 4 * q_cycles + 100 * c_cycles + 400 * qc_cycles; - - let months = [31, 30, 31, 30, 31, 31, 30, 31, 30, 31, 31, 29]; - let mut mon = 0; - for mon_len in months.iter() { - mon += 1; - if remdays < *mon_len { - break; - } - remdays -= *mon_len; - } - let mday = remdays + 1; - let mon = if mon + 2 > 12 { - year += 1; - mon - 10 - } else { - mon + 2 - }; - - let mut wday = (3 + days) % 7; - if wday <= 0 { - wday += 7 - }; - - HttpDate { - sec: (secs_of_day % 60) as u8, - min: ((secs_of_day % 3600) / 60) as u8, - hour: (secs_of_day / 3600) as u8, - day: mday as u8, - mon: mon as u8, - year: year as u16, - wday: wday as u8, - } - } -} - -impl From for SystemTime { - fn from(v: HttpDate) -> SystemTime { - let leap_years = - ((v.year - 1) - 1968) / 4 - ((v.year - 1) - 1900) / 100 + ((v.year - 1) - 1600) / 400; - let mut ydays = match v.mon { - 1 => 0, - 2 => 31, - 3 => 59, - 4 => 90, - 5 => 120, - 6 => 151, - 7 => 181, - 8 => 212, - 9 => 243, - 10 => 273, - 11 => 304, - 12 => 334, - _ => unreachable!(), - } + v.day as u64 - - 1; - if is_leap_year(v.year) && v.mon > 2 { - ydays += 1; - } - let days = (v.year as u64 - 1970) * 365 + leap_years as u64 + ydays; - UNIX_EPOCH - + Duration::from_secs( - v.sec as u64 + v.min as u64 * 60 + v.hour as u64 * 3600 + days * 86400, - ) - } -} - -impl FromStr for HttpDate { - type Err = Error; - - fn from_str(s: &str) -> Result { - if !s.is_ascii() { - return Err(Error(())); - } - let x = s.trim().as_bytes(); - let date = parse_imf_fixdate(x) - .or_else(|_| parse_rfc850_date(x)) - .or_else(|_| parse_asctime(x))?; - if !date.is_valid() { - return Err(Error(())); - } - Ok(date) - } -} - -impl Display for HttpDate { - fn fmt(&self, f: &mut Formatter) -> fmt::Result { - let wday = match self.wday { - 1 => b"Mon", - 2 => b"Tue", - 3 => b"Wed", - 4 => b"Thu", - 5 => b"Fri", - 6 => b"Sat", - 7 => b"Sun", - _ => unreachable!(), - }; - - let mon = match self.mon { - 1 => b"Jan", - 2 => b"Feb", - 3 => b"Mar", - 4 => b"Apr", - 5 => b"May", - 6 => b"Jun", - 7 => b"Jul", - 8 => b"Aug", - 9 => b"Sep", - 10 => b"Oct", - 11 => b"Nov", - 12 => b"Dec", - _ => unreachable!(), - }; - - let mut buf: [u8; 29] = *b" , 00 0000 00:00:00 GMT"; - buf[0] = wday[0]; - buf[1] = wday[1]; - buf[2] = wday[2]; - buf[5] = b'0' + (self.day / 10); - buf[6] = b'0' + (self.day % 10); - buf[8] = mon[0]; - buf[9] = mon[1]; - buf[10] = mon[2]; - buf[12] = b'0' + (self.year / 1000) as u8; - buf[13] = b'0' + (self.year / 100 % 10) as u8; - buf[14] = b'0' + (self.year / 10 % 10) as u8; - buf[15] = b'0' + (self.year % 10) as u8; - buf[17] = b'0' + (self.hour / 10); - buf[18] = b'0' + (self.hour % 10); - buf[20] = b'0' + (self.min / 10); - buf[21] = b'0' + (self.min % 10); - buf[23] = b'0' + (self.sec / 10); - buf[24] = b'0' + (self.sec % 10); - f.write_str(std::str::from_utf8(&buf[..]).unwrap()) - } -} - -impl Ord for HttpDate { - fn cmp(&self, other: &HttpDate) -> cmp::Ordering { - SystemTime::from(*self).cmp(&SystemTime::from(*other)) - } -} - -impl PartialOrd for HttpDate { - fn partial_cmp(&self, other: &HttpDate) -> Option { - Some(self.cmp(other)) - } -} - -fn toint_1(x: u8) -> Result { - let result = x.wrapping_sub(b'0'); - if result < 10 { - Ok(result) - } else { - Err(Error(())) - } -} - -fn toint_2(s: &[u8]) -> Result { - let high = s[0].wrapping_sub(b'0'); - let low = s[1].wrapping_sub(b'0'); - - if high < 10 && low < 10 { - Ok(high * 10 + low) - } else { - Err(Error(())) - } -} - -#[allow(clippy::many_single_char_names)] -fn toint_4(s: &[u8]) -> Result { - let a = u16::from(s[0].wrapping_sub(b'0')); - let b = u16::from(s[1].wrapping_sub(b'0')); - let c = u16::from(s[2].wrapping_sub(b'0')); - let d = u16::from(s[3].wrapping_sub(b'0')); - - if a < 10 && b < 10 && c < 10 && d < 10 { - Ok(a * 1000 + b * 100 + c * 10 + d) - } else { - Err(Error(())) - } -} - -fn parse_imf_fixdate(s: &[u8]) -> Result { - // Example: `Sun, 06 Nov 1994 08:49:37 GMT` - if s.len() != 29 || &s[25..] != b" GMT" || s[16] != b' ' || s[19] != b':' || s[22] != b':' { - return Err(Error(())); - } - Ok(HttpDate { - sec: toint_2(&s[23..25])?, - min: toint_2(&s[20..22])?, - hour: toint_2(&s[17..19])?, - day: toint_2(&s[5..7])?, - mon: match &s[7..12] { - b" Jan " => 1, - b" Feb " => 2, - b" Mar " => 3, - b" Apr " => 4, - b" May " => 5, - b" Jun " => 6, - b" Jul " => 7, - b" Aug " => 8, - b" Sep " => 9, - b" Oct " => 10, - b" Nov " => 11, - b" Dec " => 12, - _ => return Err(Error(())), - }, - year: toint_4(&s[12..16])?, - wday: match &s[..5] { - b"Mon, " => 1, - b"Tue, " => 2, - b"Wed, " => 3, - b"Thu, " => 4, - b"Fri, " => 5, - b"Sat, " => 6, - b"Sun, " => 7, - _ => return Err(Error(())), - }, - }) -} - -fn parse_rfc850_date(s: &[u8]) -> Result { - // Example: `Sunday, 06-Nov-94 08:49:37 GMT` - if s.len() < 23 { - return Err(Error(())); - } - - fn wday<'a>(s: &'a [u8], wday: u8, name: &'static [u8]) -> Option<(u8, &'a [u8])> { - if &s[0..name.len()] == name { - return Some((wday, &s[name.len()..])); - } - None - } - let (wday, s) = wday(s, 1, b"Monday, ") - .or_else(|| wday(s, 2, b"Tuesday, ")) - .or_else(|| wday(s, 3, b"Wednesday, ")) - .or_else(|| wday(s, 4, b"Thursday, ")) - .or_else(|| wday(s, 5, b"Friday, ")) - .or_else(|| wday(s, 6, b"Saturday, ")) - .or_else(|| wday(s, 7, b"Sunday, ")) - .ok_or(Error(()))?; - if s.len() != 22 || s[12] != b':' || s[15] != b':' || &s[18..22] != b" GMT" { - return Err(Error(())); - } - let mut year = u16::from(toint_2(&s[7..9])?); - if year < 70 { - year += 2000; - } else { - year += 1900; - } - Ok(HttpDate { - sec: toint_2(&s[16..18])?, - min: toint_2(&s[13..15])?, - hour: toint_2(&s[10..12])?, - day: toint_2(&s[0..2])?, - mon: match &s[2..7] { - b"-Jan-" => 1, - b"-Feb-" => 2, - b"-Mar-" => 3, - b"-Apr-" => 4, - b"-May-" => 5, - b"-Jun-" => 6, - b"-Jul-" => 7, - b"-Aug-" => 8, - b"-Sep-" => 9, - b"-Oct-" => 10, - b"-Nov-" => 11, - b"-Dec-" => 12, - _ => return Err(Error(())), - }, - year, - wday, - }) -} - -fn parse_asctime(s: &[u8]) -> Result { - // Example: `Sun Nov 6 08:49:37 1994` - if s.len() != 24 || s[10] != b' ' || s[13] != b':' || s[16] != b':' || s[19] != b' ' { - return Err(Error(())); - } - Ok(HttpDate { - sec: toint_2(&s[17..19])?, - min: toint_2(&s[14..16])?, - hour: toint_2(&s[11..13])?, - day: { - let x = &s[8..10]; - { - if x[0] == b' ' { - toint_1(x[1]) - } else { - toint_2(x) - } - }? - }, - mon: match &s[4..8] { - b"Jan " => 1, - b"Feb " => 2, - b"Mar " => 3, - b"Apr " => 4, - b"May " => 5, - b"Jun " => 6, - b"Jul " => 7, - b"Aug " => 8, - b"Sep " => 9, - b"Oct " => 10, - b"Nov " => 11, - b"Dec " => 12, - _ => return Err(Error(())), - }, - year: toint_4(&s[20..24])?, - wday: match &s[0..4] { - b"Mon " => 1, - b"Tue " => 2, - b"Wed " => 3, - b"Thu " => 4, - b"Fri " => 5, - b"Sat " => 6, - b"Sun " => 7, - _ => return Err(Error(())), - }, - }) -} - -fn is_leap_year(y: u16) -> bool { - y % 4 == 0 && (y % 100 != 0 || y % 400 == 0) -} diff --git a/anneal/vendor/httpdate/src/lib.rs b/anneal/vendor/httpdate/src/lib.rs deleted file mode 100644 index 88cb6fa751..0000000000 --- a/anneal/vendor/httpdate/src/lib.rs +++ /dev/null @@ -1,160 +0,0 @@ -//! Date and time utils for HTTP. -//! -//! Multiple HTTP header fields store timestamps. -//! For example a response created on May 15, 2015 may contain the header -//! `Date: Fri, 15 May 2015 15:34:21 GMT`. Since the timestamp does not -//! contain any timezone or leap second information it is equvivalent to -//! writing 1431696861 Unix time. Rust’s `SystemTime` is used to store -//! these timestamps. -//! -//! This crate provides two public functions: -//! -//! * `parse_http_date` to parse a HTTP datetime string to a system time -//! * `fmt_http_date` to format a system time to a IMF-fixdate -//! -//! In addition it exposes the `HttpDate` type that can be used to parse -//! and format timestamps. Convert a sytem time to `HttpDate` and vice versa. -//! The `HttpDate` (8 bytes) is smaller than `SystemTime` (16 bytes) and -//! using the display impl avoids a temporary allocation. -#![forbid(unsafe_code)] - -use std::error; -use std::fmt::{self, Display, Formatter}; -use std::io; -use std::time::SystemTime; - -pub use date::HttpDate; - -mod date; - -/// An opaque error type for all parsing errors. -#[derive(Debug)] -pub struct Error(()); - -impl error::Error for Error {} - -impl Display for Error { - fn fmt(&self, f: &mut Formatter) -> Result<(), fmt::Error> { - f.write_str("string contains no or an invalid date") - } -} - -impl From for io::Error { - fn from(e: Error) -> io::Error { - io::Error::new(io::ErrorKind::Other, e) - } -} - -/// Parse a date from an HTTP header field. -/// -/// Supports the preferred IMF-fixdate and the legacy RFC 805 and -/// ascdate formats. Two digit years are mapped to dates between -/// 1970 and 2069. -pub fn parse_http_date(s: &str) -> Result { - s.parse::().map(|d| d.into()) -} - -/// Format a date to be used in a HTTP header field. -/// -/// Dates are formatted as IMF-fixdate: `Fri, 15 May 2015 15:34:21 GMT`. -pub fn fmt_http_date(d: SystemTime) -> String { - format!("{}", HttpDate::from(d)) -} - -#[cfg(test)] -mod tests { - use std::str; - use std::time::{Duration, UNIX_EPOCH}; - - use super::{fmt_http_date, parse_http_date, HttpDate}; - - #[test] - fn test_rfc_example() { - let d = UNIX_EPOCH + Duration::from_secs(784111777); - assert_eq!( - d, - parse_http_date("Sun, 06 Nov 1994 08:49:37 GMT").expect("#1") - ); - assert_eq!( - d, - parse_http_date("Sunday, 06-Nov-94 08:49:37 GMT").expect("#2") - ); - assert_eq!(d, parse_http_date("Sun Nov 6 08:49:37 1994").expect("#3")); - } - - #[test] - fn test2() { - let d = UNIX_EPOCH + Duration::from_secs(1475419451); - assert_eq!( - d, - parse_http_date("Sun, 02 Oct 2016 14:44:11 GMT").expect("#1") - ); - assert!(parse_http_date("Sun Nov 10 08:00:00 1000").is_err()); - assert!(parse_http_date("Sun Nov 10 08*00:00 2000").is_err()); - assert!(parse_http_date("Sunday, 06-Nov-94 08+49:37 GMT").is_err()); - } - - #[test] - fn test3() { - let mut d = UNIX_EPOCH; - assert_eq!(d, parse_http_date("Thu, 01 Jan 1970 00:00:00 GMT").unwrap()); - d += Duration::from_secs(3600); - assert_eq!(d, parse_http_date("Thu, 01 Jan 1970 01:00:00 GMT").unwrap()); - d += Duration::from_secs(86400); - assert_eq!(d, parse_http_date("Fri, 02 Jan 1970 01:00:00 GMT").unwrap()); - d += Duration::from_secs(2592000); - assert_eq!(d, parse_http_date("Sun, 01 Feb 1970 01:00:00 GMT").unwrap()); - d += Duration::from_secs(2592000); - assert_eq!(d, parse_http_date("Tue, 03 Mar 1970 01:00:00 GMT").unwrap()); - d += Duration::from_secs(31536005); - assert_eq!(d, parse_http_date("Wed, 03 Mar 1971 01:00:05 GMT").unwrap()); - d += Duration::from_secs(15552000); - assert_eq!(d, parse_http_date("Mon, 30 Aug 1971 01:00:05 GMT").unwrap()); - d += Duration::from_secs(6048000); - assert_eq!(d, parse_http_date("Mon, 08 Nov 1971 01:00:05 GMT").unwrap()); - d += Duration::from_secs(864000000); - assert_eq!(d, parse_http_date("Fri, 26 Mar 1999 01:00:05 GMT").unwrap()); - } - - #[test] - fn test_fmt() { - let d = UNIX_EPOCH; - assert_eq!(fmt_http_date(d), "Thu, 01 Jan 1970 00:00:00 GMT"); - let d = UNIX_EPOCH + Duration::from_secs(1475419451); - assert_eq!(fmt_http_date(d), "Sun, 02 Oct 2016 14:44:11 GMT"); - } - - #[allow(dead_code)] - fn testcase(data: &[u8]) { - if let Ok(s) = str::from_utf8(data) { - println!("{:?}", s); - if let Ok(d) = parse_http_date(s) { - let o = fmt_http_date(d); - assert!(!o.is_empty()); - } - } - } - - #[test] - fn size_of() { - assert_eq!(::std::mem::size_of::(), 8); - } - - #[test] - fn test_date_comparison() { - let a = UNIX_EPOCH + Duration::from_secs(784111777); - let b = a + Duration::from_secs(30); - assert!(a < b); - let a_date: HttpDate = a.into(); - let b_date: HttpDate = b.into(); - assert!(a_date < b_date); - assert_eq!(a_date.cmp(&b_date), ::std::cmp::Ordering::Less) - } - - #[test] - fn test_parse_bad_date() { - // 1994-11-07 is actually a Monday - let parsed = "Sun, 07 Nov 1994 08:48:37 GMT".parse::(); - assert!(parsed.is_err()) - } -} diff --git a/anneal/vendor/phf/.cargo-checksum.json b/anneal/vendor/phf/.cargo-checksum.json new file mode 100644 index 0000000000..3744ecbeb3 --- /dev/null +++ b/anneal/vendor/phf/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{".cargo_vcs_info.json":"613f69d02d2523c1e9279da2a336260c837c52db6b39fc3e55a05145f87884cd","CHANGELOG.md":"0daf2d29e9a4f72c9375c6af57e79388acb4ba0ed684c7de84fd05b0bb3c50ca","Cargo.lock":"510dc3c50bfae6994161baf97e2ee115c59d83ff7d38965954b302a6300a0aec","Cargo.toml":"5d422dba03226b1aa65b7f542c2d7c91a9e8cad940c484cd5ade06e22a43cb6e","Cargo.toml.orig":"6fa1203c5049c458480ff829d4aeb0673dfa934df578182713d69e2f0a2308c1","LICENSE":"0ab4d106b6faac07fb6a051815fd1b4d862d730895e2d7d7358c2f13565e7a38","README.md":"64d22574dafbd9664fbb908506d9fc219aeac6f4ba544ebd7bd14099cea4f309","src/lib.rs":"7e0acb36f16b15880c8008a7889793c1a858c922682a6f48f6bcb030f97ed75e","src/map.rs":"71d3e813b08eb4b8caf6c276c5fd94dd8c61f4d2bddc14e223a661f5d1d58294","src/ordered_map.rs":"f7488eae4f56edda0b19e5e1853333212aa630533c227798c699f2133fbf2ec7","src/ordered_set.rs":"22aa62669b18a7c7f76ceb68af6ad8694a125cd699dfc2ecc3f9c2d724040626","src/set.rs":"509c2bd7b173063485c7c766943d7f16dfe16c73ca7a6e8fdbc6ccd45150057b"},"package":"913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7"} \ No newline at end of file diff --git a/anneal/vendor/phf/.cargo_vcs_info.json b/anneal/vendor/phf/.cargo_vcs_info.json new file mode 100644 index 0000000000..b0adb02a66 --- /dev/null +++ b/anneal/vendor/phf/.cargo_vcs_info.json @@ -0,0 +1,6 @@ +{ + "git": { + "sha1": "03a930696231da01005e762425841f91587b3e04" + }, + "path_in_vcs": "phf" +} \ No newline at end of file diff --git a/anneal/vendor/phf/CHANGELOG.md b/anneal/vendor/phf/CHANGELOG.md new file mode 100644 index 0000000000..5e28a5d2a6 --- /dev/null +++ b/anneal/vendor/phf/CHANGELOG.md @@ -0,0 +1,469 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## 0.12.0 (2025-06-19) + + + + +### Chore + + - Update version number in docs + - Turn off serde std feature on no-std + +### Chore + + - Update changelog + +### Commit Statistics + + + + - 10 commits contributed to the release over the course of 138 calendar days. + - 163 days passed between releases. + - 3 commits were understood as [conventional](https://www.conventionalcommits.org). + - 0 issues like '(#ID)' were seen in commit messages + +### Commit Details + + + +
view details + + * **Uncategorized** + - Update changelog ([`51d6baa`](https://github.com/rust-phf/rust-phf/commit/51d6baaa6ffce658fb9b56a96affaf0ddd0603e5)) + - Update version number in docs ([`08e7464`](https://github.com/rust-phf/rust-phf/commit/08e74647f00f7d77cbb81e0cb73ed663798d000f)) + - Merge pull request #333 from JohnTitor/chore/serde-no-std ([`5c3ad85`](https://github.com/rust-phf/rust-phf/commit/5c3ad850fd55f8a9fd2ea8bce69fc458029242d6)) + - Turn off serde std feature on no-std ([`08897bd`](https://github.com/rust-phf/rust-phf/commit/08897bd7603c362280ad4f24feca2c99be5e9a98)) + - Merge pull request #289 from thaliaarchi/master ([`a6df856`](https://github.com/rust-phf/rust-phf/commit/a6df856ade4cfbf2666fcabbd70c666ea8234abf)) + - Add support for unicase::Ascii type ([`2806801`](https://github.com/rust-phf/rust-phf/commit/28068018dec5aab9b6ddc0da918431285db8cd34)) + - Merge pull request #309 from edef1c/uncased-macro ([`37a7794`](https://github.com/rust-phf/rust-phf/commit/37a779400b63b5d0b5d5ab63e2727bbfb9ce494d)) + - Merge branch 'master' into no-wasteful-allocations ([`33b8aff`](https://github.com/rust-phf/rust-phf/commit/33b8affe77cea8bdeccb5c8d6c730c78231fc138)) + - Update phf/examples/uncased-example/Cargo.toml ([`8193820`](https://github.com/rust-phf/rust-phf/commit/81938207ee77f787b3186a8d6060a2615fe199c6)) + - Merge branch 'master' into fastrand ([`576dd47`](https://github.com/rust-phf/rust-phf/commit/576dd47858a2db74eb4ef67a8385039ef17b867d)) +
+ +## 0.11.3 (2025-01-07) + + + +### Chore + + - bump Cargo.toml version of phf and phf_macros + +### Commit Statistics + + + + - 10 commits contributed to the release. + - 562 days passed between releases. + - 1 commit was understood as [conventional](https://www.conventionalcommits.org). + - 0 issues like '(#ID)' were seen in commit messages + +### Commit Details + + + +
view details + + * **Uncategorized** + - Adjusting changelogs prior to release of phf_shared v0.11.3, phf_generator v0.11.3, phf_macros v0.11.3, phf v0.11.3, phf_codegen v0.11.3 ([`e111f4b`](https://github.com/rust-phf/rust-phf/commit/e111f4b53a965c188fdcbf03950321107d9b3987)) + - Merge pull request #322 from JohnTitor/release-0.11.3 ([`dc64dd6`](https://github.com/rust-phf/rust-phf/commit/dc64dd6bace986a8858590455e08659d9ea4ae4b)) + - Reset version num ([`13581f8`](https://github.com/rust-phf/rust-phf/commit/13581f8e9eefe8b8b7cb1b1ad04f2d68d97b0ffd)) + - Merge pull request #315 from LunarLambda/master ([`695a0df`](https://github.com/rust-phf/rust-phf/commit/695a0df769f3c75150a67ed9bb316579b875289d)) + - Bump Cargo.toml version of phf and phf_macros ([`a96a4e2`](https://github.com/rust-phf/rust-phf/commit/a96a4e29d63fb1ab3cc10e050571e733f5d2d0d1)) + - Macro support for uncased ([`4359e17`](https://github.com/rust-phf/rust-phf/commit/4359e17371e4b93a45b8600cc56bc27aa95c5bb1)) + - Merge pull request #290 from thaliaarchi/eq-trait ([`f89fca4`](https://github.com/rust-phf/rust-phf/commit/f89fca430205ddcbd7f41fa7c4f4f2144ae62cdb)) + - Merge pull request #300 from JohnTitor/msrv-1.61 ([`323366d`](https://github.com/rust-phf/rust-phf/commit/323366d03966ddad2eaa3432df79c9da8339e319)) + - Bump MSRV to 1.61 ([`1795f7b`](https://github.com/rust-phf/rust-phf/commit/1795f7b66b16af0191f221dc957bc8a090c891ad)) + - Implement PartialEq and Eq for map and set types ([`6e5dc32`](https://github.com/rust-phf/rust-phf/commit/6e5dc322cd3fac4eea960a6f2778989ccf985f95)) +
+ +## 0.11.2 (2023-06-24) + +### Commit Statistics + + + + - 10 commits contributed to the release. + - 319 days passed between releases. + - 0 commits were understood as [conventional](https://www.conventionalcommits.org). + - 0 issues like '(#ID)' were seen in commit messages + +### Commit Details + + + +
view details + + * **Uncategorized** + - Release phf_shared v0.11.2, phf_generator v0.11.2, phf_macros v0.11.2, phf v0.11.2, phf_codegen v0.11.2 ([`c9c35fd`](https://github.com/rust-phf/rust-phf/commit/c9c35fd8ba3f1bc228388b0cef6e3814a02a72c0)) + - Update changelogs ([`a1e5072`](https://github.com/rust-phf/rust-phf/commit/a1e5072b8e84b108f06389a1d41ac868426a03f7)) + - Merge pull request #288 from JohnTitor/rm-phf-stats ([`8fd5b77`](https://github.com/rust-phf/rust-phf/commit/8fd5b7770d427aea5004d17ff585541d0856d40b)) + - Remove mentions to `PHF_STATS` ([`0b7a826`](https://github.com/rust-phf/rust-phf/commit/0b7a82689ceab9e0e364c1d1dbe3639d2e99320a)) + - Merge pull request #280 from jf2048/deref-bytestring ([`3776342`](https://github.com/rust-phf/rust-phf/commit/377634245c8c6f0569a2ed7b75d08366b54c8810)) + - Allow using dereferenced bytestring literal keys in phf_map! ([`8c0d057`](https://github.com/rust-phf/rust-phf/commit/8c0d0572da8c0b5e188e7fda4ab8bd4bcb97f720)) + - Merge pull request #276 from JohnTitor/playground-metadata ([`f8e9d27`](https://github.com/rust-phf/rust-phf/commit/f8e9d279c528cb6985badc3ca3a60117ef92d51b)) + - Add metadata for playground ([`7e212e3`](https://github.com/rust-phf/rust-phf/commit/7e212e345f41a16409776a59796dd9ab24d6527d)) + - Merge pull request #274 from ankane/license-files ([`21baa73`](https://github.com/rust-phf/rust-phf/commit/21baa73941a0694ec48f437c0c0a6abfcc2f32d2)) + - Include license files in crates ([`1229b2f`](https://github.com/rust-phf/rust-phf/commit/1229b2faa6b97542ab4850a1723b1723dea92814)) +
+ +## 0.11.1 (2022-08-08) + + + +### Chore + + - point to local crates for now + +### Documentation + + - state allowed key expressions in `phf_map` + +### Bug Fixes + + - remove now-unnecessary `proc-macro-hack` crate usage + Resolves . + + This resolves an issue with Windows Defender identifying `proc-macro-hack` as threats. It also sheds + a depedency that is no longer necessary, now that the MSRV of this crate is 1.46 and + `proc-macro-hack` is only useful for providing support for Rust versions 1.31 through 1.45. Per + [upstream](https://github.com/dtolnay/proc-macro-hack): + + > **Note:** _As of Rust 1.45 this crate is superseded by native support for #\[proc\_macro\] in + > expression position. Only consider using this crate if you care about supporting compilers between + > 1.31 and 1.45._ + +### Commit Statistics + + + + - 310 commits contributed to the release. + - 3 commits were understood as [conventional](https://www.conventionalcommits.org). + - 1 unique issue was worked on: [#249](https://github.com/rust-phf/rust-phf/issues/249) + +### Commit Details + + + +
view details + + * **[#249](https://github.com/rust-phf/rust-phf/issues/249)** + - Add `Map::new()` function and `Default` implementation to create new, empty map ([`baac7d0`](https://github.com/rust-phf/rust-phf/commit/baac7d065a71a388476b998ba55b1c0aedaa9d86)) + * **Uncategorized** + - Release phf_shared v0.11.1, phf_generator v0.11.1, phf_macros v0.11.1, phf v0.11.1, phf_codegen v0.11.1 ([`3897b21`](https://github.com/rust-phf/rust-phf/commit/3897b21c6d38e5adcaf9110b4bb33c19f6b41977)) + - Merge pull request #264 from rust-phf/tweak-changelog ([`97f997d`](https://github.com/rust-phf/rust-phf/commit/97f997d2be827ca636a29046c78e2c09c5c62650)) + - Replace handmade changelog with generated one by `cargo-smart-release` ([`cb84cf6`](https://github.com/rust-phf/rust-phf/commit/cb84cf6636ab52823c53e70d6abeac8f648a3482)) + - Merge pull request #260 from JohnTitor/fix-repo-link ([`1407ebe`](https://github.com/rust-phf/rust-phf/commit/1407ebe536b39611db92d765ddec4de0e6c8a16e)) + - Add category to crates ([`32a72c3`](https://github.com/rust-phf/rust-phf/commit/32a72c3859997fd6b590e9ec092ae789d2acdf55)) + - Update repository links on Cargo.toml ([`1af3b0f`](https://github.com/rust-phf/rust-phf/commit/1af3b0fe1f8fdcae7ccc1bc8d51de309fb16a6bf)) + - Merge pull request #258 from JohnTitor/release-0.11.0 ([`c0b9ef9`](https://github.com/rust-phf/rust-phf/commit/c0b9ef98e798f807f94544aeb0fff429ef280efc)) + - Release 0.11.0 ([`d2efdc0`](https://github.com/rust-phf/rust-phf/commit/d2efdc08a7eb1d0d6c414b7b2ac41ce1fe1f9a43)) + - Merge pull request #257 from JohnTitor/edition-2021 ([`36ec885`](https://github.com/rust-phf/rust-phf/commit/36ec8854a9da4f295618e98d94aaf7150df2173e)) + - Make crates edition 2021 ([`b9d25da`](https://github.com/rust-phf/rust-phf/commit/b9d25da58b912d9927fbc41901631cd77836462b)) + - Merge pull request #256 from NZXTCorp/remove-proc-macro-hack ([`a85f070`](https://github.com/rust-phf/rust-phf/commit/a85f070d641317a04b81da053cc4040619652e69)) + - Remove now-unnecessary `proc-macro-hack` crate usage ([`caf1ce7`](https://github.com/rust-phf/rust-phf/commit/caf1ce71aed110fb44206ce2291154572ebfe9b7)) + - Point to local crates for now ([`92e7b43`](https://github.com/rust-phf/rust-phf/commit/92e7b433a4f62cc9b070cd1d678a6061d0906ee6)) + - Merge pull request #252 from JohnTitor/clippy-fixes ([`22570b8`](https://github.com/rust-phf/rust-phf/commit/22570b89476248d22c9d77f315fd98e048c49700)) + - Fix some Clippy warnings ([`71fd47c`](https://github.com/rust-phf/rust-phf/commit/71fd47ca27a8b1fe24b2eec75efd17ddfe11835f)) + - Merge pull request #251 from JohnTitor/weak-deps ([`2e1167c`](https://github.com/rust-phf/rust-phf/commit/2e1167c2046cd20aed1a906b4e23b40303cf0c00)) + - Make "unicase + macros" features work ([`11bb242`](https://github.com/rust-phf/rust-phf/commit/11bb2426f0237b1ecea8c8038630b1231ede4871)) + - Merge pull request #245 from JohnTitor/phf-0.10.1 ([`bed0153`](https://github.com/rust-phf/rust-phf/commit/bed01538ae576876f11189d541875d228acef9e8)) + - Prepare 0.10.1 release ([`4cc8344`](https://github.com/rust-phf/rust-phf/commit/4cc8344fad640ed71d75f557ce1a3b6eded321c3)) + - Merge pull request #244 from reitermarkus/serialize-map ([`a43e0e1`](https://github.com/rust-phf/rust-phf/commit/a43e0e19459201bac496030b9a7e30267c0e6dd4)) + - Allow serializing `Map`. ([`b6c682e`](https://github.com/rust-phf/rust-phf/commit/b6c682e81ea537b967ba055a0e464d24f5ea795c)) + - Merge pull request #243 from birkenfeld/patch-1 ([`815c17c`](https://github.com/rust-phf/rust-phf/commit/815c17cfa80a5087f91d24d56c7dae600a0df4c0)) + - State allowed key expressions in `phf_map` ([`6be1599`](https://github.com/rust-phf/rust-phf/commit/6be1599d7a0df27fd1888c78d247f8810cb8f750)) + - Merge pull request #240 from JohnTitor/docs-update ([`da98b9e`](https://github.com/rust-phf/rust-phf/commit/da98b9e80fdb22cd6d48a4a42489840afe603756)) + - Remove some stuff which is now unnecessary ([`6941e82`](https://github.com/rust-phf/rust-phf/commit/6941e825d09a98c1ea29a08ecd5fd605611584a4)) + - Refine doc comments ([`d8cfc43`](https://github.com/rust-phf/rust-phf/commit/d8cfc436059a1c2c3ede1afb0f9ec2333c046fc6)) + - Merge pull request #234 from JohnTitor/fix-ci ([`eba4cc2`](https://github.com/rust-phf/rust-phf/commit/eba4cc28d92c1db95cc430985a0fbc9ca63d1307)) + - Fix CI failure ([`d9b5ff2`](https://github.com/rust-phf/rust-phf/commit/d9b5ff23367d2bbcc385ff8243c7d972f45d459c)) + - Merge pull request #230 from JohnTitor/release-0.10 ([`3ea14b2`](https://github.com/rust-phf/rust-phf/commit/3ea14b2166553ad6e7b9afe7244144f5d661b6c6)) + - Prepare for release 0.10.0 ([`588ac25`](https://github.com/rust-phf/rust-phf/commit/588ac25dd5c0afccea084e6f94867328a6a30454)) + - Merge pull request #228 from JohnTitor/release-0.9.1 ([`d527f9d`](https://github.com/rust-phf/rust-phf/commit/d527f9d016adafe7d2930e37710291030b432838)) + - Prepare for v0.9.1 ([`9b71978`](https://github.com/rust-phf/rust-phf/commit/9b719789149ef195ef5eba093b7e73255fbef8dc)) + - Merge pull request #226 from bhgomes/iterator-traits ([`012be08`](https://github.com/rust-phf/rust-phf/commit/012be08aa1bc23092539bf617317243e672c75b1)) + - Add trait implementations to iterators mirroring std::collections ([`e47e4dc`](https://github.com/rust-phf/rust-phf/commit/e47e4dce434fd8d0ee80a3c57880f6b2465eed90)) + - Merge pull request #224 from bhgomes/const-fns ([`65deaf7`](https://github.com/rust-phf/rust-phf/commit/65deaf745b5175b6b8e645b6c66e53fc55bb3a85)) + - Remove Slice type and fix some docs ([`99d3533`](https://github.com/rust-phf/rust-phf/commit/99d353390f8124a283da9202fd4d163e68bc1949)) + - Add len/is_empty const-fns ([`f474922`](https://github.com/rust-phf/rust-phf/commit/f4749220eec2fccef35a66de323c01704a0eeda1)) + - Merge pull request #223 from JohnTitor/minor-cleanup ([`c746106`](https://github.com/rust-phf/rust-phf/commit/c746106ad05917ad62f244504727b07e07c3e075)) + - Minor cleanups ([`8868d08`](https://github.com/rust-phf/rust-phf/commit/8868d088e2fed36fcd7741e9a1c5bf68bef4f46e)) + - Merge pull request #222 from JohnTitor/precisify-msrv ([`50f8a0d`](https://github.com/rust-phf/rust-phf/commit/50f8a0d3d3f4cc7e15146e29e0559ba057a25a4d)) + - Precisify MSRV ([`63886f6`](https://github.com/rust-phf/rust-phf/commit/63886f6eb0d53d5bf44a10c713066b090686b8e2)) + - Merge pull request #219 from JohnTitor/release-0.9.0 ([`307969f`](https://github.com/rust-phf/rust-phf/commit/307969ff3bb8cae320e648890a9525920035944b)) + - Prepare 0.9.0 release ([`2ca46c4`](https://github.com/rust-phf/rust-phf/commit/2ca46c4f9c9083c128fcc6add33dc5986638940f)) + - Cleanup cargo metadata ([`a9e4b0a`](https://github.com/rust-phf/rust-phf/commit/a9e4b0a1e84825004fa66e938b870f83d3147d0d)) + - Merge pull request #218 from JohnTitor/cleanup ([`76f9072`](https://github.com/rust-phf/rust-phf/commit/76f907239af9b0cca7dac4e6d702cedc72f6f371)) + - Run rustfmt ([`dd86c6c`](https://github.com/rust-phf/rust-phf/commit/dd86c6c103f25021b52144085b8fab0a94582bef)) + - Fix some clippy warnings ([`9adc370`](https://github.com/rust-phf/rust-phf/commit/9adc370ead7fbcc36cd0c74f495ab7631e0c9754)) + - Cleanup docs ([`ddecc3a`](https://github.com/rust-phf/rust-phf/commit/ddecc3aa97aec6d9e9d6e59c57bc598d476335c1)) + - Merge pull request #217 from JohnTitor/rename-feature ([`ff77659`](https://github.com/rust-phf/rust-phf/commit/ff77659a001c08f1f069a17cc5d2ff6fdd51569c)) + - Rename `unicase_support` to `unicase` ([`b47174b`](https://github.com/rust-phf/rust-phf/commit/b47174bb9ebbd68e41316e1aa39c6541a45356a6)) + - Merge pull request #197 from benesch/uncased ([`8b44f0c`](https://github.com/rust-phf/rust-phf/commit/8b44f0c4caf1a431426ff8dbae68f0693d6cef63)) + - Add support for uncased ([`2a6087f`](https://github.com/rust-phf/rust-phf/commit/2a6087fcaf99b445ff6013f693f7c4fe5d6f7387)) + - Merge pull request #211 from skyfloogle/ordered-phfborrow ([`6ec8afb`](https://github.com/rust-phf/rust-phf/commit/6ec8afb6d85121d2edb023fcf3626308a4b3dad4)) + - Replace `std::borrow::Borrow` with `PhfBorrow` for ordered maps and sets ([`f43a9cf`](https://github.com/rust-phf/rust-phf/commit/f43a9cf4aa2aefc9e743727697ec65a0ba6cc29e)) + - Merge pull request #174 from abonander/169-drop-borrow ([`3c087d4`](https://github.com/rust-phf/rust-phf/commit/3c087d4782be496e7955d2b51d5883c4ce64ccd3)) + - Replace uses of `std::borrow::Borrow` with new `PhfBorrow` trait ([`b2f3a9c`](https://github.com/rust-phf/rust-phf/commit/b2f3a9c6a95ebabc2b0ae7ed1ec3ee7d72418e85)) + - Merge pull request #205 from skyfloogle/ordered-stuff ([`9ae1678`](https://github.com/rust-phf/rust-phf/commit/9ae1678f2507d6d26a1b780385a2e17bdfbb0b5c)) + - Add back ordered_map, ordered_set ([`0ab0108`](https://github.com/rust-phf/rust-phf/commit/0ab01081e4bd8f40bc18ab554c95f217220228d5)) + - Merge pull request #208 from JohnTitor/simplify-workspace ([`a47ac36`](https://github.com/rust-phf/rust-phf/commit/a47ac36b16dd8798659be3e24f74051cd1ed760d)) + - Use `[patch.crates-io]` section instead of path key ([`f47515b`](https://github.com/rust-phf/rust-phf/commit/f47515bce5c433214dbecee262a7a6f14e6a74d4)) + - Merge pull request #194 from pickfire/patch-1 ([`caec346`](https://github.com/rust-phf/rust-phf/commit/caec346b07cf04cc7850e4aeeca077856b79256a)) + - Merge pull request #190 from rjsberry/phf-shared-no-default-features ([`8dce12c`](https://github.com/rust-phf/rust-phf/commit/8dce12c4716cb7eeaedd5c7f5143b9c0450cedc2)) + - Fix style in doc ([`a285906`](https://github.com/rust-phf/rust-phf/commit/a28590675293af7c8faf866c1d847b7ed6876048)) + - Fix building with no_std ([`db4ce56`](https://github.com/rust-phf/rust-phf/commit/db4ce56082aafeb1aeee7e079d2bb4ae97ae58be)) + - Merge pull request #180 from abonander/master ([`81c7cc5`](https://github.com/rust-phf/rust-phf/commit/81c7cc5b48649108428671d3b8ad151f6fbdb359)) + - Release v0.8.0 ([`4060288`](https://github.com/rust-phf/rust-phf/commit/4060288dc2c1ebe3b0630e4016ed51935bb0c863)) + - Merge pull request #171 from abonander/170-removals ([`0d00821`](https://github.com/rust-phf/rust-phf/commit/0d0082178568036736bb6d51cb91f95ca5a616c3)) + - Remove ordered_map, ordered_set, phf_builder ([`8ae2bb8`](https://github.com/rust-phf/rust-phf/commit/8ae2bb886841a69a4fc482f439e2374f2373ab15)) + - Merge pull request #168 from abonander/167-std-default ([`a932094`](https://github.com/rust-phf/rust-phf/commit/a93209486f5874515da0483002e8669b2dbf95e6)) + - Switch optional `core` feature to default `std` feature ([`645e23d`](https://github.com/rust-phf/rust-phf/commit/645e23dda30ac1b99af39f201a74211e7ac3251a)) + - Merge pull request #164 from abonander/perf-improvements ([`70129c6`](https://github.com/rust-phf/rust-phf/commit/70129c6fbcdf428ce9f1014eea935301ac70e410)) + - Use two separate hashes and full 32-bit displacements ([`9b70bd9`](https://github.com/rust-phf/rust-phf/commit/9b70bd94f8b0b74f156e75ccefbd4a4c7ba29728)) + - Merge pull request #149 from danielhenrymantilla/proc-macro-hack ([`ae649cd`](https://github.com/rust-phf/rust-phf/commit/ae649cd67d9ce1452092ee739971d8ee232505ee)) + - Made macros work in stable ([`4fc0d1a`](https://github.com/rust-phf/rust-phf/commit/4fc0d1a8c3bcc3950082b614d8bfa4a0f63d6962)) + - Merge branch 'master' into patch-1 ([`cd0d7ce`](https://github.com/rust-phf/rust-phf/commit/cd0d7ce1194252dcaca3153988ba2a4effa66b4f)) + - Merge pull request #152 from abonander/unicase-upgrade ([`27f7c2c`](https://github.com/rust-phf/rust-phf/commit/27f7c2c85efde7aeb3c5409985f2d605aff8e05b)) + - Convert to 2018 edition ([`9ff66ab`](https://github.com/rust-phf/rust-phf/commit/9ff66ab36a23c7170cc775773f042a06de426c3b)) + - Merge pull request #145 from cetra3/empty_hash ([`2d3176b`](https://github.com/rust-phf/rust-phf/commit/2d3176b384112db5ca3fea08f1973ffc8a7c729b)) + - Fix & include tests for empty maps ([`83fd51c`](https://github.com/rust-phf/rust-phf/commit/83fd51c3095cbcd22b87c4d26ee22eb27a4e98d0)) + - Release v0.7.24 ([`1287414`](https://github.com/rust-phf/rust-phf/commit/1287414b1302d2d717c5f4be81accf4c12ccad48)) + - Docs for new macro setup ([`364ed47`](https://github.com/rust-phf/rust-phf/commit/364ed47c9f4401655fe7b897ce3e01e46706c286)) + - Fix feature name ([`e3a7442`](https://github.com/rust-phf/rust-phf/commit/e3a744255582aba8c743543503c9ad4c980a1ac3)) + - Reexport macros through phf crate ([`588fd1a`](https://github.com/rust-phf/rust-phf/commit/588fd1a785492afa5ad76db0556097e32e24387d)) + - Release v0.7.23 ([`a050b6f`](https://github.com/rust-phf/rust-phf/commit/a050b6f2a6b825bf0824339266ab9545340420d4)) + - Release 0.7.22 ([`ab88405`](https://github.com/rust-phf/rust-phf/commit/ab884054fa17eef915db2bdb5259c7aa71fbfea6)) + - Release v0.7.21 ([`6c7e2d9`](https://github.com/rust-phf/rust-phf/commit/6c7e2d9ce17ff1b87507925bdbe87e6e682ed3e4)) + - Typo ([`8d23b15`](https://github.com/rust-phf/rust-phf/commit/8d23b15361094b23c4eabacdb12f2dda386cc8e0)) + - Link to docs.rs ([`61142c5`](https://github.com/rust-phf/rust-phf/commit/61142c5aa168cff1bf53a6961ddc12012b49e1bb)) + - Release v0.7.20 ([`f631f50`](https://github.com/rust-phf/rust-phf/commit/f631f50abfaf6ea3d6fc8caaada47975b6df3a62)) + - Merge branch 'release' ([`ea7e256`](https://github.com/rust-phf/rust-phf/commit/ea7e2562706663632a0af65ae9fa94e5cf78c4ea)) + - Merge branch 'release-v0.7.19' into release ([`81a4806`](https://github.com/rust-phf/rust-phf/commit/81a4806b05f14fb49aa972de27a42926a542ec44)) + - Release v0.7.19 ([`0a98dd1`](https://github.com/rust-phf/rust-phf/commit/0a98dd1865d12a3fa4cc27bdb38fa1e7374940d9)) + - Merge branch 'release' ([`ecab54b`](https://github.com/rust-phf/rust-phf/commit/ecab54b8a028c88938f220dbb0a684e017bab62f)) + - Merge branch 'release-v0.7.18' into release ([`dfa970b`](https://github.com/rust-phf/rust-phf/commit/dfa970b229cc32cfb2da1692aa94ad8a266e704a)) + - Release v0.7.18 ([`3f71765`](https://github.com/rust-phf/rust-phf/commit/3f717650f4331f5dbb9d7a3f878228fcf1138729)) + - Merge branch 'release' ([`5f08563`](https://github.com/rust-phf/rust-phf/commit/5f0856327731107d9fada1b0318f6f15f32957c2)) + - Merge branch 'release-v0.7.17' into release ([`e073dd2`](https://github.com/rust-phf/rust-phf/commit/e073dd262d1b4c95234222ee5048fc883b9c7301)) + - Release v0.7.17 ([`21ecf72`](https://github.com/rust-phf/rust-phf/commit/21ecf72101715e4754db95a64ecd7de5a37b7f14)) + - Merge branch 'release' ([`839f06d`](https://github.com/rust-phf/rust-phf/commit/839f06d5a10c1300353b8f3c972990624695b668)) + - Merge branch 'release-v0.7.16' into release ([`6f5575c`](https://github.com/rust-phf/rust-phf/commit/6f5575c9b12d3619ea17c0825a613fcac12820f4)) + - Release v0.7.16 ([`8bf29c1`](https://github.com/rust-phf/rust-phf/commit/8bf29c10a878c83d73cc40385f0e96cb9cc95afa)) + - Merge branch 'release' ([`b4ec398`](https://github.com/rust-phf/rust-phf/commit/b4ec398f415e5cac2cd4d794b1889788e644447f)) + - Merge branch 'release-v0.7.15' into release ([`6bbc9e2`](https://github.com/rust-phf/rust-phf/commit/6bbc9e249b9a84e2019432b7d3b178851d2d776e)) + - Release v0.7.15 ([`20f896e`](https://github.com/rust-phf/rust-phf/commit/20f896e6975cabb9cf9883b08eaa5b3da8597f11)) + - Merge branch 'release' ([`7c692d4`](https://github.com/rust-phf/rust-phf/commit/7c692d42970bf6cb2540f6b2d3c88d63b3fd1f7a)) + - Merge branch 'release-v0.7.14' into release ([`ea8dd65`](https://github.com/rust-phf/rust-phf/commit/ea8dd652c292746a20bf3a680e9f925f6f0530b1)) + - Release v0.7.14 ([`fee66fc`](https://github.com/rust-phf/rust-phf/commit/fee66fc20e33f2b119f830a8926f3b6e52abcf09)) + - Merge pull request #82 from Ryman/unicase ([`909fac5`](https://github.com/rust-phf/rust-phf/commit/909fac5d4414a7d366432de078bcc6f78a25c230)) + - Add an impl of PhfHash for UniCase ([`d761144`](https://github.com/rust-phf/rust-phf/commit/d761144daf92ce6aed83165aa840a1ae72bd0bb2)) + - Drop all rust features ([`888f623`](https://github.com/rust-phf/rust-phf/commit/888f6234cd4e26e08b1f2d3716e4d4e0b95d0196)) + - Introduce a Slice abstraction for buffers ([`0cc3844`](https://github.com/rust-phf/rust-phf/commit/0cc38449c21f29bd9348e28c5719d650e16159cf)) + - Merge branch 'release' ([`d9351e1`](https://github.com/rust-phf/rust-phf/commit/d9351e1488bd42d1a4453e4a465177fb1c781fdc)) + - Merge branch 'release-v0.7.13' into release ([`b582e4e`](https://github.com/rust-phf/rust-phf/commit/b582e4ecec23be992ba915fc7873c0d5598f388a)) + - Release v0.7.13 ([`4769a6d`](https://github.com/rust-phf/rust-phf/commit/4769a6d2ce1d392da06e4b3cb833a1cdccb1f1aa)) + - Merge branch 'release' ([`5659a9d`](https://github.com/rust-phf/rust-phf/commit/5659a9db39bc5ee2179b264fce4cba4384d6d025)) + - Merge branch 'release-v0.7.12' into release ([`2f0a5de`](https://github.com/rust-phf/rust-phf/commit/2f0a5de9f01d9d22c774d8d85daec2a047a462e8)) + - Release v0.7.12 ([`9b75ee5`](https://github.com/rust-phf/rust-phf/commit/9b75ee5ed14060c45a5785fba0387be09e698624)) + - Merge pull request #75 from aidanhs/aphs-fix-ord-set-doc ([`ae5ee38`](https://github.com/rust-phf/rust-phf/commit/ae5ee38cad084144775d89fe38d8fdda33224697)) + - Fix ordered set `index` documentation ([`44e495f`](https://github.com/rust-phf/rust-phf/commit/44e495f634b1588ab148333cc582557f7877177f)) + - Merge branch 'release' ([`87ffab8`](https://github.com/rust-phf/rust-phf/commit/87ffab863aaeefb5ac2164da62f0407122d8057e)) + - Merge branch 'release-v0.7.11' into release ([`7260d04`](https://github.com/rust-phf/rust-phf/commit/7260d04413349bacab484afb74f9a496335278e1)) + - Release v0.7.11 ([`a004227`](https://github.com/rust-phf/rust-phf/commit/a0042277b181ec95fcbf29751b9a453f4f962ebb)) + - Merge branch 'release' ([`1579bec`](https://github.com/rust-phf/rust-phf/commit/1579bec1448c7b833f5965fe39d4ef2df66c982c)) + - Merge branch 'release-v0.7.10' into release ([`25cea13`](https://github.com/rust-phf/rust-phf/commit/25cea133fb4eec938bdfa74f04adbc8d94e30d4e)) + - Release v0.7.10 ([`c43154b`](https://github.com/rust-phf/rust-phf/commit/c43154b2661dc09620a7879c16f37b47d6ec03ae)) + - Merge branch 'release' ([`2c67ce5`](https://github.com/rust-phf/rust-phf/commit/2c67ce5a4129cd543178bf015f021a3bb83b6895)) + - Merge branch 'release-v0.7.9' into release ([`87206e1`](https://github.com/rust-phf/rust-phf/commit/87206e1c7b8d4089370dc168402ded0c0700a447)) + - Release v0.7.9 ([`b7d29df`](https://github.com/rust-phf/rust-phf/commit/b7d29dfe0df288b2da74de195f764eace1c8e443)) + - Merge branch 'release' ([`cd33902`](https://github.com/rust-phf/rust-phf/commit/cd339023e90ac1ce6971fa81badea65fb1f2b086)) + - Merge branch 'release-v0.7.8' into release ([`8bc23a0`](https://github.com/rust-phf/rust-phf/commit/8bc23a023908a038d668b6f7d8e94ee416995285)) + - Release v0.7.8 ([`aad0b9b`](https://github.com/rust-phf/rust-phf/commit/aad0b9b658fb970e3df60b066961aafca1a17c44)) + - Merge branch 'release' ([`dccff69`](https://github.com/rust-phf/rust-phf/commit/dccff69384729e3d4972174ce62d8f9db9429485)) + - Merge branch 'release-v0.7.7' into release ([`2d988b7`](https://github.com/rust-phf/rust-phf/commit/2d988b7dfb04d949246adc047f6b195263612246)) + - Release v0.7.7 ([`c9e7a93`](https://github.com/rust-phf/rust-phf/commit/c9e7a93f4d6f85a72651aba6187e4c956d8c1167)) + - Run through rustfmt ([`58e2223`](https://github.com/rust-phf/rust-phf/commit/58e222380b7fc9609a055cb5a6110ba04e47d677)) + - Merge branch 'release' ([`776046c`](https://github.com/rust-phf/rust-phf/commit/776046c961456dee9e16a6b6574d336c66e259f8)) + - Merge branch 'release-v0.7.6' into release ([`2ea7d5c`](https://github.com/rust-phf/rust-phf/commit/2ea7d5cab5e9e54952ca618b43ec3583a33a4847)) + - Release v0.7.6 ([`5bcd5c9`](https://github.com/rust-phf/rust-phf/commit/5bcd5c95215f5aa29e133cb2912662085a8158f0)) + - Fix core feature build ([`751c94b`](https://github.com/rust-phf/rust-phf/commit/751c94b208ded3b4d8ccff495513e4a55cb8fde0)) + - Use libstd debug builders ([`fd71c31`](https://github.com/rust-phf/rust-phf/commit/fd71c31288d72920a72eb73a69bc7325e7b1ba48)) + - Simplify no_std logic a bit ([`70f2ed9`](https://github.com/rust-phf/rust-phf/commit/70f2ed93d2e64b822bf2a23fde0ee848e8785bd1)) + - Merge pull request #68 from gz/master ([`44006f7`](https://github.com/rust-phf/rust-phf/commit/44006f74efca95d4f049bbf25df6321977c39577)) + - Reinstantiate no_std cargo feature flag. ([`7c3f757`](https://github.com/rust-phf/rust-phf/commit/7c3f757cdc83b4035d81f0d521b4b80b9080155e)) + - Merge branch 'release' ([`1f770df`](https://github.com/rust-phf/rust-phf/commit/1f770df1290b586a8d641ecb0bbd105080afc0ea)) + - Merge branch 'release-v0.7.5' into release ([`bb65b8c`](https://github.com/rust-phf/rust-phf/commit/bb65b8cca30ef9d4518e3083558019a972873efa)) + - Release v0.7.5 ([`fda44f5`](https://github.com/rust-phf/rust-phf/commit/fda44f550401c1bd4aad29bb2c07030b86761028)) + - Merge branch 'release' ([`269b5dc`](https://github.com/rust-phf/rust-phf/commit/269b5dc41ebf82f423393d5219e8107e9c911a03)) + - Merge branch 'release-v0.7.4' into release ([`7c093e8`](https://github.com/rust-phf/rust-phf/commit/7c093e83ffe5192d9cdcd5402b6abb7800ffafb3)) + - Release v0.7.4 ([`c7c0d3c`](https://github.com/rust-phf/rust-phf/commit/c7c0d3c294126157f0275a05b7c3a65c419234a1)) + - Merge pull request #62 from SimonSapin/string-cache ([`6f59718`](https://github.com/rust-phf/rust-phf/commit/6f5971869e5864cae653ec3606d17b554c343ef8)) + - Add hash() and get_index() to phf_shared. ([`d3b2ea0`](https://github.com/rust-phf/rust-phf/commit/d3b2ea0f0a9bd9cb79da90d8795f1905c3df1f5f)) + - Update PhfHash to mirror std::hash::Hash ([`96ef156`](https://github.com/rust-phf/rust-phf/commit/96ef156baae669b233673d6be2b96617ad48551e)) + - Release v0.7.3 ([`77ea239`](https://github.com/rust-phf/rust-phf/commit/77ea23917e908b10c4c5c463671a8409292f8661)) + - Merge pull request #59 from alexcrichton/update ([`6bd5a93`](https://github.com/rust-phf/rust-phf/commit/6bd5a939bda52281b0fa9844df1c42f1ce0220be)) + - Remove prelude imports ([`98183e1`](https://github.com/rust-phf/rust-phf/commit/98183e132a28b46af7bf72edd218549218d00776)) + - Release v0.7.2 ([`642b69d`](https://github.com/rust-phf/rust-phf/commit/642b69d0100a4ee7ec6e430ef1351bd1f28f9a4a)) + - Merge pull request #55 from SimonSapin/indexing ([`0cc37b2`](https://github.com/rust-phf/rust-phf/commit/0cc37b2f9e46e3c597373a8dfa669cc62acf5253)) + - Add `index` methods to `OrderedMap` and `OrderedSet`. ([`d2af00d`](https://github.com/rust-phf/rust-phf/commit/d2af00d4e32412d6f6b7597786976c1a0b642956)) + - Release v0.7.1 ([`9cb9de9`](https://github.com/rust-phf/rust-phf/commit/9cb9de911ad4e16964f0def29780dde1630c3619)) + - Release v0.7.0 ([`555a690`](https://github.com/rust-phf/rust-phf/commit/555a690561673597aee068650ac884bbcc2e31cf)) + - Stabilize phf ([`e215273`](https://github.com/rust-phf/rust-phf/commit/e2152739cbdd471116d88bb4a9cea4cdfede1e42)) + - Drop debug_builders feature ([`0b68ea5`](https://github.com/rust-phf/rust-phf/commit/0b68ea538639ebbdae032c9c3abefe547a60e982)) + - Release v0.6.19 ([`5810d30`](https://github.com/rust-phf/rust-phf/commit/5810d30ef2162f33cfb4da99c65b7344c7f2913b)) + - Clean up debug impls ([`7e32f39`](https://github.com/rust-phf/rust-phf/commit/7e32f399e150739c9cea3b9acd958d885d796372)) + - Merge pull request #53 from kmcallister/rustup ([`7f0392a`](https://github.com/rust-phf/rust-phf/commit/7f0392ad5ed9bb88a95d931f9c92e66a83aa039a)) + - Upgrade to rustc 1.0.0-dev (d8be84eb4 2015-03-29) (built 2015-03-29) ([`7d74f1f`](https://github.com/rust-phf/rust-phf/commit/7d74f1ff5eaa6a2963b97cdd7683e449681ff9aa)) + - Release v0.6.18 ([`36efc72`](https://github.com/rust-phf/rust-phf/commit/36efc721478d097fba1e5458cbdd9f288637abae)) + - Fix for upstream changes ([`eabadcf`](https://github.com/rust-phf/rust-phf/commit/eabadcf7e8af351ba8f07d86746e35adc8c5812e)) + - Release v0.6.17 ([`271ccc2`](https://github.com/rust-phf/rust-phf/commit/271ccc27d885363d4d8c549f75624d08c48e56c5)) + - Release v0.6.15 ([`ede14df`](https://github.com/rust-phf/rust-phf/commit/ede14df1e574674852b09bcafff4ad549ebfd4ae)) + - Release v0.6.14 ([`cf64ebb`](https://github.com/rust-phf/rust-phf/commit/cf64ebb8f769c9f12c9a03d05713dde6b8caf371)) + - Release v0.6.13 ([`4fdb533`](https://github.com/rust-phf/rust-phf/commit/4fdb5331fd9978ca3e180a06fb2e34627f50fb77)) + - Fix warnings and use debug builders ([`4d28684`](https://github.com/rust-phf/rust-phf/commit/4d28684b72333e911e23b898b5780947d49822a5)) + - Release v0.6.12 ([`59ca586`](https://github.com/rust-phf/rust-phf/commit/59ca58637206c9806c13cc24cb35cb7d0ce9d23f)) + - Release v0.6.11 ([`e1e6d3b`](https://github.com/rust-phf/rust-phf/commit/e1e6d3b40a6babddd0989406f2b4e952443ff52e)) + - Release v0.6.10 ([`fc45373`](https://github.com/rust-phf/rust-phf/commit/fc45373b34a461664f532c5108f3d2625172c128)) + - Add documentation for phf_macros ([`8eca797`](https://github.com/rust-phf/rust-phf/commit/8eca79711f33d04ad773a023581b6bd0a6f1efdc)) + - Move tests to phf_macros ([`40dbc32`](https://github.com/rust-phf/rust-phf/commit/40dbc328456003484716021cc317156967f1b2c1)) + - Remove core feature ([`d4c189a`](https://github.com/rust-phf/rust-phf/commit/d4c189a2b060df33e7c97d6c1f0f430b68fc23b5)) + - Release v0.6.9 ([`822f4e3`](https://github.com/rust-phf/rust-phf/commit/822f4e3fb127dc02d36d802803d71aa5b98bed3c)) + - Fix for upstream changes ([`f014882`](https://github.com/rust-phf/rust-phf/commit/f01488236a8e944f1b12b4bc441d55c10fc47aa1)) + - Release v0.6.8 ([`cd637ca`](https://github.com/rust-phf/rust-phf/commit/cd637cafb6d37b1901b6c119a7d26f253e9a288e)) + - Merge pull request #49 from kmcallister/rustup ([`ee54b59`](https://github.com/rust-phf/rust-phf/commit/ee54b59ff1eb87b10aa2df60b25887fcb0afa765)) + - Upgrade to rustc 1.0.0-nightly (6c065fc8c 2015-02-17) (built 2015-02-18) ([`cbd9a41`](https://github.com/rust-phf/rust-phf/commit/cbd9a41bdf3771eceeb1d4701e1d598b1321cdad)) + - .map(|t| t.clone()) -> .cloned() ([`044f690`](https://github.com/rust-phf/rust-phf/commit/044f6903cca0a3d656e4a738cc02b1d29d80c996)) + - Add example to root module docs ([`fbbb530`](https://github.com/rust-phf/rust-phf/commit/fbbb53094e52efa19ff225d3d3ef2cbc00b4a7af)) + - Release v0.6.7 ([`bfc36c9`](https://github.com/rust-phf/rust-phf/commit/bfc36c979225f652cdb72f3b1f2a25e77b50ab8c)) + - Release v0.6.6 ([`b09a174`](https://github.com/rust-phf/rust-phf/commit/b09a174a166c7744c5989bedc6ba68340f6f7fd1)) + - Fix for upstream changse ([`9bd8705`](https://github.com/rust-phf/rust-phf/commit/9bd870597fb26a109a4f33926a299729c00aea10)) + - Release v0.6.5 ([`271e784`](https://github.com/rust-phf/rust-phf/commit/271e7848f35b31d6ce9fc9268de173738464bfc8)) + - Fix for upstream changes ([`3db7cef`](https://github.com/rust-phf/rust-phf/commit/3db7cef414e4de28eb6c18938c275a3aafbdafa4)) + - Fix doc URLs ([`e1c53fc`](https://github.com/rust-phf/rust-phf/commit/e1c53fc3d79d896ec65677ed88eda2140468e124)) + - Move docs to this repo and auto build them ([`f8ef160`](https://github.com/rust-phf/rust-phf/commit/f8ef160480e2d4ce72fa7afb6ebce70e45acbc76)) + - Release v0.6.4 ([`6866c1b`](https://github.com/rust-phf/rust-phf/commit/6866c1bf5ad5091bc969f1356884aa86c27458cb)) + - Release v0.6.3 ([`b0c5e3c`](https://github.com/rust-phf/rust-phf/commit/b0c5e3cb69742f81160ea80a3ba1782a0b4e01a2)) + - Release v0.6.2 ([`d9ddf45`](https://github.com/rust-phf/rust-phf/commit/d9ddf45b15ba812b0d3acedffb08e901742e56c4)) + - Implement IntoIterator ([`2f63ded`](https://github.com/rust-phf/rust-phf/commit/2f63ded4b37f91215754545b828ca14a1aad2d32)) + - Link to libstd by default ([`24555b1`](https://github.com/rust-phf/rust-phf/commit/24555b19e6b54656633cc4ceac91864f14c20471)) + - Release v0.6.1 ([`ca0e9f6`](https://github.com/rust-phf/rust-phf/commit/ca0e9f6b9c737f3d11bcad2f4624bb5603a8170e)) + - Fix for upstream changes ([`69ca376`](https://github.com/rust-phf/rust-phf/commit/69ca376dc8daa094ab16f1fcbadb65f83a75939b)) + - Fix for stability changes ([`f7fb510`](https://github.com/rust-phf/rust-phf/commit/f7fb510dfe67f11522a2d214bd14d21f910bfd7b)) + - More sed fixes ([`81b54b2`](https://github.com/rust-phf/rust-phf/commit/81b54b22f2c87914a737fc4c650f95809ff1383e)) + - Release v0.6.0 ([`09d6870`](https://github.com/rust-phf/rust-phf/commit/09d687053caf4d321f72907528573b3334fae3c2)) + - Rename phf_mac to phf_macros ([`c50d107`](https://github.com/rust-phf/rust-phf/commit/c50d1077b1d53fccd703021911a7100b8937bbc7)) + - More fixes for bad sed ([`28af2aa`](https://github.com/rust-phf/rust-phf/commit/28af2aa411cc418025c8d04fd838db5cda6a792b)) + - Fix silly sed error ([`39e098a`](https://github.com/rust-phf/rust-phf/commit/39e098a7fb333cc046f4506f4c20cbc0d079c12f)) + - Show -> Debug ([`384ead4`](https://github.com/rust-phf/rust-phf/commit/384ead41f21d0cb2c46f3b6628e5ba9ee00f79c0)) + - Release v0.5.0 ([`8683be2`](https://github.com/rust-phf/rust-phf/commit/8683be260effe5605243ef230bad6154ef4e5e20)) + - Add type to Show implementations ([`c5a4f31`](https://github.com/rust-phf/rust-phf/commit/c5a4f3112e09d84332305bd7daff3a93691c7b3c)) + - Merge pull request #41 from alexcrichton/update ([`79772f4`](https://github.com/rust-phf/rust-phf/commit/79772f414fb18cedc33bf4ee95a9dcdbf9c0caad)) + - Remove unused features ([`88700a2`](https://github.com/rust-phf/rust-phf/commit/88700a2068c0901db8454119e3bcae5953d5b8a2)) + - Remove fmt::String impls for structures ([`5135f02`](https://github.com/rust-phf/rust-phf/commit/5135f029157d13bde463740e75140f9c4403edaa)) + - Release v0.4.9 ([`28cbe70`](https://github.com/rust-phf/rust-phf/commit/28cbe704e0f96495c2527ad93c5e67315c245908)) + - Fix for upstream changes ([`0b22188`](https://github.com/rust-phf/rust-phf/commit/0b22188f5767a0a125d01ed8b176ce19fef95cad)) + - Release v0.4.8 ([`bb858f1`](https://github.com/rust-phf/rust-phf/commit/bb858f11dd88579d47b0089121f8d551731464ab)) + - Merge pull request #38 from chris-morgan/master ([`668f986`](https://github.com/rust-phf/rust-phf/commit/668f986705ba3a6385b47b851878250ce954a6dc)) + - Release v0.4.7 ([`d83f551`](https://github.com/rust-phf/rust-phf/commit/d83f551a874a24b2a4308804e7cbca32a1aa2494)) + - Fix for upstream changes ([`c3ae5ac`](https://github.com/rust-phf/rust-phf/commit/c3ae5ac94cfa11404b420d45229c3a0d0d8a4535)) + - Release v0.4.6 ([`360bf81`](https://github.com/rust-phf/rust-phf/commit/360bf81ad3aafced75dc64a49e58a867d5239264)) + - Release v0.4.5 ([`ab4786c`](https://github.com/rust-phf/rust-phf/commit/ab4786c09b55e46658f2a66092caf6a782d056a6)) + - Fix for upstream changes ([`6963a16`](https://github.com/rust-phf/rust-phf/commit/6963a16a7619c3aa4a14ed880334e5712deae20e)) + - Release v0.4.4 ([`f678635`](https://github.com/rust-phf/rust-phf/commit/f678635378555b7d086014b0466aea12a3ae5701)) + - Fix for upstream changes ([`2b4863f`](https://github.com/rust-phf/rust-phf/commit/2b4863fcb5827d5bd89cc278d2a3052b6b3ee20e)) + - Release v0.4.3 ([`4f5902c`](https://github.com/rust-phf/rust-phf/commit/4f5902c222a81da009bf7955bc96568c73b46b13)) + - Fix for weird type inference breakage ([`3c36bfb`](https://github.com/rust-phf/rust-phf/commit/3c36bfbdd6ebfc1e544cbd38473f48e91406d965)) + - Release v0.4.2 ([`69d92b8`](https://github.com/rust-phf/rust-phf/commit/69d92b869fab51a31fda6126003edadd9e832b32)) + - Merge pull request #37 from alexcrichton/update ([`b9f0a43`](https://github.com/rust-phf/rust-phf/commit/b9f0a43500499fc08170690bdc6624f289e35841)) + - Update to rust master ([`4a0d48d`](https://github.com/rust-phf/rust-phf/commit/4a0d48d165d78d1b3e8f791503e220a032d26d24)) + - Release v0.4.1 ([`0fba837`](https://github.com/rust-phf/rust-phf/commit/0fba8374fd6fb1b10d9d456ae4b1310b00e9d9ca)) + - Release v0.4.0 ([`49dbb36`](https://github.com/rust-phf/rust-phf/commit/49dbb3636621c0436e771a4e0ebfe7342b676616)) + - Fix for upstream changes and drop xxhash ([`fc2539f`](https://github.com/rust-phf/rust-phf/commit/fc2539f7893ef0f833a8c13ec77ba317bd8bf43e)) + - Release v0.3.0 ([`0a80b06`](https://github.com/rust-phf/rust-phf/commit/0a80b06ecde77b33cec8c956c67704613fdd313e)) + - Fix for unboxed closure changes ([`d96a1e5`](https://github.com/rust-phf/rust-phf/commit/d96a1e5c7107eceb5cda147eb2ac3691ec534f68)) + - Rename Set and OrderedSet iterators ([`9103fc5`](https://github.com/rust-phf/rust-phf/commit/9103fc564121d90aa24adf1014ad82bc09119e0f)) + - Merge pull request #32 from sp3d/master ([`fc4829a`](https://github.com/rust-phf/rust-phf/commit/fc4829a292663e4e30a23a4ba1de693d154cd611)) + - Add support for [u8, ..N] keys ([`e26947c`](https://github.com/rust-phf/rust-phf/commit/e26947cc264266bcbc85b8cf5c46b2019d654c72)) + - Bump to 0.2 ([`4546f51`](https://github.com/rust-phf/rust-phf/commit/4546f51fccbd56ddf1214fe232db8926d9f471de)) + - Remove uneeded feature ([`98dde65`](https://github.com/rust-phf/rust-phf/commit/98dde65406865890af53618b7517ca8fcb2da5ad)) + - Alter entries iterator behavior ([`14627f5`](https://github.com/rust-phf/rust-phf/commit/14627f5696156b09bcc1150bee0318fa3c5c6c0f)) + - Bump to 0.1.0 ([`43d9a50`](https://github.com/rust-phf/rust-phf/commit/43d9a50e6240716d68dadd9d037f22b2f7df4b58)) + - Merge pull request #31 from jamesrhurst/exactsize ([`d20c311`](https://github.com/rust-phf/rust-phf/commit/d20c311e0e519c0ace07c0d2085d6d35e64a5ba8)) + - Make publishable on crates.io ([`4ad2bb2`](https://github.com/rust-phf/rust-phf/commit/4ad2bb27be35015b3f37ec7025c46df9170b3ef9)) + - ExactSize is now ExactSizeIterator ([`6a7cc6e`](https://github.com/rust-phf/rust-phf/commit/6a7cc6eb9ec08b103b6b62fa39bdb3229f3cdbe4)) + - Use repository packages ([`6e3a54d`](https://github.com/rust-phf/rust-phf/commit/6e3a54d1fee637c59e86b06ee5af67ab01039338)) + - Add license and descriptions ([`ff7dad4`](https://github.com/rust-phf/rust-phf/commit/ff7dad4cb8ad84d8fe05df2f1f32d959971eaa1c)) + - Update to use BorrowFrom ([`2f3c605`](https://github.com/rust-phf/rust-phf/commit/2f3c6053c2d754974a94aa45a49b8cce10ae88ba)) + - Merge pull request #28 from cgaebel/master ([`cc0d031`](https://github.com/rust-phf/rust-phf/commit/cc0d031772c1068781eaf64878ac2cd93499d6cf)) + - S/kv/entry/ ([`bf62eb8`](https://github.com/rust-phf/rust-phf/commit/bf62eb878981115492fbac99ff4d9f6c99858f72)) + - Merge pull request #27 from cgaebel/master ([`f6ce09a`](https://github.com/rust-phf/rust-phf/commit/f6ce09a25c4468b76a48fe4e1070436121084786)) + - More code review ([`aec5aab`](https://github.com/rust-phf/rust-phf/commit/aec5aab3a95bb96bd32b560598851dfc2f322fad)) + - Code review ([`88d54c2`](https://github.com/rust-phf/rust-phf/commit/88d54c2b875830bb00170421f3ea7d74eefe3f2b)) + - Added key+value equivalents for the map getters. ([`7ced000`](https://github.com/rust-phf/rust-phf/commit/7ced00017886acfe740ea70ba10b4d4cb9cf780f)) + - Switch from find to get ([`88abf6c`](https://github.com/rust-phf/rust-phf/commit/88abf6c8b081439c8cb1458289790d0ee8f4d04a)) + - Fix some deprecation warnings ([`af2dd53`](https://github.com/rust-phf/rust-phf/commit/af2dd53e131e950f29bb089e48bc9f42f621a9d7)) + - Update for collections traits removal ([`f585e4c`](https://github.com/rust-phf/rust-phf/commit/f585e4c88f1cd327e0b409c60deb51cd3f3d6b15)) + - Remove deprecated reexports ([`b697d13`](https://github.com/rust-phf/rust-phf/commit/b697d132b04f282bf489adde6cfe996adf8634fd)) + - Hide deprecated reexports from docs ([`d120067`](https://github.com/rust-phf/rust-phf/commit/d12006775117350d9c47e636aa3d4ba64e3a3454)) + - Add deprecated reexports ([`5752604`](https://github.com/rust-phf/rust-phf/commit/5752604bfa3d0aaad43dc4b1c50e986c6ee078e4)) + - Fix doc header size ([`8f5c0f0`](https://github.com/rust-phf/rust-phf/commit/8f5c0f0b491868a3811b434321f871892eab02c1)) + - Fix docs ([`eadea0b`](https://github.com/rust-phf/rust-phf/commit/eadea0b2c2cb9e76d0be9a209819c75a41434719)) + - Convert PhfOrderedSet to new naming conventions ([`de193c7`](https://github.com/rust-phf/rust-phf/commit/de193c767502a587d8bf4b81b6c5fb821e4a6b29)) + - Switch over PhfOrderedMap to new naming scheme ([`f17bae1`](https://github.com/rust-phf/rust-phf/commit/f17bae1c34380b0566207df8e54807f3773109ce)) + - Convert PhfSet to new naming conventions ([`b2416db`](https://github.com/rust-phf/rust-phf/commit/b2416db396bc0e35fd64fd23c367f26b5fe78f5a)) + - Move and rename PhfMap stuff ([`7fc934a`](https://github.com/rust-phf/rust-phf/commit/7fc934a23e7e25fd12014a123eea8f7707928338)) + - Update for Equiv DST changes ([`719de47`](https://github.com/rust-phf/rust-phf/commit/719de47be5881b070cdf948668ae3c71dcea51f6)) + - Clean up warnings ([`b44065b`](https://github.com/rust-phf/rust-phf/commit/b44065b78dd31d2931d5d4427b608ae907e841a9)) + - Fix docs ([`83a8116`](https://github.com/rust-phf/rust-phf/commit/83a8116c71bf1cbf28d51d269b4c214e13748509)) + - Drop libstd requirement ([`dd3d0f1`](https://github.com/rust-phf/rust-phf/commit/dd3d0f1fedc19bbea2795bb63b9ce623618f4e31)) + - Remove unneeded import ([`15cc179`](https://github.com/rust-phf/rust-phf/commit/15cc17901777ef3e8f9a7a95f15f11e5dd29eb57)) + - Update docs location ([`49647cd`](https://github.com/rust-phf/rust-phf/commit/49647cdd0c170be43956822cc31968ac96cd31b4)) + - Misc cleanup ([`2fe6940`](https://github.com/rust-phf/rust-phf/commit/2fe6940182240e39ecd283eef00c5eff1b343a08)) + - Use XXHash instead of SipHash ([`bd10658`](https://github.com/rust-phf/rust-phf/commit/bd10658648539a13553bd9ea8853f490ee424cc8)) + - Use slice operators ([`a1b5030`](https://github.com/rust-phf/rust-phf/commit/a1b503023f516753fcd95061b1b303d21bb44a91)) + - Fix warnings in tests ([`4bf6f82`](https://github.com/rust-phf/rust-phf/commit/4bf6f824795de3c587f554119cf8d6f88c438e53)) + - Remove old crate_name attributes ([`35701e2`](https://github.com/rust-phf/rust-phf/commit/35701e2591d78d76707453376fc32b3a53de08c0)) + - Fix typo ([`68458d3`](https://github.com/rust-phf/rust-phf/commit/68458d3255af0f58510c3b502dcff4d83af19ae8)) + - Rephrase order guarantees ([`3c2661d`](https://github.com/rust-phf/rust-phf/commit/3c2661d8a421d9f9ddccdcbc51a3386480fdf59d)) + - Update examples ([`85a3b28`](https://github.com/rust-phf/rust-phf/commit/85a3b28ea9ee24f080ff02d1db390284691714a9)) + - Minor cleanup ([`2f75f5f`](https://github.com/rust-phf/rust-phf/commit/2f75f5fed1579c1f26c42f8a263977fcec50f749)) + - Merge pull request #12 from kmcallister/find-index ([`d7ae880`](https://github.com/rust-phf/rust-phf/commit/d7ae8800202cd20cf057b865d4023b28fe80c8cc)) + - Provide find_index{,_equiv} on PhfOrdered{Set,Map} ([`b16d440`](https://github.com/rust-phf/rust-phf/commit/b16d4400556f7cae3e7dcca8ba091af5459090de)) + - Update for lifetime changes ([`af0a11c`](https://github.com/rust-phf/rust-phf/commit/af0a11c92bd531c9677bef31f6a6d8c4b59ad29b)) + - Add back crate_name for rustdoc ([`92ec57a`](https://github.com/rust-phf/rust-phf/commit/92ec57aca33e1dfeda7a6cadb0b0fd08ddc23808)) + - More cleanup ([`20dea1d`](https://github.com/rust-phf/rust-phf/commit/20dea1d778a9e5226b6ffe2b11ed37a23878863a)) + - One more where clause ([`d6e5d77`](https://github.com/rust-phf/rust-phf/commit/d6e5d774a5ab6e796da0eb5e0cf062d0f0aebec0)) + - Switch to where clause syntax ([`13b9389`](https://github.com/rust-phf/rust-phf/commit/13b93899b5679d425fdfff7695003bc52d4c8f0b)) + - Cargo update ([`2a650ef`](https://github.com/rust-phf/rust-phf/commit/2a650efcdb9f013906cdf097e7a569c38d38487e)) + - Re-disable in-crate tests ([`9c4d247`](https://github.com/rust-phf/rust-phf/commit/9c4d247cb824689791e81942fd586e36899b35aa)) + - Properly support cross compiled builds ([`b2220d9`](https://github.com/rust-phf/rust-phf/commit/b2220d9a428049fb9c52b51c16d8f6b15cd02487)) + - Reenable tests for phf crate for docs ([`3ab5bd1`](https://github.com/rust-phf/rust-phf/commit/3ab5bd117af17cc8d91816b5911a65376f2a8f7f)) + - Update for pattern changes ([`f79814a`](https://github.com/rust-phf/rust-phf/commit/f79814a6abfa3bc5d739825643ea4ecee0a3aa8a)) + - Move test to tests dir ([`c9ca9b1`](https://github.com/rust-phf/rust-phf/commit/c9ca9b118f77e0581887c0bde09e78f9f7f00d0f)) + - Add more _equiv methods ([`61eea75`](https://github.com/rust-phf/rust-phf/commit/61eea759b53785fd8233a565de0765ce66fb824d)) + - Elide lifetimes ([`20a1e83`](https://github.com/rust-phf/rust-phf/commit/20a1e838c01017d74ef48cdb40e30eaf32de834a)) + - Impl Index for PhfMap and PhfOrderedMap ([`3995dbc`](https://github.com/rust-phf/rust-phf/commit/3995dbc443f33571e15c18c45b38862a515a88c0)) + - Switch Travis to using cargo ([`95f3c90`](https://github.com/rust-phf/rust-phf/commit/95f3c9074392b7782d28e6a94e79dfc303066ea2)) + - Rename module ([`25aeba6`](https://github.com/rust-phf/rust-phf/commit/25aeba6aeeb9f14ebabf11cd368f22840d40a245)) + - Rename phf_shared to phf ([`6372fa4`](https://github.com/rust-phf/rust-phf/commit/6372fa437f01de39cc80120f9d9ed48cee0f0b1f)) + - Turn off tests for main crates ([`6718b60`](https://github.com/rust-phf/rust-phf/commit/6718b60a55939992b7d4c5c00f57a4a81f38e5ac)) + - Pull shared code into a module ([`19c4f8d`](https://github.com/rust-phf/rust-phf/commit/19c4f8d420d3a9ff8e3ace0256198f5db9fccae0)) + - Move iterator maps to construction time ([`a8bb815`](https://github.com/rust-phf/rust-phf/commit/a8bb8156d513d0e15c476baac13a8d153f740958)) + - Implement more iterator traits for PhfMap iters ([`4b48972`](https://github.com/rust-phf/rust-phf/commit/4b4897284da11b59b4122c4b0c10b23064ca380c)) + - Add support for remaining literals ([`55ededf`](https://github.com/rust-phf/rust-phf/commit/55ededfc9ccbd3b01690e289adfc4d5e05a4064d)) + - Byte and char key support ([`789990e`](https://github.com/rust-phf/rust-phf/commit/789990ede8def8c333a305437899a953ed6f9a62)) + - Support binary literal keys! ([`6bfb12b`](https://github.com/rust-phf/rust-phf/commit/6bfb12bf3b0bffb66e44b8a5326051b58d697543)) + - Parameterize the key type of PhfOrdered* ([`f6ce641`](https://github.com/rust-phf/rust-phf/commit/f6ce641e5676be8d70e961f020d79fc3d6dcfb74)) + - Parameterize the key type of PhfMap and Set ([`cb4ed93`](https://github.com/rust-phf/rust-phf/commit/cb4ed93175b656f442802c27e039add8e2b86723)) + - Update for crate_id removal ([`a0ab8d7`](https://github.com/rust-phf/rust-phf/commit/a0ab8d7f517305c77cdb1d51076ff4b3e31923e5)) + - Split to two separate Cargo packages ([`4ff3544`](https://github.com/rust-phf/rust-phf/commit/4ff35445a4b376009d0f365bd761c2c27c174c4c)) +
+ diff --git a/anneal/vendor/phf/Cargo.lock b/anneal/vendor/phf/Cargo.lock new file mode 100644 index 0000000000..57ac1c9dc5 --- /dev/null +++ b/anneal/vendor/phf/Cargo.lock @@ -0,0 +1,136 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "phf" +version = "0.12.1" +dependencies = [ + "phf_macros", + "phf_shared", + "serde", +] + +[[package]] +name = "phf_generator" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2cbb1126afed61dd6368748dae63b1ee7dc480191c6262a3b4ff1e29d86a6c5b" +dependencies = [ + "fastrand", + "phf_shared", +] + +[[package]] +name = "phf_macros" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d713258393a82f091ead52047ca779d37e5766226d009de21696c4e667044368" +dependencies = [ + "phf_generator", + "phf_shared", + "proc-macro2", + "quote", + "syn", + "uncased", + "unicase", +] + +[[package]] +name = "phf_shared" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981" +dependencies = [ + "siphasher", + "uncased", + "unicase", +] + +[[package]] +name = "proc-macro2" +version = "1.0.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "serde" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + +[[package]] +name = "syn" +version = "2.0.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4307e30089d6fd6aff212f2da3a1f9e32f3223b1f010fb09b7c95f90f3ca1e8" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "uncased" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1b88fcfe09e89d3866a5c11019378088af2d24c3fbd4f0543f96b479ec90697" +dependencies = [ + "version_check", +] + +[[package]] +name = "unicase" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" + +[[package]] +name = "unicode-ident" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" diff --git a/anneal/vendor/phf/Cargo.toml b/anneal/vendor/phf/Cargo.toml new file mode 100644 index 0000000000..30142cb587 --- /dev/null +++ b/anneal/vendor/phf/Cargo.toml @@ -0,0 +1,74 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2021" +rust-version = "1.61" +name = "phf" +version = "0.12.1" +authors = ["Steven Fackler "] +build = false +autolib = false +autobins = false +autoexamples = false +autotests = false +autobenches = false +description = "Runtime support for perfect hash function data structures" +readme = "README.md" +categories = [ + "data-structures", + "no-std", +] +license = "MIT" +repository = "https://github.com/rust-phf/rust-phf" +resolver = "1" + +[package.metadata.docs.rs] +features = ["macros"] + +[package.metadata.playground] +all-features = false +default-features = true +features = ["macros"] + +[features] +default = ["std"] +macros = ["phf_macros"] +std = [ + "phf_shared/std", + "serde?/std", +] +uncased = [ + "phf_macros?/uncased", + "phf_shared/uncased", +] +unicase = [ + "phf_macros?/unicase", + "phf_shared/unicase", +] + +[lib] +name = "phf" +path = "src/lib.rs" +test = false + +[dependencies.phf_macros] +version = "^0.12.0" +optional = true + +[dependencies.phf_shared] +version = "^0.12.0" +default-features = false + +[dependencies.serde] +version = "1.0" +optional = true +default-features = false diff --git a/anneal/vendor/phf/Cargo.toml.orig b/anneal/vendor/phf/Cargo.toml.orig new file mode 100644 index 0000000000..0346ba92ea --- /dev/null +++ b/anneal/vendor/phf/Cargo.toml.orig @@ -0,0 +1,36 @@ +[package] +name = "phf" +authors = ["Steven Fackler "] +version = "0.12.1" +license = "MIT" +description = "Runtime support for perfect hash function data structures" +repository = "https://github.com/rust-phf/rust-phf" +edition = "2021" +readme = "../README.md" +rust-version = "1.61" +categories = ["data-structures", "no-std"] + +[lib] +name = "phf" +path = "src/lib.rs" +test = false + +[features] +default = ["std"] +std = ["phf_shared/std", "serde?/std"] +uncased = ["phf_macros?/uncased", "phf_shared/uncased"] +unicase = ["phf_macros?/unicase", "phf_shared/unicase"] +macros = ["phf_macros"] + +[dependencies] +phf_macros = { version = "^0.12.0", optional = true, path = "../phf_macros" } +phf_shared = { version = "^0.12.0", default-features = false, path = "../phf_shared" } +serde = { version = "1.0", default-features = false, optional = true } + +[package.metadata.docs.rs] +features = ["macros"] + +[package.metadata.playground] +default-features = true +features = ["macros"] +all-features = false diff --git a/anneal/vendor/phf/LICENSE b/anneal/vendor/phf/LICENSE new file mode 100644 index 0000000000..cd5bf6855b --- /dev/null +++ b/anneal/vendor/phf/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2014-2022 Steven Fackler, Yuki Okushi + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/anneal/vendor/phf/README.md b/anneal/vendor/phf/README.md new file mode 100644 index 0000000000..4f6b5dcf7d --- /dev/null +++ b/anneal/vendor/phf/README.md @@ -0,0 +1,127 @@ +# Rust-PHF + +[![CI](https://github.com/rust-phf/rust-phf/actions/workflows/ci.yml/badge.svg)](https://github.com/rust-phf/rust-phf/actions/workflows/ci.yml) [![Latest Version](https://img.shields.io/crates/v/phf.svg)](https://crates.io/crates/phf) + +[Documentation](https://docs.rs/phf) + +Rust-PHF is a library to generate efficient lookup tables at compile time using +[perfect hash functions](http://en.wikipedia.org/wiki/Perfect_hash_function). + +It currently uses the +[CHD algorithm](http://cmph.sourceforge.net/papers/esa09.pdf) and can generate +a 100,000 entry map in roughly .4 seconds. + +MSRV (minimum supported rust version) is Rust 1.61. + +## Usage + +PHF data structures can be constructed via either the procedural +macros in the `phf_macros` crate or code generation supported by the +`phf_codegen` crate. + +To compile the `phf` crate with a dependency on +libcore instead of libstd, enabling use in environments where libstd +will not work, set `default-features = false` for the dependency: + +```toml +[dependencies] +# to use `phf` in `no_std` environments +phf = { version = "0.12", default-features = false } +``` + +### phf_macros + +```rust +use phf::phf_map; + +#[derive(Clone)] +pub enum Keyword { + Loop, + Continue, + Break, + Fn, + Extern, +} + +static KEYWORDS: phf::Map<&'static str, Keyword> = phf_map! { + "loop" => Keyword::Loop, + "continue" => Keyword::Continue, + "break" => Keyword::Break, + "fn" => Keyword::Fn, + "extern" => Keyword::Extern, +}; + +pub fn parse_keyword(keyword: &str) -> Option { + KEYWORDS.get(keyword).cloned() +} +``` + +```toml +[dependencies] +phf = { version = "0.12", features = ["macros"] } +``` + +#### Note + +Currently, the macro syntax has some limitations and may not +work as you want. See [#183] or [#196] for example. + +[#183]: https://github.com/rust-phf/rust-phf/issues/183 +[#196]: https://github.com/rust-phf/rust-phf/issues/196 + +### phf_codegen + +To use `phf_codegen` on build.rs, you have to add dependencies under `[build-dependencies]`: + +```toml +[build-dependencies] +phf = { version = "0.12", default-features = false } +phf_codegen = "0.12" +``` + +Then put code on build.rs: + +```rust +use std::env; +use std::fs::File; +use std::io::{BufWriter, Write}; +use std::path::Path; + +fn main() { + let path = Path::new(&env::var("OUT_DIR").unwrap()).join("codegen.rs"); + let mut file = BufWriter::new(File::create(&path).unwrap()); + + write!( + &mut file, + "static KEYWORDS: phf::Map<&'static str, Keyword> = {}", + phf_codegen::Map::new() + .entry("loop", "Keyword::Loop") + .entry("continue", "Keyword::Continue") + .entry("break", "Keyword::Break") + .entry("fn", "Keyword::Fn") + .entry("extern", "Keyword::Extern") + .build() + ) + .unwrap(); + write!(&mut file, ";\n").unwrap(); +} +``` + +and lib.rs: + +```rust +#[derive(Clone)] +enum Keyword { + Loop, + Continue, + Break, + Fn, + Extern, +} + +include!(concat!(env!("OUT_DIR"), "/codegen.rs")); + +pub fn parse_keyword(keyword: &str) -> Option { + KEYWORDS.get(keyword).cloned() +} +``` diff --git a/anneal/vendor/phf/src/lib.rs b/anneal/vendor/phf/src/lib.rs new file mode 100644 index 0000000000..18e2a9efe2 --- /dev/null +++ b/anneal/vendor/phf/src/lib.rs @@ -0,0 +1,153 @@ +//! Rust-PHF is a library to generate efficient lookup tables at compile time using +//! [perfect hash functions](http://en.wikipedia.org/wiki/Perfect_hash_function). +//! +//! It currently uses the +//! [CHD algorithm](http://cmph.sourceforge.net/papers/esa09.pdf) and can generate +//! a 100,000 entry map in roughly .4 seconds. +//! +//! MSRV (minimum supported rust version) is Rust 1.61. +//! +//! ## Usage +//! +//! PHF data structures can be constructed via either the procedural +//! macros in the `phf_macros` crate or code generation supported by the +//! `phf_codegen` crate. If you prefer macros, you can easily use them by +//! enabling the `macros` feature of the `phf` crate, like: +//! +//!```toml +//! [dependencies] +//! phf = { version = "0.12", features = ["macros"] } +//! ``` +//! +//! To compile the `phf` crate with a dependency on +//! libcore instead of libstd, enabling use in environments where libstd +//! will not work, set `default-features = false` for the dependency: +//! +//! ```toml +//! [dependencies] +//! # to use `phf` in `no_std` environments +//! phf = { version = "0.12", default-features = false } +//! ``` +//! +//! ## Example (with the `macros` feature enabled) +//! +//! ```rust +//! use phf::phf_map; +//! +//! #[derive(Clone)] +//! pub enum Keyword { +//! Loop, +//! Continue, +//! Break, +//! Fn, +//! Extern, +//! } +//! +//! static KEYWORDS: phf::Map<&'static str, Keyword> = phf_map! { +//! "loop" => Keyword::Loop, +//! "continue" => Keyword::Continue, +//! "break" => Keyword::Break, +//! "fn" => Keyword::Fn, +//! "extern" => Keyword::Extern, +//! }; +//! +//! pub fn parse_keyword(keyword: &str) -> Option { +//! KEYWORDS.get(keyword).cloned() +//! } +//! ``` +//! +//! Alternatively, you can use the [`phf_codegen`] crate to generate PHF datatypes +//! in a build script. +//! +//! [`phf_codegen`]: https://docs.rs/phf_codegen +//! +//! ## Note +//! +//! Currently, the macro syntax has some limitations and may not +//! work as you want. See [#183] or [#196] for example. +//! +//! [#183]: https://github.com/rust-phf/rust-phf/issues/183 +//! [#196]: https://github.com/rust-phf/rust-phf/issues/196 + +#![doc(html_root_url = "https://docs.rs/phf/0.12")] +#![warn(missing_docs)] +#![cfg_attr(not(feature = "std"), no_std)] + +#[cfg(feature = "std")] +extern crate std as core; + +#[cfg(feature = "macros")] +/// Macro to create a `static` (compile-time) [`Map`]. +/// +/// Requires the `macros` feature. +/// +/// Supported key expressions are: +/// - literals: bools, (byte) strings, bytes, chars, and integers (these must have a type suffix) +/// - arrays of `u8` integers +/// - dereferenced byte string literals +/// - `UniCase::unicode(string)`, `UniCase::ascii(string)`, or `Ascii::new(string)` if the `unicase` feature is enabled +/// - `UncasedStr::new(string)` if the `uncased` feature is enabled +/// +/// # Example +/// +/// ``` +/// use phf::{phf_map, Map}; +/// +/// static MY_MAP: Map<&'static str, u32> = phf_map! { +/// "hello" => 1, +/// "world" => 2, +/// }; +/// +/// fn main () { +/// assert_eq!(MY_MAP["hello"], 1); +/// } +/// ``` +pub use phf_macros::phf_map; + +#[cfg(feature = "macros")] +/// Macro to create a `static` (compile-time) [`OrderedMap`]. +/// +/// Requires the `macros` feature. Same usage as [`phf_map`]. +pub use phf_macros::phf_ordered_map; + +#[cfg(feature = "macros")] +/// Macro to create a `static` (compile-time) [`Set`]. +/// +/// Requires the `macros` feature. +/// +/// # Example +/// +/// ``` +/// use phf::{phf_set, Set}; +/// +/// static MY_SET: Set<&'static str> = phf_set! { +/// "hello world", +/// "hola mundo", +/// }; +/// +/// fn main () { +/// assert!(MY_SET.contains("hello world")); +/// } +/// ``` +pub use phf_macros::phf_set; + +#[cfg(feature = "macros")] +/// Macro to create a `static` (compile-time) [`OrderedSet`]. +/// +/// Requires the `macros` feature. Same usage as [`phf_set`]. +pub use phf_macros::phf_ordered_set; + +#[doc(inline)] +pub use self::map::Map; +#[doc(inline)] +pub use self::ordered_map::OrderedMap; +#[doc(inline)] +pub use self::ordered_set::OrderedSet; +#[doc(inline)] +pub use self::set::Set; +pub use phf_shared::PhfHash; + +pub mod map; +pub mod ordered_map; +pub mod ordered_set; +pub mod set; diff --git a/anneal/vendor/phf/src/map.rs b/anneal/vendor/phf/src/map.rs new file mode 100644 index 0000000000..13f649b3fa --- /dev/null +++ b/anneal/vendor/phf/src/map.rs @@ -0,0 +1,334 @@ +//! An immutable map constructed at compile time. +use core::fmt; +use core::iter::FusedIterator; +use core::iter::IntoIterator; +use core::ops::Index; +use core::slice; +use phf_shared::{self, HashKey, PhfBorrow, PhfHash}; +#[cfg(feature = "serde")] +use serde::ser::{Serialize, SerializeMap, Serializer}; + +/// An immutable map constructed at compile time. +/// +/// ## Note +/// +/// The fields of this struct are public so that they may be initialized by the +/// `phf_map!` macro and code generation. They are subject to change at any +/// time and should never be accessed directly. +pub struct Map { + #[doc(hidden)] + pub key: HashKey, + #[doc(hidden)] + pub disps: &'static [(u32, u32)], + #[doc(hidden)] + pub entries: &'static [(K, V)], +} + +impl fmt::Debug for Map +where + K: fmt::Debug, + V: fmt::Debug, +{ + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt.debug_map().entries(self.entries()).finish() + } +} + +impl<'a, K, V, T: ?Sized> Index<&'a T> for Map +where + T: Eq + PhfHash, + K: PhfBorrow, +{ + type Output = V; + + fn index(&self, k: &'a T) -> &V { + self.get(k).expect("invalid key") + } +} + +impl Default for Map { + fn default() -> Self { + Self::new() + } +} + +impl PartialEq for Map +where + K: PartialEq, + V: PartialEq, +{ + fn eq(&self, other: &Self) -> bool { + self.key == other.key && self.disps == other.disps && self.entries == other.entries + } +} + +impl Eq for Map +where + K: Eq, + V: Eq, +{ +} + +impl Map { + /// Create a new, empty, immutable map. + #[inline] + pub const fn new() -> Self { + Self { + key: 0, + disps: &[], + entries: &[], + } + } + + /// Returns the number of entries in the `Map`. + #[inline] + pub const fn len(&self) -> usize { + self.entries.len() + } + + /// Returns true if the `Map` is empty. + #[inline] + pub const fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Determines if `key` is in the `Map`. + pub fn contains_key(&self, key: &T) -> bool + where + T: Eq + PhfHash, + K: PhfBorrow, + { + self.get(key).is_some() + } + + /// Returns a reference to the value that `key` maps to. + pub fn get(&self, key: &T) -> Option<&V> + where + T: Eq + PhfHash, + K: PhfBorrow, + { + self.get_entry(key).map(|e| e.1) + } + + /// Returns a reference to the map's internal static instance of the given + /// key. + /// + /// This can be useful for interning schemes. + pub fn get_key(&self, key: &T) -> Option<&K> + where + T: Eq + PhfHash, + K: PhfBorrow, + { + self.get_entry(key).map(|e| e.0) + } + + /// Like `get`, but returns both the key and the value. + pub fn get_entry(&self, key: &T) -> Option<(&K, &V)> + where + T: Eq + PhfHash, + K: PhfBorrow, + { + if self.disps.is_empty() { + return None; + } //Prevent panic on empty map + let hashes = phf_shared::hash(key, &self.key); + let index = phf_shared::get_index(&hashes, self.disps, self.entries.len()); + let entry = &self.entries[index as usize]; + let b: &T = entry.0.borrow(); + if b == key { + Some((&entry.0, &entry.1)) + } else { + None + } + } + + /// Returns an iterator over the key/value pairs in the map. + /// + /// Entries are returned in an arbitrary but fixed order. + pub fn entries(&self) -> Entries<'_, K, V> { + Entries { + iter: self.entries.iter(), + } + } + + /// Returns an iterator over the keys in the map. + /// + /// Keys are returned in an arbitrary but fixed order. + pub fn keys(&self) -> Keys<'_, K, V> { + Keys { + iter: self.entries(), + } + } + + /// Returns an iterator over the values in the map. + /// + /// Values are returned in an arbitrary but fixed order. + pub fn values(&self) -> Values<'_, K, V> { + Values { + iter: self.entries(), + } + } +} + +impl<'a, K, V> IntoIterator for &'a Map { + type Item = (&'a K, &'a V); + type IntoIter = Entries<'a, K, V>; + + fn into_iter(self) -> Entries<'a, K, V> { + self.entries() + } +} + +/// An iterator over the key/value pairs in a `Map`. +pub struct Entries<'a, K, V> { + iter: slice::Iter<'a, (K, V)>, +} + +impl<'a, K, V> Clone for Entries<'a, K, V> { + #[inline] + fn clone(&self) -> Self { + Self { + iter: self.iter.clone(), + } + } +} + +impl<'a, K, V> fmt::Debug for Entries<'a, K, V> +where + K: fmt::Debug, + V: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +impl<'a, K, V> Iterator for Entries<'a, K, V> { + type Item = (&'a K, &'a V); + + fn next(&mut self) -> Option<(&'a K, &'a V)> { + self.iter.next().map(|&(ref k, ref v)| (k, v)) + } + + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} + +impl<'a, K, V> DoubleEndedIterator for Entries<'a, K, V> { + fn next_back(&mut self) -> Option<(&'a K, &'a V)> { + self.iter.next_back().map(|e| (&e.0, &e.1)) + } +} + +impl<'a, K, V> ExactSizeIterator for Entries<'a, K, V> {} + +impl<'a, K, V> FusedIterator for Entries<'a, K, V> {} + +/// An iterator over the keys in a `Map`. +pub struct Keys<'a, K, V> { + iter: Entries<'a, K, V>, +} + +impl<'a, K, V> Clone for Keys<'a, K, V> { + #[inline] + fn clone(&self) -> Self { + Self { + iter: self.iter.clone(), + } + } +} + +impl<'a, K, V> fmt::Debug for Keys<'a, K, V> +where + K: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +impl<'a, K, V> Iterator for Keys<'a, K, V> { + type Item = &'a K; + + fn next(&mut self) -> Option<&'a K> { + self.iter.next().map(|e| e.0) + } + + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} + +impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> { + fn next_back(&mut self) -> Option<&'a K> { + self.iter.next_back().map(|e| e.0) + } +} + +impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V> {} + +impl<'a, K, V> FusedIterator for Keys<'a, K, V> {} + +/// An iterator over the values in a `Map`. +pub struct Values<'a, K, V> { + iter: Entries<'a, K, V>, +} + +impl<'a, K, V> Clone for Values<'a, K, V> { + #[inline] + fn clone(&self) -> Self { + Self { + iter: self.iter.clone(), + } + } +} + +impl<'a, K, V> fmt::Debug for Values<'a, K, V> +where + V: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +impl<'a, K, V> Iterator for Values<'a, K, V> { + type Item = &'a V; + + fn next(&mut self) -> Option<&'a V> { + self.iter.next().map(|e| e.1) + } + + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} + +impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> { + fn next_back(&mut self) -> Option<&'a V> { + self.iter.next_back().map(|e| e.1) + } +} + +impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> {} + +impl<'a, K, V> FusedIterator for Values<'a, K, V> {} + +#[cfg(feature = "serde")] +impl Serialize for Map +where + K: Serialize, + V: Serialize, +{ + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + let mut map = serializer.serialize_map(Some(self.len()))?; + for (k, v) in self.entries() { + map.serialize_entry(k, v)?; + } + map.end() + } +} diff --git a/anneal/vendor/phf/src/ordered_map.rs b/anneal/vendor/phf/src/ordered_map.rs new file mode 100644 index 0000000000..bf6473ca5c --- /dev/null +++ b/anneal/vendor/phf/src/ordered_map.rs @@ -0,0 +1,332 @@ +//! An order-preserving immutable map constructed at compile time. +use core::fmt; +use core::iter::FusedIterator; +use core::iter::IntoIterator; +use core::ops::Index; +use core::slice; +use phf_shared::{self, HashKey, PhfBorrow, PhfHash}; + +/// An order-preserving immutable map constructed at compile time. +/// +/// Unlike a `Map`, iteration order is guaranteed to match the definition +/// order. +/// +/// ## Note +/// +/// The fields of this struct are public so that they may be initialized by the +/// `phf_ordered_map!` macro and code generation. They are subject to change at +/// any time and should never be accessed directly. +pub struct OrderedMap { + #[doc(hidden)] + pub key: HashKey, + #[doc(hidden)] + pub disps: &'static [(u32, u32)], + #[doc(hidden)] + pub idxs: &'static [usize], + #[doc(hidden)] + pub entries: &'static [(K, V)], +} + +impl fmt::Debug for OrderedMap +where + K: fmt::Debug, + V: fmt::Debug, +{ + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt.debug_map().entries(self.entries()).finish() + } +} + +impl<'a, K, V, T: ?Sized> Index<&'a T> for OrderedMap +where + T: Eq + PhfHash, + K: PhfBorrow, +{ + type Output = V; + + fn index(&self, k: &'a T) -> &V { + self.get(k).expect("invalid key") + } +} + +impl PartialEq for OrderedMap +where + K: PartialEq, + V: PartialEq, +{ + fn eq(&self, other: &Self) -> bool { + self.key == other.key + && self.disps == other.disps + && self.idxs == other.idxs + && self.entries == other.entries + } +} + +impl Eq for OrderedMap +where + K: Eq, + V: Eq, +{ +} + +impl OrderedMap { + /// Returns the number of entries in the `OrderedMap`. + #[inline] + pub const fn len(&self) -> usize { + self.entries.len() + } + + /// Returns true if the `OrderedMap` is empty. + #[inline] + pub const fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Returns a reference to the value that `key` maps to. + pub fn get(&self, key: &T) -> Option<&V> + where + T: Eq + PhfHash, + K: PhfBorrow, + { + self.get_entry(key).map(|e| e.1) + } + + /// Returns a reference to the map's internal static instance of the given + /// key. + /// + /// This can be useful for interning schemes. + pub fn get_key(&self, key: &T) -> Option<&K> + where + T: Eq + PhfHash, + K: PhfBorrow, + { + self.get_entry(key).map(|e| e.0) + } + + /// Determines if `key` is in the `OrderedMap`. + pub fn contains_key(&self, key: &T) -> bool + where + T: Eq + PhfHash, + K: PhfBorrow, + { + self.get(key).is_some() + } + + /// Returns the index of the key within the list used to initialize + /// the ordered map. + pub fn get_index(&self, key: &T) -> Option + where + T: Eq + PhfHash, + K: PhfBorrow, + { + self.get_internal(key).map(|(i, _)| i) + } + + /// Returns references to both the key and values at an index + /// within the list used to initialize the ordered map. See `.get_index(key)`. + pub fn index(&self, index: usize) -> Option<(&K, &V)> { + self.entries.get(index).map(|&(ref k, ref v)| (k, v)) + } + + /// Like `get`, but returns both the key and the value. + pub fn get_entry(&self, key: &T) -> Option<(&K, &V)> + where + T: Eq + PhfHash, + K: PhfBorrow, + { + self.get_internal(key).map(|(_, e)| e) + } + + fn get_internal(&self, key: &T) -> Option<(usize, (&K, &V))> + where + T: Eq + PhfHash, + K: PhfBorrow, + { + if self.disps.is_empty() { + return None; + } //Prevent panic on empty map + let hashes = phf_shared::hash(key, &self.key); + let idx_index = phf_shared::get_index(&hashes, self.disps, self.idxs.len()); + let idx = self.idxs[idx_index as usize]; + let entry = &self.entries[idx]; + + let b: &T = entry.0.borrow(); + if b == key { + Some((idx, (&entry.0, &entry.1))) + } else { + None + } + } + + /// Returns an iterator over the key/value pairs in the map. + /// + /// Entries are returned in the same order in which they were defined. + pub fn entries(&self) -> Entries<'_, K, V> { + Entries { + iter: self.entries.iter(), + } + } + + /// Returns an iterator over the keys in the map. + /// + /// Keys are returned in the same order in which they were defined. + pub fn keys(&self) -> Keys<'_, K, V> { + Keys { + iter: self.entries(), + } + } + + /// Returns an iterator over the values in the map. + /// + /// Values are returned in the same order in which they were defined. + pub fn values(&self) -> Values<'_, K, V> { + Values { + iter: self.entries(), + } + } +} + +impl<'a, K, V> IntoIterator for &'a OrderedMap { + type Item = (&'a K, &'a V); + type IntoIter = Entries<'a, K, V>; + + fn into_iter(self) -> Entries<'a, K, V> { + self.entries() + } +} + +/// An iterator over the entries in a `OrderedMap`. +pub struct Entries<'a, K, V> { + iter: slice::Iter<'a, (K, V)>, +} + +impl<'a, K, V> Clone for Entries<'a, K, V> { + #[inline] + fn clone(&self) -> Self { + Self { + iter: self.iter.clone(), + } + } +} + +impl<'a, K, V> fmt::Debug for Entries<'a, K, V> +where + K: fmt::Debug, + V: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +impl<'a, K, V> Iterator for Entries<'a, K, V> { + type Item = (&'a K, &'a V); + + fn next(&mut self) -> Option<(&'a K, &'a V)> { + self.iter.next().map(|e| (&e.0, &e.1)) + } + + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} + +impl<'a, K, V> DoubleEndedIterator for Entries<'a, K, V> { + fn next_back(&mut self) -> Option<(&'a K, &'a V)> { + self.iter.next_back().map(|e| (&e.0, &e.1)) + } +} + +impl<'a, K, V> ExactSizeIterator for Entries<'a, K, V> {} + +impl<'a, K, V> FusedIterator for Entries<'a, K, V> {} + +/// An iterator over the keys in a `OrderedMap`. +pub struct Keys<'a, K, V> { + iter: Entries<'a, K, V>, +} + +impl<'a, K, V> Clone for Keys<'a, K, V> { + #[inline] + fn clone(&self) -> Self { + Self { + iter: self.iter.clone(), + } + } +} + +impl<'a, K, V> fmt::Debug for Keys<'a, K, V> +where + K: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +impl<'a, K, V> Iterator for Keys<'a, K, V> { + type Item = &'a K; + + fn next(&mut self) -> Option<&'a K> { + self.iter.next().map(|e| e.0) + } + + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} + +impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> { + fn next_back(&mut self) -> Option<&'a K> { + self.iter.next_back().map(|e| e.0) + } +} + +impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V> {} + +impl<'a, K, V> FusedIterator for Keys<'a, K, V> {} + +/// An iterator over the values in a `OrderedMap`. +pub struct Values<'a, K, V> { + iter: Entries<'a, K, V>, +} + +impl<'a, K, V> Clone for Values<'a, K, V> { + #[inline] + fn clone(&self) -> Self { + Self { + iter: self.iter.clone(), + } + } +} + +impl<'a, K, V> fmt::Debug for Values<'a, K, V> +where + V: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +impl<'a, K, V> Iterator for Values<'a, K, V> { + type Item = &'a V; + + fn next(&mut self) -> Option<&'a V> { + self.iter.next().map(|e| e.1) + } + + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} + +impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> { + fn next_back(&mut self) -> Option<&'a V> { + self.iter.next_back().map(|e| e.1) + } +} + +impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> {} + +impl<'a, K, V> FusedIterator for Values<'a, K, V> {} diff --git a/anneal/vendor/phf/src/ordered_set.rs b/anneal/vendor/phf/src/ordered_set.rs new file mode 100644 index 0000000000..11e4f749fb --- /dev/null +++ b/anneal/vendor/phf/src/ordered_set.rs @@ -0,0 +1,181 @@ +//! An order-preserving immutable set constructed at compile time. +use crate::{ordered_map, OrderedMap, PhfHash}; +use core::fmt; +use core::iter::FusedIterator; +use core::iter::IntoIterator; +use phf_shared::PhfBorrow; + +/// An order-preserving immutable set constructed at compile time. +/// +/// Unlike a `Set`, iteration order is guaranteed to match the definition +/// order. +/// +/// ## Note +/// +/// The fields of this struct are public so that they may be initialized by the +/// `phf_ordered_set!` macro and code generation. They are subject to change at +/// any time and should never be accessed directly. +pub struct OrderedSet { + #[doc(hidden)] + pub map: OrderedMap, +} + +impl fmt::Debug for OrderedSet +where + T: fmt::Debug, +{ + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt.debug_set().entries(self).finish() + } +} + +impl PartialEq for OrderedSet +where + T: PartialEq, +{ + fn eq(&self, other: &Self) -> bool { + self.map == other.map + } +} + +impl Eq for OrderedSet where T: Eq {} + +impl OrderedSet { + /// Returns the number of elements in the `OrderedSet`. + #[inline] + pub const fn len(&self) -> usize { + self.map.len() + } + + /// Returns true if the `OrderedSet` contains no elements. + #[inline] + pub const fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Returns a reference to the set's internal static instance of the given + /// key. + /// + /// This can be useful for interning schemes. + pub fn get_key(&self, key: &U) -> Option<&T> + where + U: Eq + PhfHash, + T: PhfBorrow, + { + self.map.get_key(key) + } + + /// Returns the index of the key within the list used to initialize + /// the ordered set. + pub fn get_index(&self, key: &U) -> Option + where + U: Eq + PhfHash, + T: PhfBorrow, + { + self.map.get_index(key) + } + + /// Returns a reference to the key at an index + /// within the list used to initialize the ordered set. See `.get_index(key)`. + pub fn index(&self, index: usize) -> Option<&T> { + self.map.index(index).map(|(k, &())| k) + } + + /// Returns true if `value` is in the `OrderedSet`. + pub fn contains(&self, value: &U) -> bool + where + U: Eq + PhfHash, + T: PhfBorrow, + { + self.map.contains_key(value) + } + + /// Returns an iterator over the values in the set. + /// + /// Values are returned in the same order in which they were defined. + pub fn iter(&self) -> Iter<'_, T> { + Iter { + iter: self.map.keys(), + } + } +} + +impl OrderedSet +where + T: Eq + PhfHash + PhfBorrow, +{ + /// Returns true if `other` shares no elements with `self`. + #[inline] + pub fn is_disjoint(&self, other: &OrderedSet) -> bool { + !self.iter().any(|value| other.contains(value)) + } + + /// Returns true if `other` contains all values in `self`. + #[inline] + pub fn is_subset(&self, other: &OrderedSet) -> bool { + self.iter().all(|value| other.contains(value)) + } + + /// Returns true if `self` contains all values in `other`. + #[inline] + pub fn is_superset(&self, other: &OrderedSet) -> bool { + other.is_subset(self) + } +} + +impl<'a, T> IntoIterator for &'a OrderedSet { + type Item = &'a T; + type IntoIter = Iter<'a, T>; + + fn into_iter(self) -> Iter<'a, T> { + self.iter() + } +} + +/// An iterator over the values in a `OrderedSet`. +pub struct Iter<'a, T> { + iter: ordered_map::Keys<'a, T, ()>, +} + +impl<'a, T> Clone for Iter<'a, T> { + #[inline] + fn clone(&self) -> Self { + Self { + iter: self.iter.clone(), + } + } +} + +impl<'a, T> fmt::Debug for Iter<'a, T> +where + T: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +impl<'a, T> Iterator for Iter<'a, T> { + type Item = &'a T; + + #[inline] + fn next(&mut self) -> Option<&'a T> { + self.iter.next() + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} + +impl<'a, T> DoubleEndedIterator for Iter<'a, T> { + #[inline] + fn next_back(&mut self) -> Option<&'a T> { + self.iter.next_back() + } +} + +impl<'a, T> ExactSizeIterator for Iter<'a, T> {} + +impl<'a, T> FusedIterator for Iter<'a, T> {} diff --git a/anneal/vendor/phf/src/set.rs b/anneal/vendor/phf/src/set.rs new file mode 100644 index 0000000000..dbba392379 --- /dev/null +++ b/anneal/vendor/phf/src/set.rs @@ -0,0 +1,158 @@ +//! An immutable set constructed at compile time. +use core::fmt; +use core::iter::FusedIterator; +use core::iter::IntoIterator; + +use phf_shared::{PhfBorrow, PhfHash}; + +use crate::{map, Map}; + +/// An immutable set constructed at compile time. +/// +/// ## Note +/// +/// The fields of this struct are public so that they may be initialized by the +/// `phf_set!` macro and code generation. They are subject to change at any +/// time and should never be accessed directly. +pub struct Set { + #[doc(hidden)] + pub map: Map, +} + +impl fmt::Debug for Set +where + T: fmt::Debug, +{ + fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt.debug_set().entries(self).finish() + } +} + +impl PartialEq for Set +where + T: PartialEq, +{ + fn eq(&self, other: &Self) -> bool { + self.map == other.map + } +} + +impl Eq for Set where T: Eq {} + +impl Set { + /// Returns the number of elements in the `Set`. + #[inline] + pub const fn len(&self) -> usize { + self.map.len() + } + + /// Returns true if the `Set` contains no elements. + #[inline] + pub const fn is_empty(&self) -> bool { + self.len() == 0 + } + + /// Returns a reference to the set's internal static instance of the given + /// key. + /// + /// This can be useful for interning schemes. + pub fn get_key(&self, key: &U) -> Option<&T> + where + U: Eq + PhfHash, + T: PhfBorrow, + { + self.map.get_key(key) + } + + /// Returns true if `value` is in the `Set`. + pub fn contains(&self, value: &U) -> bool + where + U: Eq + PhfHash, + T: PhfBorrow, + { + self.map.contains_key(value) + } + + /// Returns an iterator over the values in the set. + /// + /// Values are returned in an arbitrary but fixed order. + pub fn iter(&self) -> Iter<'_, T> { + Iter { + iter: self.map.keys(), + } + } +} + +impl Set +where + T: Eq + PhfHash + PhfBorrow, +{ + /// Returns true if `other` shares no elements with `self`. + pub fn is_disjoint(&self, other: &Set) -> bool { + !self.iter().any(|value| other.contains(value)) + } + + /// Returns true if `other` contains all values in `self`. + pub fn is_subset(&self, other: &Set) -> bool { + self.iter().all(|value| other.contains(value)) + } + + /// Returns true if `self` contains all values in `other`. + pub fn is_superset(&self, other: &Set) -> bool { + other.is_subset(self) + } +} + +impl<'a, T> IntoIterator for &'a Set { + type Item = &'a T; + type IntoIter = Iter<'a, T>; + + fn into_iter(self) -> Iter<'a, T> { + self.iter() + } +} + +/// An iterator over the values in a `Set`. +pub struct Iter<'a, T: 'static> { + iter: map::Keys<'a, T, ()>, +} + +impl<'a, T> Clone for Iter<'a, T> { + #[inline] + fn clone(&self) -> Self { + Self { + iter: self.iter.clone(), + } + } +} + +impl<'a, T> fmt::Debug for Iter<'a, T> +where + T: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_list().entries(self.clone()).finish() + } +} + +impl<'a, T> Iterator for Iter<'a, T> { + type Item = &'a T; + + fn next(&mut self) -> Option<&'a T> { + self.iter.next() + } + + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} + +impl<'a, T> DoubleEndedIterator for Iter<'a, T> { + fn next_back(&mut self) -> Option<&'a T> { + self.iter.next_back() + } +} + +impl<'a, T> ExactSizeIterator for Iter<'a, T> {} + +impl<'a, T> FusedIterator for Iter<'a, T> {} diff --git a/anneal/vendor/phf_generator/.cargo-checksum.json b/anneal/vendor/phf_generator/.cargo-checksum.json new file mode 100644 index 0000000000..87303bd6c6 --- /dev/null +++ b/anneal/vendor/phf_generator/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{".cargo_vcs_info.json":"42e0892769e5beff2dddcd87b7c483eca12d2603b5eb9bb0175b1f41b4def4b2","CHANGELOG.md":"c96cd8896e8df8ae351942a10e01741f23d103b9ab2553bcb3d35c40d53964ed","Cargo.lock":"acf9c980d9a6e09fd9ef5d1d92c4ce7e0ae25c423849b2c565272e60c2da3c49","Cargo.toml":"48a189b2b68ef3b407b84c09366e017eb07d761858dc153916956ddfecf5d427","Cargo.toml.orig":"9a6ad8582dc49fb7fdbc09c6008d7620d1dc3efb8166700a55932c6c9292bdec","LICENSE":"0ab4d106b6faac07fb6a051815fd1b4d862d730895e2d7d7358c2f13565e7a38","README.md":"c750896a79b343bdeee6fa2e1c5cebc80408ebf8d6b5c1fd92dba5cb02026ff2","benches/benches.rs":"7559ed47e52a39d898ea66ab4decb319f01a5d6b7f2ef0542d2ffd23d1e60b78","src/bin/gen_hash_test.rs":"37d4c9720b3268a71ab54b2ca6ffe845539570292e25a54bcdf9bd38342693ca","src/lib.rs":"9b24fbe05731227a5507ba1aabf34fbc156035594fed99d0ce7a0035112e84db"},"package":"2cbb1126afed61dd6368748dae63b1ee7dc480191c6262a3b4ff1e29d86a6c5b"} \ No newline at end of file diff --git a/anneal/vendor/phf_generator/.cargo_vcs_info.json b/anneal/vendor/phf_generator/.cargo_vcs_info.json new file mode 100644 index 0000000000..ae6567d0ee --- /dev/null +++ b/anneal/vendor/phf_generator/.cargo_vcs_info.json @@ -0,0 +1,6 @@ +{ + "git": { + "sha1": "03a930696231da01005e762425841f91587b3e04" + }, + "path_in_vcs": "phf_generator" +} \ No newline at end of file diff --git a/anneal/vendor/phf_generator/CHANGELOG.md b/anneal/vendor/phf_generator/CHANGELOG.md new file mode 100644 index 0000000000..4357ff47ea --- /dev/null +++ b/anneal/vendor/phf_generator/CHANGELOG.md @@ -0,0 +1,255 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## 0.12.0 (2025-06-19) + + + +### Chore + + - Update version number in docs + +### Chore + + - Update changelog + +### Commit Statistics + + + + - 5 commits contributed to the release over the course of 138 calendar days. + - 163 days passed between releases. + - 2 commits were understood as [conventional](https://www.conventionalcommits.org). + - 0 issues like '(#ID)' were seen in commit messages + +### Commit Details + + + +
view details + + * **Uncategorized** + - Update changelog ([`51d6baa`](https://github.com/rust-phf/rust-phf/commit/51d6baaa6ffce658fb9b56a96affaf0ddd0603e5)) + - Update version number in docs ([`08e7464`](https://github.com/rust-phf/rust-phf/commit/08e74647f00f7d77cbb81e0cb73ed663798d000f)) + - Merge branch 'master' into no-wasteful-allocations ([`33b8aff`](https://github.com/rust-phf/rust-phf/commit/33b8affe77cea8bdeccb5c8d6c730c78231fc138)) + - Merge pull request #312 from goffrie/fastrand ([`24d8867`](https://github.com/rust-phf/rust-phf/commit/24d8867429d2338631b851db29c7057afccac987)) + - Merge branch 'master' into fastrand ([`576dd47`](https://github.com/rust-phf/rust-phf/commit/576dd47858a2db74eb4ef67a8385039ef17b867d)) +
+ +## 0.11.3 (2025-01-07) + +### Commit Statistics + + + + - 8 commits contributed to the release. + - 562 days passed between releases. + - 0 commits were understood as [conventional](https://www.conventionalcommits.org). + - 0 issues like '(#ID)' were seen in commit messages + +### Commit Details + + + +
view details + + * **Uncategorized** + - Adjusting changelogs prior to release of phf_shared v0.11.3, phf_generator v0.11.3, phf_macros v0.11.3, phf v0.11.3, phf_codegen v0.11.3 ([`e111f4b`](https://github.com/rust-phf/rust-phf/commit/e111f4b53a965c188fdcbf03950321107d9b3987)) + - Switch from rand to fastrand ([`13ddcb5`](https://github.com/rust-phf/rust-phf/commit/13ddcb5e1028776da6ce23ae922f31a8749c2452)) + - Merge pull request #300 from JohnTitor/msrv-1.61 ([`323366d`](https://github.com/rust-phf/rust-phf/commit/323366d03966ddad2eaa3432df79c9da8339e319)) + - Bump MSRV to 1.61 ([`1795f7b`](https://github.com/rust-phf/rust-phf/commit/1795f7b66b16af0191f221dc957bc8a090c891ad)) + - Merge pull request #295 from Swatinem/intermediate-vec ([`ac49464`](https://github.com/rust-phf/rust-phf/commit/ac494646b0e05688671e652c87bc9a2e906cb3b0)) + - Merge pull request #294 from Swatinem/generate-with-fn ([`8ecc019`](https://github.com/rust-phf/rust-phf/commit/8ecc019c494207c7cc09bf9e9452a5e1627716b1)) + - Avoid intermediate `Vec` allocations ([`4d67564`](https://github.com/rust-phf/rust-phf/commit/4d67564a7e840a660d9ccd4fe8e9d04a19a95bd1)) + - Allow using an arbitrary hash_fn for generation ([`a714e10`](https://github.com/rust-phf/rust-phf/commit/a714e10eb8438f2b8aedb88a9bed9f632a8c3425)) +
+ +## 0.11.2 (2023-06-24) + +### Commit Statistics + + + + - 4 commits contributed to the release. + - 319 days passed between releases. + - 0 commits were understood as [conventional](https://www.conventionalcommits.org). + - 0 issues like '(#ID)' were seen in commit messages + +### Commit Details + + + +
view details + + * **Uncategorized** + - Release phf_shared v0.11.2, phf_generator v0.11.2, phf_macros v0.11.2, phf v0.11.2, phf_codegen v0.11.2 ([`c9c35fd`](https://github.com/rust-phf/rust-phf/commit/c9c35fd8ba3f1bc228388b0cef6e3814a02a72c0)) + - Update changelogs ([`a1e5072`](https://github.com/rust-phf/rust-phf/commit/a1e5072b8e84b108f06389a1d41ac868426a03f7)) + - Merge pull request #274 from ankane/license-files ([`21baa73`](https://github.com/rust-phf/rust-phf/commit/21baa73941a0694ec48f437c0c0a6abfcc2f32d2)) + - Include license files in crates ([`1229b2f`](https://github.com/rust-phf/rust-phf/commit/1229b2faa6b97542ab4850a1723b1723dea92814)) +
+ +## 0.11.1 (2022-08-08) + + + +### Other + + - Unpin the `criterion` dependency + + +### Commit Statistics + + + + - 135 commits contributed to the release. + - 1 commit was understood as [conventional](https://www.conventionalcommits.org). + - 0 issues like '(#ID)' were seen in commit messages + +### Commit Details + + + +
view details + + * **Uncategorized** + - Release phf_shared v0.11.1, phf_generator v0.11.1, phf_macros v0.11.1, phf v0.11.1, phf_codegen v0.11.1 ([`3897b21`](https://github.com/rust-phf/rust-phf/commit/3897b21c6d38e5adcaf9110b4bb33c19f6b41977)) + - Merge pull request #265 from rust-phf/unpin-criterion ([`3c6af3f`](https://github.com/rust-phf/rust-phf/commit/3c6af3f7d783a6018070944a00fa29e0ff48b0dc)) + - Unpin the `criterion` dependency ([`27a2ce4`](https://github.com/rust-phf/rust-phf/commit/27a2ce4b07ddc68c45e2faccbfef52d22375c1b0)) + - Merge pull request #264 from rust-phf/tweak-changelog ([`97f997d`](https://github.com/rust-phf/rust-phf/commit/97f997d2be827ca636a29046c78e2c09c5c62650)) + - Replace handmade changelog with generated one by `cargo-smart-release` ([`cb84cf6`](https://github.com/rust-phf/rust-phf/commit/cb84cf6636ab52823c53e70d6abeac8f648a3482)) + - Merge pull request #263 from lopopolo/lopopolo/rand-no-default-features ([`d441940`](https://github.com/rust-phf/rust-phf/commit/d441940cbb1a4653d2b33467e2449e6178ad53a7)) + - Disable default features for rand dep in phf_generator ([`deefda1`](https://github.com/rust-phf/rust-phf/commit/deefda1cdff6ced54526ddb702b13282e0c8c66b)) + - Merge pull request #260 from JohnTitor/fix-repo-link ([`1407ebe`](https://github.com/rust-phf/rust-phf/commit/1407ebe536b39611db92d765ddec4de0e6c8a16e)) + - Add README.md for some crates ([`e0b34fa`](https://github.com/rust-phf/rust-phf/commit/e0b34fa0a697f45f2c41a875bf84b78a6d3ce079)) + - Add category to crates ([`32a72c3`](https://github.com/rust-phf/rust-phf/commit/32a72c3859997fd6b590e9ec092ae789d2acdf55)) + - Update repository links on Cargo.toml ([`1af3b0f`](https://github.com/rust-phf/rust-phf/commit/1af3b0fe1f8fdcae7ccc1bc8d51de309fb16a6bf)) + - Merge pull request #258 from JohnTitor/release-0.11.0 ([`c0b9ef9`](https://github.com/rust-phf/rust-phf/commit/c0b9ef98e798f807f94544aeb0fff429ef280efc)) + - Release 0.11.0 ([`d2efdc0`](https://github.com/rust-phf/rust-phf/commit/d2efdc08a7eb1d0d6c414b7b2ac41ce1fe1f9a43)) + - Merge pull request #257 from JohnTitor/edition-2021 ([`36ec885`](https://github.com/rust-phf/rust-phf/commit/36ec8854a9da4f295618e98d94aaf7150df2173e)) + - Make crates edition 2021 ([`b9d25da`](https://github.com/rust-phf/rust-phf/commit/b9d25da58b912d9927fbc41901631cd77836462b)) + - Merge pull request #251 from JohnTitor/weak-deps ([`2e1167c`](https://github.com/rust-phf/rust-phf/commit/2e1167c2046cd20aed1a906b4e23b40303cf0c00)) + - Make "unicase + macros" features work ([`11bb242`](https://github.com/rust-phf/rust-phf/commit/11bb2426f0237b1ecea8c8038630b1231ede4871)) + - Merge pull request #240 from JohnTitor/docs-update ([`da98b9e`](https://github.com/rust-phf/rust-phf/commit/da98b9e80fdb22cd6d48a4a42489840afe603756)) + - Refine doc comments ([`d8cfc43`](https://github.com/rust-phf/rust-phf/commit/d8cfc436059a1c2c3ede1afb0f9ec2333c046fc6)) + - Merge pull request #234 from JohnTitor/fix-ci ([`eba4cc2`](https://github.com/rust-phf/rust-phf/commit/eba4cc28d92c1db95cc430985a0fbc9ca63d1307)) + - Fix CI failure ([`d9b5ff2`](https://github.com/rust-phf/rust-phf/commit/d9b5ff23367d2bbcc385ff8243c7d972f45d459c)) + - Merge pull request #230 from JohnTitor/release-0.10 ([`3ea14b2`](https://github.com/rust-phf/rust-phf/commit/3ea14b2166553ad6e7b9afe7244144f5d661b6c6)) + - Prepare for release 0.10.0 ([`588ac25`](https://github.com/rust-phf/rust-phf/commit/588ac25dd5c0afccea084e6f94867328a6a30454)) + - Merge pull request #228 from JohnTitor/release-0.9.1 ([`d527f9d`](https://github.com/rust-phf/rust-phf/commit/d527f9d016adafe7d2930e37710291030b432838)) + - Prepare for v0.9.1 ([`9b71978`](https://github.com/rust-phf/rust-phf/commit/9b719789149ef195ef5eba093b7e73255fbef8dc)) + - Merge pull request #227 from JohnTitor/pin-criterion ([`d71851e`](https://github.com/rust-phf/rust-phf/commit/d71851ef62092143914cc5a2bbbb780029a55ceb)) + - Pin `criterion` version ([`b19afb6`](https://github.com/rust-phf/rust-phf/commit/b19afb6544c4c04fb7893661455191942d14e4af)) + - Fix included files ([`0442122`](https://github.com/rust-phf/rust-phf/commit/04421227eb627eab52ddc9195874ed67be9044df)) + - Merge pull request #219 from JohnTitor/release-0.9.0 ([`307969f`](https://github.com/rust-phf/rust-phf/commit/307969ff3bb8cae320e648890a9525920035944b)) + - Prepare 0.9.0 release ([`2ca46c4`](https://github.com/rust-phf/rust-phf/commit/2ca46c4f9c9083c128fcc6add33dc5986638940f)) + - Cleanup cargo metadata ([`a9e4b0a`](https://github.com/rust-phf/rust-phf/commit/a9e4b0a1e84825004fa66e938b870f83d3147d0d)) + - Merge pull request #218 from JohnTitor/cleanup ([`76f9072`](https://github.com/rust-phf/rust-phf/commit/76f907239af9b0cca7dac4e6d702cedc72f6f371)) + - Run rustfmt ([`dd86c6c`](https://github.com/rust-phf/rust-phf/commit/dd86c6c103f25021b52144085b8fab0a94582bef)) + - Cleanup docs ([`ddecc3a`](https://github.com/rust-phf/rust-phf/commit/ddecc3aa97aec6d9e9d6e59c57bc598d476335c1)) + - Merge pull request #212 from JohnTitor/tweak-benches ([`f12cf10`](https://github.com/rust-phf/rust-phf/commit/f12cf1051439664284f671c1371fb7197748e97c)) + - Tweak benchmarks ([`5ea2854`](https://github.com/rust-phf/rust-phf/commit/5ea2854f1e58bb4a821559b94c42f446e953eccb)) + - Merge pull request #208 from JohnTitor/simplify-workspace ([`a47ac36`](https://github.com/rust-phf/rust-phf/commit/a47ac36b16dd8798659be3e24f74051cd1ed760d)) + - Use `[patch.crates-io]` section instead of path key ([`f47515b`](https://github.com/rust-phf/rust-phf/commit/f47515bce5c433214dbecee262a7a6f14e6a74d4)) + - Merge pull request #206 from Kazurin-775/master ([`7ebc9e7`](https://github.com/rust-phf/rust-phf/commit/7ebc9e7986ca9ae86c6e871b4fd495a401d6b5ca)) + - Fix phf_macros on no_std ([`d7af3dc`](https://github.com/rust-phf/rust-phf/commit/d7af3dc96a67070e2f9000158d074825f0a9d592)) + - Merge pull request #201 from benesch/rand-08-redux ([`73a6799`](https://github.com/rust-phf/rust-phf/commit/73a6799f048228039af32c8e21246a63d977c9e3)) + - Update to rand v0.8 ([`6d5bfb4`](https://github.com/rust-phf/rust-phf/commit/6d5bfb4a377270d2ae69e05347044b1a95499973)) + - Merge pull request #180 from abonander/master ([`81c7cc5`](https://github.com/rust-phf/rust-phf/commit/81c7cc5b48649108428671d3b8ad151f6fbdb359)) + - Release v0.8.0 ([`4060288`](https://github.com/rust-phf/rust-phf/commit/4060288dc2c1ebe3b0630e4016ed51935bb0c863)) + - Merge pull request #181 from mati865/criterion ([`696eee1`](https://github.com/rust-phf/rust-phf/commit/696eee1f38213fe4a404ddfb9ef10d8e61ef0700)) + - Update criterion ([`9de3d83`](https://github.com/rust-phf/rust-phf/commit/9de3d836537b1360a3a1edf07ce5a9009f9c71c1)) + - Merge pull request #164 from abonander/perf-improvements ([`70129c6`](https://github.com/rust-phf/rust-phf/commit/70129c6fbcdf428ce9f1014eea935301ac70e410)) + - Use two separate hashes and full 32-bit displacements ([`9b70bd9`](https://github.com/rust-phf/rust-phf/commit/9b70bd94f8b0b74f156e75ccefbd4a4c7ba29728)) + - Add simple test for timing checks ([`ecb9fd5`](https://github.com/rust-phf/rust-phf/commit/ecb9fd58437722d568b82a52fb4750f9d0acecc1)) + - Merge pull request #159 from upsuper/rand-07 ([`f6407a0`](https://github.com/rust-phf/rust-phf/commit/f6407a056d432326bbfa42f476736ce754354e3e)) + - Upgrade rand to 0.7 ([`522f823`](https://github.com/rust-phf/rust-phf/commit/522f8230b9e738707764aed699bafc7c7ca997d0)) + - Merge branch 'master' into patch-1 ([`cd0d7ce`](https://github.com/rust-phf/rust-phf/commit/cd0d7ce1194252dcaca3153988ba2a4effa66b4f)) + - Merge pull request #152 from abonander/unicase-upgrade ([`27f7c2c`](https://github.com/rust-phf/rust-phf/commit/27f7c2c85efde7aeb3c5409985f2d605aff8e05b)) + - Convert to 2018 edition ([`9ff66ab`](https://github.com/rust-phf/rust-phf/commit/9ff66ab36a23c7170cc775773f042a06de426c3b)) + - Release v0.7.24 ([`1287414`](https://github.com/rust-phf/rust-phf/commit/1287414b1302d2d717c5f4be81accf4c12ccad48)) + - Upgrade rand and siphasher ([`80d9894`](https://github.com/rust-phf/rust-phf/commit/80d9894e5db7b5a8acf5b89716ee506de2a95b99)) + - Release v0.7.23 ([`a050b6f`](https://github.com/rust-phf/rust-phf/commit/a050b6f2a6b825bf0824339266ab9545340420d4)) + - Upgrade rand ([`9098872`](https://github.com/rust-phf/rust-phf/commit/9098872d320ad7c48fe1f58fedd7113aa08c8200)) + - Release 0.7.22 ([`ab88405`](https://github.com/rust-phf/rust-phf/commit/ab884054fa17eef915db2bdb5259c7aa71fbfea6)) + - Upgrade rand ([`e7b5a35`](https://github.com/rust-phf/rust-phf/commit/e7b5a35d14f6927a748f3c55a1c87b5b751ececd)) + - Release v0.7.21 ([`6c7e2d9`](https://github.com/rust-phf/rust-phf/commit/6c7e2d9ce17ff1b87507925bdbe87e6e682ed3e4)) + - Link to docs.rs ([`61142c5`](https://github.com/rust-phf/rust-phf/commit/61142c5aa168cff1bf53a6961ddc12012b49e1bb)) + - Dependency cleanup ([`f106aa6`](https://github.com/rust-phf/rust-phf/commit/f106aa66d85abfba3d627d12fd46a9b080c83e95)) + - Release v0.7.20 ([`f631f50`](https://github.com/rust-phf/rust-phf/commit/f631f50abfaf6ea3d6fc8caaada47975b6df3a62)) + - Merge branch 'release' ([`ea7e256`](https://github.com/rust-phf/rust-phf/commit/ea7e2562706663632a0af65ae9fa94e5cf78c4ea)) + - Merge branch 'release-v0.7.19' into release ([`81a4806`](https://github.com/rust-phf/rust-phf/commit/81a4806b05f14fb49aa972de27a42926a542ec44)) + - Release v0.7.19 ([`0a98dd1`](https://github.com/rust-phf/rust-phf/commit/0a98dd1865d12a3fa4cc27bdb38fa1e7374940d9)) + - Merge branch 'release' ([`ecab54b`](https://github.com/rust-phf/rust-phf/commit/ecab54b8a028c88938f220dbb0a684e017bab62f)) + - Merge branch 'release-v0.7.18' into release ([`dfa970b`](https://github.com/rust-phf/rust-phf/commit/dfa970b229cc32cfb2da1692aa94ad8a266e704a)) + - Release v0.7.18 ([`3f71765`](https://github.com/rust-phf/rust-phf/commit/3f717650f4331f5dbb9d7a3f878228fcf1138729)) + - Merge branch 'release' ([`5f08563`](https://github.com/rust-phf/rust-phf/commit/5f0856327731107d9fada1b0318f6f15f32957c2)) + - Merge branch 'release-v0.7.17' into release ([`e073dd2`](https://github.com/rust-phf/rust-phf/commit/e073dd262d1b4c95234222ee5048fc883b9c7301)) + - Release v0.7.17 ([`21ecf72`](https://github.com/rust-phf/rust-phf/commit/21ecf72101715e4754db95a64ecd7de5a37b7f14)) + - Merge branch 'release' ([`839f06d`](https://github.com/rust-phf/rust-phf/commit/839f06d5a10c1300353b8f3c972990624695b668)) + - Merge branch 'release-v0.7.16' into release ([`6f5575c`](https://github.com/rust-phf/rust-phf/commit/6f5575c9b12d3619ea17c0825a613fcac12820f4)) + - Release v0.7.16 ([`8bf29c1`](https://github.com/rust-phf/rust-phf/commit/8bf29c10a878c83d73cc40385f0e96cb9cc95afa)) + - Merge branch 'release' ([`b4ec398`](https://github.com/rust-phf/rust-phf/commit/b4ec398f415e5cac2cd4d794b1889788e644447f)) + - Merge branch 'release-v0.7.15' into release ([`6bbc9e2`](https://github.com/rust-phf/rust-phf/commit/6bbc9e249b9a84e2019432b7d3b178851d2d776e)) + - Release v0.7.15 ([`20f896e`](https://github.com/rust-phf/rust-phf/commit/20f896e6975cabb9cf9883b08eaa5b3da8597f11)) + - Merge branch 'release' ([`7c692d4`](https://github.com/rust-phf/rust-phf/commit/7c692d42970bf6cb2540f6b2d3c88d63b3fd1f7a)) + - Merge branch 'release-v0.7.14' into release ([`ea8dd65`](https://github.com/rust-phf/rust-phf/commit/ea8dd652c292746a20bf3a680e9f925f6f0530b1)) + - Release v0.7.14 ([`fee66fc`](https://github.com/rust-phf/rust-phf/commit/fee66fc20e33f2b119f830a8926f3b6e52abcf09)) + - Merge branch 'release' ([`d9351e1`](https://github.com/rust-phf/rust-phf/commit/d9351e1488bd42d1a4453e4a465177fb1c781fdc)) + - Merge branch 'release-v0.7.13' into release ([`b582e4e`](https://github.com/rust-phf/rust-phf/commit/b582e4ecec23be992ba915fc7873c0d5598f388a)) + - Release v0.7.13 ([`4769a6d`](https://github.com/rust-phf/rust-phf/commit/4769a6d2ce1d392da06e4b3cb833a1cdccb1f1aa)) + - Merge branch 'release' ([`5659a9d`](https://github.com/rust-phf/rust-phf/commit/5659a9db39bc5ee2179b264fce4cba4384d6d025)) + - Merge branch 'release-v0.7.12' into release ([`2f0a5de`](https://github.com/rust-phf/rust-phf/commit/2f0a5de9f01d9d22c774d8d85daec2a047a462e8)) + - Release v0.7.12 ([`9b75ee5`](https://github.com/rust-phf/rust-phf/commit/9b75ee5ed14060c45a5785fba0387be09e698624)) + - Merge branch 'release' ([`87ffab8`](https://github.com/rust-phf/rust-phf/commit/87ffab863aaeefb5ac2164da62f0407122d8057e)) + - Merge branch 'release-v0.7.11' into release ([`7260d04`](https://github.com/rust-phf/rust-phf/commit/7260d04413349bacab484afb74f9a496335278e1)) + - Release v0.7.11 ([`a004227`](https://github.com/rust-phf/rust-phf/commit/a0042277b181ec95fcbf29751b9a453f4f962ebb)) + - Merge branch 'release' ([`1579bec`](https://github.com/rust-phf/rust-phf/commit/1579bec1448c7b833f5965fe39d4ef2df66c982c)) + - Merge branch 'release-v0.7.10' into release ([`25cea13`](https://github.com/rust-phf/rust-phf/commit/25cea133fb4eec938bdfa74f04adbc8d94e30d4e)) + - Release v0.7.10 ([`c43154b`](https://github.com/rust-phf/rust-phf/commit/c43154b2661dc09620a7879c16f37b47d6ec03ae)) + - Merge branch 'release' ([`2c67ce5`](https://github.com/rust-phf/rust-phf/commit/2c67ce5a4129cd543178bf015f021a3bb83b6895)) + - Merge branch 'release-v0.7.9' into release ([`87206e1`](https://github.com/rust-phf/rust-phf/commit/87206e1c7b8d4089370dc168402ded0c0700a447)) + - Release v0.7.9 ([`b7d29df`](https://github.com/rust-phf/rust-phf/commit/b7d29dfe0df288b2da74de195f764eace1c8e443)) + - Merge branch 'release' ([`cd33902`](https://github.com/rust-phf/rust-phf/commit/cd339023e90ac1ce6971fa81badea65fb1f2b086)) + - Merge branch 'release-v0.7.8' into release ([`8bc23a0`](https://github.com/rust-phf/rust-phf/commit/8bc23a023908a038d668b6f7d8e94ee416995285)) + - Release v0.7.8 ([`aad0b9b`](https://github.com/rust-phf/rust-phf/commit/aad0b9b658fb970e3df60b066961aafca1a17c44)) + - Merge branch 'release' ([`dccff69`](https://github.com/rust-phf/rust-phf/commit/dccff69384729e3d4972174ce62d8f9db9429485)) + - Merge branch 'release-v0.7.7' into release ([`2d988b7`](https://github.com/rust-phf/rust-phf/commit/2d988b7dfb04d949246adc047f6b195263612246)) + - Release v0.7.7 ([`c9e7a93`](https://github.com/rust-phf/rust-phf/commit/c9e7a93f4d6f85a72651aba6187e4c956d8c1167)) + - Run through rustfmt ([`58e2223`](https://github.com/rust-phf/rust-phf/commit/58e222380b7fc9609a055cb5a6110ba04e47d677)) + - Merge branch 'release' ([`776046c`](https://github.com/rust-phf/rust-phf/commit/776046c961456dee9e16a6b6574d336c66e259f8)) + - Merge branch 'release-v0.7.6' into release ([`2ea7d5c`](https://github.com/rust-phf/rust-phf/commit/2ea7d5cab5e9e54952ca618b43ec3583a33a4847)) + - Release v0.7.6 ([`5bcd5c9`](https://github.com/rust-phf/rust-phf/commit/5bcd5c95215f5aa29e133cb2912662085a8158f0)) + - Merge branch 'release' ([`1f770df`](https://github.com/rust-phf/rust-phf/commit/1f770df1290b586a8d641ecb0bbd105080afc0ea)) + - Merge branch 'release-v0.7.5' into release ([`bb65b8c`](https://github.com/rust-phf/rust-phf/commit/bb65b8cca30ef9d4518e3083558019a972873efa)) + - Release v0.7.5 ([`fda44f5`](https://github.com/rust-phf/rust-phf/commit/fda44f550401c1bd4aad29bb2c07030b86761028)) + - Merge branch 'release' ([`269b5dc`](https://github.com/rust-phf/rust-phf/commit/269b5dc41ebf82f423393d5219e8107e9c911a03)) + - Merge branch 'release-v0.7.4' into release ([`7c093e8`](https://github.com/rust-phf/rust-phf/commit/7c093e83ffe5192d9cdcd5402b6abb7800ffafb3)) + - Release v0.7.4 ([`c7c0d3c`](https://github.com/rust-phf/rust-phf/commit/c7c0d3c294126157f0275a05b7c3a65c419234a1)) + - Merge pull request #62 from SimonSapin/string-cache ([`6f59718`](https://github.com/rust-phf/rust-phf/commit/6f5971869e5864cae653ec3606d17b554c343ef8)) + - Add hash() and get_index() to phf_shared. ([`d3b2ea0`](https://github.com/rust-phf/rust-phf/commit/d3b2ea0f0a9bd9cb79da90d8795f1905c3df1f5f)) + - Update PhfHash to mirror std::hash::Hash ([`96ef156`](https://github.com/rust-phf/rust-phf/commit/96ef156baae669b233673d6be2b96617ad48551e)) + - Release v0.7.3 ([`77ea239`](https://github.com/rust-phf/rust-phf/commit/77ea23917e908b10c4c5c463671a8409292f8661)) + - Minor generator cleanup ([`14e81a9`](https://github.com/rust-phf/rust-phf/commit/14e81a96bf567e06ea671535108c94e974113c9c)) + - Release v0.7.2 ([`642b69d`](https://github.com/rust-phf/rust-phf/commit/642b69d0100a4ee7ec6e430ef1351bd1f28f9a4a)) + - Release v0.7.1 ([`9cb9de9`](https://github.com/rust-phf/rust-phf/commit/9cb9de911ad4e16964f0def29780dde1630c3619)) + - Release v0.7.0 ([`555a690`](https://github.com/rust-phf/rust-phf/commit/555a690561673597aee068650ac884bbcc2e31cf)) + - Release v0.6.19 ([`5810d30`](https://github.com/rust-phf/rust-phf/commit/5810d30ef2162f33cfb4da99c65b7344c7f2913b)) + - Release v0.6.18 ([`36efc72`](https://github.com/rust-phf/rust-phf/commit/36efc721478d097fba1e5458cbdd9f288637abae)) + - Release v0.6.17 ([`271ccc2`](https://github.com/rust-phf/rust-phf/commit/271ccc27d885363d4d8c549f75624d08c48e56c5)) + - Bump rand version ([`8959cee`](https://github.com/rust-phf/rust-phf/commit/8959cee53fff8b45f548b236d13eca9ae84308a6)) + - Release v0.6.15 ([`ede14df`](https://github.com/rust-phf/rust-phf/commit/ede14df1e574674852b09bcafff4ad549ebfd4ae)) + - Merge pull request #51 from mbrubeck/rand ([`0923103`](https://github.com/rust-phf/rust-phf/commit/092310344f2397b8bdc27894248f69a6c29ed5cc)) + - Update to rand 0.2 ([`ab49d38`](https://github.com/rust-phf/rust-phf/commit/ab49d388e8e49a39f386100f5903e2a74a6b53c5)) + - Release v0.6.14 ([`cf64ebb`](https://github.com/rust-phf/rust-phf/commit/cf64ebb8f769c9f12c9a03d05713dde6b8caf371)) + - Release v0.6.13 ([`4fdb533`](https://github.com/rust-phf/rust-phf/commit/4fdb5331fd9978ca3e180a06fb2e34627f50fb77)) + - Release v0.6.12 ([`59ca586`](https://github.com/rust-phf/rust-phf/commit/59ca58637206c9806c13cc24cb35cb7d0ce9d23f)) + - Release v0.6.11 ([`e1e6d3b`](https://github.com/rust-phf/rust-phf/commit/e1e6d3b40a6babddd0989406f2b4e952443ff52e)) + - Release v0.6.10 ([`fc45373`](https://github.com/rust-phf/rust-phf/commit/fc45373b34a461664f532c5108f3d2625172c128)) + - Add doc roots ([`e22cdea`](https://github.com/rust-phf/rust-phf/commit/e22cdea2b24ce65b4de25adbb6cce727f20ac2f5)) + - Move generation logic to its own crate ([`cfeee87`](https://github.com/rust-phf/rust-phf/commit/cfeee8714caa4ecb3199df2a2ac149fe6a28ecc0)) +
+ diff --git a/anneal/vendor/phf_generator/Cargo.lock b/anneal/vendor/phf_generator/Cargo.lock new file mode 100644 index 0000000000..0fe45ee8d0 --- /dev/null +++ b/anneal/vendor/phf_generator/Cargo.lock @@ -0,0 +1,649 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "aho-corasick" +version = "1.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" +dependencies = [ + "memchr", +] + +[[package]] +name = "atty" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" +dependencies = [ + "hermit-abi", + "libc", + "winapi", +] + +[[package]] +name = "autocfg" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bumpalo" +version = "3.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db76d6187cd04dff33004d8e6c9cc4e05cd330500379d2394209271b4aeee" + +[[package]] +name = "cast" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" + +[[package]] +name = "cfg-if" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" + +[[package]] +name = "clap" +version = "2.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" +dependencies = [ + "bitflags", + "textwrap", + "unicode-width", +] + +[[package]] +name = "criterion" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f" +dependencies = [ + "atty", + "cast", + "clap", + "criterion-plot", + "csv", + "itertools", + "lazy_static", + "num-traits", + "oorandom", + "plotters", + "rayon", + "regex", + "serde", + "serde_cbor", + "serde_derive", + "serde_json", + "tinytemplate", + "walkdir", +] + +[[package]] +name = "criterion-plot" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2673cc8207403546f45f5fd319a974b1e6983ad1a3ee7e6041650013be041876" +dependencies = [ + "cast", + "itertools", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9dd111b7b7f7d55b72c0a6ae361660ee5853c9af73f70c3c2ef6858b950e2e51" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-utils" +version = "0.8.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" + +[[package]] +name = "csv" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acdc4883a9c96732e4733212c01447ebd805833b7275a73ca3ee080fd77afdaf" +dependencies = [ + "csv-core", + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "csv-core" +version = "0.1.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d02f3b0da4c6504f86e9cd789d8dbafab48c2321be74e9987593de5a894d93d" +dependencies = [ + "memchr", +] + +[[package]] +name = "either" +version = "1.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "half" +version = "1.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b43ede17f21864e81be2fa654110bf1e793774238d86ef8555c37e6519c0403" + +[[package]] +name = "hermit-abi" +version = "0.1.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" +dependencies = [ + "libc", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" + +[[package]] +name = "js-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +dependencies = [ + "once_cell", + "wasm-bindgen", +] + +[[package]] +name = "lazy_static" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" + +[[package]] +name = "libc" +version = "0.2.174" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1171693293099992e19cddea4e8b849964e9846f4acee11b3948bcc337be8776" + +[[package]] +name = "log" +version = "0.4.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" + +[[package]] +name = "memchr" +version = "2.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" + +[[package]] +name = "num-traits" +version = "0.2.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" +dependencies = [ + "autocfg", +] + +[[package]] +name = "once_cell" +version = "1.21.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" + +[[package]] +name = "oorandom" +version = "11.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" + +[[package]] +name = "phf_generator" +version = "0.12.1" +dependencies = [ + "criterion", + "fastrand", + "phf_shared", +] + +[[package]] +name = "phf_shared" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981" +dependencies = [ + "siphasher", +] + +[[package]] +name = "plotters" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" +dependencies = [ + "num-traits", + "plotters-backend", + "plotters-svg", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "plotters-backend" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" + +[[package]] +name = "plotters-svg" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" +dependencies = [ + "plotters-backend", +] + +[[package]] +name = "proc-macro2" +version = "1.0.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rayon" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b418a60154510ca1a002a752ca9714984e21e4241e804d32555251faf8b78ffa" +dependencies = [ + "either", + "rayon-core", +] + +[[package]] +name = "rayon-core" +version = "1.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + +[[package]] +name = "regex" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata", + "regex-syntax", +] + +[[package]] +name = "regex-automata" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax", +] + +[[package]] +name = "regex-syntax" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" + +[[package]] +name = "rustversion" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a0d197bd2c9dc6e53b84da9556a69ba4cdfab8619eb41a8bd1cc2027a0f6b1d" + +[[package]] +name = "ryu" +version = "1.0.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + +[[package]] +name = "serde" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_cbor" +version = "0.11.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" +dependencies = [ + "half", + "serde", +] + +[[package]] +name = "serde_derive" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.140" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" +dependencies = [ + "itoa", + "memchr", + "ryu", + "serde", +] + +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + +[[package]] +name = "syn" +version = "2.0.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4307e30089d6fd6aff212f2da3a1f9e32f3223b1f010fb09b7c95f90f3ca1e8" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "textwrap" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d326610f408c7a4eb6f51c37c330e496b08506c9457c9d34287ecc38809fb060" +dependencies = [ + "unicode-width", +] + +[[package]] +name = "tinytemplate" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "unicode-ident" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" + +[[package]] +name = "unicode-width" +version = "0.1.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" + +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + +[[package]] +name = "wasm-bindgen" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +dependencies = [ + "cfg-if", + "once_cell", + "rustversion", + "wasm-bindgen-macro", +] + +[[package]] +name = "wasm-bindgen-backend" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +dependencies = [ + "bumpalo", + "log", + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-macro" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +dependencies = [ + "quote", + "wasm-bindgen-macro-support", +] + +[[package]] +name = "wasm-bindgen-macro-support" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +dependencies = [ + "proc-macro2", + "quote", + "syn", + "wasm-bindgen-backend", + "wasm-bindgen-shared", +] + +[[package]] +name = "wasm-bindgen-shared" +version = "0.2.100" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "web-sys" +version = "0.3.77" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +dependencies = [ + "windows-sys", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets", +] + +[[package]] +name = "windows-targets" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" +dependencies = [ + "windows_aarch64_gnullvm", + "windows_aarch64_msvc", + "windows_i686_gnu", + "windows_i686_gnullvm", + "windows_i686_msvc", + "windows_x86_64_gnu", + "windows_x86_64_gnullvm", + "windows_x86_64_msvc", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" + +[[package]] +name = "windows_i686_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" + +[[package]] +name = "windows_i686_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" + +[[package]] +name = "windows_i686_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" diff --git a/anneal/vendor/phf_generator/Cargo.toml b/anneal/vendor/phf_generator/Cargo.toml new file mode 100644 index 0000000000..01133c632a --- /dev/null +++ b/anneal/vendor/phf_generator/Cargo.toml @@ -0,0 +1,58 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2021" +rust-version = "1.61" +name = "phf_generator" +version = "0.12.1" +authors = ["Steven Fackler "] +build = false +autolib = false +autobins = false +autoexamples = false +autotests = false +autobenches = false +description = "PHF generation logic" +readme = "README.md" +categories = ["data-structures"] +license = "MIT" +repository = "https://github.com/rust-phf/rust-phf" +resolver = "1" + +[lib] +name = "phf_generator" +path = "src/lib.rs" + +[[bin]] +name = "gen_hash_test" +path = "src/bin/gen_hash_test.rs" +required-features = ["criterion"] + +[[bench]] +name = "benches" +path = "benches/benches.rs" +harness = false + +[dependencies.criterion] +version = "0.3.6" +optional = true + +[dependencies.fastrand] +version = "2.1.0" +default-features = false + +[dependencies.phf_shared] +version = "^0.12" +default-features = false + +[dev-dependencies.criterion] +version = "0.3.6" diff --git a/anneal/vendor/phf_generator/Cargo.toml.orig b/anneal/vendor/phf_generator/Cargo.toml.orig new file mode 100644 index 0000000000..d05bae4195 --- /dev/null +++ b/anneal/vendor/phf_generator/Cargo.toml.orig @@ -0,0 +1,28 @@ +[package] +name = "phf_generator" +authors = ["Steven Fackler "] +version = "0.12.1" +license = "MIT" +description = "PHF generation logic" +repository = "https://github.com/rust-phf/rust-phf" +edition = "2021" +rust-version = "1.61" +categories = ["data-structures"] +readme = "README.md" + +[dependencies] +fastrand = { version = "2.1.0", default-features = false } +phf_shared = { version = "^0.12", default-features = false } +# for stable black_box() +criterion = { version = "0.3.6", optional = true } + +[dev-dependencies] +criterion = "0.3.6" + +[[bench]] +name = "benches" +harness = false + +[[bin]] +name = "gen_hash_test" +required-features = ["criterion"] diff --git a/anneal/vendor/phf_generator/LICENSE b/anneal/vendor/phf_generator/LICENSE new file mode 100644 index 0000000000..cd5bf6855b --- /dev/null +++ b/anneal/vendor/phf_generator/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2014-2022 Steven Fackler, Yuki Okushi + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/anneal/vendor/phf_generator/README.md b/anneal/vendor/phf_generator/README.md new file mode 100644 index 0000000000..017e1d7591 --- /dev/null +++ b/anneal/vendor/phf_generator/README.md @@ -0,0 +1,5 @@ +# phf_generator + +This crate is for the `phf` crate, [find it on crates.io][phf] for details. + +[phf]: https://crates.io/crates/phf diff --git a/anneal/vendor/phf_generator/benches/benches.rs b/anneal/vendor/phf_generator/benches/benches.rs new file mode 100644 index 0000000000..ef06ff3b9f --- /dev/null +++ b/anneal/vendor/phf_generator/benches/benches.rs @@ -0,0 +1,56 @@ +use std::iter; + +use criterion::measurement::Measurement; +use criterion::{criterion_group, criterion_main, Bencher, BenchmarkId, Criterion}; + +use fastrand::Rng; + +use phf_generator::generate_hash; + +fn gen_vec(len: usize) -> Vec { + let mut rng = Rng::with_seed(0xAAAAAAAAAAAAAAAA); + iter::repeat_with(|| rng.u64(..)).take(len).collect() +} + +fn bench_hash(b: &mut Bencher, len: &usize) { + let vec = gen_vec(*len); + b.iter(|| generate_hash(&vec)) +} + +fn gen_hash_small(c: &mut Criterion) { + let sizes = vec![0, 1, 2, 5, 10, 25, 50, 75]; + for size in &sizes { + c.bench_with_input(BenchmarkId::new("gen_hash_small", *size), size, bench_hash); + } +} + +fn gen_hash_med(c: &mut Criterion) { + let sizes = vec![100, 250, 500, 1000, 2500, 5000, 7500]; + for size in &sizes { + c.bench_with_input(BenchmarkId::new("gen_hash_medium", *size), size, bench_hash); + } +} + +fn gen_hash_large(c: &mut Criterion) { + let sizes = vec![10_000, 25_000, 50_000, 75_000]; + for size in &sizes { + c.bench_with_input(BenchmarkId::new("gen_hash_large", *size), size, bench_hash); + } +} + +fn gen_hash_xlarge(c: &mut Criterion) { + let sizes = vec![100_000, 250_000, 500_000, 750_000, 1_000_000]; + for size in &sizes { + c.bench_with_input(BenchmarkId::new("gen_hash_xlarge", *size), size, bench_hash); + } +} + +criterion_group!( + benches, + gen_hash_small, + gen_hash_med, + gen_hash_large, + gen_hash_xlarge +); + +criterion_main!(benches); diff --git a/anneal/vendor/phf_generator/src/bin/gen_hash_test.rs b/anneal/vendor/phf_generator/src/bin/gen_hash_test.rs new file mode 100644 index 0000000000..b599190bb5 --- /dev/null +++ b/anneal/vendor/phf_generator/src/bin/gen_hash_test.rs @@ -0,0 +1,21 @@ +use std::iter; + +use criterion::*; + +use fastrand::Rng; + +use phf_generator::generate_hash; + +fn gen_vec(len: usize) -> Vec { + let mut rng = Rng::with_seed(0xAAAAAAAAAAAAAAAA); + let mut chars = iter::repeat_with(|| rng.alphanumeric()); + + (0..len) + .map(move |_| chars.by_ref().take(64).collect::()) + .collect() +} + +fn main() { + let data = black_box(gen_vec(1_000_000)); + black_box(generate_hash(&data)); +} diff --git a/anneal/vendor/phf_generator/src/lib.rs b/anneal/vendor/phf_generator/src/lib.rs new file mode 100644 index 0000000000..6fe262d6b3 --- /dev/null +++ b/anneal/vendor/phf_generator/src/lib.rs @@ -0,0 +1,156 @@ +//! See [the `phf` crate's documentation][phf] for details. +//! +//! [phf]: https://docs.rs/phf + +#![doc(html_root_url = "https://docs.rs/phf_generator/0.12")] +use std::iter; + +use fastrand::Rng; +use phf_shared::{HashKey, Hashes, PhfHash}; + +const DEFAULT_LAMBDA: usize = 5; + +const FIXED_SEED: u64 = 1234567890; + +pub struct HashState { + pub key: HashKey, + pub disps: Vec<(u32, u32)>, + pub map: Vec, +} + +pub fn generate_hash(entries: &[H]) -> HashState { + generate_hash_with_hash_fn(entries, phf_shared::hash) +} + +pub fn generate_hash_with_hash_fn(entries: &[T], hash_fn: F) -> HashState +where + F: Fn(&T, &HashKey) -> Hashes, +{ + let mut generator = Generator::new(entries.len()); + let mut rng = Rng::with_seed(FIXED_SEED); + + iter::repeat_with(|| rng.u64(..)) + .find(|key| { + let hashes = entries.iter().map(|entry| hash_fn(entry, key)); + generator.reset(hashes); + + generator.try_generate_hash() + }) + .map(|key| HashState { + key, + disps: generator.disps, + map: generator.map.into_iter().map(|i| i.unwrap()).collect(), + }) + .expect("failed to solve PHF") +} + +struct Bucket { + idx: usize, + keys: Vec, +} + +struct Generator { + hashes: Vec, + buckets: Vec, + disps: Vec<(u32, u32)>, + map: Vec>, + try_map: Vec, +} + +impl Generator { + fn new(table_len: usize) -> Self { + let hashes = Vec::with_capacity(table_len); + + let buckets_len = (table_len + DEFAULT_LAMBDA - 1) / DEFAULT_LAMBDA; + let buckets: Vec<_> = (0..buckets_len) + .map(|i| Bucket { + idx: i, + keys: vec![], + }) + .collect(); + let disps = vec![(0u32, 0u32); buckets_len]; + + let map = vec![None; table_len]; + let try_map = vec![0u64; table_len]; + + Self { + hashes, + buckets, + disps, + map, + try_map, + } + } + + fn reset(&mut self, hashes: I) + where + I: Iterator, + { + self.buckets.iter_mut().for_each(|b| b.keys.clear()); + self.buckets.sort_by_key(|b| b.idx); + self.disps.iter_mut().for_each(|d| *d = (0, 0)); + self.map.iter_mut().for_each(|m| *m = None); + self.try_map.iter_mut().for_each(|m| *m = 0); + + self.hashes.clear(); + self.hashes.extend(hashes); + } + + fn try_generate_hash(&mut self) -> bool { + let buckets_len = self.buckets.len() as u32; + for (i, hash) in self.hashes.iter().enumerate() { + self.buckets[(hash.g % buckets_len) as usize].keys.push(i); + } + + // Sort descending + self.buckets + .sort_by(|a, b| a.keys.len().cmp(&b.keys.len()).reverse()); + + let table_len = self.hashes.len(); + + // store whether an element from the bucket being placed is + // located at a certain position, to allow for efficient overlap + // checks. It works by storing the generation in each cell and + // each new placement-attempt is a new generation, so you can tell + // if this is legitimately full by checking that the generations + // are equal. (A u64 is far too large to overflow in a reasonable + // time for current hardware.) + let mut generation = 0u64; + + // the actual values corresponding to the markers above, as + // (index, key) pairs, for adding to the main map once we've + // chosen the right disps. + let mut values_to_add = vec![]; + + 'buckets: for bucket in &self.buckets { + for d1 in 0..(table_len as u32) { + 'disps: for d2 in 0..(table_len as u32) { + values_to_add.clear(); + generation += 1; + + for &key in &bucket.keys { + let idx = + (phf_shared::displace(self.hashes[key].f1, self.hashes[key].f2, d1, d2) + % (table_len as u32)) as usize; + if self.map[idx].is_some() || self.try_map[idx] == generation { + continue 'disps; + } + self.try_map[idx] = generation; + values_to_add.push((idx, key)); + } + + // We've picked a good set of disps + self.disps[bucket.idx] = (d1, d2); + for &(idx, key) in &values_to_add { + self.map[idx] = Some(key); + } + continue 'buckets; + } + } + + // Unable to find displacements for a bucket + return false; + } + true + } +} diff --git a/anneal/vendor/phf_macros/.cargo-checksum.json b/anneal/vendor/phf_macros/.cargo-checksum.json new file mode 100644 index 0000000000..9ab838c87c --- /dev/null +++ b/anneal/vendor/phf_macros/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{".cargo_vcs_info.json":"493fd54f48db56223a1f59959df880797f1c375af4066f5fb8dbe6e4fe538633","CHANGELOG.md":"31db3c33a42a6b9eb9c4cc5df96c3f4aedee3c0f111ecb913023b3f41abe347c","Cargo.lock":"72d1cb1b978e47840fd32682cf306389f763031bf504d559791140de116eac51","Cargo.toml":"8a6612d2902f5d299d9a19bff5c4a2dffbcbda9cf881c1e9d9bdc4bc7d9fd151","Cargo.toml.orig":"3d10849fced893af900fe58fb9a6e67a2f3ccc9fec8873ecff76c2af1da55e32","LICENSE":"0ab4d106b6faac07fb6a051815fd1b4d862d730895e2d7d7358c2f13565e7a38","README.md":"64d22574dafbd9664fbb908506d9fc219aeac6f4ba544ebd7bd14099cea4f309","src/lib.rs":"776ade96b063c5f0e5b684ed22e8f8010ac2d0f7b170a6aadcd59c418cda2f3a"},"package":"d713258393a82f091ead52047ca779d37e5766226d009de21696c4e667044368"} \ No newline at end of file diff --git a/anneal/vendor/phf_macros/.cargo_vcs_info.json b/anneal/vendor/phf_macros/.cargo_vcs_info.json new file mode 100644 index 0000000000..3f1c248a81 --- /dev/null +++ b/anneal/vendor/phf_macros/.cargo_vcs_info.json @@ -0,0 +1,6 @@ +{ + "git": { + "sha1": "03a930696231da01005e762425841f91587b3e04" + }, + "path_in_vcs": "phf_macros" +} \ No newline at end of file diff --git a/anneal/vendor/phf_macros/CHANGELOG.md b/anneal/vendor/phf_macros/CHANGELOG.md new file mode 100644 index 0000000000..07592054af --- /dev/null +++ b/anneal/vendor/phf_macros/CHANGELOG.md @@ -0,0 +1,401 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## 0.12.0 (2025-06-19) + +### Chore + + - Update changelog + +### Commit Statistics + + + + - 6 commits contributed to the release over the course of 138 calendar days. + - 163 days passed between releases. + - 1 commit was understood as [conventional](https://www.conventionalcommits.org). + - 0 issues like '(#ID)' were seen in commit messages + +### Commit Details + + + +
view details + + * **Uncategorized** + - Update changelog ([`51d6baa`](https://github.com/rust-phf/rust-phf/commit/51d6baaa6ffce658fb9b56a96affaf0ddd0603e5)) + - Merge pull request #289 from thaliaarchi/master ([`a6df856`](https://github.com/rust-phf/rust-phf/commit/a6df856ade4cfbf2666fcabbd70c666ea8234abf)) + - Add support for unicase::Ascii type ([`2806801`](https://github.com/rust-phf/rust-phf/commit/28068018dec5aab9b6ddc0da918431285db8cd34)) + - Merge pull request #309 from edef1c/uncased-macro ([`37a7794`](https://github.com/rust-phf/rust-phf/commit/37a779400b63b5d0b5d5ab63e2727bbfb9ce494d)) + - Merge branch 'master' into no-wasteful-allocations ([`33b8aff`](https://github.com/rust-phf/rust-phf/commit/33b8affe77cea8bdeccb5c8d6c730c78231fc138)) + - Merge branch 'master' into fastrand ([`576dd47`](https://github.com/rust-phf/rust-phf/commit/576dd47858a2db74eb4ef67a8385039ef17b867d)) +
+ +## 0.11.3 (2025-01-07) + + + + + +### Other + + - include LICENSE and changelog files in published crates + The restrictive "include" directive is only present in the phf_macros + crate but in none of the others. This commit brings phf_macros crate + in line with other crates in this workspace. + +### Other + + - unwrap groups in call arguments + +### Bug Fixes + + - Add isize/usize as valid key types + These types already implement PhfHash as of #262, but were not supported + as valid key expressions in `phf_map!` and co. + +### Chore + + - bump Cargo.toml version of phf and phf_macros + +### Commit Statistics + + + + - 12 commits contributed to the release. + - 562 days passed between releases. + - 4 commits were understood as [conventional](https://www.conventionalcommits.org). + - 0 issues like '(#ID)' were seen in commit messages + +### Commit Details + + + +
view details + + * **Uncategorized** + - Adjusting changelogs prior to release of phf_shared v0.11.3, phf_generator v0.11.3, phf_macros v0.11.3, phf v0.11.3, phf_codegen v0.11.3 ([`e111f4b`](https://github.com/rust-phf/rust-phf/commit/e111f4b53a965c188fdcbf03950321107d9b3987)) + - Merge pull request #322 from JohnTitor/release-0.11.3 ([`dc64dd6`](https://github.com/rust-phf/rust-phf/commit/dc64dd6bace986a8858590455e08659d9ea4ae4b)) + - Reset version num ([`13581f8`](https://github.com/rust-phf/rust-phf/commit/13581f8e9eefe8b8b7cb1b1ad04f2d68d97b0ffd)) + - Merge pull request #315 from LunarLambda/master ([`695a0df`](https://github.com/rust-phf/rust-phf/commit/695a0df769f3c75150a67ed9bb316579b875289d)) + - Bump Cargo.toml version of phf and phf_macros ([`a96a4e2`](https://github.com/rust-phf/rust-phf/commit/a96a4e29d63fb1ab3cc10e050571e733f5d2d0d1)) + - Add isize/usize as valid key types ([`b54c740`](https://github.com/rust-phf/rust-phf/commit/b54c740086a96da85056b0df28174122bd73d5b0)) + - Unwrap groups in call arguments ([`3a86c8a`](https://github.com/rust-phf/rust-phf/commit/3a86c8a152b587aa572bd17e318b3cf61133040d)) + - Macro support for uncased ([`4359e17`](https://github.com/rust-phf/rust-phf/commit/4359e17371e4b93a45b8600cc56bc27aa95c5bb1)) + - Merge pull request #300 from JohnTitor/msrv-1.61 ([`323366d`](https://github.com/rust-phf/rust-phf/commit/323366d03966ddad2eaa3432df79c9da8339e319)) + - Bump MSRV to 1.61 ([`1795f7b`](https://github.com/rust-phf/rust-phf/commit/1795f7b66b16af0191f221dc957bc8a090c891ad)) + - Merge pull request #293 from decathorpe/master ([`e03f456`](https://github.com/rust-phf/rust-phf/commit/e03f4562957afd89d8d95a19d563eda9f0db7e8c)) + - Include LICENSE and changelog files in published crates ([`9ed628f`](https://github.com/rust-phf/rust-phf/commit/9ed628fe2ca954fdc49a93331188a99b58c9363a)) +
+ +## 0.11.2 (2023-06-24) + +### Commit Statistics + + + + - 10 commits contributed to the release. + - 319 days passed between releases. + - 0 commits were understood as [conventional](https://www.conventionalcommits.org). + - 0 issues like '(#ID)' were seen in commit messages + +### Commit Details + + + +
view details + + * **Uncategorized** + - Release phf_shared v0.11.2, phf_generator v0.11.2, phf_macros v0.11.2, phf v0.11.2, phf_codegen v0.11.2 ([`c9c35fd`](https://github.com/rust-phf/rust-phf/commit/c9c35fd8ba3f1bc228388b0cef6e3814a02a72c0)) + - Update changelogs ([`a1e5072`](https://github.com/rust-phf/rust-phf/commit/a1e5072b8e84b108f06389a1d41ac868426a03f7)) + - Merge pull request #280 from jf2048/deref-bytestring ([`3776342`](https://github.com/rust-phf/rust-phf/commit/377634245c8c6f0569a2ed7b75d08366b54c8810)) + - Merge pull request #284 from nickelc/deps/syn2 ([`5ec8936`](https://github.com/rust-phf/rust-phf/commit/5ec8936369ca9eb6392a4aeb878d9bfef88d0d17)) + - Update `syn` to 2.0 ([`8e3e3e5`](https://github.com/rust-phf/rust-phf/commit/8e3e3e554433a2bcb6bf84805b1d03a49780d8c3)) + - Allow using dereferenced bytestring literal keys in phf_map! ([`8c0d057`](https://github.com/rust-phf/rust-phf/commit/8c0d0572da8c0b5e188e7fda4ab8bd4bcb97f720)) + - Merge pull request #274 from ankane/license-files ([`21baa73`](https://github.com/rust-phf/rust-phf/commit/21baa73941a0694ec48f437c0c0a6abfcc2f32d2)) + - Include license files in crates ([`1229b2f`](https://github.com/rust-phf/rust-phf/commit/1229b2faa6b97542ab4850a1723b1723dea92814)) + - Merge pull request #271 from DavidS/bump-dep ([`ea8df2c`](https://github.com/rust-phf/rust-phf/commit/ea8df2caad5b20f927be1f0174dfa4e68e8a95f6)) + - Fix missed dependency bump in phf_macros ([`b7fd8f1`](https://github.com/rust-phf/rust-phf/commit/b7fd8f183f266cf7f0bf0ca8e89b03453f3f35b7)) +
+ +## 0.11.1 (2022-08-08) + + + + +### Chore + + - upgrade syn/proc-macro + +### Bug Fixes + + - remove now-unnecessary `proc-macro-hack` crate usage + Resolves . + + This resolves an issue with Windows Defender identifying `proc-macro-hack` as threats. It also sheds + a depedency that is no longer necessary, now that the MSRV of this crate is 1.46 and + `proc-macro-hack` is only useful for providing support for Rust versions 1.31 through 1.45. Per + [upstream](https://github.com/dtolnay/proc-macro-hack): + + > **Note:** _As of Rust 1.45 this crate is superseded by native support for #\[proc\_macro\] in + > expression position. Only consider using this crate if you care about supporting compilers between + > 1.31 and 1.45._ + +### Other + + - Update code for changes in Rust + LitBinary is now LitByteStr + +### Commit Statistics + + + + - 232 commits contributed to the release. + - 3 commits were understood as [conventional](https://www.conventionalcommits.org). + - 0 issues like '(#ID)' were seen in commit messages + +### Commit Details + + + +
view details + + * **Uncategorized** + - Release phf_shared v0.11.1, phf_generator v0.11.1, phf_macros v0.11.1, phf v0.11.1, phf_codegen v0.11.1 ([`3897b21`](https://github.com/rust-phf/rust-phf/commit/3897b21c6d38e5adcaf9110b4bb33c19f6b41977)) + - Merge pull request #264 from rust-phf/tweak-changelog ([`97f997d`](https://github.com/rust-phf/rust-phf/commit/97f997d2be827ca636a29046c78e2c09c5c62650)) + - Replace handmade changelog with generated one by `cargo-smart-release` ([`cb84cf6`](https://github.com/rust-phf/rust-phf/commit/cb84cf6636ab52823c53e70d6abeac8f648a3482)) + - Merge pull request #260 from JohnTitor/fix-repo-link ([`1407ebe`](https://github.com/rust-phf/rust-phf/commit/1407ebe536b39611db92d765ddec4de0e6c8a16e)) + - Add category to crates ([`32a72c3`](https://github.com/rust-phf/rust-phf/commit/32a72c3859997fd6b590e9ec092ae789d2acdf55)) + - Update repository links on Cargo.toml ([`1af3b0f`](https://github.com/rust-phf/rust-phf/commit/1af3b0fe1f8fdcae7ccc1bc8d51de309fb16a6bf)) + - Merge pull request #258 from JohnTitor/release-0.11.0 ([`c0b9ef9`](https://github.com/rust-phf/rust-phf/commit/c0b9ef98e798f807f94544aeb0fff429ef280efc)) + - Release 0.11.0 ([`d2efdc0`](https://github.com/rust-phf/rust-phf/commit/d2efdc08a7eb1d0d6c414b7b2ac41ce1fe1f9a43)) + - Merge pull request #257 from JohnTitor/edition-2021 ([`36ec885`](https://github.com/rust-phf/rust-phf/commit/36ec8854a9da4f295618e98d94aaf7150df2173e)) + - Make crates edition 2021 ([`b9d25da`](https://github.com/rust-phf/rust-phf/commit/b9d25da58b912d9927fbc41901631cd77836462b)) + - Merge pull request #256 from NZXTCorp/remove-proc-macro-hack ([`a85f070`](https://github.com/rust-phf/rust-phf/commit/a85f070d641317a04b81da053cc4040619652e69)) + - Remove now-unnecessary `proc-macro-hack` crate usage ([`caf1ce7`](https://github.com/rust-phf/rust-phf/commit/caf1ce71aed110fb44206ce2291154572ebfe9b7)) + - Merge pull request #251 from JohnTitor/weak-deps ([`2e1167c`](https://github.com/rust-phf/rust-phf/commit/2e1167c2046cd20aed1a906b4e23b40303cf0c00)) + - Make "unicase + macros" features work ([`11bb242`](https://github.com/rust-phf/rust-phf/commit/11bb2426f0237b1ecea8c8038630b1231ede4871)) + - Merge pull request #241 from JohnTitor/extract-macro-tests ([`7b0a313`](https://github.com/rust-phf/rust-phf/commit/7b0a3130a55176d2570300f92cb7ddca6c23da83)) + - Extract `phf_macros` tests as a separated crate ([`8cf694d`](https://github.com/rust-phf/rust-phf/commit/8cf694d76e0991b4e24ecdc5d2a88bb74713d9cd)) + - Merge pull request #240 from JohnTitor/docs-update ([`da98b9e`](https://github.com/rust-phf/rust-phf/commit/da98b9e80fdb22cd6d48a4a42489840afe603756)) + - Remove some stuff which is now unnecessary ([`6941e82`](https://github.com/rust-phf/rust-phf/commit/6941e825d09a98c1ea29a08ecd5fd605611584a4)) + - Refine doc comments ([`d8cfc43`](https://github.com/rust-phf/rust-phf/commit/d8cfc436059a1c2c3ede1afb0f9ec2333c046fc6)) + - Merge pull request #234 from JohnTitor/fix-ci ([`eba4cc2`](https://github.com/rust-phf/rust-phf/commit/eba4cc28d92c1db95cc430985a0fbc9ca63d1307)) + - Fix CI failure ([`d9b5ff2`](https://github.com/rust-phf/rust-phf/commit/d9b5ff23367d2bbcc385ff8243c7d972f45d459c)) + - Fix `phf` dev dep version ([`3cc6f05`](https://github.com/rust-phf/rust-phf/commit/3cc6f05cb07933af4cf886645d1170bdcb306b6b)) + - Merge pull request #230 from JohnTitor/release-0.10 ([`3ea14b2`](https://github.com/rust-phf/rust-phf/commit/3ea14b2166553ad6e7b9afe7244144f5d661b6c6)) + - Prepare for release 0.10.0 ([`588ac25`](https://github.com/rust-phf/rust-phf/commit/588ac25dd5c0afccea084e6f94867328a6a30454)) + - Fix publish failure ([`fbb18f9`](https://github.com/rust-phf/rust-phf/commit/fbb18f925018fa621ce8a8d334f6746ae0f1d072)) + - Merge pull request #228 from JohnTitor/release-0.9.1 ([`d527f9d`](https://github.com/rust-phf/rust-phf/commit/d527f9d016adafe7d2930e37710291030b432838)) + - Prepare for v0.9.1 ([`9b71978`](https://github.com/rust-phf/rust-phf/commit/9b719789149ef195ef5eba093b7e73255fbef8dc)) + - Merge pull request #224 from bhgomes/const-fns ([`65deaf7`](https://github.com/rust-phf/rust-phf/commit/65deaf745b5175b6b8e645b6c66e53fc55bb3a85)) + - Remove Slice type and fix some docs ([`99d3533`](https://github.com/rust-phf/rust-phf/commit/99d353390f8124a283da9202fd4d163e68bc1949)) + - Merge pull request #223 from JohnTitor/minor-cleanup ([`c746106`](https://github.com/rust-phf/rust-phf/commit/c746106ad05917ad62f244504727b07e07c3e075)) + - Minor cleanups ([`8868d08`](https://github.com/rust-phf/rust-phf/commit/8868d088e2fed36fcd7741e9a1c5bf68bef4f46e)) + - Merge pull request #222 from JohnTitor/precisify-msrv ([`50f8a0d`](https://github.com/rust-phf/rust-phf/commit/50f8a0d3d3f4cc7e15146e29e0559ba057a25a4d)) + - Bless tests ([`dab668c`](https://github.com/rust-phf/rust-phf/commit/dab668ccc8b638548cd78678de8427ed5e765b21)) + - Merge pull request #220 from JohnTitor/fix-release-process ([`29f9100`](https://github.com/rust-phf/rust-phf/commit/29f910079b75623420a19f3bd91a341821e02118)) + - Fix the release failure ([`647f331`](https://github.com/rust-phf/rust-phf/commit/647f331d43dcf2b61625cccffbd31f95ad076d05)) + - Downgrade `phf` dev-dep version for now ([`7dd8a1b`](https://github.com/rust-phf/rust-phf/commit/7dd8a1b410fea96820bfe489f53f1c6fd9d64ba5)) + - Merge pull request #219 from JohnTitor/release-0.9.0 ([`307969f`](https://github.com/rust-phf/rust-phf/commit/307969ff3bb8cae320e648890a9525920035944b)) + - Prepare 0.9.0 release ([`2ca46c4`](https://github.com/rust-phf/rust-phf/commit/2ca46c4f9c9083c128fcc6add33dc5986638940f)) + - Cleanup cargo metadata ([`a9e4b0a`](https://github.com/rust-phf/rust-phf/commit/a9e4b0a1e84825004fa66e938b870f83d3147d0d)) + - Merge pull request #218 from JohnTitor/cleanup ([`76f9072`](https://github.com/rust-phf/rust-phf/commit/76f907239af9b0cca7dac4e6d702cedc72f6f371)) + - Fix test ([`ffa7e41`](https://github.com/rust-phf/rust-phf/commit/ffa7e41a767dd6021a7f42f012dab0befe6d0932)) + - Run rustfmt check on CI ([`1adfb30`](https://github.com/rust-phf/rust-phf/commit/1adfb305704cbced7c63e58b99bd53847298dbe6)) + - Run rustfmt ([`dd86c6c`](https://github.com/rust-phf/rust-phf/commit/dd86c6c103f25021b52144085b8fab0a94582bef)) + - Merge pull request #217 from JohnTitor/rename-feature ([`ff77659`](https://github.com/rust-phf/rust-phf/commit/ff77659a001c08f1f069a17cc5d2ff6fdd51569c)) + - Rename `unicase_support` to `unicase` ([`b47174b`](https://github.com/rust-phf/rust-phf/commit/b47174bb9ebbd68e41316e1aa39c6541a45356a6)) + - Merge pull request #215 from rust-phf/gha ([`12121ec`](https://github.com/rust-phf/rust-phf/commit/12121ec6d16d79d73cf9a2a7cdae1681798351b4)) + - Run UI tests only on stable ([`7522b16`](https://github.com/rust-phf/rust-phf/commit/7522b160e76e981e430f6586dbfa8747c85f2f76)) + - Merge pull request #205 from skyfloogle/ordered-stuff ([`9ae1678`](https://github.com/rust-phf/rust-phf/commit/9ae1678f2507d6d26a1b780385a2e17bdfbb0b5c)) + - Add back ordered_map, ordered_set ([`0ab0108`](https://github.com/rust-phf/rust-phf/commit/0ab01081e4bd8f40bc18ab554c95f217220228d5)) + - Merge pull request #209 from JohnTitor/unicase_support ([`ec43f5c`](https://github.com/rust-phf/rust-phf/commit/ec43f5c912e48d7f56a4126fca8247733baee18f)) + - Improve implementation for unicase support ([`6957e47`](https://github.com/rust-phf/rust-phf/commit/6957e470b6fcd3b389440bf3d2ddcb12e1d38911)) + - Restore unicase_support for phf_macros ([`77e6cce`](https://github.com/rust-phf/rust-phf/commit/77e6cce1931fe8b43e434061a369f3620b3e97e0)) + - Merge pull request #208 from JohnTitor/simplify-workspace ([`a47ac36`](https://github.com/rust-phf/rust-phf/commit/a47ac36b16dd8798659be3e24f74051cd1ed760d)) + - Use `[patch.crates-io]` section instead of path key ([`f47515b`](https://github.com/rust-phf/rust-phf/commit/f47515bce5c433214dbecee262a7a6f14e6a74d4)) + - Merge pull request #206 from Kazurin-775/master ([`7ebc9e7`](https://github.com/rust-phf/rust-phf/commit/7ebc9e7986ca9ae86c6e871b4fd495a401d6b5ca)) + - Fix phf_macros on no_std ([`d7af3dc`](https://github.com/rust-phf/rust-phf/commit/d7af3dc96a67070e2f9000158d074825f0a9d592)) + - Merge pull request #207 from JohnTitor/fix-ci ([`5b42ba6`](https://github.com/rust-phf/rust-phf/commit/5b42ba673ac03299799a69b317dfff90a994b240)) + - Update stderrs ([`0f1407e`](https://github.com/rust-phf/rust-phf/commit/0f1407ec8aa6df74e7ed95dd073685295958d5d5)) + - Merge pull request #201 from benesch/rand-08-redux ([`73a6799`](https://github.com/rust-phf/rust-phf/commit/73a6799f048228039af32c8e21246a63d977c9e3)) + - Update expected test case output for latest nightly ([`e387f69`](https://github.com/rust-phf/rust-phf/commit/e387f69540138026ab679537322c94500876fe8d)) + - Merge pull request #180 from abonander/master ([`81c7cc5`](https://github.com/rust-phf/rust-phf/commit/81c7cc5b48649108428671d3b8ad151f6fbdb359)) + - Release v0.8.0 ([`4060288`](https://github.com/rust-phf/rust-phf/commit/4060288dc2c1ebe3b0630e4016ed51935bb0c863)) + - Merge pull request #181 from mati865/criterion ([`696eee1`](https://github.com/rust-phf/rust-phf/commit/696eee1f38213fe4a404ddfb9ef10d8e61ef0700)) + - Avoid missing main error in tests ([`1992222`](https://github.com/rust-phf/rust-phf/commit/19922229dfe8c25076ab13344a0b876fe2c3bda3)) + - Merge pull request #179 from FauxFaux/bumps ([`5f86fa4`](https://github.com/rust-phf/rust-phf/commit/5f86fa46ebf28eb6ef83d70d58b1212795639ba3)) + - Upgrade syn/proc-macro ([`d40d663`](https://github.com/rust-phf/rust-phf/commit/d40d663ca96f668bcd6f86cc691085629111c0b5)) + - Merge pull request #171 from abonander/170-removals ([`0d00821`](https://github.com/rust-phf/rust-phf/commit/0d0082178568036736bb6d51cb91f95ca5a616c3)) + - Remove ordered_map, ordered_set, phf_builder ([`8ae2bb8`](https://github.com/rust-phf/rust-phf/commit/8ae2bb886841a69a4fc482f439e2374f2373ab15)) + - Merge pull request #166 from abonander/158-trybuild ([`50c6c75`](https://github.com/rust-phf/rust-phf/commit/50c6c75d406b529601f0377afba93e562bbff2aa)) + - Port compile-fail tests to trybuild ([`4a4256c`](https://github.com/rust-phf/rust-phf/commit/4a4256cf1963a349c8d63f4f93c7c562e8963d59)) + - Merge pull request #161 from abonander/display-builders ([`171f7ed`](https://github.com/rust-phf/rust-phf/commit/171f7edccb71766e9381600108a0d996513ec7ea)) + - Create `Display` adapters for `phf_codegen` builders ([`93aa7ae`](https://github.com/rust-phf/rust-phf/commit/93aa7ae1de87345ea19f38e747283bc712384650)) + - Merge pull request #164 from abonander/perf-improvements ([`70129c6`](https://github.com/rust-phf/rust-phf/commit/70129c6fbcdf428ce9f1014eea935301ac70e410)) + - Ignore compiletest ([`f1362b2`](https://github.com/rust-phf/rust-phf/commit/f1362b25674538ed02d41fcc9f7cc1c8ba6ec57c)) + - Merge pull request #160 from abonander/readme-edits ([`6e1f6ac`](https://github.com/rust-phf/rust-phf/commit/6e1f6ac9b1f917089a4501ccb32f4f477799e39c)) + - Proc_macro_hygiene is not needed with proc-macro-hack ([`ab473a4`](https://github.com/rust-phf/rust-phf/commit/ab473a4c7fcc1a8e8a99594c261fe00b4ad96865)) + - Merge pull request #149 from danielhenrymantilla/proc-macro-hack ([`ae649cd`](https://github.com/rust-phf/rust-phf/commit/ae649cd67d9ce1452092ee739971d8ee232505ee)) + - Made macros work in stable ([`4fc0d1a`](https://github.com/rust-phf/rust-phf/commit/4fc0d1a8c3bcc3950082b614d8bfa4a0f63d6962)) + - Merge branch 'master' into patch-1 ([`cd0d7ce`](https://github.com/rust-phf/rust-phf/commit/cd0d7ce1194252dcaca3153988ba2a4effa66b4f)) + - Merge pull request #155 from abonander/128-bit-ints ([`6749552`](https://github.com/rust-phf/rust-phf/commit/674955292a7028752f2eb25e34c27e881f6b11a1)) + - Implement support for 128-bit ints and fix high magnitude vals ([`5be5919`](https://github.com/rust-phf/rust-phf/commit/5be59199389c0703fff62f640eb1a0d19243fc48)) + - Merge pull request #146 from Benjamin-L/master ([`d41f27d`](https://github.com/rust-phf/rust-phf/commit/d41f27d3e2bcbb4a2868a62b0e022b4bdb267d8b)) + - Fixed typo in benchmark ([`f46b2e1`](https://github.com/rust-phf/rust-phf/commit/f46b2e19622de2f845ea5eb8e8d4f54ece364242)) + - Fix tests ([`ae4ef3e`](https://github.com/rust-phf/rust-phf/commit/ae4ef3ea68d6baca0916b5ef2a15245ad78674ae)) + - Release v0.7.24 ([`1287414`](https://github.com/rust-phf/rust-phf/commit/1287414b1302d2d717c5f4be81accf4c12ccad48)) + - Reexport macros through phf crate ([`588fd1a`](https://github.com/rust-phf/rust-phf/commit/588fd1a785492afa5ad76db0556097e32e24387d)) + - Convert phf_macros to new-style proc-macros ([`5ae4131`](https://github.com/rust-phf/rust-phf/commit/5ae413129c391223782bc2944ec0ffbded103791)) + - Release v0.7.23 ([`a050b6f`](https://github.com/rust-phf/rust-phf/commit/a050b6f2a6b825bf0824339266ab9545340420d4)) + - Update to nightly-2018-08-23 ([`e03f536`](https://github.com/rust-phf/rust-phf/commit/e03f536f32a8a2a31d07e43b19e05c7d4fd1cb82)) + - Release 0.7.22 ([`ab88405`](https://github.com/rust-phf/rust-phf/commit/ab884054fa17eef915db2bdb5259c7aa71fbfea6)) + - Fix build ([`2071d25`](https://github.com/rust-phf/rust-phf/commit/2071d2515ff37590c45ee2e88cead583cdb81089)) + - Update to latest nightly ([`fcf758f`](https://github.com/rust-phf/rust-phf/commit/fcf758faa21c6c2c93dbab9fe6ac82a36bab0dd9)) + - Upgrade rand ([`e7b5a35`](https://github.com/rust-phf/rust-phf/commit/e7b5a35d14f6927a748f3c55a1c87b5b751ececd)) + - Release v0.7.21 ([`6c7e2d9`](https://github.com/rust-phf/rust-phf/commit/6c7e2d9ce17ff1b87507925bdbe87e6e682ed3e4)) + - Merge pull request #101 from SimonSapin/rustup ([`8889199`](https://github.com/rust-phf/rust-phf/commit/888919958cd0b8bb1ca81b3e4d59fdb6716d30f1)) + - Upgrade to rustc 1.16.0-nightly (c07a6ae77 2017-01-17) ([`dc756bf`](https://github.com/rust-phf/rust-phf/commit/dc756bfb1400715eeedd0dfaa394296274f59be4)) + - Don't ICE on bad syntax ([`e87e95f`](https://github.com/rust-phf/rust-phf/commit/e87e95fb96cfad1cc6699b828fb8994d2429f424)) + - Link to docs.rs ([`61142c5`](https://github.com/rust-phf/rust-phf/commit/61142c5aa168cff1bf53a6961ddc12012b49e1bb)) + - Cleanup ([`9278c47`](https://github.com/rust-phf/rust-phf/commit/9278c470b33571de286314cae555c4de9dd7d177)) + - Fix tests ([`5947cd1`](https://github.com/rust-phf/rust-phf/commit/5947cd14b9aac452f4f8feb25b57fd11240970ee)) + - Remove time dependency ([`98f56e5`](https://github.com/rust-phf/rust-phf/commit/98f56e53c212795e048c7baa0f488e1b294e9c37)) + - Dependency cleanup ([`f106aa6`](https://github.com/rust-phf/rust-phf/commit/f106aa66d85abfba3d627d12fd46a9b080c83e95)) + - Release v0.7.20 ([`f631f50`](https://github.com/rust-phf/rust-phf/commit/f631f50abfaf6ea3d6fc8caaada47975b6df3a62)) + - Merge pull request #96 from nox/rustup ([`2f509ca`](https://github.com/rust-phf/rust-phf/commit/2f509ca1a5e7910c3bc7aec773418098bc27d3ea)) + - Update to Rust 1.15.0-nightly (7b3eeea22 2016-11-21) ([`39cc485`](https://github.com/rust-phf/rust-phf/commit/39cc485f777daaf2076f1da7337cc5ad7e9f00ad)) + - Merge branch 'release' ([`ea7e256`](https://github.com/rust-phf/rust-phf/commit/ea7e2562706663632a0af65ae9fa94e5cf78c4ea)) + - Merge branch 'release-v0.7.19' into release ([`81a4806`](https://github.com/rust-phf/rust-phf/commit/81a4806b05f14fb49aa972de27a42926a542ec44)) + - Release v0.7.19 ([`0a98dd1`](https://github.com/rust-phf/rust-phf/commit/0a98dd1865d12a3fa4cc27bdb38fa1e7374940d9)) + - Merge pull request #95 from nox/rustup ([`969bcd5`](https://github.com/rust-phf/rust-phf/commit/969bcd57629b97f06f3cf05453e36cd584cd85f7)) + - Update phf_macros to Rust 1.14.0-nightly (7c69b0d5a 2016-11-01) ([`b7d2d4d`](https://github.com/rust-phf/rust-phf/commit/b7d2d4d36cb43a8fa159135250bd2265cb30f523)) + - Merge branch 'release' ([`ecab54b`](https://github.com/rust-phf/rust-phf/commit/ecab54b8a028c88938f220dbb0a684e017bab62f)) + - Merge branch 'release-v0.7.18' into release ([`dfa970b`](https://github.com/rust-phf/rust-phf/commit/dfa970b229cc32cfb2da1692aa94ad8a266e704a)) + - Release v0.7.18 ([`3f71765`](https://github.com/rust-phf/rust-phf/commit/3f717650f4331f5dbb9d7a3f878228fcf1138729)) + - Merge pull request #94 from Bobo1239/master ([`81f2a5d`](https://github.com/rust-phf/rust-phf/commit/81f2a5d7bc9897711a064b343b8a8b6216e252b7)) + - Fix for latest nightly ([`35e991b`](https://github.com/rust-phf/rust-phf/commit/35e991b11efca3bd065a28f661ab76f423a83601)) + - Merge branch 'release' ([`5f08563`](https://github.com/rust-phf/rust-phf/commit/5f0856327731107d9fada1b0318f6f15f32957c2)) + - Merge branch 'release-v0.7.17' into release ([`e073dd2`](https://github.com/rust-phf/rust-phf/commit/e073dd262d1b4c95234222ee5048fc883b9c7301)) + - Release v0.7.17 ([`21ecf72`](https://github.com/rust-phf/rust-phf/commit/21ecf72101715e4754db95a64ecd7de5a37b7f14)) + - Merge pull request #92 from Bobo1239/master ([`d4b788d`](https://github.com/rust-phf/rust-phf/commit/d4b788dbce05fa8e103bd9d0a3022230ae738b81)) + - Fix for latest nightly ([`cb1ec95`](https://github.com/rust-phf/rust-phf/commit/cb1ec955442750fc712d155346beeb9562905602)) + - Merge pull request #91 from Bobo1239/master ([`bf472f2`](https://github.com/rust-phf/rust-phf/commit/bf472f2baed1552530a80c95ba5872a78fd68a5c)) + - Remove dead code ([`df0d8e8`](https://github.com/rust-phf/rust-phf/commit/df0d8e8ae9b23482fb19ca70f1f3bd6cdfe59358)) + - Add compile-fail test for equivalent UniCase keys ([`711515a`](https://github.com/rust-phf/rust-phf/commit/711515ad0ab53c14303b6c659a1fb3c2b3c86df5)) + - Add UniCase support to phf_macros and bump unicase version ([`2af3abb`](https://github.com/rust-phf/rust-phf/commit/2af3abb00cafc85d43755e43767a2a8b274f6670)) + - Merge branch 'release' ([`839f06d`](https://github.com/rust-phf/rust-phf/commit/839f06d5a10c1300353b8f3c972990624695b668)) + - Merge branch 'release-v0.7.16' into release ([`6f5575c`](https://github.com/rust-phf/rust-phf/commit/6f5575c9b12d3619ea17c0825a613fcac12820f4)) + - Release v0.7.16 ([`8bf29c1`](https://github.com/rust-phf/rust-phf/commit/8bf29c10a878c83d73cc40385f0e96cb9cc95afa)) + - Merge pull request #89 from Machtan/master ([`ce387c3`](https://github.com/rust-phf/rust-phf/commit/ce387c3e2fb64ee031e812b93a64064098c5d617)) + - Update the TokenTree import ([`f404629`](https://github.com/rust-phf/rust-phf/commit/f40462989e75ce85de8c88d6faaee934d05fe006)) + - Merge branch 'release' ([`b4ec398`](https://github.com/rust-phf/rust-phf/commit/b4ec398f415e5cac2cd4d794b1889788e644447f)) + - Merge branch 'release-v0.7.15' into release ([`6bbc9e2`](https://github.com/rust-phf/rust-phf/commit/6bbc9e249b9a84e2019432b7d3b178851d2d776e)) + - Release v0.7.15 ([`20f896e`](https://github.com/rust-phf/rust-phf/commit/20f896e6975cabb9cf9883b08eaa5b3da8597f11)) + - Merge branch 'release' ([`7c692d4`](https://github.com/rust-phf/rust-phf/commit/7c692d42970bf6cb2540f6b2d3c88d63b3fd1f7a)) + - Merge branch 'release-v0.7.14' into release ([`ea8dd65`](https://github.com/rust-phf/rust-phf/commit/ea8dd652c292746a20bf3a680e9f925f6f0530b1)) + - Release v0.7.14 ([`fee66fc`](https://github.com/rust-phf/rust-phf/commit/fee66fc20e33f2b119f830a8926f3b6e52abcf09)) + - Introduce a Slice abstraction for buffers ([`0cc3844`](https://github.com/rust-phf/rust-phf/commit/0cc38449c21f29bd9348e28c5719d650e16159cf)) + - Merge branch 'release' ([`d9351e1`](https://github.com/rust-phf/rust-phf/commit/d9351e1488bd42d1a4453e4a465177fb1c781fdc)) + - Merge branch 'release-v0.7.13' into release ([`b582e4e`](https://github.com/rust-phf/rust-phf/commit/b582e4ecec23be992ba915fc7873c0d5598f388a)) + - Release v0.7.13 ([`4769a6d`](https://github.com/rust-phf/rust-phf/commit/4769a6d2ce1d392da06e4b3cb833a1cdccb1f1aa)) + - Merge pull request #80 from nox/rustup ([`6d17c1f`](https://github.com/rust-phf/rust-phf/commit/6d17c1ffe01d82eaeb0d087762c73ed6ab288bbe)) + - Update to Rust 2016-02-22 ([`c995514`](https://github.com/rust-phf/rust-phf/commit/c9955143ffdb07bf85a525494811bd96517bf688)) + - Merge branch 'release' ([`5659a9d`](https://github.com/rust-phf/rust-phf/commit/5659a9db39bc5ee2179b264fce4cba4384d6d025)) + - Merge branch 'release-v0.7.12' into release ([`2f0a5de`](https://github.com/rust-phf/rust-phf/commit/2f0a5de9f01d9d22c774d8d85daec2a047a462e8)) + - Release v0.7.12 ([`9b75ee5`](https://github.com/rust-phf/rust-phf/commit/9b75ee5ed14060c45a5785fba0387be09e698624)) + - Merge pull request #77 from nox/byte-string-key ([`75606bc`](https://github.com/rust-phf/rust-phf/commit/75606bc371b532dddb814588bc65a9a2a5343ddb)) + - Support byte string keys in phf_macros (fixes #76) ([`652beae`](https://github.com/rust-phf/rust-phf/commit/652beae0cac6711ab0931d8dc844cd291559dad7)) + - Merge branch 'release' ([`87ffab8`](https://github.com/rust-phf/rust-phf/commit/87ffab863aaeefb5ac2164da62f0407122d8057e)) + - Merge branch 'release-v0.7.11' into release ([`7260d04`](https://github.com/rust-phf/rust-phf/commit/7260d04413349bacab484afb74f9a496335278e1)) + - Release v0.7.11 ([`a004227`](https://github.com/rust-phf/rust-phf/commit/a0042277b181ec95fcbf29751b9a453f4f962ebb)) + - Merge pull request #74 from djudd/fix-eat-retval ([`4791e96`](https://github.com/rust-phf/rust-phf/commit/4791e9602bc00e67bc9dd22fa55a58d7609d469c)) + - Update for changed return value of parser.eat ([`82da9f0`](https://github.com/rust-phf/rust-phf/commit/82da9f00f404634c09097f9116cda9e8e742d556)) + - Switch timing info back to a hint ([`771e781`](https://github.com/rust-phf/rust-phf/commit/771e781e704e581c1a103f56ed0f6f2a68917883)) + - Merge branch 'release' ([`1579bec`](https://github.com/rust-phf/rust-phf/commit/1579bec1448c7b833f5965fe39d4ef2df66c982c)) + - Merge branch 'release-v0.7.10' into release ([`25cea13`](https://github.com/rust-phf/rust-phf/commit/25cea133fb4eec938bdfa74f04adbc8d94e30d4e)) + - Release v0.7.10 ([`c43154b`](https://github.com/rust-phf/rust-phf/commit/c43154b2661dc09620a7879c16f37b47d6ec03ae)) + - Update for syntax changes ([`3be2db8`](https://github.com/rust-phf/rust-phf/commit/3be2db8d9254214bf1571fafd466ed7d6b96af55)) + - Merge branch 'release' ([`2c67ce5`](https://github.com/rust-phf/rust-phf/commit/2c67ce5a4129cd543178bf015f021a3bb83b6895)) + - Merge branch 'release-v0.7.9' into release ([`87206e1`](https://github.com/rust-phf/rust-phf/commit/87206e1c7b8d4089370dc168402ded0c0700a447)) + - Release v0.7.9 ([`b7d29df`](https://github.com/rust-phf/rust-phf/commit/b7d29dfe0df288b2da74de195f764eace1c8e443)) + - Merge pull request #71 from djudd/rustc-plugin-rename ([`260437e`](https://github.com/rust-phf/rust-phf/commit/260437ee8dc5fcad43654b07ccef101089cadabd)) + - Registry now seems to live in rustc_plugin instead of rustc::plugin ([`ba8d701`](https://github.com/rust-phf/rust-phf/commit/ba8d7019599cb779b9f7ab983f6cc2aa4f422991)) + - Merge branch 'release' ([`cd33902`](https://github.com/rust-phf/rust-phf/commit/cd339023e90ac1ce6971fa81badea65fb1f2b086)) + - Merge branch 'release-v0.7.8' into release ([`8bc23a0`](https://github.com/rust-phf/rust-phf/commit/8bc23a023908a038d668b6f7d8e94ee416995285)) + - Release v0.7.8 ([`aad0b9b`](https://github.com/rust-phf/rust-phf/commit/aad0b9b658fb970e3df60b066961aafca1a17c44)) + - Merge pull request #70 from nrc/rustup ([`2cc2ed3`](https://github.com/rust-phf/rust-phf/commit/2cc2ed36e30dea0ce0411784be87b184c0c68961)) + - Rustup ([`a6c43fa`](https://github.com/rust-phf/rust-phf/commit/a6c43fa25e06684121df6a93b2b90405d8e0fc2e)) + - Merge branch 'release' ([`dccff69`](https://github.com/rust-phf/rust-phf/commit/dccff69384729e3d4972174ce62d8f9db9429485)) + - Merge branch 'release-v0.7.7' into release ([`2d988b7`](https://github.com/rust-phf/rust-phf/commit/2d988b7dfb04d949246adc047f6b195263612246)) + - Release v0.7.7 ([`c9e7a93`](https://github.com/rust-phf/rust-phf/commit/c9e7a93f4d6f85a72651aba6187e4c956d8c1167)) + - Merge pull request #69 from nrc/rustup ([`8185728`](https://github.com/rust-phf/rust-phf/commit/81857284f30ff832f4c8eb7c68a2957f2acdb198)) + - Rustup for phf_macros ([`4c51ffc`](https://github.com/rust-phf/rust-phf/commit/4c51ffc6d63f768dea75cab65ad6cb809bce9bb4)) + - Run through rustfmt ([`58e2223`](https://github.com/rust-phf/rust-phf/commit/58e222380b7fc9609a055cb5a6110ba04e47d677)) + - Merge branch 'release' ([`776046c`](https://github.com/rust-phf/rust-phf/commit/776046c961456dee9e16a6b6574d336c66e259f8)) + - Merge branch 'release-v0.7.6' into release ([`2ea7d5c`](https://github.com/rust-phf/rust-phf/commit/2ea7d5cab5e9e54952ca618b43ec3583a33a4847)) + - Release v0.7.6 ([`5bcd5c9`](https://github.com/rust-phf/rust-phf/commit/5bcd5c95215f5aa29e133cb2912662085a8158f0)) + - Merge branch 'release' ([`1f770df`](https://github.com/rust-phf/rust-phf/commit/1f770df1290b586a8d641ecb0bbd105080afc0ea)) + - Merge branch 'release-v0.7.5' into release ([`bb65b8c`](https://github.com/rust-phf/rust-phf/commit/bb65b8cca30ef9d4518e3083558019a972873efa)) + - Release v0.7.5 ([`fda44f5`](https://github.com/rust-phf/rust-phf/commit/fda44f550401c1bd4aad29bb2c07030b86761028)) + - Merge pull request #65 from dinfuehr/master ([`fc1f6b0`](https://github.com/rust-phf/rust-phf/commit/fc1f6b00c5aeb00b1d1e5d418b5979c7cb8b8afd)) + - Update code for changes in Rust ([`8225c4b`](https://github.com/rust-phf/rust-phf/commit/8225c4b90d6ee71483304e71342c269fca86a044)) + - Macro assemble benchmark map and match to ensure sync ([`a2486ed`](https://github.com/rust-phf/rust-phf/commit/a2486eda19c647d16c9976bb33ba8634388a0569)) + - Merge pull request #63 from erickt/master ([`e879788`](https://github.com/rust-phf/rust-phf/commit/e8797888ff6f1a7a690a44844b692107cbf2c8a9)) + - Add benchmarks ([`9585cc3`](https://github.com/rust-phf/rust-phf/commit/9585cc3c0391725d02f6199eaed500ba5fafcaf3)) + - Merge branch 'release' ([`269b5dc`](https://github.com/rust-phf/rust-phf/commit/269b5dc41ebf82f423393d5219e8107e9c911a03)) + - Merge branch 'release-v0.7.4' into release ([`7c093e8`](https://github.com/rust-phf/rust-phf/commit/7c093e83ffe5192d9cdcd5402b6abb7800ffafb3)) + - Release v0.7.4 ([`c7c0d3c`](https://github.com/rust-phf/rust-phf/commit/c7c0d3c294126157f0275a05b7c3a65c419234a1)) + - Update PhfHash to mirror std::hash::Hash ([`96ef156`](https://github.com/rust-phf/rust-phf/commit/96ef156baae669b233673d6be2b96617ad48551e)) + - Release v0.7.3 ([`77ea239`](https://github.com/rust-phf/rust-phf/commit/77ea23917e908b10c4c5c463671a8409292f8661)) + - Release v0.7.2 ([`642b69d`](https://github.com/rust-phf/rust-phf/commit/642b69d0100a4ee7ec6e430ef1351bd1f28f9a4a)) + - Add an index test ([`f51f449`](https://github.com/rust-phf/rust-phf/commit/f51f449261ddd8ad30bfb5507b166e7980df1aa7)) + - Release v0.7.1 ([`9cb9de9`](https://github.com/rust-phf/rust-phf/commit/9cb9de911ad4e16964f0def29780dde1630c3619)) + - Fix phf-macros ([`6c98e9f`](https://github.com/rust-phf/rust-phf/commit/6c98e9f16a6d9ebf11e0a9c8e9ff91b4b320d2af)) + - Release v0.7.0 ([`555a690`](https://github.com/rust-phf/rust-phf/commit/555a690561673597aee068650ac884bbcc2e31cf)) + - Stabilize phf ([`e215273`](https://github.com/rust-phf/rust-phf/commit/e2152739cbdd471116d88bb4a9cea4cdfede1e42)) + - Release v0.6.19 ([`5810d30`](https://github.com/rust-phf/rust-phf/commit/5810d30ef2162f33cfb4da99c65b7344c7f2913b)) + - Release v0.6.18 ([`36efc72`](https://github.com/rust-phf/rust-phf/commit/36efc721478d097fba1e5458cbdd9f288637abae)) + - Fix for upstream changes ([`eabadcf`](https://github.com/rust-phf/rust-phf/commit/eabadcf7e8af351ba8f07d86746e35adc8c5812e)) + - Release v0.6.17 ([`271ccc2`](https://github.com/rust-phf/rust-phf/commit/271ccc27d885363d4d8c549f75624d08c48e56c5)) + - Release v0.6.15 ([`ede14df`](https://github.com/rust-phf/rust-phf/commit/ede14df1e574674852b09bcafff4ad549ebfd4ae)) + - Remove broken test ([`f54adb7`](https://github.com/rust-phf/rust-phf/commit/f54adb783a71678c9397b4d7c1e02ee82b9646b8)) + - Release v0.6.14 ([`cf64ebb`](https://github.com/rust-phf/rust-phf/commit/cf64ebb8f769c9f12c9a03d05713dde6b8caf371)) + - Release v0.6.13 ([`4fdb533`](https://github.com/rust-phf/rust-phf/commit/4fdb5331fd9978ca3e180a06fb2e34627f50fb77)) + - Fix warnings and use debug builders ([`4d28684`](https://github.com/rust-phf/rust-phf/commit/4d28684b72333e911e23b898b5780947d49822a5)) + - Release v0.6.12 ([`59ca586`](https://github.com/rust-phf/rust-phf/commit/59ca58637206c9806c13cc24cb35cb7d0ce9d23f)) + - Fix phf_macros ([`6567152`](https://github.com/rust-phf/rust-phf/commit/6567152be9e018a99fedf6e54017d827812b8f13)) + - Release v0.6.11 ([`e1e6d3b`](https://github.com/rust-phf/rust-phf/commit/e1e6d3b40a6babddd0989406f2b4e952443ff52e)) + - Release v0.6.10 ([`fc45373`](https://github.com/rust-phf/rust-phf/commit/fc45373b34a461664f532c5108f3d2625172c128)) + - Add doc URLs ([`4605db3`](https://github.com/rust-phf/rust-phf/commit/4605db3e7e0c4bef09ccf6c09c7dbcc36b707a9f)) + - Add documentation for phf_macros ([`8eca797`](https://github.com/rust-phf/rust-phf/commit/8eca79711f33d04ad773a023581b6bd0a6f1efdc)) + - Move generation logic to its own crate ([`cfeee87`](https://github.com/rust-phf/rust-phf/commit/cfeee8714caa4ecb3199df2a2ac149fe6a28ecc0)) + - Move tests to phf_macros ([`40dbc32`](https://github.com/rust-phf/rust-phf/commit/40dbc328456003484716021cc317156967f1b2c1)) + - Release v0.6.9 ([`822f4e3`](https://github.com/rust-phf/rust-phf/commit/822f4e3fb127dc02d36d802803d71aa5b98bed3c)) + - More fixes ([`0c04b9c`](https://github.com/rust-phf/rust-phf/commit/0c04b9cb2679a63394778a7362ef14441b6c2032)) + - Release v0.6.8 ([`cd637ca`](https://github.com/rust-phf/rust-phf/commit/cd637cafb6d37b1901b6c119a7d26f253e9a288e)) + - Release v0.6.7 ([`bfc36c9`](https://github.com/rust-phf/rust-phf/commit/bfc36c979225f652cdb72f3b1f2a25e77b50ab8c)) + - Fix for upstream changes ([`5ff7040`](https://github.com/rust-phf/rust-phf/commit/5ff70403a1b12c30206b128ac619b31c69e42eb4)) + - Merge pull request #47 from globin/fix/rustup ([`5aac93b`](https://github.com/rust-phf/rust-phf/commit/5aac93bad40ccac195e1f66614a29a9240dcaf54)) + - Rustup to current master ([`f6922e2`](https://github.com/rust-phf/rust-phf/commit/f6922e245752b4932f9a3a420c1f8d10e66e0b78)) + - Release v0.6.6 ([`b09a174`](https://github.com/rust-phf/rust-phf/commit/b09a174a166c7744c5989bedc6ba68340f6f7fd1)) + - Release v0.6.5 ([`271e784`](https://github.com/rust-phf/rust-phf/commit/271e7848f35b31d6ce9fc9268de173738464bfc8)) + - Move docs to this repo and auto build them ([`f8ef160`](https://github.com/rust-phf/rust-phf/commit/f8ef160480e2d4ce72fa7afb6ebce70e45acbc76)) + - Release v0.6.4 ([`6866c1b`](https://github.com/rust-phf/rust-phf/commit/6866c1bf5ad5091bc969f1356884aa86c27458cb)) + - Remove unused feature ([`2ee5f78`](https://github.com/rust-phf/rust-phf/commit/2ee5f788d493d929b669550c144ff23aad52721b)) + - Merge pull request #45 from Manishearth/internedstring ([`9b9c009`](https://github.com/rust-phf/rust-phf/commit/9b9c00934e33d920ab287765458d26ab321d8ab4)) + - InternedString.get() removal; brings us to rustc 1.0.0-dev (80627cd3c 2015-02-07 12:01:31 +0000) ([`3150bf0`](https://github.com/rust-phf/rust-phf/commit/3150bf0d608b051f2c8db3826ee21ce593f4f61c)) + - Release v0.6.3 ([`b0c5e3c`](https://github.com/rust-phf/rust-phf/commit/b0c5e3cb69742f81160ea80a3ba1782a0b4e01a2)) + - Use out of tree rand ([`9e1623b`](https://github.com/rust-phf/rust-phf/commit/9e1623bc7d1b8a432cdae47187eab40fa168401f)) + - Release v0.6.2 ([`d9ddf45`](https://github.com/rust-phf/rust-phf/commit/d9ddf45b15ba812b0d3acedffb08e901742e56c4)) + - Release v0.6.1 ([`ca0e9f6`](https://github.com/rust-phf/rust-phf/commit/ca0e9f6b9c737f3d11bcad2f4624bb5603a8170e)) + - Fix for stability changes ([`f7fb510`](https://github.com/rust-phf/rust-phf/commit/f7fb510dfe67f11522a2d214bd14d21f910bfd7b)) + - Release v0.6.0 ([`09d6870`](https://github.com/rust-phf/rust-phf/commit/09d687053caf4d321f72907528573b3334fae3c2)) + - Rename phf_mac to phf_macros ([`c50d107`](https://github.com/rust-phf/rust-phf/commit/c50d1077b1d53fccd703021911a7100b8937bbc7)) +
+ diff --git a/anneal/vendor/phf_macros/Cargo.lock b/anneal/vendor/phf_macros/Cargo.lock new file mode 100644 index 0000000000..1654882e07 --- /dev/null +++ b/anneal/vendor/phf_macros/Cargo.lock @@ -0,0 +1,105 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "phf_generator" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2cbb1126afed61dd6368748dae63b1ee7dc480191c6262a3b4ff1e29d86a6c5b" +dependencies = [ + "fastrand", + "phf_shared", +] + +[[package]] +name = "phf_macros" +version = "0.12.1" +dependencies = [ + "phf_generator", + "phf_shared", + "proc-macro2", + "quote", + "syn", + "uncased", + "unicase", +] + +[[package]] +name = "phf_shared" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981" +dependencies = [ + "siphasher", + "uncased", + "unicase", +] + +[[package]] +name = "proc-macro2" +version = "1.0.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + +[[package]] +name = "syn" +version = "2.0.103" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e4307e30089d6fd6aff212f2da3a1f9e32f3223b1f010fb09b7c95f90f3ca1e8" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "uncased" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1b88fcfe09e89d3866a5c11019378088af2d24c3fbd4f0543f96b479ec90697" +dependencies = [ + "version_check", +] + +[[package]] +name = "unicase" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" + +[[package]] +name = "unicode-ident" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" diff --git a/anneal/vendor/phf_macros/Cargo.toml b/anneal/vendor/phf_macros/Cargo.toml new file mode 100644 index 0000000000..2b9974b961 --- /dev/null +++ b/anneal/vendor/phf_macros/Cargo.toml @@ -0,0 +1,71 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2021" +rust-version = "1.61" +name = "phf_macros" +version = "0.12.1" +authors = ["Steven Fackler "] +build = false +autolib = false +autobins = false +autoexamples = false +autotests = false +autobenches = false +description = "Macros to generate types in the phf crate" +readme = "README.md" +categories = ["data-structures"] +license = "MIT" +repository = "https://github.com/rust-phf/rust-phf" +resolver = "1" + +[features] +uncased = [ + "uncased_", + "phf_shared/uncased", +] +unicase = [ + "unicase_", + "phf_shared/unicase", +] + +[lib] +name = "phf_macros" +path = "src/lib.rs" +proc-macro = true + +[dependencies.phf_generator] +version = "0.12" + +[dependencies.phf_shared] +version = "^0.12" +default-features = false + +[dependencies.proc-macro2] +version = "1" + +[dependencies.quote] +version = "1" + +[dependencies.syn] +version = "2" +features = ["full"] + +[dependencies.uncased_] +version = "0.9.7" +optional = true +package = "uncased" + +[dependencies.unicase_] +version = "2.4.0" +optional = true +package = "unicase" diff --git a/anneal/vendor/phf_macros/Cargo.toml.orig b/anneal/vendor/phf_macros/Cargo.toml.orig new file mode 100644 index 0000000000..2eadec3747 --- /dev/null +++ b/anneal/vendor/phf_macros/Cargo.toml.orig @@ -0,0 +1,28 @@ +[package] +name = "phf_macros" +version = "0.12.1" +authors = ["Steven Fackler "] +edition = "2021" +license = "MIT" +description = "Macros to generate types in the phf crate" +repository = "https://github.com/rust-phf/rust-phf" +readme = "../README.md" +rust-version = "1.61" +categories = ["data-structures"] + +[lib] +proc-macro = true + +[features] +unicase = ["unicase_", "phf_shared/unicase"] +uncased = ["uncased_", "phf_shared/uncased"] + +[dependencies] +syn = { version = "2", features = ["full"] } +quote = "1" +proc-macro2 = "1" +unicase_ = { package = "unicase", version = "2.4.0", optional = true } +uncased_ = { package = "uncased", version = "0.9.7", optional = true } + +phf_generator = "0.12" +phf_shared = { version = "^0.12", default-features = false } diff --git a/anneal/vendor/phf_macros/LICENSE b/anneal/vendor/phf_macros/LICENSE new file mode 100644 index 0000000000..cd5bf6855b --- /dev/null +++ b/anneal/vendor/phf_macros/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2014-2022 Steven Fackler, Yuki Okushi + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/anneal/vendor/phf_macros/README.md b/anneal/vendor/phf_macros/README.md new file mode 100644 index 0000000000..4f6b5dcf7d --- /dev/null +++ b/anneal/vendor/phf_macros/README.md @@ -0,0 +1,127 @@ +# Rust-PHF + +[![CI](https://github.com/rust-phf/rust-phf/actions/workflows/ci.yml/badge.svg)](https://github.com/rust-phf/rust-phf/actions/workflows/ci.yml) [![Latest Version](https://img.shields.io/crates/v/phf.svg)](https://crates.io/crates/phf) + +[Documentation](https://docs.rs/phf) + +Rust-PHF is a library to generate efficient lookup tables at compile time using +[perfect hash functions](http://en.wikipedia.org/wiki/Perfect_hash_function). + +It currently uses the +[CHD algorithm](http://cmph.sourceforge.net/papers/esa09.pdf) and can generate +a 100,000 entry map in roughly .4 seconds. + +MSRV (minimum supported rust version) is Rust 1.61. + +## Usage + +PHF data structures can be constructed via either the procedural +macros in the `phf_macros` crate or code generation supported by the +`phf_codegen` crate. + +To compile the `phf` crate with a dependency on +libcore instead of libstd, enabling use in environments where libstd +will not work, set `default-features = false` for the dependency: + +```toml +[dependencies] +# to use `phf` in `no_std` environments +phf = { version = "0.12", default-features = false } +``` + +### phf_macros + +```rust +use phf::phf_map; + +#[derive(Clone)] +pub enum Keyword { + Loop, + Continue, + Break, + Fn, + Extern, +} + +static KEYWORDS: phf::Map<&'static str, Keyword> = phf_map! { + "loop" => Keyword::Loop, + "continue" => Keyword::Continue, + "break" => Keyword::Break, + "fn" => Keyword::Fn, + "extern" => Keyword::Extern, +}; + +pub fn parse_keyword(keyword: &str) -> Option { + KEYWORDS.get(keyword).cloned() +} +``` + +```toml +[dependencies] +phf = { version = "0.12", features = ["macros"] } +``` + +#### Note + +Currently, the macro syntax has some limitations and may not +work as you want. See [#183] or [#196] for example. + +[#183]: https://github.com/rust-phf/rust-phf/issues/183 +[#196]: https://github.com/rust-phf/rust-phf/issues/196 + +### phf_codegen + +To use `phf_codegen` on build.rs, you have to add dependencies under `[build-dependencies]`: + +```toml +[build-dependencies] +phf = { version = "0.12", default-features = false } +phf_codegen = "0.12" +``` + +Then put code on build.rs: + +```rust +use std::env; +use std::fs::File; +use std::io::{BufWriter, Write}; +use std::path::Path; + +fn main() { + let path = Path::new(&env::var("OUT_DIR").unwrap()).join("codegen.rs"); + let mut file = BufWriter::new(File::create(&path).unwrap()); + + write!( + &mut file, + "static KEYWORDS: phf::Map<&'static str, Keyword> = {}", + phf_codegen::Map::new() + .entry("loop", "Keyword::Loop") + .entry("continue", "Keyword::Continue") + .entry("break", "Keyword::Break") + .entry("fn", "Keyword::Fn") + .entry("extern", "Keyword::Extern") + .build() + ) + .unwrap(); + write!(&mut file, ";\n").unwrap(); +} +``` + +and lib.rs: + +```rust +#[derive(Clone)] +enum Keyword { + Loop, + Continue, + Break, + Fn, + Extern, +} + +include!(concat!(env!("OUT_DIR"), "/codegen.rs")); + +pub fn parse_keyword(keyword: &str) -> Option { + KEYWORDS.get(keyword).cloned() +} +``` diff --git a/anneal/vendor/phf_macros/src/lib.rs b/anneal/vendor/phf_macros/src/lib.rs new file mode 100644 index 0000000000..7002f66532 --- /dev/null +++ b/anneal/vendor/phf_macros/src/lib.rs @@ -0,0 +1,358 @@ +//! A set of macros to generate Rust source for PHF data structures at compile time. +//! See [the `phf` crate's documentation][phf] for details. +//! +//! [phf]: https://docs.rs/phf + +use phf_generator::HashState; +use phf_shared::PhfHash; +use proc_macro::TokenStream; +use quote::quote; +use std::collections::HashSet; +use std::hash::Hasher; +use syn::parse::{self, Parse, ParseStream}; +use syn::punctuated::Punctuated; +use syn::{parse_macro_input, Error, Expr, ExprLit, Lit, Token, UnOp}; +#[cfg(feature = "uncased")] +use uncased_::Uncased; +#[cfg(feature = "unicase")] +use unicase_::{Ascii, UniCase}; + +#[derive(Hash, PartialEq, Eq, Clone)] +enum ParsedKey { + Str(String), + Binary(Vec), + Char(char), + I8(i8), + I16(i16), + I32(i32), + I64(i64), + I128(i128), + Isize(isize), + U8(u8), + U16(u16), + U32(u32), + U64(u64), + U128(u128), + Usize(usize), + Bool(bool), + #[cfg(feature = "unicase")] + UniCase(UniCase), + #[cfg(feature = "unicase")] + UniCaseAscii(Ascii), + #[cfg(feature = "uncased")] + Uncased(Uncased<'static>), +} + +impl PhfHash for ParsedKey { + fn phf_hash(&self, state: &mut H) + where + H: Hasher, + { + match self { + ParsedKey::Str(s) => s.phf_hash(state), + ParsedKey::Binary(s) => s.phf_hash(state), + ParsedKey::Char(s) => s.phf_hash(state), + ParsedKey::I8(s) => s.phf_hash(state), + ParsedKey::I16(s) => s.phf_hash(state), + ParsedKey::I32(s) => s.phf_hash(state), + ParsedKey::I64(s) => s.phf_hash(state), + ParsedKey::I128(s) => s.phf_hash(state), + ParsedKey::Isize(s) => s.phf_hash(state), + ParsedKey::U8(s) => s.phf_hash(state), + ParsedKey::U16(s) => s.phf_hash(state), + ParsedKey::U32(s) => s.phf_hash(state), + ParsedKey::U64(s) => s.phf_hash(state), + ParsedKey::U128(s) => s.phf_hash(state), + ParsedKey::Usize(s) => s.phf_hash(state), + ParsedKey::Bool(s) => s.phf_hash(state), + #[cfg(feature = "unicase")] + ParsedKey::UniCase(s) => s.phf_hash(state), + #[cfg(feature = "unicase")] + ParsedKey::UniCaseAscii(s) => s.phf_hash(state), + #[cfg(feature = "uncased")] + ParsedKey::Uncased(s) => s.phf_hash(state), + } + } +} + +impl ParsedKey { + fn from_expr(expr: &Expr) -> Option { + match expr { + Expr::Lit(lit) => match &lit.lit { + Lit::Str(s) => Some(ParsedKey::Str(s.value())), + Lit::ByteStr(s) => Some(ParsedKey::Binary(s.value())), + Lit::Byte(s) => Some(ParsedKey::U8(s.value())), + Lit::Char(s) => Some(ParsedKey::Char(s.value())), + Lit::Int(s) => match s.suffix() { + // we've lost the sign at this point, so `-128i8` looks like `128i8`, + // which doesn't fit in an `i8`; parse it as a `u8` and cast (to `0i8`), + // which is handled below, by `Unary` + "i8" => Some(ParsedKey::I8(s.base10_parse::().unwrap() as i8)), + "i16" => Some(ParsedKey::I16(s.base10_parse::().unwrap() as i16)), + "i32" => Some(ParsedKey::I32(s.base10_parse::().unwrap() as i32)), + "i64" => Some(ParsedKey::I64(s.base10_parse::().unwrap() as i64)), + "i128" => Some(ParsedKey::I128(s.base10_parse::().unwrap() as i128)), + "isize" => Some(ParsedKey::Isize(s.base10_parse::().unwrap() as isize)), + "u8" => Some(ParsedKey::U8(s.base10_parse::().unwrap())), + "u16" => Some(ParsedKey::U16(s.base10_parse::().unwrap())), + "u32" => Some(ParsedKey::U32(s.base10_parse::().unwrap())), + "u64" => Some(ParsedKey::U64(s.base10_parse::().unwrap())), + "u128" => Some(ParsedKey::U128(s.base10_parse::().unwrap())), + "usize" => Some(ParsedKey::Usize(s.base10_parse::().unwrap())), + _ => None, + }, + Lit::Bool(s) => Some(ParsedKey::Bool(s.value)), + _ => None, + }, + Expr::Array(array) => { + let mut buf = vec![]; + for expr in &array.elems { + match expr { + Expr::Lit(lit) => match &lit.lit { + Lit::Int(s) => match s.suffix() { + "u8" | "" => buf.push(s.base10_parse::().unwrap()), + _ => return None, + }, + _ => return None, + }, + _ => return None, + } + } + Some(ParsedKey::Binary(buf)) + } + Expr::Unary(unary) => { + // if we received an integer literal (always unsigned) greater than i__::max_value() + // then casting it to a signed integer type of the same width will negate it to + // the same absolute value so we don't need to negate it here + macro_rules! try_negate ( + ($val:expr) => {if $val < 0 { $val } else { -$val }} + ); + + match unary.op { + UnOp::Neg(_) => match ParsedKey::from_expr(&unary.expr)? { + ParsedKey::I8(v) => Some(ParsedKey::I8(try_negate!(v))), + ParsedKey::I16(v) => Some(ParsedKey::I16(try_negate!(v))), + ParsedKey::I32(v) => Some(ParsedKey::I32(try_negate!(v))), + ParsedKey::I64(v) => Some(ParsedKey::I64(try_negate!(v))), + ParsedKey::I128(v) => Some(ParsedKey::I128(try_negate!(v))), + ParsedKey::Isize(v) => Some(ParsedKey::Isize(try_negate!(v))), + _ => None, + }, + UnOp::Deref(_) => { + let mut expr = &*unary.expr; + while let Expr::Group(group) = expr { + expr = &*group.expr; + } + match expr { + Expr::Lit(ExprLit { + lit: Lit::ByteStr(s), + .. + }) => Some(ParsedKey::Binary(s.value())), + _ => None, + } + } + _ => None, + } + } + Expr::Group(group) => ParsedKey::from_expr(&group.expr), + Expr::Call(call) if call.args.len() == 1 => { + let last; + let last_ahead; + + if let Expr::Path(ep) = call.func.as_ref() { + let mut segments = ep.path.segments.iter(); + last = segments.next_back()?.ident.to_string(); + last_ahead = segments.next_back()?.ident.to_string(); + } else { + return None; + } + + let mut arg = call.args.first().unwrap(); + + while let Expr::Group(group) = arg { + arg = &group.expr; + } + + let _value = match arg { + Expr::Lit(ExprLit { + attrs: _, + lit: Lit::Str(s), + }) => s.value(), + _ => { + return None; + } + }; + + match (&*last_ahead, &*last) { + #[cfg(feature = "unicase")] + ("UniCase", "unicode") => Some(ParsedKey::UniCase(UniCase::unicode(_value))), + #[cfg(feature = "unicase")] + ("UniCase", "ascii") => Some(ParsedKey::UniCase(UniCase::ascii(_value))), + #[cfg(feature = "unicase")] + ("Ascii", "new") => Some(ParsedKey::UniCaseAscii(Ascii::new(_value))), + #[cfg(feature = "uncased")] + ("UncasedStr", "new") => Some(ParsedKey::Uncased(Uncased::new(_value))), + _ => None, + } + } + _ => None, + } + } +} + +struct Key { + parsed: ParsedKey, + expr: Expr, +} + +impl PhfHash for Key { + fn phf_hash(&self, state: &mut H) + where + H: Hasher, + { + self.parsed.phf_hash(state) + } +} + +impl Parse for Key { + fn parse(input: ParseStream<'_>) -> parse::Result { + let expr = input.parse()?; + let parsed = ParsedKey::from_expr(&expr) + .ok_or_else(|| Error::new_spanned(&expr, "unsupported key expression"))?; + + Ok(Key { parsed, expr }) + } +} + +struct Entry { + key: Key, + value: Expr, +} + +impl PhfHash for Entry { + fn phf_hash(&self, state: &mut H) + where + H: Hasher, + { + self.key.phf_hash(state) + } +} + +impl Parse for Entry { + fn parse(input: ParseStream<'_>) -> parse::Result { + let key = input.parse()?; + input.parse::]>()?; + let value = input.parse()?; + Ok(Entry { key, value }) + } +} + +struct Map(Vec); + +impl Parse for Map { + fn parse(input: ParseStream<'_>) -> parse::Result { + let parsed = Punctuated::::parse_terminated(input)?; + let map = parsed.into_iter().collect::>(); + check_duplicates(&map)?; + Ok(Map(map)) + } +} + +struct Set(Vec); + +impl Parse for Set { + fn parse(input: ParseStream<'_>) -> parse::Result { + let parsed = Punctuated::::parse_terminated(input)?; + let set = parsed + .into_iter() + .map(|key| Entry { + key, + value: syn::parse_str("()").unwrap(), + }) + .collect::>(); + check_duplicates(&set)?; + Ok(Set(set)) + } +} + +fn check_duplicates(entries: &[Entry]) -> parse::Result<()> { + let mut keys = HashSet::new(); + for entry in entries { + if !keys.insert(&entry.key.parsed) { + return Err(Error::new_spanned(&entry.key.expr, "duplicate key")); + } + } + Ok(()) +} + +fn build_map(entries: &[Entry], state: HashState) -> proc_macro2::TokenStream { + let key = state.key; + let disps = state.disps.iter().map(|&(d1, d2)| quote!((#d1, #d2))); + let entries = state.map.iter().map(|&idx| { + let key = &entries[idx].key.expr; + let value = &entries[idx].value; + quote!((#key, #value)) + }); + + quote! { + phf::Map { + key: #key, + disps: &[#(#disps),*], + entries: &[#(#entries),*], + } + } +} + +fn build_ordered_map(entries: &[Entry], state: HashState) -> proc_macro2::TokenStream { + let key = state.key; + let disps = state.disps.iter().map(|&(d1, d2)| quote!((#d1, #d2))); + let idxs = state.map.iter().map(|idx| quote!(#idx)); + let entries = entries.iter().map(|entry| { + let key = &entry.key.expr; + let value = &entry.value; + quote!((#key, #value)) + }); + + quote! { + phf::OrderedMap { + key: #key, + disps: &[#(#disps),*], + idxs: &[#(#idxs),*], + entries: &[#(#entries),*], + } + } +} + +#[proc_macro] +pub fn phf_map(input: TokenStream) -> TokenStream { + let map = parse_macro_input!(input as Map); + let state = phf_generator::generate_hash(&map.0); + + build_map(&map.0, state).into() +} + +#[proc_macro] +pub fn phf_set(input: TokenStream) -> TokenStream { + let set = parse_macro_input!(input as Set); + let state = phf_generator::generate_hash(&set.0); + + let map = build_map(&set.0, state); + quote!(phf::Set { map: #map }).into() +} + +#[proc_macro] +pub fn phf_ordered_map(input: TokenStream) -> TokenStream { + let map = parse_macro_input!(input as Map); + let state = phf_generator::generate_hash(&map.0); + + build_ordered_map(&map.0, state).into() +} + +#[proc_macro] +pub fn phf_ordered_set(input: TokenStream) -> TokenStream { + let set = parse_macro_input!(input as Set); + let state = phf_generator::generate_hash(&set.0); + + let map = build_ordered_map(&set.0, state); + quote!(phf::OrderedSet { map: #map }).into() +} diff --git a/anneal/vendor/phf_shared/.cargo-checksum.json b/anneal/vendor/phf_shared/.cargo-checksum.json new file mode 100644 index 0000000000..893c84ef67 --- /dev/null +++ b/anneal/vendor/phf_shared/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{".cargo_vcs_info.json":"1f19579d11e9de065ff99a30ca9ec3feaca533751c76895967413bc842524a6d","CHANGELOG.md":"b02f7b92559e6feeeb238365be367eb4ff0ff1b53c3448e13318e4076b1de39d","Cargo.lock":"670df89f93176aa2749604cc4aa2544923bc8875dbd3c836df45751afa07e58d","Cargo.toml":"352de61f9607b84245028b42ae6d20a6e2e6a71de37462784913d30696dfa9c5","Cargo.toml.orig":"bbb60e17c72214055c849c8956a4b021df97776cc650536dca7a2b98329c2c09","LICENSE":"0ab4d106b6faac07fb6a051815fd1b4d862d730895e2d7d7358c2f13565e7a38","README.md":"9fe30ff85b7670ed85175c5f572cf499d0ed231f1b60de4189aff394afa8fe9d","src/lib.rs":"c2ac8175d317dcc94368f8da6fa8aa021d56aa9326cb6eeb45834aeb12bc8f9c"},"package":"06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981"} \ No newline at end of file diff --git a/anneal/vendor/phf_shared/.cargo_vcs_info.json b/anneal/vendor/phf_shared/.cargo_vcs_info.json new file mode 100644 index 0000000000..c51a0bad7b --- /dev/null +++ b/anneal/vendor/phf_shared/.cargo_vcs_info.json @@ -0,0 +1,6 @@ +{ + "git": { + "sha1": "03a930696231da01005e762425841f91587b3e04" + }, + "path_in_vcs": "phf_shared" +} \ No newline at end of file diff --git a/anneal/vendor/phf_shared/CHANGELOG.md b/anneal/vendor/phf_shared/CHANGELOG.md new file mode 100644 index 0000000000..cd50fba1cf --- /dev/null +++ b/anneal/vendor/phf_shared/CHANGELOG.md @@ -0,0 +1,330 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## 0.12.0 (2025-06-19) + + + +### Chore + + - Update version number in docs + +### Chore + + - Update changelog + +### Commit Statistics + + + + - 6 commits contributed to the release over the course of 138 calendar days. + - 163 days passed between releases. + - 2 commits were understood as [conventional](https://www.conventionalcommits.org). + - 0 issues like '(#ID)' were seen in commit messages + +### Commit Details + + + +
view details + + * **Uncategorized** + - Update changelog ([`51d6baa`](https://github.com/rust-phf/rust-phf/commit/51d6baaa6ffce658fb9b56a96affaf0ddd0603e5)) + - Update version number in docs ([`08e7464`](https://github.com/rust-phf/rust-phf/commit/08e74647f00f7d77cbb81e0cb73ed663798d000f)) + - Merge pull request #289 from thaliaarchi/master ([`a6df856`](https://github.com/rust-phf/rust-phf/commit/a6df856ade4cfbf2666fcabbd70c666ea8234abf)) + - Add support for unicase::Ascii type ([`2806801`](https://github.com/rust-phf/rust-phf/commit/28068018dec5aab9b6ddc0da918431285db8cd34)) + - Merge branch 'master' into no-wasteful-allocations ([`33b8aff`](https://github.com/rust-phf/rust-phf/commit/33b8affe77cea8bdeccb5c8d6c730c78231fc138)) + - Merge branch 'master' into fastrand ([`576dd47`](https://github.com/rust-phf/rust-phf/commit/576dd47858a2db74eb4ef67a8385039ef17b867d)) +
+ +## 0.11.3 (2025-01-07) + +### Commit Statistics + + + + - 10 commits contributed to the release. + - 562 days passed between releases. + - 0 commits were understood as [conventional](https://www.conventionalcommits.org). + - 0 issues like '(#ID)' were seen in commit messages + +### Commit Details + + + +
view details + + * **Uncategorized** + - Adjusting changelogs prior to release of phf_shared v0.11.3, phf_generator v0.11.3, phf_macros v0.11.3, phf v0.11.3, phf_codegen v0.11.3 ([`e111f4b`](https://github.com/rust-phf/rust-phf/commit/e111f4b53a965c188fdcbf03950321107d9b3987)) + - Merge branch 'master' into master ([`43e9bb6`](https://github.com/rust-phf/rust-phf/commit/43e9bb69aca9ab8ddf8e2041d33d4ec08a6a0ce1)) + - Merge pull request #314 from TGODiamond/master ([`4abd9a0`](https://github.com/rust-phf/rust-phf/commit/4abd9a0a5eaddb1b4c18d678b2924a1cda818f85)) + - Add `PhfBorrow<[u8; N]>` ([`e44fb71`](https://github.com/rust-phf/rust-phf/commit/e44fb714ea4d5fb85bfad1496dd887313cbfa3b5)) + - Merge pull request #304 from serprex/master ([`999e6a2`](https://github.com/rust-phf/rust-phf/commit/999e6a260f03d82aa9d159465113294e7ed019e7)) + - Update siphasher to 1.0 ([`7e0482e`](https://github.com/rust-phf/rust-phf/commit/7e0482e8e80e14d66cd46dde5ee6a16f34630ab3)) + - Merge pull request #300 from JohnTitor/msrv-1.61 ([`323366d`](https://github.com/rust-phf/rust-phf/commit/323366d03966ddad2eaa3432df79c9da8339e319)) + - Bump MSRV to 1.61 ([`1795f7b`](https://github.com/rust-phf/rust-phf/commit/1795f7b66b16af0191f221dc957bc8a090c891ad)) + - Merge pull request #291 from Manishearth/uncased-transmute ([`b7116ff`](https://github.com/rust-phf/rust-phf/commit/b7116ff519415d302c070aa313831cd473b1a911)) + - Bump uncased dep; remove generated transmute ([`c70bb63`](https://github.com/rust-phf/rust-phf/commit/c70bb636dea1c5902ca0cae68df2ec387eb1d2bc)) +
+ +## 0.11.2 (2023-06-24) + +### Commit Statistics + + + + - 9 commits contributed to the release over the course of 268 calendar days. + - 319 days passed between releases. + - 0 commits were understood as [conventional](https://www.conventionalcommits.org). + - 0 issues like '(#ID)' were seen in commit messages + +### Commit Details + + + +
view details + + * **Uncategorized** + - Release phf_shared v0.11.2, phf_generator v0.11.2, phf_macros v0.11.2, phf v0.11.2, phf_codegen v0.11.2 ([`c9c35fd`](https://github.com/rust-phf/rust-phf/commit/c9c35fd8ba3f1bc228388b0cef6e3814a02a72c0)) + - Update changelogs ([`a1e5072`](https://github.com/rust-phf/rust-phf/commit/a1e5072b8e84b108f06389a1d41ac868426a03f7)) + - Merge pull request #274 from ankane/license-files ([`21baa73`](https://github.com/rust-phf/rust-phf/commit/21baa73941a0694ec48f437c0c0a6abfcc2f32d2)) + - Include license files in crates ([`1229b2f`](https://github.com/rust-phf/rust-phf/commit/1229b2faa6b97542ab4850a1723b1723dea92814)) + - Merge pull request #273 from drewkett/impl-phf-hash-for-slices ([`757012b`](https://github.com/rust-phf/rust-phf/commit/757012bba5eb5056a29c30c708e5c37993c2be1d)) + - Add `impl PhfHash` for integer slices ([`272f3d7`](https://github.com/rust-phf/rust-phf/commit/272f3d754457328e8bc765f1f5a56fd8420b0671)) + - Merge pull request #262 from Spaceface16518/ptr-size-impl ([`1b88e07`](https://github.com/rust-phf/rust-phf/commit/1b88e07eae51277f99fc08ae6d0171aafdb8cd77)) + - Merge pull request #269 from JohnTitor/clarify-build-deps ([`3bc663e`](https://github.com/rust-phf/rust-phf/commit/3bc663eb82686caeab4f49848d867ce87d313765)) + - Mention build-dependencies on phf_codegen ([`307ef77`](https://github.com/rust-phf/rust-phf/commit/307ef778fb3aa414b70b2d9584c71ee1a361836e)) +
+ +## 0.11.1 (2022-08-08) + + + + +### Chore + + - Update siphasher to 0.3 + +### Other + + - make uncased feature compatible with no_std + + +### Commit Statistics + + + + - 197 commits contributed to the release. + - 2 commits were understood as [conventional](https://www.conventionalcommits.org). + - 0 issues like '(#ID)' were seen in commit messages + +### Commit Details + + + +
view details + + * **Uncategorized** + - Release phf_shared v0.11.1, phf_generator v0.11.1, phf_macros v0.11.1, phf v0.11.1, phf_codegen v0.11.1 ([`3897b21`](https://github.com/rust-phf/rust-phf/commit/3897b21c6d38e5adcaf9110b4bb33c19f6b41977)) + - Merge pull request #264 from rust-phf/tweak-changelog ([`97f997d`](https://github.com/rust-phf/rust-phf/commit/97f997d2be827ca636a29046c78e2c09c5c62650)) + - Replace handmade changelog with generated one by `cargo-smart-release` ([`cb84cf6`](https://github.com/rust-phf/rust-phf/commit/cb84cf6636ab52823c53e70d6abeac8f648a3482)) + - Add array impl for `usize` and `isize` ([`39fd8a4`](https://github.com/rust-phf/rust-phf/commit/39fd8a4b0042aee2a99deb214ea246610353b859)) + - Add `impl PhfBorrow` for `usize` and `isize` ([`34c92ce`](https://github.com/rust-phf/rust-phf/commit/34c92ce9c64236ade571fd1027957857e7e71525)) + - Add `impl FmtConst` for `usize` and `isize` ([`da88095`](https://github.com/rust-phf/rust-phf/commit/da8809503abfa9967ed8ed15eb8cdc7db46cf04a)) + - Add `impl PhfHash` for `usize` and `isize` ([`c217673`](https://github.com/rust-phf/rust-phf/commit/c217673eaa6370b2590c3b518db58ce1ccc53861)) + - Merge pull request #260 from JohnTitor/fix-repo-link ([`1407ebe`](https://github.com/rust-phf/rust-phf/commit/1407ebe536b39611db92d765ddec4de0e6c8a16e)) + - Add README.md for some crates ([`e0b34fa`](https://github.com/rust-phf/rust-phf/commit/e0b34fa0a697f45f2c41a875bf84b78a6d3ce079)) + - Add category to crates ([`32a72c3`](https://github.com/rust-phf/rust-phf/commit/32a72c3859997fd6b590e9ec092ae789d2acdf55)) + - Update repository links on Cargo.toml ([`1af3b0f`](https://github.com/rust-phf/rust-phf/commit/1af3b0fe1f8fdcae7ccc1bc8d51de309fb16a6bf)) + - Merge pull request #258 from JohnTitor/release-0.11.0 ([`c0b9ef9`](https://github.com/rust-phf/rust-phf/commit/c0b9ef98e798f807f94544aeb0fff429ef280efc)) + - Release 0.11.0 ([`d2efdc0`](https://github.com/rust-phf/rust-phf/commit/d2efdc08a7eb1d0d6c414b7b2ac41ce1fe1f9a43)) + - Merge pull request #257 from JohnTitor/edition-2021 ([`36ec885`](https://github.com/rust-phf/rust-phf/commit/36ec8854a9da4f295618e98d94aaf7150df2173e)) + - Make crates edition 2021 ([`b9d25da`](https://github.com/rust-phf/rust-phf/commit/b9d25da58b912d9927fbc41901631cd77836462b)) + - Merge pull request #251 from JohnTitor/weak-deps ([`2e1167c`](https://github.com/rust-phf/rust-phf/commit/2e1167c2046cd20aed1a906b4e23b40303cf0c00)) + - Make "unicase + macros" features work ([`11bb242`](https://github.com/rust-phf/rust-phf/commit/11bb2426f0237b1ecea8c8038630b1231ede4871)) + - Merge pull request #247 from turbocool3r/master ([`82f9492`](https://github.com/rust-phf/rust-phf/commit/82f9492da736e553e25f7f0b1cdce5dbd486e688)) + - Fix build issues. ([`ee82cc9`](https://github.com/rust-phf/rust-phf/commit/ee82cc994573fbb774a0006f13bceb871710fdb6)) + - Implement PhfHash for arrays of any size. ([`cf0f6ec`](https://github.com/rust-phf/rust-phf/commit/cf0f6ec2870ab8d9c7339fe72a536c526003263f)) + - Merge pull request #240 from JohnTitor/docs-update ([`da98b9e`](https://github.com/rust-phf/rust-phf/commit/da98b9e80fdb22cd6d48a4a42489840afe603756)) + - Refine doc comments ([`d8cfc43`](https://github.com/rust-phf/rust-phf/commit/d8cfc436059a1c2c3ede1afb0f9ec2333c046fc6)) + - Merge pull request #232 from petrosagg/no_std_uncased ([`96dbc0d`](https://github.com/rust-phf/rust-phf/commit/96dbc0d936d614cbee980b4e7a6050f6ba84c8a5)) + - Make uncased feature compatible with no_std ([`3068a18`](https://github.com/rust-phf/rust-phf/commit/3068a18e997ef04c302a10b51271353897a92027)) + - Merge pull request #234 from JohnTitor/fix-ci ([`eba4cc2`](https://github.com/rust-phf/rust-phf/commit/eba4cc28d92c1db95cc430985a0fbc9ca63d1307)) + - Fix CI failure ([`d9b5ff2`](https://github.com/rust-phf/rust-phf/commit/d9b5ff23367d2bbcc385ff8243c7d972f45d459c)) + - Merge pull request #230 from JohnTitor/release-0.10 ([`3ea14b2`](https://github.com/rust-phf/rust-phf/commit/3ea14b2166553ad6e7b9afe7244144f5d661b6c6)) + - Prepare for release 0.10.0 ([`588ac25`](https://github.com/rust-phf/rust-phf/commit/588ac25dd5c0afccea084e6f94867328a6a30454)) + - Merge pull request #223 from JohnTitor/minor-cleanup ([`c746106`](https://github.com/rust-phf/rust-phf/commit/c746106ad05917ad62f244504727b07e07c3e075)) + - Minor cleanups ([`8868d08`](https://github.com/rust-phf/rust-phf/commit/8868d088e2fed36fcd7741e9a1c5bf68bef4f46e)) + - Merge pull request #219 from JohnTitor/release-0.9.0 ([`307969f`](https://github.com/rust-phf/rust-phf/commit/307969ff3bb8cae320e648890a9525920035944b)) + - Prepare 0.9.0 release ([`2ca46c4`](https://github.com/rust-phf/rust-phf/commit/2ca46c4f9c9083c128fcc6add33dc5986638940f)) + - Merge pull request #218 from JohnTitor/cleanup ([`76f9072`](https://github.com/rust-phf/rust-phf/commit/76f907239af9b0cca7dac4e6d702cedc72f6f371)) + - Run rustfmt ([`dd86c6c`](https://github.com/rust-phf/rust-phf/commit/dd86c6c103f25021b52144085b8fab0a94582bef)) + - Fix some clippy warnings ([`9adc370`](https://github.com/rust-phf/rust-phf/commit/9adc370ead7fbcc36cd0c74f495ab7631e0c9754)) + - Cleanup docs ([`ddecc3a`](https://github.com/rust-phf/rust-phf/commit/ddecc3aa97aec6d9e9d6e59c57bc598d476335c1)) + - Merge pull request #197 from benesch/uncased ([`8b44f0c`](https://github.com/rust-phf/rust-phf/commit/8b44f0c4caf1a431426ff8dbae68f0693d6cef63)) + - Add support for uncased ([`2a6087f`](https://github.com/rust-phf/rust-phf/commit/2a6087fcaf99b445ff6013f693f7c4fe5d6f7387)) + - Merge pull request #174 from abonander/169-drop-borrow ([`3c087d4`](https://github.com/rust-phf/rust-phf/commit/3c087d4782be496e7955d2b51d5883c4ce64ccd3)) + - Replace uses of `std::borrow::Borrow` with new `PhfBorrow` trait ([`b2f3a9c`](https://github.com/rust-phf/rust-phf/commit/b2f3a9c6a95ebabc2b0ae7ed1ec3ee7d72418e85)) + - Merge pull request #199 from neandrake/fmtconst-string ([`ff45c2e`](https://github.com/rust-phf/rust-phf/commit/ff45c2e9d504ca09494b944bf5f7b3362f97d633)) + - Allow using the owned `String` type for `phf` dynamic code generation. ([`58dfc05`](https://github.com/rust-phf/rust-phf/commit/58dfc05a7d27ac506016186970b4f3697c1c6475)) + - Merge pull request #180 from abonander/master ([`81c7cc5`](https://github.com/rust-phf/rust-phf/commit/81c7cc5b48649108428671d3b8ad151f6fbdb359)) + - Release v0.8.0 ([`4060288`](https://github.com/rust-phf/rust-phf/commit/4060288dc2c1ebe3b0630e4016ed51935bb0c863)) + - Merge pull request #168 from abonander/167-std-default ([`a932094`](https://github.com/rust-phf/rust-phf/commit/a93209486f5874515da0483002e8669b2dbf95e6)) + - Switch optional `core` feature to default `std` feature ([`645e23d`](https://github.com/rust-phf/rust-phf/commit/645e23dda30ac1b99af39f201a74211e7ac3251a)) + - Merge pull request #164 from abonander/perf-improvements ([`70129c6`](https://github.com/rust-phf/rust-phf/commit/70129c6fbcdf428ce9f1014eea935301ac70e410)) + - Use sip128 instead of hashing twice ([`a8e67c3`](https://github.com/rust-phf/rust-phf/commit/a8e67c37486974f810737add6fce73e82aeb39aa)) + - Use two separate hashes and full 32-bit displacements ([`9b70bd9`](https://github.com/rust-phf/rust-phf/commit/9b70bd94f8b0b74f156e75ccefbd4a4c7ba29728)) + - Merge pull request #157 from abonander/array-formatting ([`8fc18be`](https://github.com/rust-phf/rust-phf/commit/8fc18be75dd3cb284b0b34b6c9e99c3c92544268)) + - Fix formatting for arrays after #156 ([`40c1476`](https://github.com/rust-phf/rust-phf/commit/40c147691acd4996fc6883a05734fc6da125143d)) + - Merge pull request #156 from abonander/slice-fix ([`56ff009`](https://github.com/rust-phf/rust-phf/commit/56ff0090ed04bc4d3bbc06de36884f0dfac0d100)) + - Fix `FmtConst` for `[u8]` ([`12b4fde`](https://github.com/rust-phf/rust-phf/commit/12b4fde5850f2de79e9cf5163689624a3a9787a1)) + - Merge pull request #144 from ignatenkobrain/patch-1 ([`7b415a4`](https://github.com/rust-phf/rust-phf/commit/7b415a424ef45ae66131a004ea4180c18ff53ef2)) + - Merge branch 'master' into patch-1 ([`cd0d7ce`](https://github.com/rust-phf/rust-phf/commit/cd0d7ce1194252dcaca3153988ba2a4effa66b4f)) + - Merge pull request #155 from abonander/128-bit-ints ([`6749552`](https://github.com/rust-phf/rust-phf/commit/674955292a7028752f2eb25e34c27e881f6b11a1)) + - Implement support for 128-bit ints and fix high magnitude vals ([`5be5919`](https://github.com/rust-phf/rust-phf/commit/5be59199389c0703fff62f640eb1a0d19243fc48)) + - Merge pull request #152 from abonander/unicase-upgrade ([`27f7c2c`](https://github.com/rust-phf/rust-phf/commit/27f7c2c85efde7aeb3c5409985f2d605aff8e05b)) + - Convert to 2018 edition ([`9ff66ab`](https://github.com/rust-phf/rust-phf/commit/9ff66ab36a23c7170cc775773f042a06de426c3b)) + - Upgrade `unicase` ([`4a7f766`](https://github.com/rust-phf/rust-phf/commit/4a7f7667598e7beb4c76c72b2bf2fb6571f6dbd9)) + - Don't rely on `fmt::Debug` for codegen ([`97405f5`](https://github.com/rust-phf/rust-phf/commit/97405f5be14738dc5d03a8b309297ffa295f4702)) + - Update siphasher to 0.3 ([`71977b2`](https://github.com/rust-phf/rust-phf/commit/71977b26194ce6049a063131792760933952424b)) + - Release v0.7.24 ([`1287414`](https://github.com/rust-phf/rust-phf/commit/1287414b1302d2d717c5f4be81accf4c12ccad48)) + - Downgrade siphasher ([`54dd1e2`](https://github.com/rust-phf/rust-phf/commit/54dd1e22ccb0788fab5240feb5502e02c7b034b9)) + - Upgrade rand and siphasher ([`80d9894`](https://github.com/rust-phf/rust-phf/commit/80d9894e5db7b5a8acf5b89716ee506de2a95b99)) + - Release v0.7.23 ([`a050b6f`](https://github.com/rust-phf/rust-phf/commit/a050b6f2a6b825bf0824339266ab9545340420d4)) + - Release 0.7.22 ([`ab88405`](https://github.com/rust-phf/rust-phf/commit/ab884054fa17eef915db2bdb5259c7aa71fbfea6)) + - Release v0.7.21 ([`6c7e2d9`](https://github.com/rust-phf/rust-phf/commit/6c7e2d9ce17ff1b87507925bdbe87e6e682ed3e4)) + - Link to docs.rs ([`61142c5`](https://github.com/rust-phf/rust-phf/commit/61142c5aa168cff1bf53a6961ddc12012b49e1bb)) + - Switch to non-deprecated SipHasher ([`fb3c115`](https://github.com/rust-phf/rust-phf/commit/fb3c115a306e6e0a12b88e12d7178857dbc5f66e)) + - Release v0.7.20 ([`f631f50`](https://github.com/rust-phf/rust-phf/commit/f631f50abfaf6ea3d6fc8caaada47975b6df3a62)) + - Merge branch 'release' ([`ea7e256`](https://github.com/rust-phf/rust-phf/commit/ea7e2562706663632a0af65ae9fa94e5cf78c4ea)) + - Merge branch 'release-v0.7.19' into release ([`81a4806`](https://github.com/rust-phf/rust-phf/commit/81a4806b05f14fb49aa972de27a42926a542ec44)) + - Release v0.7.19 ([`0a98dd1`](https://github.com/rust-phf/rust-phf/commit/0a98dd1865d12a3fa4cc27bdb38fa1e7374940d9)) + - Merge branch 'release' ([`ecab54b`](https://github.com/rust-phf/rust-phf/commit/ecab54b8a028c88938f220dbb0a684e017bab62f)) + - Merge branch 'release-v0.7.18' into release ([`dfa970b`](https://github.com/rust-phf/rust-phf/commit/dfa970b229cc32cfb2da1692aa94ad8a266e704a)) + - Release v0.7.18 ([`3f71765`](https://github.com/rust-phf/rust-phf/commit/3f717650f4331f5dbb9d7a3f878228fcf1138729)) + - Merge branch 'release' ([`5f08563`](https://github.com/rust-phf/rust-phf/commit/5f0856327731107d9fada1b0318f6f15f32957c2)) + - Merge branch 'release-v0.7.17' into release ([`e073dd2`](https://github.com/rust-phf/rust-phf/commit/e073dd262d1b4c95234222ee5048fc883b9c7301)) + - Release v0.7.17 ([`21ecf72`](https://github.com/rust-phf/rust-phf/commit/21ecf72101715e4754db95a64ecd7de5a37b7f14)) + - Merge pull request #91 from Bobo1239/master ([`bf472f2`](https://github.com/rust-phf/rust-phf/commit/bf472f2baed1552530a80c95ba5872a78fd68a5c)) + - Add UniCase support to phf_macros and bump unicase version ([`2af3abb`](https://github.com/rust-phf/rust-phf/commit/2af3abb00cafc85d43755e43767a2a8b274f6670)) + - Merge branch 'release' ([`839f06d`](https://github.com/rust-phf/rust-phf/commit/839f06d5a10c1300353b8f3c972990624695b668)) + - Merge branch 'release-v0.7.16' into release ([`6f5575c`](https://github.com/rust-phf/rust-phf/commit/6f5575c9b12d3619ea17c0825a613fcac12820f4)) + - Release v0.7.16 ([`8bf29c1`](https://github.com/rust-phf/rust-phf/commit/8bf29c10a878c83d73cc40385f0e96cb9cc95afa)) + - Merge branch 'release' ([`b4ec398`](https://github.com/rust-phf/rust-phf/commit/b4ec398f415e5cac2cd4d794b1889788e644447f)) + - Merge branch 'release-v0.7.15' into release ([`6bbc9e2`](https://github.com/rust-phf/rust-phf/commit/6bbc9e249b9a84e2019432b7d3b178851d2d776e)) + - Release v0.7.15 ([`20f896e`](https://github.com/rust-phf/rust-phf/commit/20f896e6975cabb9cf9883b08eaa5b3da8597f11)) + - Merge branch 'release' ([`7c692d4`](https://github.com/rust-phf/rust-phf/commit/7c692d42970bf6cb2540f6b2d3c88d63b3fd1f7a)) + - Merge branch 'release-v0.7.14' into release ([`ea8dd65`](https://github.com/rust-phf/rust-phf/commit/ea8dd652c292746a20bf3a680e9f925f6f0530b1)) + - Release v0.7.14 ([`fee66fc`](https://github.com/rust-phf/rust-phf/commit/fee66fc20e33f2b119f830a8926f3b6e52abcf09)) + - Merge pull request #82 from Ryman/unicase ([`909fac5`](https://github.com/rust-phf/rust-phf/commit/909fac5d4414a7d366432de078bcc6f78a25c230)) + - Add an impl of PhfHash for UniCase ([`d761144`](https://github.com/rust-phf/rust-phf/commit/d761144daf92ce6aed83165aa840a1ae72bd0bb2)) + - Drop all rust features ([`888f623`](https://github.com/rust-phf/rust-phf/commit/888f6234cd4e26e08b1f2d3716e4d4e0b95d0196)) + - Conditionally compile String and Vec impls ([`8105ae8`](https://github.com/rust-phf/rust-phf/commit/8105ae8f6c1e4fde641716521b327eb07cf648cc)) + - Implement PhfHash for String and Vec ([`ae820e6`](https://github.com/rust-phf/rust-phf/commit/ae820e6b8c8b4a46083ea4105ec3b378d52e8db0)) + - Merge branch 'release' ([`d9351e1`](https://github.com/rust-phf/rust-phf/commit/d9351e1488bd42d1a4453e4a465177fb1c781fdc)) + - Merge branch 'release-v0.7.13' into release ([`b582e4e`](https://github.com/rust-phf/rust-phf/commit/b582e4ecec23be992ba915fc7873c0d5598f388a)) + - Release v0.7.13 ([`4769a6d`](https://github.com/rust-phf/rust-phf/commit/4769a6d2ce1d392da06e4b3cb833a1cdccb1f1aa)) + - Merge branch 'release' ([`5659a9d`](https://github.com/rust-phf/rust-phf/commit/5659a9db39bc5ee2179b264fce4cba4384d6d025)) + - Merge branch 'release-v0.7.12' into release ([`2f0a5de`](https://github.com/rust-phf/rust-phf/commit/2f0a5de9f01d9d22c774d8d85daec2a047a462e8)) + - Release v0.7.12 ([`9b75ee5`](https://github.com/rust-phf/rust-phf/commit/9b75ee5ed14060c45a5785fba0387be09e698624)) + - Merge branch 'release' ([`87ffab8`](https://github.com/rust-phf/rust-phf/commit/87ffab863aaeefb5ac2164da62f0407122d8057e)) + - Merge branch 'release-v0.7.11' into release ([`7260d04`](https://github.com/rust-phf/rust-phf/commit/7260d04413349bacab484afb74f9a496335278e1)) + - Release v0.7.11 ([`a004227`](https://github.com/rust-phf/rust-phf/commit/a0042277b181ec95fcbf29751b9a453f4f962ebb)) + - Merge branch 'release' ([`1579bec`](https://github.com/rust-phf/rust-phf/commit/1579bec1448c7b833f5965fe39d4ef2df66c982c)) + - Merge branch 'release-v0.7.10' into release ([`25cea13`](https://github.com/rust-phf/rust-phf/commit/25cea133fb4eec938bdfa74f04adbc8d94e30d4e)) + - Release v0.7.10 ([`c43154b`](https://github.com/rust-phf/rust-phf/commit/c43154b2661dc09620a7879c16f37b47d6ec03ae)) + - Merge branch 'release' ([`2c67ce5`](https://github.com/rust-phf/rust-phf/commit/2c67ce5a4129cd543178bf015f021a3bb83b6895)) + - Merge branch 'release-v0.7.9' into release ([`87206e1`](https://github.com/rust-phf/rust-phf/commit/87206e1c7b8d4089370dc168402ded0c0700a447)) + - Release v0.7.9 ([`b7d29df`](https://github.com/rust-phf/rust-phf/commit/b7d29dfe0df288b2da74de195f764eace1c8e443)) + - Merge branch 'release' ([`cd33902`](https://github.com/rust-phf/rust-phf/commit/cd339023e90ac1ce6971fa81badea65fb1f2b086)) + - Merge branch 'release-v0.7.8' into release ([`8bc23a0`](https://github.com/rust-phf/rust-phf/commit/8bc23a023908a038d668b6f7d8e94ee416995285)) + - Release v0.7.8 ([`aad0b9b`](https://github.com/rust-phf/rust-phf/commit/aad0b9b658fb970e3df60b066961aafca1a17c44)) + - Merge branch 'release' ([`dccff69`](https://github.com/rust-phf/rust-phf/commit/dccff69384729e3d4972174ce62d8f9db9429485)) + - Merge branch 'release-v0.7.7' into release ([`2d988b7`](https://github.com/rust-phf/rust-phf/commit/2d988b7dfb04d949246adc047f6b195263612246)) + - Release v0.7.7 ([`c9e7a93`](https://github.com/rust-phf/rust-phf/commit/c9e7a93f4d6f85a72651aba6187e4c956d8c1167)) + - Run through rustfmt ([`58e2223`](https://github.com/rust-phf/rust-phf/commit/58e222380b7fc9609a055cb5a6110ba04e47d677)) + - Merge branch 'release' ([`776046c`](https://github.com/rust-phf/rust-phf/commit/776046c961456dee9e16a6b6574d336c66e259f8)) + - Merge branch 'release-v0.7.6' into release ([`2ea7d5c`](https://github.com/rust-phf/rust-phf/commit/2ea7d5cab5e9e54952ca618b43ec3583a33a4847)) + - Release v0.7.6 ([`5bcd5c9`](https://github.com/rust-phf/rust-phf/commit/5bcd5c95215f5aa29e133cb2912662085a8158f0)) + - Simplify no_std logic a bit ([`70f2ed9`](https://github.com/rust-phf/rust-phf/commit/70f2ed93d2e64b822bf2a23fde0ee848e8785bd1)) + - Merge pull request #68 from gz/master ([`44006f7`](https://github.com/rust-phf/rust-phf/commit/44006f74efca95d4f049bbf25df6321977c39577)) + - Reinstantiate no_std cargo feature flag. ([`7c3f757`](https://github.com/rust-phf/rust-phf/commit/7c3f757cdc83b4035d81f0d521b4b80b9080155e)) + - Merge branch 'release' ([`1f770df`](https://github.com/rust-phf/rust-phf/commit/1f770df1290b586a8d641ecb0bbd105080afc0ea)) + - Merge branch 'release-v0.7.5' into release ([`bb65b8c`](https://github.com/rust-phf/rust-phf/commit/bb65b8cca30ef9d4518e3083558019a972873efa)) + - Release v0.7.5 ([`fda44f5`](https://github.com/rust-phf/rust-phf/commit/fda44f550401c1bd4aad29bb2c07030b86761028)) + - Merge branch 'release' ([`269b5dc`](https://github.com/rust-phf/rust-phf/commit/269b5dc41ebf82f423393d5219e8107e9c911a03)) + - Merge branch 'release-v0.7.4' into release ([`7c093e8`](https://github.com/rust-phf/rust-phf/commit/7c093e83ffe5192d9cdcd5402b6abb7800ffafb3)) + - Release v0.7.4 ([`c7c0d3c`](https://github.com/rust-phf/rust-phf/commit/c7c0d3c294126157f0275a05b7c3a65c419234a1)) + - Merge pull request #62 from SimonSapin/string-cache ([`6f59718`](https://github.com/rust-phf/rust-phf/commit/6f5971869e5864cae653ec3606d17b554c343ef8)) + - Add hash() and get_index() to phf_shared. ([`d3b2ea0`](https://github.com/rust-phf/rust-phf/commit/d3b2ea0f0a9bd9cb79da90d8795f1905c3df1f5f)) + - Update PhfHash to mirror std::hash::Hash ([`96ef156`](https://github.com/rust-phf/rust-phf/commit/96ef156baae669b233673d6be2b96617ad48551e)) + - Make PhfHash endianness-independent ([`8f406b9`](https://github.com/rust-phf/rust-phf/commit/8f406b910a2ec0f389b977614f8de3151bb17070)) + - Release v0.7.3 ([`77ea239`](https://github.com/rust-phf/rust-phf/commit/77ea23917e908b10c4c5c463671a8409292f8661)) + - Release v0.7.2 ([`642b69d`](https://github.com/rust-phf/rust-phf/commit/642b69d0100a4ee7ec6e430ef1351bd1f28f9a4a)) + - Release v0.7.1 ([`9cb9de9`](https://github.com/rust-phf/rust-phf/commit/9cb9de911ad4e16964f0def29780dde1630c3619)) + - Release v0.7.0 ([`555a690`](https://github.com/rust-phf/rust-phf/commit/555a690561673597aee068650ac884bbcc2e31cf)) + - Release v0.6.19 ([`5810d30`](https://github.com/rust-phf/rust-phf/commit/5810d30ef2162f33cfb4da99c65b7344c7f2913b)) + - Release v0.6.18 ([`36efc72`](https://github.com/rust-phf/rust-phf/commit/36efc721478d097fba1e5458cbdd9f288637abae)) + - Fix for upstream changes ([`eabadcf`](https://github.com/rust-phf/rust-phf/commit/eabadcf7e8af351ba8f07d86746e35adc8c5812e)) + - Release v0.6.17 ([`271ccc2`](https://github.com/rust-phf/rust-phf/commit/271ccc27d885363d4d8c549f75624d08c48e56c5)) + - Release v0.6.15 ([`ede14df`](https://github.com/rust-phf/rust-phf/commit/ede14df1e574674852b09bcafff4ad549ebfd4ae)) + - Release v0.6.14 ([`cf64ebb`](https://github.com/rust-phf/rust-phf/commit/cf64ebb8f769c9f12c9a03d05713dde6b8caf371)) + - Merge pull request #50 from o01eg/patch-1 ([`5a3309b`](https://github.com/rust-phf/rust-phf/commit/5a3309b50e5685816c7712072df887890eba0823)) + - Update to rustc 1.0.0-dev (e46610966 2015-03-17) (built 2015-03-17) ([`54f32dd`](https://github.com/rust-phf/rust-phf/commit/54f32dd4cba60fd4833cd2cf0e1030cfd9a9ca4b)) + - Release v0.6.13 ([`4fdb533`](https://github.com/rust-phf/rust-phf/commit/4fdb5331fd9978ca3e180a06fb2e34627f50fb77)) + - Release v0.6.12 ([`59ca586`](https://github.com/rust-phf/rust-phf/commit/59ca58637206c9806c13cc24cb35cb7d0ce9d23f)) + - Release v0.6.11 ([`e1e6d3b`](https://github.com/rust-phf/rust-phf/commit/e1e6d3b40a6babddd0989406f2b4e952443ff52e)) + - Release v0.6.10 ([`fc45373`](https://github.com/rust-phf/rust-phf/commit/fc45373b34a461664f532c5108f3d2625172c128)) + - Add doc URLs ([`4605db3`](https://github.com/rust-phf/rust-phf/commit/4605db3e7e0c4bef09ccf6c09c7dbcc36b707a9f)) + - Remove core feature ([`d4c189a`](https://github.com/rust-phf/rust-phf/commit/d4c189a2b060df33e7c97d6c1f0f430b68fc23b5)) + - Release v0.6.9 ([`822f4e3`](https://github.com/rust-phf/rust-phf/commit/822f4e3fb127dc02d36d802803d71aa5b98bed3c)) + - More fixes ([`0c04b9c`](https://github.com/rust-phf/rust-phf/commit/0c04b9cb2679a63394778a7362ef14441b6c2032)) + - Fix for upstream changes ([`f014882`](https://github.com/rust-phf/rust-phf/commit/f01488236a8e944f1b12b4bc441d55c10fc47aa1)) + - Release v0.6.8 ([`cd637ca`](https://github.com/rust-phf/rust-phf/commit/cd637cafb6d37b1901b6c119a7d26f253e9a288e)) + - Release v0.6.7 ([`bfc36c9`](https://github.com/rust-phf/rust-phf/commit/bfc36c979225f652cdb72f3b1f2a25e77b50ab8c)) + - Fix for upstream changes ([`5ff7040`](https://github.com/rust-phf/rust-phf/commit/5ff70403a1b12c30206b128ac619b31c69e42eb4)) + - Release v0.6.6 ([`b09a174`](https://github.com/rust-phf/rust-phf/commit/b09a174a166c7744c5989bedc6ba68340f6f7fd1)) + - Release v0.6.5 ([`271e784`](https://github.com/rust-phf/rust-phf/commit/271e7848f35b31d6ce9fc9268de173738464bfc8)) + - Fix for upstream changes ([`3db7cef`](https://github.com/rust-phf/rust-phf/commit/3db7cef414e4de28eb6c18938c275a3aafbdafa4)) + - Move docs to this repo and auto build them ([`f8ef160`](https://github.com/rust-phf/rust-phf/commit/f8ef160480e2d4ce72fa7afb6ebce70e45acbc76)) + - Release v0.6.4 ([`6866c1b`](https://github.com/rust-phf/rust-phf/commit/6866c1bf5ad5091bc969f1356884aa86c27458cb)) + - Release v0.6.3 ([`b0c5e3c`](https://github.com/rust-phf/rust-phf/commit/b0c5e3cb69742f81160ea80a3ba1782a0b4e01a2)) + - Release v0.6.2 ([`d9ddf45`](https://github.com/rust-phf/rust-phf/commit/d9ddf45b15ba812b0d3acedffb08e901742e56c4)) + - Link to libstd by default ([`24555b1`](https://github.com/rust-phf/rust-phf/commit/24555b19e6b54656633cc4ceac91864f14c20471)) + - Release v0.6.1 ([`ca0e9f6`](https://github.com/rust-phf/rust-phf/commit/ca0e9f6b9c737f3d11bcad2f4624bb5603a8170e)) + - Fix for stability changes ([`f7fb510`](https://github.com/rust-phf/rust-phf/commit/f7fb510dfe67f11522a2d214bd14d21f910bfd7b)) + - Release v0.6.0 ([`09d6870`](https://github.com/rust-phf/rust-phf/commit/09d687053caf4d321f72907528573b3334fae3c2)) + - Release v0.5.0 ([`8683be2`](https://github.com/rust-phf/rust-phf/commit/8683be260effe5605243ef230bad6154ef4e5e20)) + - Fix deprecation warning ([`d0fa86a`](https://github.com/rust-phf/rust-phf/commit/d0fa86a1f37f118382a3dc4400de158f8d181a2a)) + - Release v0.4.9 ([`28cbe70`](https://github.com/rust-phf/rust-phf/commit/28cbe704e0f96495c2527ad93c5e67315c245908)) + - Fix for upstream changes ([`0b22188`](https://github.com/rust-phf/rust-phf/commit/0b22188f5767a0a125d01ed8b176ce19fef95cad)) + - Release v0.4.8 ([`bb858f1`](https://github.com/rust-phf/rust-phf/commit/bb858f11dd88579d47b0089121f8d551731464ab)) + - Merge pull request #38 from chris-morgan/master ([`668f986`](https://github.com/rust-phf/rust-phf/commit/668f986705ba3a6385b47b851878250ce954a6dc)) + - Release v0.4.7 ([`d83f551`](https://github.com/rust-phf/rust-phf/commit/d83f551a874a24b2a4308804e7cbca32a1aa2494)) + - Fix for upstream changes ([`c3ae5ac`](https://github.com/rust-phf/rust-phf/commit/c3ae5ac94cfa11404b420d45229c3a0d0d8a4535)) + - Release v0.4.6 ([`360bf81`](https://github.com/rust-phf/rust-phf/commit/360bf81ad3aafced75dc64a49e58a867d5239264)) + - Release v0.4.5 ([`ab4786c`](https://github.com/rust-phf/rust-phf/commit/ab4786c09b55e46658f2a66092caf6a782d056a6)) + - Release v0.4.4 ([`f678635`](https://github.com/rust-phf/rust-phf/commit/f678635378555b7d086014b0466aea12a3ae5701)) + - Fix for upstream changes ([`2b4863f`](https://github.com/rust-phf/rust-phf/commit/2b4863fcb5827d5bd89cc278d2a3052b6b3ee20e)) + - Release v0.4.3 ([`4f5902c`](https://github.com/rust-phf/rust-phf/commit/4f5902c222a81da009bf7955bc96568c73b46b13)) + - Release v0.4.2 ([`69d92b8`](https://github.com/rust-phf/rust-phf/commit/69d92b869fab51a31fda6126003edadd9e832b32)) + - Merge pull request #37 from alexcrichton/update ([`b9f0a43`](https://github.com/rust-phf/rust-phf/commit/b9f0a43500499fc08170690bdc6624f289e35841)) + - Update to rust master ([`4a0d48d`](https://github.com/rust-phf/rust-phf/commit/4a0d48d165d78d1b3e8f791503e220a032d26d24)) + - Release v0.4.1 ([`0fba837`](https://github.com/rust-phf/rust-phf/commit/0fba8374fd6fb1b10d9d456ae4b1310b00e9d9ca)) + - Make sure we're actually no_std ([`126c6e2`](https://github.com/rust-phf/rust-phf/commit/126c6e26345113bc7492c8ef920ad609b0b25af7)) + - Re-fix str and [u8] hashing when cross compiling ([`a0eb200`](https://github.com/rust-phf/rust-phf/commit/a0eb200d87971555d3d7ce8498404844c860a47f)) + - Release v0.4.0 ([`49dbb36`](https://github.com/rust-phf/rust-phf/commit/49dbb3636621c0436e771a4e0ebfe7342b676616)) + - Fix for upstream changes and drop xxhash ([`fc2539f`](https://github.com/rust-phf/rust-phf/commit/fc2539f7893ef0f833a8c13ec77ba317bd8bf43e)) + - Release v0.3.0 ([`0a80b06`](https://github.com/rust-phf/rust-phf/commit/0a80b06ecde77b33cec8c956c67704613fdd313e)) + - Merge pull request #32 from sp3d/master ([`fc4829a`](https://github.com/rust-phf/rust-phf/commit/fc4829a292663e4e30a23a4ba1de693d154cd611)) + - Add support for [u8, ..N] keys ([`e26947c`](https://github.com/rust-phf/rust-phf/commit/e26947cc264266bcbc85b8cf5c46b2019d654c72)) + - Bump to 0.2 ([`4546f51`](https://github.com/rust-phf/rust-phf/commit/4546f51fccbd56ddf1214fe232db8926d9f471de)) + - Bump to 0.1.0 ([`43d9a50`](https://github.com/rust-phf/rust-phf/commit/43d9a50e6240716d68dadd9d037f22b2f7df4b58)) + - Make publishable on crates.io ([`4ad2bb2`](https://github.com/rust-phf/rust-phf/commit/4ad2bb27be35015b3f37ec7025c46df9170b3ef9)) + - Pull shared code into a module ([`19c4f8d`](https://github.com/rust-phf/rust-phf/commit/19c4f8d420d3a9ff8e3ace0256198f5db9fccae0)) +
+ diff --git a/anneal/vendor/phf_shared/Cargo.lock b/anneal/vendor/phf_shared/Cargo.lock new file mode 100644 index 0000000000..0205412367 --- /dev/null +++ b/anneal/vendor/phf_shared/Cargo.lock @@ -0,0 +1,39 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "phf_shared" +version = "0.12.1" +dependencies = [ + "siphasher", + "uncased", + "unicase", +] + +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + +[[package]] +name = "uncased" +version = "0.9.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1b88fcfe09e89d3866a5c11019378088af2d24c3fbd4f0543f96b479ec90697" +dependencies = [ + "version_check", +] + +[[package]] +name = "unicase" +version = "2.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75b844d17643ee918803943289730bec8aac480150456169e647ed0b576ba539" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" diff --git a/anneal/vendor/phf_shared/Cargo.toml b/anneal/vendor/phf_shared/Cargo.toml new file mode 100644 index 0000000000..def76f8317 --- /dev/null +++ b/anneal/vendor/phf_shared/Cargo.toml @@ -0,0 +1,50 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2021" +rust-version = "1.61" +name = "phf_shared" +version = "0.12.1" +authors = ["Steven Fackler "] +build = false +autolib = false +autobins = false +autoexamples = false +autotests = false +autobenches = false +description = "Support code shared by PHF libraries" +readme = "README.md" +categories = ["data-structures"] +license = "MIT" +repository = "https://github.com/rust-phf/rust-phf" +resolver = "1" + +[features] +default = ["std"] +std = [] + +[lib] +name = "phf_shared" +path = "src/lib.rs" +test = false + +[dependencies.siphasher] +version = "1.0" + +[dependencies.uncased] +version = "0.9.9" +optional = true +default-features = false + +[dependencies.unicase] +version = "2.4.0" +optional = true diff --git a/anneal/vendor/phf_shared/Cargo.toml.orig b/anneal/vendor/phf_shared/Cargo.toml.orig new file mode 100644 index 0000000000..e62b6cbf68 --- /dev/null +++ b/anneal/vendor/phf_shared/Cargo.toml.orig @@ -0,0 +1,25 @@ +[package] +name = "phf_shared" +authors = ["Steven Fackler "] +version = "0.12.1" +license = "MIT" +description = "Support code shared by PHF libraries" +repository = "https://github.com/rust-phf/rust-phf" +edition = "2021" +rust-version = "1.61" +categories = ["data-structures"] +readme = "README.md" + +[lib] +name = "phf_shared" +path = "src/lib.rs" +test = false + +[features] +default = ["std"] +std = [] + +[dependencies] +siphasher = "1.0" +unicase = { version = "2.4.0", optional = true } +uncased = { version = "0.9.9", optional = true, default-features = false } diff --git a/anneal/vendor/phf_shared/LICENSE b/anneal/vendor/phf_shared/LICENSE new file mode 100644 index 0000000000..cd5bf6855b --- /dev/null +++ b/anneal/vendor/phf_shared/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) + +Copyright (c) 2014-2022 Steven Fackler, Yuki Okushi + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/anneal/vendor/phf_shared/README.md b/anneal/vendor/phf_shared/README.md new file mode 100644 index 0000000000..1674ae64fd --- /dev/null +++ b/anneal/vendor/phf_shared/README.md @@ -0,0 +1,5 @@ +# phf_shared + +This crate is for the `phf` crate, [find it on crates.io][phf] for details. + +[phf]: https://crates.io/crates/phf diff --git a/anneal/vendor/phf_shared/src/lib.rs b/anneal/vendor/phf_shared/src/lib.rs new file mode 100644 index 0000000000..d3a0465fb8 --- /dev/null +++ b/anneal/vendor/phf_shared/src/lib.rs @@ -0,0 +1,475 @@ +//! See [the `phf` crate's documentation][phf] for details. +//! +//! [phf]: https://docs.rs/phf + +#![doc(html_root_url = "https://docs.rs/phf_shared/0.12")] +#![cfg_attr(not(feature = "std"), no_std)] + +#[cfg(feature = "std")] +extern crate std as core; + +use core::fmt; +use core::hash::{Hash, Hasher}; +use core::num::Wrapping; +use siphasher::sip128::{Hash128, Hasher128, SipHasher13}; + +#[non_exhaustive] +pub struct Hashes { + pub g: u32, + pub f1: u32, + pub f2: u32, +} + +/// A central typedef for hash keys +/// +/// Makes experimentation easier by only needing to be updated here. +pub type HashKey = u64; + +#[inline] +pub fn displace(f1: u32, f2: u32, d1: u32, d2: u32) -> u32 { + (Wrapping(d2) + Wrapping(f1) * Wrapping(d1) + Wrapping(f2)).0 +} + +/// `key` is from `phf_generator::HashState`. +#[inline] +pub fn hash(x: &T, key: &HashKey) -> Hashes { + let mut hasher = SipHasher13::new_with_keys(0, *key); + x.phf_hash(&mut hasher); + + let Hash128 { + h1: lower, + h2: upper, + } = hasher.finish128(); + + Hashes { + g: (lower >> 32) as u32, + f1: lower as u32, + f2: upper as u32, + } +} + +/// Return an index into `phf_generator::HashState::map`. +/// +/// * `hash` is from `hash()` in this crate. +/// * `disps` is from `phf_generator::HashState::disps`. +/// * `len` is the length of `phf_generator::HashState::map`. +#[inline] +pub fn get_index(hashes: &Hashes, disps: &[(u32, u32)], len: usize) -> u32 { + let (d1, d2) = disps[(hashes.g % (disps.len() as u32)) as usize]; + displace(hashes.f1, hashes.f2, d1, d2) % (len as u32) +} + +/// A trait implemented by types which can be used in PHF data structures. +/// +/// This differs from the standard library's `Hash` trait in that `PhfHash`'s +/// results must be architecture independent so that hashes will be consistent +/// between the host and target when cross compiling. +pub trait PhfHash { + /// Feeds the value into the state given, updating the hasher as necessary. + fn phf_hash(&self, state: &mut H); + + /// Feeds a slice of this type into the state provided. + fn phf_hash_slice(data: &[Self], state: &mut H) + where + Self: Sized, + { + for piece in data { + piece.phf_hash(state); + } + } +} + +/// Trait for printing types with `const` constructors, used by `phf_codegen` and `phf_macros`. +pub trait FmtConst { + /// Print a `const` expression representing this value. + fn fmt_const(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result; +} + +/// Identical to `std::borrow::Borrow` except omitting blanket impls to facilitate other +/// borrowing patterns. +/// +/// The same semantic requirements apply: +/// +/// > In particular `Eq`, `Ord` and `Hash` must be equivalent for borrowed and owned values: +/// `x.borrow() == y.borrow()` should give the same result as `x == y`. +/// +/// (This crate's API only requires `Eq` and `PhfHash`, however.) +/// +/// ### Motivation +/// The conventional signature for lookup methods on collections looks something like this: +/// +/// ```ignore +/// impl Map where K: PhfHash + Eq { +/// fn get(&self, key: &T) -> Option<&V> where T: PhfHash + Eq, K: Borrow { +/// ... +/// } +/// } +/// ``` +/// +/// This allows the key type used for lookup to be different than the key stored in the map so for +/// example you can use `&str` to look up a value in a `Map`. However, this runs into +/// a problem in the case where `T` and `K` are both a `Foo<_>` type constructor but +/// the contained type is different (even being the same type with different lifetimes). +/// +/// The main issue for this crate's API is that, with this method signature, you cannot perform a +/// lookup on a `Map, _>` with a `UniCase<&'a str>` where `'a` is not +/// `'static`; there is no impl of `Borrow` that resolves to +/// `impl Borrow> for UniCase<&'static str>` and one cannot be added either because of +/// all the blanket impls. +/// +/// Instead, this trait is implemented conservatively, without blanket impls, so that impls like +/// this may be added. This is feasible since the set of types that implement `PhfHash` is +/// intentionally small. +/// +/// This likely won't be fixable with specialization alone but will require full support for lattice +/// impls since we technically want to add overlapping blanket impls. +pub trait PhfBorrow { + /// Convert a reference to `self` to a reference to the borrowed type. + fn borrow(&self) -> &B; +} + +/// Create an impl of `FmtConst` delegating to `fmt::Debug` for types that can deal with it. +/// +/// Ideally with specialization this could be just one default impl and then specialized where +/// it doesn't apply. +macro_rules! delegate_debug ( + ($ty:ty) => { + impl FmtConst for $ty { + fn fmt_const(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{:?}", self) + } + } + } +); + +delegate_debug!(str); +delegate_debug!(char); +delegate_debug!(u8); +delegate_debug!(i8); +delegate_debug!(u16); +delegate_debug!(i16); +delegate_debug!(u32); +delegate_debug!(i32); +delegate_debug!(u64); +delegate_debug!(i64); +delegate_debug!(usize); +delegate_debug!(isize); +delegate_debug!(u128); +delegate_debug!(i128); +delegate_debug!(bool); + +/// `impl PhfBorrow for T` +macro_rules! impl_reflexive( + ($($t:ty),*) => ( + $(impl PhfBorrow<$t> for $t { + fn borrow(&self) -> &$t { + self + } + })* + ) +); + +impl_reflexive!( + str, + char, + u8, + i8, + u16, + i16, + u32, + i32, + u64, + i64, + usize, + isize, + u128, + i128, + bool, + [u8] +); + +#[cfg(feature = "std")] +impl PhfBorrow for String { + fn borrow(&self) -> &str { + self + } +} + +#[cfg(feature = "std")] +impl PhfBorrow<[u8]> for Vec { + fn borrow(&self) -> &[u8] { + self + } +} + +#[cfg(feature = "std")] +delegate_debug!(String); + +#[cfg(feature = "std")] +impl PhfHash for String { + #[inline] + fn phf_hash(&self, state: &mut H) { + (**self).phf_hash(state) + } +} + +#[cfg(feature = "std")] +impl PhfHash for Vec { + #[inline] + fn phf_hash(&self, state: &mut H) { + (**self).phf_hash(state) + } +} + +impl<'a, T: 'a + PhfHash + ?Sized> PhfHash for &'a T { + fn phf_hash(&self, state: &mut H) { + (*self).phf_hash(state) + } +} + +impl<'a, T: 'a + FmtConst + ?Sized> FmtConst for &'a T { + fn fmt_const(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + (*self).fmt_const(f) + } +} + +impl<'a> PhfBorrow for &'a str { + fn borrow(&self) -> &str { + self + } +} + +impl<'a> PhfBorrow<[u8]> for &'a [u8] { + fn borrow(&self) -> &[u8] { + self + } +} + +impl<'a, const N: usize> PhfBorrow<[u8; N]> for &'a [u8; N] { + fn borrow(&self) -> &[u8; N] { + self + } +} + +impl PhfHash for str { + #[inline] + fn phf_hash(&self, state: &mut H) { + self.as_bytes().phf_hash(state) + } +} + +impl PhfHash for [u8] { + #[inline] + fn phf_hash(&self, state: &mut H) { + state.write(self); + } +} + +impl FmtConst for [u8] { + #[inline] + fn fmt_const(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + // slices need a leading reference + write!(f, "&{:?}", self) + } +} + +#[cfg(feature = "unicase")] +impl PhfHash for unicase::UniCase +where + unicase::UniCase: Hash, +{ + #[inline] + fn phf_hash(&self, state: &mut H) { + self.hash(state) + } +} + +#[cfg(feature = "unicase")] +impl FmtConst for unicase::UniCase +where + S: AsRef, +{ + fn fmt_const(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + if self.is_ascii() { + f.write_str("UniCase::ascii(")?; + } else { + f.write_str("UniCase::unicode(")?; + } + + self.as_ref().fmt_const(f)?; + f.write_str(")") + } +} + +#[cfg(feature = "unicase")] +impl<'b, 'a: 'b, S: ?Sized + 'a> PhfBorrow> for unicase::UniCase<&'a S> { + fn borrow(&self) -> &unicase::UniCase<&'b S> { + self + } +} + +#[cfg(feature = "unicase")] +impl PhfHash for unicase::Ascii +where + unicase::Ascii: Hash, +{ + #[inline] + fn phf_hash(&self, state: &mut H) { + self.hash(state) + } +} + +#[cfg(feature = "unicase")] +impl FmtConst for unicase::Ascii +where + S: AsRef, +{ + fn fmt_const(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("Ascii::new(")?; + self.as_ref().fmt_const(f)?; + f.write_str(")") + } +} + +#[cfg(feature = "unicase")] +impl<'b, 'a: 'b, S: ?Sized + 'a> PhfBorrow> for unicase::Ascii<&'a S> { + fn borrow(&self) -> &unicase::Ascii<&'b S> { + self + } +} + +#[cfg(feature = "uncased")] +impl PhfHash for uncased::UncasedStr { + #[inline] + fn phf_hash(&self, state: &mut H) { + self.hash(state) + } +} + +#[cfg(feature = "uncased")] +impl FmtConst for uncased::UncasedStr { + fn fmt_const(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.write_str("UncasedStr::new(")?; + self.as_str().fmt_const(f)?; + f.write_str(")") + } +} + +#[cfg(feature = "uncased")] +impl PhfBorrow for &uncased::UncasedStr { + fn borrow(&self) -> &uncased::UncasedStr { + self + } +} + +macro_rules! sip_impl ( + (le $t:ty) => ( + impl PhfHash for $t { + #[inline] + fn phf_hash(&self, state: &mut H) { + self.to_le().hash(state); + } + } + ); + ($t:ty) => ( + impl PhfHash for $t { + #[inline] + fn phf_hash(&self, state: &mut H) { + self.hash(state); + } + } + ) +); + +sip_impl!(u8); +sip_impl!(i8); +sip_impl!(le u16); +sip_impl!(le i16); +sip_impl!(le u32); +sip_impl!(le i32); +sip_impl!(le u64); +sip_impl!(le i64); +sip_impl!(le usize); +sip_impl!(le isize); +sip_impl!(le u128); +sip_impl!(le i128); +sip_impl!(bool); + +impl PhfHash for char { + #[inline] + fn phf_hash(&self, state: &mut H) { + (*self as u32).phf_hash(state) + } +} + +// minimize duplicated code since formatting drags in quite a bit +fn fmt_array(array: &[T], f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "{:?}", array) +} + +macro_rules! array_impl ( + ($t:ty) => ( + impl PhfHash for [$t; N] { + #[inline] + fn phf_hash(&self, state: &mut H) { + for v in &self[..] { + v.phf_hash(state); + } + } + } + + impl FmtConst for [$t; N] { + fn fmt_const(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt_array(self, f) + } + } + + impl PhfBorrow<[$t]> for [$t; N] { + fn borrow(&self) -> &[$t] { + self + } + } + ) +); + +array_impl!(u8); +array_impl!(i8); +array_impl!(u16); +array_impl!(i16); +array_impl!(u32); +array_impl!(i32); +array_impl!(u64); +array_impl!(i64); +array_impl!(usize); +array_impl!(isize); +array_impl!(u128); +array_impl!(i128); +array_impl!(bool); +array_impl!(char); + +macro_rules! slice_impl ( + ($t:ty) => { + impl PhfHash for [$t] { + #[inline] + fn phf_hash(&self, state: &mut H) { + for v in self { + v.phf_hash(state); + } + } + } + }; +); + +slice_impl!(i8); +slice_impl!(u16); +slice_impl!(i16); +slice_impl!(u32); +slice_impl!(i32); +slice_impl!(u64); +slice_impl!(i64); +slice_impl!(usize); +slice_impl!(isize); +slice_impl!(u128); +slice_impl!(i128); +slice_impl!(bool); +slice_impl!(char); diff --git a/anneal/vendor/siphasher/.cargo-checksum.json b/anneal/vendor/siphasher/.cargo-checksum.json new file mode 100644 index 0000000000..f616250e61 --- /dev/null +++ b/anneal/vendor/siphasher/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{".cargo_vcs_info.json":"67d171b722d201cd15f8851c8a7663d728e65cd0141f919d2571e1c21ab8b3c5",".github/dependabot.yml":"9dc3030fc87139b49c283e6fafe1d1f46b2002659ab4eea26350f67d0367b491",".github/workflows/issues.yml":"b7f30a2a870d09450e0111f7fb5b3cf08a2fb369ccbd8e97e95b2286e5b8aa9b","COPYING":"c962ee4d1d05ddc138b202b2540219ebc57893fcf97b364852094a9a94ce1365","Cargo.lock":"fc9b9efc31e46295bc47bce4cc7ff0883d757fa2f345c1b576630722fca047fd","Cargo.toml":"e7b84f7acf4a759417326d28525baa2986f25e4bf8c96b146ed5e375bafb2959","Cargo.toml.orig":"65fbc684d36923202e8472bab68a9505b9d631d362cd9bd0544015a94f9b6419","README.md":"af6d54563bbcc675b002349cd883e44d607e45cbdd640ed452804533fef00bab","src/common.rs":"7319d34054413c01a3740a1d349c487125c5ffc5a39cddf562702f3560e1525a","src/lib.rs":"b58b7067ee6cad446233fbb17a30fb4828fa0c783a4d65c110374ecadc6e3e74","src/sip.rs":"64e6d808ac93c4024ffb1ced16be1891407c6e7026dd0f4eb0145c98605392c3","src/sip128.rs":"c3d7cf996712e0184c5eaabfc2a0a5b64e2a5b89a1faa2d44da38cd5bc2590da","src/tests.rs":"6658c1ac8c688ed90682a6dbc759ab569d43a5fc1405df84575374507ce79a6a","src/tests128.rs":"ae4f63fcc9aa59614e87c53242dd8b533edfe28e55a76e6a6de13f3d51bf0b05"},"package":"8ee5873ec9cce0195efcb7a4e9507a04cd49aec9c83d0389df45b1ef7ba2e649"} \ No newline at end of file diff --git a/anneal/vendor/siphasher/.cargo_vcs_info.json b/anneal/vendor/siphasher/.cargo_vcs_info.json new file mode 100644 index 0000000000..f557316d68 --- /dev/null +++ b/anneal/vendor/siphasher/.cargo_vcs_info.json @@ -0,0 +1,6 @@ +{ + "git": { + "sha1": "451f67d73a772cba325728109bbfa247750ed076" + }, + "path_in_vcs": "" +} \ No newline at end of file diff --git a/anneal/vendor/siphasher/.github/dependabot.yml b/anneal/vendor/siphasher/.github/dependabot.yml new file mode 100644 index 0000000000..c11601feca --- /dev/null +++ b/anneal/vendor/siphasher/.github/dependabot.yml @@ -0,0 +1,8 @@ +version: 2 +updates: +- package-ecosystem: cargo + directory: "/" + schedule: + interval: daily + time: "04:00" + open-pull-requests-limit: 10 diff --git a/anneal/vendor/siphasher/.github/workflows/issues.yml b/anneal/vendor/siphasher/.github/workflows/issues.yml new file mode 100644 index 0000000000..c5bf53073f --- /dev/null +++ b/anneal/vendor/siphasher/.github/workflows/issues.yml @@ -0,0 +1,17 @@ +name: Close inactive issues +on: + schedule: + - cron: "30 1 * * *" + +jobs: + close-issues: + runs-on: ubuntu-latest + permissions: + issues: write + pull-requests: write + steps: + - uses: actions/stale@v9 + with: + stale-issue-message: "This issue is stale because it has been open for 30 days with no activity." + close-issue-message: "This issue was closed because it has been inactive for 14 days since being marked as stale." + repo-token: ${{ secrets.GITHUB_TOKEN }} diff --git a/anneal/vendor/siphasher/COPYING b/anneal/vendor/siphasher/COPYING new file mode 100644 index 0000000000..f65e1ba098 --- /dev/null +++ b/anneal/vendor/siphasher/COPYING @@ -0,0 +1,7 @@ +Copyright 2012-2016 The Rust Project Developers. +Copyright 2016-2026 Frank Denis. + +Licensed under the Apache License, Version 2.0 or the MIT license +, at your +option. diff --git a/anneal/vendor/siphasher/Cargo.lock b/anneal/vendor/siphasher/Cargo.lock new file mode 100644 index 0000000000..ec68c5cc5e --- /dev/null +++ b/anneal/vendor/siphasher/Cargo.lock @@ -0,0 +1,89 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "itoa" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" + +[[package]] +name = "proc-macro2" +version = "1.0.66" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "ryu" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" + +[[package]] +name = "serde" +version = "1.0.183" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32ac8da02677876d532745a130fc9d8e6edfa81a269b107c5b00829b91d8eb3c" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.183" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aafe972d60b0b9bee71a91b92fee2d4fb3c9d7e8f6b179aa99f27203d99a4816" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_json" +version = "1.0.105" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "693151e1ac27563d6dbcec9dee9fbd5da8539b20fa14ad3752b2e6d363ace360" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "siphasher" +version = "1.0.3" +dependencies = [ + "serde", + "serde_json", +] + +[[package]] +name = "syn" +version = "2.0.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c324c494eba9d92503e6f1ef2e6df781e78f6a7705a0202d9801b198807d518a" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "unicode-ident" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" diff --git a/anneal/vendor/siphasher/Cargo.toml b/anneal/vendor/siphasher/Cargo.toml new file mode 100644 index 0000000000..6f53788915 --- /dev/null +++ b/anneal/vendor/siphasher/Cargo.toml @@ -0,0 +1,64 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2018" +name = "siphasher" +version = "1.0.3" +authors = ["Frank Denis "] +build = false +autolib = false +autobins = false +autoexamples = false +autotests = false +autobenches = false +description = "SipHash-2-4, SipHash-1-3 and 128-bit variants in pure Rust" +homepage = "https://docs.rs/siphasher" +documentation = "https://docs.rs/siphasher" +readme = "README.md" +keywords = [ + "crypto", + "hash", + "siphash", +] +categories = [ + "algorithms", + "cryptography", +] +license = "MIT/Apache-2.0" +repository = "https://github.com/jedisct1/rust-siphash" + +[features] +default = ["std"] +serde_no_std = ["serde/alloc"] +serde_std = [ + "std", + "serde/std", +] +std = [] + +[lib] +name = "siphasher" +path = "src/lib.rs" + +[dependencies.serde] +version = "1.0" +features = ["derive"] +optional = true + +[dependencies.serde_json] +version = "1.0" +optional = true + +[profile.release] +opt-level = 3 +lto = true +panic = "abort" diff --git a/anneal/vendor/siphasher/Cargo.toml.orig b/anneal/vendor/siphasher/Cargo.toml.orig new file mode 100644 index 0000000000..99684ca99d --- /dev/null +++ b/anneal/vendor/siphasher/Cargo.toml.orig @@ -0,0 +1,28 @@ +[package] +authors = ["Frank Denis "] +keywords = ["crypto","hash","siphash"] +license = "MIT/Apache-2.0" +name = "siphasher" +description = "SipHash-2-4, SipHash-1-3 and 128-bit variants in pure Rust" +repository = "https://github.com/jedisct1/rust-siphash" +homepage = "https://docs.rs/siphasher" +documentation = "https://docs.rs/siphasher" +readme = "README.md" +version = "1.0.3" +categories = ["algorithms", "cryptography"] +edition = "2018" + +[profile.release] +lto = true +panic = "abort" +opt-level = 3 + +[dependencies] +serde = { version = "1.0", features = ["derive"], optional = true } +serde_json = { version = "1.0", optional = true } + +[features] +default = ["std"] +serde_std = ["std", "serde/std"] +serde_no_std = ["serde/alloc"] +std = [] diff --git a/anneal/vendor/siphasher/README.md b/anneal/vendor/siphasher/README.md new file mode 100644 index 0000000000..e485ab4130 --- /dev/null +++ b/anneal/vendor/siphasher/README.md @@ -0,0 +1,93 @@ +SipHash implementation for Rust +=============================== + +This crates implements SipHash-2-4 and SipHash-1-3 in Rust. + +It is based on the original implementation from rust-core and exposes the +same API. + +It also implements SipHash variants returning 128-bit tags. + +The `sip` module implements the standard 64-bit mode, whereas the `sip128` +module implements the 128-bit mode. + +Usage +----- + +In `Cargo.toml`: + +```toml +[dependencies] +siphasher = "1" +``` + +If you want [serde](https://github.com/serde-rs/serde) support, include the feature like this: + +```toml +[dependencies] +siphasher = { version = "1", features = ["serde"] } +``` + +64-bit mode: + +```rust +use siphasher::sip::{SipHasher, SipHasher13, SipHasher24}; + +// one-shot: + +let array: &[u8] = &[1, 2, 3]; +let key: &[u8; 16] = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]; +let hasher = SipHasher13::new_with_key(key); +let h = hasher.hash(array); + +// incremental: + +use core::hash::Hasher; + +let array1: &[u8] = &[1, 2, 3]; +let array2: &[u8] = &[4, 5, 6]; +let key: &[u8; 16] = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]; +let mut hasher = SipHasher13::new_with_key(key); +hasher.write(array1); +hasher.write(array2); +let h = hasher.finish(); +``` + +128-bit mode: + +```rust +use siphasher::sip128::{Hasher128, SipHasher, SipHasher13, SipHasher24}; + +// one-shot: + +let array: &[u8] = &[1, 2, 3]; +let key: &[u8; 16] = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]; +let hasher = SipHasher13::new_with_key(key); +let h = hasher.hash(array).as_bytes(); + +// incremental: + +use core::hash::Hasher; + +let array1: &[u8] = &[1, 2, 3]; +let array2: &[u8] = &[4, 5, 6]; +let key: &[u8; 16] = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]; +let mut hasher = SipHasher13::new_with_key(key); +hasher.write(array1); +hasher.write(array2); +let h = hasher.finish128().as_bytes(); +``` + +[API documentation](https://docs.rs/siphasher/) +----------------------------------------------- + +Note +---- + +Due to a confusing and not well documented API, methods from the `Hasher` trait of the standard library (`std::hash::Hasher`, `core::hash::Hasher`) produce non-portable results. + +This is not specific to SipHash, and affects all hash functions. + +The only safe methods in that trait are `write()` and `finish()`. + +It is thus recommended to use SipHash (and all other hash functions, actually) as documented above. diff --git a/anneal/vendor/siphasher/src/common.rs b/anneal/vendor/siphasher/src/common.rs new file mode 100644 index 0000000000..ce9fde2e95 --- /dev/null +++ b/anneal/vendor/siphasher/src/common.rs @@ -0,0 +1,65 @@ +/// Compression round for SipHash algorithm. +macro_rules! compress { + ($state:expr) => {{ + compress!($state.v0, $state.v1, $state.v2, $state.v3) + }}; + ($v0:expr, $v1:expr, $v2:expr, $v3:expr) => {{ + $v0 = $v0.wrapping_add($v1); + $v1 = $v1.rotate_left(13); + $v1 ^= $v0; + $v0 = $v0.rotate_left(32); + $v2 = $v2.wrapping_add($v3); + $v3 = $v3.rotate_left(16); + $v3 ^= $v2; + $v0 = $v0.wrapping_add($v3); + $v3 = $v3.rotate_left(21); + $v3 ^= $v0; + $v2 = $v2.wrapping_add($v1); + $v1 = $v1.rotate_left(17); + $v1 ^= $v2; + $v2 = $v2.rotate_left(32); + }}; +} + +/// Loads an integer of the desired type from a byte stream, in LE order. +macro_rules! load_int_le { + ($buf:expr, $i:expr, $int_ty:ident) => {{ + debug_assert!($i + ::core::mem::size_of::<$int_ty>() <= $buf.len()); + let mut data = 0 as $int_ty; + ::core::ptr::copy_nonoverlapping( + $buf.as_ptr().add($i), + &mut data as *mut _ as *mut u8, + ::core::mem::size_of::<$int_ty>(), + ); + data.to_le() + }}; +} + +/// Loads a u64 using up to 7 bytes of a byte slice. +/// +/// # Safety +/// +/// The caller must ensure that `start + len <= buf.len()` and `len < 8`. +#[inline] +pub unsafe fn u8to64_le(buf: &[u8], start: usize, len: usize) -> u64 { + debug_assert!(len < 8); + let mut i = 0; + let mut out = 0; + if i + 3 < len { + out = load_int_le!(buf, start + i, u32) as u64; + i += 4; + } + if i + 1 < len { + out |= (load_int_le!(buf, start + i, u16) as u64) << (i * 8); + i += 2; + } + if i < len { + out |= (*buf.get_unchecked(start + i) as u64) << (i * 8); + i += 1; + } + debug_assert_eq!(i, len); + out +} + +pub(crate) use compress; +pub(crate) use load_int_le; diff --git a/anneal/vendor/siphasher/src/lib.rs b/anneal/vendor/siphasher/src/lib.rs new file mode 100644 index 0000000000..b66177e360 --- /dev/null +++ b/anneal/vendor/siphasher/src/lib.rs @@ -0,0 +1,30 @@ +#![doc = include_str!("../README.md")] +#![cfg_attr(not(test), no_std)] +#![allow(clippy::unreadable_literal)] +#![allow(clippy::cast_lossless)] +#![allow(clippy::many_single_char_names)] + +mod common; +pub mod sip; +pub mod sip128; + +#[cfg(test)] +mod tests; + +#[cfg(test)] +mod tests128; + +#[cfg(any(feature = "serde", feature = "serde_std", feature = "serde_no_std"))] +pub mod reexports { + pub use serde; + #[cfg(feature = "serde_json")] + pub use serde_json; +} + +pub mod prelude { + pub use core::hash::Hasher as _; + + pub use sip128::Hasher128 as _; + + pub use crate::{sip, sip128}; +} diff --git a/anneal/vendor/siphasher/src/sip.rs b/anneal/vendor/siphasher/src/sip.rs new file mode 100644 index 0000000000..1de1e30cdc --- /dev/null +++ b/anneal/vendor/siphasher/src/sip.rs @@ -0,0 +1,566 @@ +// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! An implementation of SipHash. + +use core::cmp; +use core::hash; +use core::hash::Hasher as _; +use core::marker::PhantomData; +use core::mem; + +use crate::common::{compress, load_int_le, u8to64_le}; + +/// An implementation of SipHash 1-3. +/// +/// See: +#[derive(Debug, Clone, Copy, Default)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct SipHasher13 { + hasher: Hasher, +} + +/// An implementation of SipHash 2-4. +/// +/// See: +#[derive(Debug, Clone, Copy, Default)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct SipHasher24 { + hasher: Hasher, +} + +/// An implementation of SipHash 2-4. +/// +/// See: +/// +/// SipHash is a general-purpose hashing function: it runs at a good +/// speed (competitive with Spooky and City) and permits strong _keyed_ +/// hashing. This lets you key your hashtables from a strong RNG, such as +/// [`rand::os::OsRng`](https://doc.rust-lang.org/rand/rand/os/struct.OsRng.html). +/// +/// Although the SipHash algorithm is considered to be generally strong, +/// it is not intended for cryptographic purposes. As such, all +/// cryptographic uses of this implementation are _strongly discouraged_. +#[derive(Debug, Clone, Copy, Default)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct SipHasher(SipHasher24); + +#[derive(Debug, Clone, Copy)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +struct Hasher { + k0: u64, + k1: u64, + length: usize, // how many bytes we've processed + state: State, // hash State + tail: u64, // unprocessed bytes le + ntail: usize, // how many bytes in tail are valid + _marker: PhantomData, +} + +#[derive(Debug, Clone, Copy)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +struct State { + // v0, v2 and v1, v3 show up in pairs in the algorithm, + // and simd implementations of SipHash will use vectors + // of v02 and v13. By placing them in this order in the struct, + // the compiler can pick up on just a few simd optimizations by itself. + v0: u64, + v2: u64, + v1: u64, + v3: u64, +} + +impl SipHasher { + /// Creates a new `SipHasher` with the two initial keys set to 0. + #[inline] + pub fn new() -> SipHasher { + SipHasher::new_with_keys(0, 0) + } + + /// Creates a `SipHasher` that is keyed off the provided keys. + #[inline] + pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher { + SipHasher(SipHasher24::new_with_keys(key0, key1)) + } + + /// Creates a `SipHasher` from a 16 byte key. + pub fn new_with_key(key: &[u8; 16]) -> SipHasher { + let mut b0 = [0u8; 8]; + let mut b1 = [0u8; 8]; + b0.copy_from_slice(&key[0..8]); + b1.copy_from_slice(&key[8..16]); + let key0 = u64::from_le_bytes(b0); + let key1 = u64::from_le_bytes(b1); + Self::new_with_keys(key0, key1) + } + + /// Get the keys used by this hasher + pub fn keys(&self) -> (u64, u64) { + (self.0.hasher.k0, self.0.hasher.k1) + } + + /// Get the key used by this hasher as a 16 byte vector + pub fn key(&self) -> [u8; 16] { + let mut bytes = [0u8; 16]; + bytes[0..8].copy_from_slice(&self.0.hasher.k0.to_le_bytes()); + bytes[8..16].copy_from_slice(&self.0.hasher.k1.to_le_bytes()); + bytes + } + + /// Hash a byte array - This is the easiest and safest way to use SipHash. + #[inline] + pub fn hash(&self, bytes: &[u8]) -> u64 { + self.0.hasher.hash(bytes) + } +} + +impl SipHasher13 { + /// Creates a new `SipHasher13` with the two initial keys set to 0. + #[inline] + pub fn new() -> SipHasher13 { + SipHasher13::new_with_keys(0, 0) + } + + /// Creates a `SipHasher13` that is keyed off the provided keys. + #[inline] + pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher13 { + SipHasher13 { + hasher: Hasher::new_with_keys(key0, key1), + } + } + + /// Creates a `SipHasher13` from a 16 byte key. + pub fn new_with_key(key: &[u8; 16]) -> SipHasher13 { + let mut b0 = [0u8; 8]; + let mut b1 = [0u8; 8]; + b0.copy_from_slice(&key[0..8]); + b1.copy_from_slice(&key[8..16]); + let key0 = u64::from_le_bytes(b0); + let key1 = u64::from_le_bytes(b1); + Self::new_with_keys(key0, key1) + } + + /// Get the keys used by this hasher + pub fn keys(&self) -> (u64, u64) { + (self.hasher.k0, self.hasher.k1) + } + + /// Get the key used by this hasher as a 16 byte vector + pub fn key(&self) -> [u8; 16] { + let mut bytes = [0u8; 16]; + bytes[0..8].copy_from_slice(&self.hasher.k0.to_le_bytes()); + bytes[8..16].copy_from_slice(&self.hasher.k1.to_le_bytes()); + bytes + } + + /// Hash a byte array - This is the easiest and safest way to use SipHash. + #[inline] + pub fn hash(&self, bytes: &[u8]) -> u64 { + self.hasher.hash(bytes) + } +} + +impl SipHasher24 { + /// Creates a new `SipHasher24` with the two initial keys set to 0. + #[inline] + pub fn new() -> SipHasher24 { + SipHasher24::new_with_keys(0, 0) + } + + /// Creates a `SipHasher24` that is keyed off the provided keys. + #[inline] + pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher24 { + SipHasher24 { + hasher: Hasher::new_with_keys(key0, key1), + } + } + + /// Creates a `SipHasher24` from a 16 byte key. + pub fn new_with_key(key: &[u8; 16]) -> SipHasher24 { + let mut b0 = [0u8; 8]; + let mut b1 = [0u8; 8]; + b0.copy_from_slice(&key[0..8]); + b1.copy_from_slice(&key[8..16]); + let key0 = u64::from_le_bytes(b0); + let key1 = u64::from_le_bytes(b1); + Self::new_with_keys(key0, key1) + } + + /// Get the keys used by this hasher + pub fn keys(&self) -> (u64, u64) { + (self.hasher.k0, self.hasher.k1) + } + + /// Get the key used by this hasher as a 16 byte vector + pub fn key(&self) -> [u8; 16] { + let mut bytes = [0u8; 16]; + bytes[0..8].copy_from_slice(&self.hasher.k0.to_le_bytes()); + bytes[8..16].copy_from_slice(&self.hasher.k1.to_le_bytes()); + bytes + } + + /// Hash a byte array - This is the easiest and safest way to use SipHash. + #[inline] + pub fn hash(&self, bytes: &[u8]) -> u64 { + self.hasher.hash(bytes) + } +} + +impl Hasher { + #[inline] + fn new_with_keys(key0: u64, key1: u64) -> Hasher { + let mut state = Hasher { + k0: key0, + k1: key1, + length: 0, + state: State { + v0: 0, + v1: 0, + v2: 0, + v3: 0, + }, + tail: 0, + ntail: 0, + _marker: PhantomData, + }; + state.reset(); + state + } + + #[inline] + fn reset(&mut self) { + self.length = 0; + self.state.v0 = self.k0 ^ 0x736f6d6570736575; + self.state.v1 = self.k1 ^ 0x646f72616e646f6d; + self.state.v2 = self.k0 ^ 0x6c7967656e657261; + self.state.v3 = self.k1 ^ 0x7465646279746573; + self.ntail = 0; + } + + // A specialized write function for values with size <= 8. + // + // The hashing of multi-byte integers depends on endianness. E.g.: + // - little-endian: `write_u32(0xDDCCBBAA)` == `write([0xAA, 0xBB, 0xCC, 0xDD])` + // - big-endian: `write_u32(0xDDCCBBAA)` == `write([0xDD, 0xCC, 0xBB, 0xAA])` + // + // This function does the right thing for little-endian hardware. On + // big-endian hardware `x` must be byte-swapped first to give the right + // behaviour. After any byte-swapping, the input must be zero-extended to + // 64-bits. The caller is responsible for the byte-swapping and + // zero-extension. + #[inline] + fn short_write(&mut self, _x: T, x: u64) { + let size = mem::size_of::(); + self.length += size; + + // The original number must be zero-extended, not sign-extended. + debug_assert!(if size < 8 { x >> (8 * size) == 0 } else { true }); + + // The number of bytes needed to fill `self.tail`. + let needed = 8 - self.ntail; + + self.tail |= x << (8 * self.ntail); + if size < needed { + self.ntail += size; + return; + } + + // `self.tail` is full, process it. + self.state.v3 ^= self.tail; + S::c_rounds(&mut self.state); + self.state.v0 ^= self.tail; + + self.ntail = size - needed; + self.tail = if needed < 8 { x >> (8 * needed) } else { 0 }; + } + + #[inline] + fn hash(&self, msg: &[u8]) -> u64 { + if self.ntail != 0 { + let mut hasher: Hasher = Hasher { + k0: self.k0, + k1: self.k1, + length: self.length, + state: self.state, + tail: self.tail, + ntail: self.ntail, + _marker: PhantomData, + }; + hasher.write(msg); + return hasher.finish(); + } + + let length = self.length + msg.len(); + let len = msg.len(); + let left = len & 0x7; + let mut state = self.state; + let mut i = 0; + + while i < len - left { + let mi = unsafe { load_int_le!(msg, i, u64) }; + + state.v3 ^= mi; + S::c_rounds(&mut state); + state.v0 ^= mi; + + i += 8; + } + + let tail = unsafe { u8to64_le(msg, i, left) }; + Self::finish_with_state(state, length, tail) + } + + #[inline] + fn finish_with_state(mut state: State, length: usize, tail: u64) -> u64 { + let b: u64 = ((length as u64 & 0xff) << 56) | tail; + + state.v3 ^= b; + S::c_rounds(&mut state); + state.v0 ^= b; + + state.v2 ^= 0xff; + S::d_rounds(&mut state); + + state.v0 ^ state.v1 ^ state.v2 ^ state.v3 + } +} + +impl hash::Hasher for SipHasher { + #[inline] + fn write(&mut self, msg: &[u8]) { + self.0.write(msg) + } + + #[inline] + fn finish(&self) -> u64 { + self.0.finish() + } + + #[inline] + fn write_usize(&mut self, i: usize) { + self.0.write_usize(i); + } + + #[inline] + fn write_u8(&mut self, i: u8) { + self.0.write_u8(i); + } + + #[inline] + fn write_u16(&mut self, i: u16) { + self.0.write_u16(i); + } + + #[inline] + fn write_u32(&mut self, i: u32) { + self.0.write_u32(i); + } + + #[inline] + fn write_u64(&mut self, i: u64) { + self.0.write_u64(i); + } +} + +impl hash::Hasher for SipHasher13 { + #[inline] + fn write(&mut self, msg: &[u8]) { + self.hasher.write(msg) + } + + #[inline] + fn finish(&self) -> u64 { + self.hasher.finish() + } + + #[inline] + fn write_usize(&mut self, i: usize) { + self.hasher.write_usize(i); + } + + #[inline] + fn write_u8(&mut self, i: u8) { + self.hasher.write_u8(i); + } + + #[inline] + fn write_u16(&mut self, i: u16) { + self.hasher.write_u16(i); + } + + #[inline] + fn write_u32(&mut self, i: u32) { + self.hasher.write_u32(i); + } + + #[inline] + fn write_u64(&mut self, i: u64) { + self.hasher.write_u64(i); + } +} + +impl hash::Hasher for SipHasher24 { + #[inline] + fn write(&mut self, msg: &[u8]) { + self.hasher.write(msg) + } + + #[inline] + fn finish(&self) -> u64 { + self.hasher.finish() + } + + #[inline] + fn write_usize(&mut self, i: usize) { + self.hasher.write_usize(i); + } + + #[inline] + fn write_u8(&mut self, i: u8) { + self.hasher.write_u8(i); + } + + #[inline] + fn write_u16(&mut self, i: u16) { + self.hasher.write_u16(i); + } + + #[inline] + fn write_u32(&mut self, i: u32) { + self.hasher.write_u32(i); + } + + #[inline] + fn write_u64(&mut self, i: u64) { + self.hasher.write_u64(i); + } +} + +impl hash::Hasher for Hasher { + #[inline] + fn write_usize(&mut self, i: usize) { + self.short_write(i, i.to_le() as u64); + } + + #[inline] + fn write_u8(&mut self, i: u8) { + self.short_write(i, i as u64); + } + + #[inline] + fn write_u16(&mut self, i: u16) { + self.short_write(i, i.to_le() as u64); + } + + #[inline] + fn write_u32(&mut self, i: u32) { + self.short_write(i, i.to_le() as u64); + } + + #[inline] + fn write_u64(&mut self, i: u64) { + self.short_write(i, i.to_le()); + } + + #[inline] + fn write(&mut self, msg: &[u8]) { + let length = msg.len(); + self.length += length; + + let mut needed = 0; + + if self.ntail != 0 { + needed = 8 - self.ntail; + self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << (8 * self.ntail); + if length < needed { + self.ntail += length; + return; + } else { + self.state.v3 ^= self.tail; + S::c_rounds(&mut self.state); + self.state.v0 ^= self.tail; + self.ntail = 0; + } + } + + // Buffered tail is now flushed, process new input. + let len = length - needed; + let left = len & 0x7; + + let mut i = needed; + while i < len - left { + let mi = unsafe { load_int_le!(msg, i, u64) }; + + self.state.v3 ^= mi; + S::c_rounds(&mut self.state); + self.state.v0 ^= mi; + + i += 8; + } + + self.tail = unsafe { u8to64_le(msg, i, left) }; + self.ntail = left; + } + + #[inline] + fn finish(&self) -> u64 { + Self::finish_with_state(self.state, self.length, self.tail) + } +} + +impl Default for Hasher { + /// Creates a `Hasher` with the two initial keys set to 0. + #[inline] + fn default() -> Hasher { + Hasher::new_with_keys(0, 0) + } +} + +#[doc(hidden)] +trait Sip { + fn c_rounds(_: &mut State); + fn d_rounds(_: &mut State); +} + +#[derive(Debug, Clone, Copy, Default)] +struct Sip13Rounds; + +impl Sip for Sip13Rounds { + #[inline] + fn c_rounds(state: &mut State) { + compress!(state); + } + + #[inline] + fn d_rounds(state: &mut State) { + compress!(state); + compress!(state); + compress!(state); + } +} + +#[derive(Debug, Clone, Copy, Default)] +struct Sip24Rounds; + +impl Sip for Sip24Rounds { + #[inline] + fn c_rounds(state: &mut State) { + compress!(state); + compress!(state); + } + + #[inline] + fn d_rounds(state: &mut State) { + compress!(state); + compress!(state); + compress!(state); + compress!(state); + } +} diff --git a/anneal/vendor/siphasher/src/sip128.rs b/anneal/vendor/siphasher/src/sip128.rs new file mode 100644 index 0000000000..fb295d60f0 --- /dev/null +++ b/anneal/vendor/siphasher/src/sip128.rs @@ -0,0 +1,667 @@ +// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! An implementation of SipHash with a 128-bit output. + +use core::cmp; +use core::hash; +use core::hash::Hasher as _; +use core::marker::PhantomData; +use core::mem; + +use crate::common::{compress, load_int_le, u8to64_le}; + +/// A 128-bit (2x64) hash output +#[derive(Debug, Clone, Copy, Default)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct Hash128 { + pub h1: u64, + pub h2: u64, +} + +impl PartialEq for Hash128 { + /// Constant-time equality comparison to prevent timing attacks. + fn eq(&self, other: &Self) -> bool { + let x = (self.h1 ^ other.h1) | (self.h2 ^ other.h2); + unsafe { core::ptr::read_volatile(&x) == 0 } + } +} + +impl Eq for Hash128 {} + +impl From for Hash128 { + fn from(v: u128) -> Self { + Hash128 { + h1: v as u64, + h2: (v >> 64) as u64, + } + } +} + +impl From for u128 { + fn from(h: Hash128) -> u128 { + (h.h1 as u128) | ((h.h2 as u128) << 64) + } +} + +/// An implementation of SipHash128 1-3. +#[derive(Debug, Clone, Copy, Default)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct SipHasher13 { + hasher: Hasher, +} + +/// An implementation of SipHash128 2-4. +#[derive(Debug, Clone, Copy, Default)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct SipHasher24 { + hasher: Hasher, +} + +/// An implementation of SipHash128 2-4. +/// +/// SipHash is a general-purpose hashing function: it runs at a good +/// speed (competitive with Spooky and City) and permits strong _keyed_ +/// hashing. This lets you key your hashtables from a strong RNG, such as +/// [`rand::os::OsRng`](https://doc.rust-lang.org/rand/rand/os/struct.OsRng.html). +/// +/// Although the SipHash algorithm is considered to be generally strong, +/// it is not intended for cryptographic purposes. As such, all +/// cryptographic uses of this implementation are _strongly discouraged_. +#[derive(Debug, Clone, Copy, Default)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct SipHasher(SipHasher24); + +#[derive(Debug, Copy)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +struct Hasher { + k0: u64, + k1: u64, + length: usize, // how many bytes we've processed + state: State, // hash State + tail: u64, // unprocessed bytes le + ntail: usize, // how many bytes in tail are valid + _marker: PhantomData, +} + +#[derive(Debug, Clone, Copy)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +struct State { + // v0, v2 and v1, v3 show up in pairs in the algorithm, + // and simd implementations of SipHash will use vectors + // of v02 and v13. By placing them in this order in the struct, + // the compiler can pick up on just a few simd optimizations by itself. + v0: u64, + v2: u64, + v1: u64, + v3: u64, +} + +pub trait Hasher128 { + /// Return a 128-bit hash + fn finish128(&self) -> Hash128; +} + +impl SipHasher { + /// Creates a new `SipHasher` with the two initial keys set to 0. + #[inline] + pub fn new() -> SipHasher { + SipHasher::new_with_keys(0, 0) + } + + /// Creates a `SipHasher` that is keyed off the provided keys. + #[inline] + pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher { + SipHasher(SipHasher24::new_with_keys(key0, key1)) + } + + /// Creates a `SipHasher` from a 16 byte key. + pub fn new_with_key(key: &[u8; 16]) -> SipHasher { + let mut b0 = [0u8; 8]; + let mut b1 = [0u8; 8]; + b0.copy_from_slice(&key[0..8]); + b1.copy_from_slice(&key[8..16]); + let key0 = u64::from_le_bytes(b0); + let key1 = u64::from_le_bytes(b1); + Self::new_with_keys(key0, key1) + } + + /// Get the keys used by this hasher + pub fn keys(&self) -> (u64, u64) { + (self.0.hasher.k0, self.0.hasher.k1) + } + + /// Get the key used by this hasher as a 16 byte vector + pub fn key(&self) -> [u8; 16] { + let mut bytes = [0u8; 16]; + bytes[0..8].copy_from_slice(&self.0.hasher.k0.to_le_bytes()); + bytes[8..16].copy_from_slice(&self.0.hasher.k1.to_le_bytes()); + bytes + } + + /// Hash a byte array - This is the easiest and safest way to use SipHash. + #[inline] + pub fn hash(&self, bytes: &[u8]) -> Hash128 { + self.0.hasher.hash128(bytes) + } +} + +impl Hasher128 for SipHasher { + /// Return a 128-bit hash + #[inline] + fn finish128(&self) -> Hash128 { + self.0.finish128() + } +} + +impl SipHasher13 { + /// Creates a new `SipHasher13` with the two initial keys set to 0. + #[inline] + pub fn new() -> SipHasher13 { + SipHasher13::new_with_keys(0, 0) + } + + /// Creates a `SipHasher13` that is keyed off the provided keys. + #[inline] + pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher13 { + SipHasher13 { + hasher: Hasher::new_with_keys(key0, key1), + } + } + + /// Creates a `SipHasher13` from a 16 byte key. + pub fn new_with_key(key: &[u8; 16]) -> SipHasher13 { + let mut b0 = [0u8; 8]; + let mut b1 = [0u8; 8]; + b0.copy_from_slice(&key[0..8]); + b1.copy_from_slice(&key[8..16]); + let key0 = u64::from_le_bytes(b0); + let key1 = u64::from_le_bytes(b1); + Self::new_with_keys(key0, key1) + } + + /// Get the keys used by this hasher + pub fn keys(&self) -> (u64, u64) { + (self.hasher.k0, self.hasher.k1) + } + + /// Get the key used by this hasher as a 16 byte vector + pub fn key(&self) -> [u8; 16] { + let mut bytes = [0u8; 16]; + bytes[0..8].copy_from_slice(&self.hasher.k0.to_le_bytes()); + bytes[8..16].copy_from_slice(&self.hasher.k1.to_le_bytes()); + bytes + } + + /// Hash a byte array - This is the easiest and safest way to use SipHash. + #[inline] + pub fn hash(&self, bytes: &[u8]) -> Hash128 { + self.hasher.hash128(bytes) + } +} + +impl Hasher128 for SipHasher13 { + /// Return a 128-bit hash + #[inline] + fn finish128(&self) -> Hash128 { + self.hasher.finish128() + } +} + +impl SipHasher24 { + /// Creates a new `SipHasher24` with the two initial keys set to 0. + #[inline] + pub fn new() -> SipHasher24 { + SipHasher24::new_with_keys(0, 0) + } + + /// Creates a `SipHasher24` that is keyed off the provided keys. + #[inline] + pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher24 { + SipHasher24 { + hasher: Hasher::new_with_keys(key0, key1), + } + } + + /// Creates a `SipHasher24` from a 16 byte key. + pub fn new_with_key(key: &[u8; 16]) -> SipHasher24 { + let mut b0 = [0u8; 8]; + let mut b1 = [0u8; 8]; + b0.copy_from_slice(&key[0..8]); + b1.copy_from_slice(&key[8..16]); + let key0 = u64::from_le_bytes(b0); + let key1 = u64::from_le_bytes(b1); + Self::new_with_keys(key0, key1) + } + + /// Get the keys used by this hasher + pub fn keys(&self) -> (u64, u64) { + (self.hasher.k0, self.hasher.k1) + } + + /// Get the key used by this hasher as a 16 byte vector + pub fn key(&self) -> [u8; 16] { + let mut bytes = [0u8; 16]; + bytes[0..8].copy_from_slice(&self.hasher.k0.to_le_bytes()); + bytes[8..16].copy_from_slice(&self.hasher.k1.to_le_bytes()); + bytes + } + + /// Hash a byte array - This is the easiest and safest way to use SipHash. + #[inline] + pub fn hash(&self, bytes: &[u8]) -> Hash128 { + self.hasher.hash128(bytes) + } +} + +impl Hasher128 for SipHasher24 { + /// Return a 128-bit hash + #[inline] + fn finish128(&self) -> Hash128 { + self.hasher.finish128() + } +} + +impl Hasher { + #[inline] + fn new_with_keys(key0: u64, key1: u64) -> Hasher { + let mut state = Hasher { + k0: key0, + k1: key1, + length: 0, + state: State { + v0: 0, + v1: 0xee, + v2: 0, + v3: 0, + }, + tail: 0, + ntail: 0, + _marker: PhantomData, + }; + state.reset(); + state + } + + #[inline] + fn reset(&mut self) { + self.length = 0; + self.state.v0 = self.k0 ^ 0x736f6d6570736575; + self.state.v1 = self.k1 ^ 0x646f72616e646f83; + self.state.v2 = self.k0 ^ 0x6c7967656e657261; + self.state.v3 = self.k1 ^ 0x7465646279746573; + self.ntail = 0; + } + + // A specialized write function for values with size <= 8. + // + // The hashing of multi-byte integers depends on endianness. E.g.: + // - little-endian: `write_u32(0xDDCCBBAA)` == `write([0xAA, 0xBB, 0xCC, 0xDD])` + // - big-endian: `write_u32(0xDDCCBBAA)` == `write([0xDD, 0xCC, 0xBB, 0xAA])` + // + // This function does the right thing for little-endian hardware. On + // big-endian hardware `x` must be byte-swapped first to give the right + // behaviour. After any byte-swapping, the input must be zero-extended to + // 64-bits. The caller is responsible for the byte-swapping and + // zero-extension. + #[inline] + fn short_write(&mut self, _x: T, x: u64) { + let size = mem::size_of::(); + self.length += size; + + // The original number must be zero-extended, not sign-extended. + debug_assert!(if size < 8 { x >> (8 * size) == 0 } else { true }); + + // The number of bytes needed to fill `self.tail`. + let needed = 8 - self.ntail; + + self.tail |= x << (8 * self.ntail); + if size < needed { + self.ntail += size; + return; + } + + // `self.tail` is full, process it. + self.state.v3 ^= self.tail; + S::c_rounds(&mut self.state); + self.state.v0 ^= self.tail; + + self.ntail = size - needed; + self.tail = if needed < 8 { x >> (8 * needed) } else { 0 }; + } + + #[inline] + fn hash128(&self, msg: &[u8]) -> Hash128 { + if self.ntail != 0 { + let mut hasher = self.clone(); + hasher.write(msg); + return hasher.finish128(); + } + + let length = self.length + msg.len(); + let len = msg.len(); + let left = len & 0x7; + let mut state = self.state; + let mut i = 0; + + while i < len - left { + let mi = unsafe { load_int_le!(msg, i, u64) }; + + state.v3 ^= mi; + S::c_rounds(&mut state); + state.v0 ^= mi; + + i += 8; + } + + let tail = unsafe { u8to64_le(msg, i, left) }; + Self::finish128_with_state(state, length, tail) + } + + #[inline] + fn finish128_with_state(mut state: State, length: usize, tail: u64) -> Hash128 { + let b: u64 = ((length as u64 & 0xff) << 56) | tail; + + state.v3 ^= b; + S::c_rounds(&mut state); + state.v0 ^= b; + + state.v2 ^= 0xee; + S::d_rounds(&mut state); + let h1 = state.v0 ^ state.v1 ^ state.v2 ^ state.v3; + + state.v1 ^= 0xdd; + S::d_rounds(&mut state); + let h2 = state.v0 ^ state.v1 ^ state.v2 ^ state.v3; + + Hash128 { h1, h2 } + } +} + +impl Hasher { + #[inline] + pub fn finish128(&self) -> Hash128 { + Self::finish128_with_state(self.state, self.length, self.tail) + } +} + +impl hash::Hasher for SipHasher { + #[inline] + fn write(&mut self, msg: &[u8]) { + self.0.write(msg) + } + + #[inline] + fn finish(&self) -> u64 { + self.0.finish() + } + + #[inline] + fn write_usize(&mut self, i: usize) { + self.0.write_usize(i); + } + + #[inline] + fn write_u8(&mut self, i: u8) { + self.0.write_u8(i); + } + + #[inline] + fn write_u16(&mut self, i: u16) { + self.0.write_u16(i); + } + + #[inline] + fn write_u32(&mut self, i: u32) { + self.0.write_u32(i); + } + + #[inline] + fn write_u64(&mut self, i: u64) { + self.0.write_u64(i); + } +} + +impl hash::Hasher for SipHasher13 { + #[inline] + fn write(&mut self, msg: &[u8]) { + self.hasher.write(msg) + } + + #[inline] + fn finish(&self) -> u64 { + self.hasher.finish() + } + + #[inline] + fn write_usize(&mut self, i: usize) { + self.hasher.write_usize(i); + } + + #[inline] + fn write_u8(&mut self, i: u8) { + self.hasher.write_u8(i); + } + + #[inline] + fn write_u16(&mut self, i: u16) { + self.hasher.write_u16(i); + } + + #[inline] + fn write_u32(&mut self, i: u32) { + self.hasher.write_u32(i); + } + + #[inline] + fn write_u64(&mut self, i: u64) { + self.hasher.write_u64(i); + } +} + +impl hash::Hasher for SipHasher24 { + #[inline] + fn write(&mut self, msg: &[u8]) { + self.hasher.write(msg) + } + + #[inline] + fn finish(&self) -> u64 { + self.hasher.finish() + } + + #[inline] + fn write_usize(&mut self, i: usize) { + self.hasher.write_usize(i); + } + + #[inline] + fn write_u8(&mut self, i: u8) { + self.hasher.write_u8(i); + } + + #[inline] + fn write_u16(&mut self, i: u16) { + self.hasher.write_u16(i); + } + + #[inline] + fn write_u32(&mut self, i: u32) { + self.hasher.write_u32(i); + } + + #[inline] + fn write_u64(&mut self, i: u64) { + self.hasher.write_u64(i); + } +} + +impl hash::Hasher for Hasher { + #[inline] + fn write_usize(&mut self, i: usize) { + self.short_write(i, i.to_le() as u64); + } + + #[inline] + fn write_u8(&mut self, i: u8) { + self.short_write(i, i as u64); + } + + #[inline] + fn write_u16(&mut self, i: u16) { + self.short_write(i, i.to_le() as u64); + } + + #[inline] + fn write_u32(&mut self, i: u32) { + self.short_write(i, i.to_le() as u64); + } + + #[inline] + fn write_u64(&mut self, i: u64) { + self.short_write(i, i.to_le()); + } + + #[inline] + fn write(&mut self, msg: &[u8]) { + let length = msg.len(); + self.length += length; + + let mut needed = 0; + + if self.ntail != 0 { + needed = 8 - self.ntail; + self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << (8 * self.ntail); + if length < needed { + self.ntail += length; + return; + } else { + self.state.v3 ^= self.tail; + S::c_rounds(&mut self.state); + self.state.v0 ^= self.tail; + self.ntail = 0; + } + } + + // Buffered tail is now flushed, process new input. + let len = length - needed; + let left = len & 0x7; + + let mut i = needed; + while i < len - left { + let mi = unsafe { load_int_le!(msg, i, u64) }; + + self.state.v3 ^= mi; + S::c_rounds(&mut self.state); + self.state.v0 ^= mi; + + i += 8; + } + + self.tail = unsafe { u8to64_le(msg, i, left) }; + self.ntail = left; + } + + #[inline] + fn finish(&self) -> u64 { + self.finish128().h2 + } +} + +impl Clone for Hasher { + #[inline] + fn clone(&self) -> Hasher { + Hasher { + k0: self.k0, + k1: self.k1, + length: self.length, + state: self.state, + tail: self.tail, + ntail: self.ntail, + _marker: self._marker, + } + } +} + +impl Default for Hasher { + /// Creates a `Hasher` with the two initial keys set to 0. + #[inline] + fn default() -> Hasher { + Hasher::new_with_keys(0, 0) + } +} + +#[doc(hidden)] +trait Sip { + fn c_rounds(_: &mut State); + fn d_rounds(_: &mut State); +} + +#[derive(Debug, Clone, Copy, Default)] +struct Sip13Rounds; + +impl Sip for Sip13Rounds { + #[inline] + fn c_rounds(state: &mut State) { + compress!(state); + } + + #[inline] + fn d_rounds(state: &mut State) { + compress!(state); + compress!(state); + compress!(state); + } +} + +#[derive(Debug, Clone, Copy, Default)] +struct Sip24Rounds; + +impl Sip for Sip24Rounds { + #[inline] + fn c_rounds(state: &mut State) { + compress!(state); + compress!(state); + } + + #[inline] + fn d_rounds(state: &mut State) { + compress!(state); + compress!(state); + compress!(state); + compress!(state); + } +} + +impl Hash128 { + /// Convert into a 16-bytes vector + pub fn as_bytes(&self) -> [u8; 16] { + let mut bytes = [0u8; 16]; + bytes[0..8].copy_from_slice(&self.h1.to_le_bytes()); + bytes[8..16].copy_from_slice(&self.h2.to_le_bytes()); + bytes + } + + /// Convert into a `u128` + #[inline] + pub fn as_u128(&self) -> u128 { + let h1 = self.h1.to_le(); + let h2 = self.h2.to_le(); + h1 as u128 | ((h2 as u128) << 64) + } + + /// Convert into `(u64, u64)` + #[inline] + pub fn as_u64(&self) -> (u64, u64) { + let h1 = self.h1.to_le(); + let h2 = self.h2.to_le(); + (h1, h2) + } +} diff --git a/anneal/vendor/siphasher/src/tests.rs b/anneal/vendor/siphasher/src/tests.rs new file mode 100644 index 0000000000..125afacdb2 --- /dev/null +++ b/anneal/vendor/siphasher/src/tests.rs @@ -0,0 +1,332 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::hash::{Hash, Hasher}; + +use super::sip::{SipHasher, SipHasher13, SipHasher24}; + +// Hash just the bytes of the slice, without length prefix +struct Bytes<'a>(&'a [u8]); + +impl Hash for Bytes<'_> { + #[allow(unused_must_use)] + fn hash(&self, state: &mut H) { + let Bytes(v) = *self; + state.write(v); + } +} + +macro_rules! u8to64_le { + ($buf:expr, $i:expr) => { + $buf[0 + $i] as u64 + | ($buf[1 + $i] as u64) << 8 + | ($buf[2 + $i] as u64) << 16 + | ($buf[3 + $i] as u64) << 24 + | ($buf[4 + $i] as u64) << 32 + | ($buf[5 + $i] as u64) << 40 + | ($buf[6 + $i] as u64) << 48 + | ($buf[7 + $i] as u64) << 56 + }; + ($buf:expr, $i:expr, $len:expr) => {{ + let mut t = 0; + let mut out = 0; + while t < $len { + out |= ($buf[t + $i] as u64) << t * 8; + t += 1; + } + out + }}; +} + +fn hash_with(mut st: H, x: &T) -> u64 { + x.hash(&mut st); + st.finish() +} + +fn hash(x: &T) -> u64 { + hash_with(SipHasher::new(), x) +} + +#[test] +#[allow(unused_must_use)] +fn test_siphash_1_3() { + let vecs: [[u8; 8]; 64] = [ + [0xdc, 0xc4, 0x0f, 0x05, 0x58, 0x01, 0xac, 0xab], + [0x93, 0xca, 0x57, 0x7d, 0xf3, 0x9b, 0xf4, 0xc9], + [0x4d, 0xd4, 0xc7, 0x4d, 0x02, 0x9b, 0xcb, 0x82], + [0xfb, 0xf7, 0xdd, 0xe7, 0xb8, 0x0a, 0xf8, 0x8b], + [0x28, 0x83, 0xd3, 0x88, 0x60, 0x57, 0x75, 0xcf], + [0x67, 0x3b, 0x53, 0x49, 0x2f, 0xd5, 0xf9, 0xde], + [0xa7, 0x22, 0x9f, 0xc5, 0x50, 0x2b, 0x0d, 0xc5], + [0x40, 0x11, 0xb1, 0x9b, 0x98, 0x7d, 0x92, 0xd3], + [0x8e, 0x9a, 0x29, 0x8d, 0x11, 0x95, 0x90, 0x36], + [0xe4, 0x3d, 0x06, 0x6c, 0xb3, 0x8e, 0xa4, 0x25], + [0x7f, 0x09, 0xff, 0x92, 0xee, 0x85, 0xde, 0x79], + [0x52, 0xc3, 0x4d, 0xf9, 0xc1, 0x18, 0xc1, 0x70], + [0xa2, 0xd9, 0xb4, 0x57, 0xb1, 0x84, 0xa3, 0x78], + [0xa7, 0xff, 0x29, 0x12, 0x0c, 0x76, 0x6f, 0x30], + [0x34, 0x5d, 0xf9, 0xc0, 0x11, 0xa1, 0x5a, 0x60], + [0x56, 0x99, 0x51, 0x2a, 0x6d, 0xd8, 0x20, 0xd3], + [0x66, 0x8b, 0x90, 0x7d, 0x1a, 0xdd, 0x4f, 0xcc], + [0x0c, 0xd8, 0xdb, 0x63, 0x90, 0x68, 0xf2, 0x9c], + [0x3e, 0xe6, 0x73, 0xb4, 0x9c, 0x38, 0xfc, 0x8f], + [0x1c, 0x7d, 0x29, 0x8d, 0xe5, 0x9d, 0x1f, 0xf2], + [0x40, 0xe0, 0xcc, 0xa6, 0x46, 0x2f, 0xdc, 0xc0], + [0x44, 0xf8, 0x45, 0x2b, 0xfe, 0xab, 0x92, 0xb9], + [0x2e, 0x87, 0x20, 0xa3, 0x9b, 0x7b, 0xfe, 0x7f], + [0x23, 0xc1, 0xe6, 0xda, 0x7f, 0x0e, 0x5a, 0x52], + [0x8c, 0x9c, 0x34, 0x67, 0xb2, 0xae, 0x64, 0xf4], + [0x79, 0x09, 0x5b, 0x70, 0x28, 0x59, 0xcd, 0x45], + [0xa5, 0x13, 0x99, 0xca, 0xe3, 0x35, 0x3e, 0x3a], + [0x35, 0x3b, 0xde, 0x4a, 0x4e, 0xc7, 0x1d, 0xa9], + [0x0d, 0xd0, 0x6c, 0xef, 0x02, 0xed, 0x0b, 0xfb], + [0xf4, 0xe1, 0xb1, 0x4a, 0xb4, 0x3c, 0xd9, 0x88], + [0x63, 0xe6, 0xc5, 0x43, 0xd6, 0x11, 0x0f, 0x54], + [0xbc, 0xd1, 0x21, 0x8c, 0x1f, 0xdd, 0x70, 0x23], + [0x0d, 0xb6, 0xa7, 0x16, 0x6c, 0x7b, 0x15, 0x81], + [0xbf, 0xf9, 0x8f, 0x7a, 0xe5, 0xb9, 0x54, 0x4d], + [0x3e, 0x75, 0x2a, 0x1f, 0x78, 0x12, 0x9f, 0x75], + [0x91, 0x6b, 0x18, 0xbf, 0xbe, 0xa3, 0xa1, 0xce], + [0x06, 0x62, 0xa2, 0xad, 0xd3, 0x08, 0xf5, 0x2c], + [0x57, 0x30, 0xc3, 0xa3, 0x2d, 0x1c, 0x10, 0xb6], + [0xa1, 0x36, 0x3a, 0xae, 0x96, 0x74, 0xf4, 0xb3], + [0x92, 0x83, 0x10, 0x7b, 0x54, 0x57, 0x6b, 0x62], + [0x31, 0x15, 0xe4, 0x99, 0x32, 0x36, 0xd2, 0xc1], + [0x44, 0xd9, 0x1a, 0x3f, 0x92, 0xc1, 0x7c, 0x66], + [0x25, 0x88, 0x13, 0xc8, 0xfe, 0x4f, 0x70, 0x65], + [0xa6, 0x49, 0x89, 0xc2, 0xd1, 0x80, 0xf2, 0x24], + [0x6b, 0x87, 0xf8, 0xfa, 0xed, 0x1c, 0xca, 0xc2], + [0x96, 0x21, 0x04, 0x9f, 0xfc, 0x4b, 0x16, 0xc2], + [0x23, 0xd6, 0xb1, 0x68, 0x93, 0x9c, 0x6e, 0xa1], + [0xfd, 0x14, 0x51, 0x8b, 0x9c, 0x16, 0xfb, 0x49], + [0x46, 0x4c, 0x07, 0xdf, 0xf8, 0x43, 0x31, 0x9f], + [0xb3, 0x86, 0xcc, 0x12, 0x24, 0xaf, 0xfd, 0xc6], + [0x8f, 0x09, 0x52, 0x0a, 0xd1, 0x49, 0xaf, 0x7e], + [0x9a, 0x2f, 0x29, 0x9d, 0x55, 0x13, 0xf3, 0x1c], + [0x12, 0x1f, 0xf4, 0xa2, 0xdd, 0x30, 0x4a, 0xc4], + [0xd0, 0x1e, 0xa7, 0x43, 0x89, 0xe9, 0xfa, 0x36], + [0xe6, 0xbc, 0xf0, 0x73, 0x4c, 0xb3, 0x8f, 0x31], + [0x80, 0xe9, 0xa7, 0x70, 0x36, 0xbf, 0x7a, 0xa2], + [0x75, 0x6d, 0x3c, 0x24, 0xdb, 0xc0, 0xbc, 0xb4], + [0x13, 0x15, 0xb7, 0xfd, 0x52, 0xd8, 0xf8, 0x23], + [0x08, 0x8a, 0x7d, 0xa6, 0x4d, 0x5f, 0x03, 0x8f], + [0x48, 0xf1, 0xe8, 0xb7, 0xe5, 0xd0, 0x9c, 0xd8], + [0xee, 0x44, 0xa6, 0xf7, 0xbc, 0xe6, 0xf4, 0xf6], + [0xf2, 0x37, 0x18, 0x0f, 0xd8, 0x9a, 0xc5, 0xae], + [0xe0, 0x94, 0x66, 0x4b, 0x15, 0xf6, 0xb2, 0xc3], + [0xa8, 0xb3, 0xbb, 0xb7, 0x62, 0x90, 0x19, 0x9d], + ]; + + let k0 = 0x_07_06_05_04_03_02_01_00; + let k1 = 0x_0f_0e_0d_0c_0b_0a_09_08; + let mut buf = Vec::new(); + let mut t = 0; + let mut state_inc = SipHasher13::new_with_keys(k0, k1); + + while t < 64 { + let vec = u8to64_le!(vecs[t], 0); + let out = hash_with(SipHasher13::new_with_keys(k0, k1), &Bytes(&buf)); + assert_eq!(vec, out); + + let full = hash_with(SipHasher13::new_with_keys(k0, k1), &Bytes(&buf)); + let i = state_inc.finish(); + + assert_eq!(full, i); + assert_eq!(full, vec); + + buf.push(t as u8); + Hasher::write(&mut state_inc, &[t as u8]); + + t += 1; + } +} + +#[test] +#[allow(unused_must_use)] +fn test_siphash_2_4() { + let vecs: [[u8; 8]; 64] = [ + [0x31, 0x0e, 0x0e, 0xdd, 0x47, 0xdb, 0x6f, 0x72], + [0xfd, 0x67, 0xdc, 0x93, 0xc5, 0x39, 0xf8, 0x74], + [0x5a, 0x4f, 0xa9, 0xd9, 0x09, 0x80, 0x6c, 0x0d], + [0x2d, 0x7e, 0xfb, 0xd7, 0x96, 0x66, 0x67, 0x85], + [0xb7, 0x87, 0x71, 0x27, 0xe0, 0x94, 0x27, 0xcf], + [0x8d, 0xa6, 0x99, 0xcd, 0x64, 0x55, 0x76, 0x18], + [0xce, 0xe3, 0xfe, 0x58, 0x6e, 0x46, 0xc9, 0xcb], + [0x37, 0xd1, 0x01, 0x8b, 0xf5, 0x00, 0x02, 0xab], + [0x62, 0x24, 0x93, 0x9a, 0x79, 0xf5, 0xf5, 0x93], + [0xb0, 0xe4, 0xa9, 0x0b, 0xdf, 0x82, 0x00, 0x9e], + [0xf3, 0xb9, 0xdd, 0x94, 0xc5, 0xbb, 0x5d, 0x7a], + [0xa7, 0xad, 0x6b, 0x22, 0x46, 0x2f, 0xb3, 0xf4], + [0xfb, 0xe5, 0x0e, 0x86, 0xbc, 0x8f, 0x1e, 0x75], + [0x90, 0x3d, 0x84, 0xc0, 0x27, 0x56, 0xea, 0x14], + [0xee, 0xf2, 0x7a, 0x8e, 0x90, 0xca, 0x23, 0xf7], + [0xe5, 0x45, 0xbe, 0x49, 0x61, 0xca, 0x29, 0xa1], + [0xdb, 0x9b, 0xc2, 0x57, 0x7f, 0xcc, 0x2a, 0x3f], + [0x94, 0x47, 0xbe, 0x2c, 0xf5, 0xe9, 0x9a, 0x69], + [0x9c, 0xd3, 0x8d, 0x96, 0xf0, 0xb3, 0xc1, 0x4b], + [0xbd, 0x61, 0x79, 0xa7, 0x1d, 0xc9, 0x6d, 0xbb], + [0x98, 0xee, 0xa2, 0x1a, 0xf2, 0x5c, 0xd6, 0xbe], + [0xc7, 0x67, 0x3b, 0x2e, 0xb0, 0xcb, 0xf2, 0xd0], + [0x88, 0x3e, 0xa3, 0xe3, 0x95, 0x67, 0x53, 0x93], + [0xc8, 0xce, 0x5c, 0xcd, 0x8c, 0x03, 0x0c, 0xa8], + [0x94, 0xaf, 0x49, 0xf6, 0xc6, 0x50, 0xad, 0xb8], + [0xea, 0xb8, 0x85, 0x8a, 0xde, 0x92, 0xe1, 0xbc], + [0xf3, 0x15, 0xbb, 0x5b, 0xb8, 0x35, 0xd8, 0x17], + [0xad, 0xcf, 0x6b, 0x07, 0x63, 0x61, 0x2e, 0x2f], + [0xa5, 0xc9, 0x1d, 0xa7, 0xac, 0xaa, 0x4d, 0xde], + [0x71, 0x65, 0x95, 0x87, 0x66, 0x50, 0xa2, 0xa6], + [0x28, 0xef, 0x49, 0x5c, 0x53, 0xa3, 0x87, 0xad], + [0x42, 0xc3, 0x41, 0xd8, 0xfa, 0x92, 0xd8, 0x32], + [0xce, 0x7c, 0xf2, 0x72, 0x2f, 0x51, 0x27, 0x71], + [0xe3, 0x78, 0x59, 0xf9, 0x46, 0x23, 0xf3, 0xa7], + [0x38, 0x12, 0x05, 0xbb, 0x1a, 0xb0, 0xe0, 0x12], + [0xae, 0x97, 0xa1, 0x0f, 0xd4, 0x34, 0xe0, 0x15], + [0xb4, 0xa3, 0x15, 0x08, 0xbe, 0xff, 0x4d, 0x31], + [0x81, 0x39, 0x62, 0x29, 0xf0, 0x90, 0x79, 0x02], + [0x4d, 0x0c, 0xf4, 0x9e, 0xe5, 0xd4, 0xdc, 0xca], + [0x5c, 0x73, 0x33, 0x6a, 0x76, 0xd8, 0xbf, 0x9a], + [0xd0, 0xa7, 0x04, 0x53, 0x6b, 0xa9, 0x3e, 0x0e], + [0x92, 0x59, 0x58, 0xfc, 0xd6, 0x42, 0x0c, 0xad], + [0xa9, 0x15, 0xc2, 0x9b, 0xc8, 0x06, 0x73, 0x18], + [0x95, 0x2b, 0x79, 0xf3, 0xbc, 0x0a, 0xa6, 0xd4], + [0xf2, 0x1d, 0xf2, 0xe4, 0x1d, 0x45, 0x35, 0xf9], + [0x87, 0x57, 0x75, 0x19, 0x04, 0x8f, 0x53, 0xa9], + [0x10, 0xa5, 0x6c, 0xf5, 0xdf, 0xcd, 0x9a, 0xdb], + [0xeb, 0x75, 0x09, 0x5c, 0xcd, 0x98, 0x6c, 0xd0], + [0x51, 0xa9, 0xcb, 0x9e, 0xcb, 0xa3, 0x12, 0xe6], + [0x96, 0xaf, 0xad, 0xfc, 0x2c, 0xe6, 0x66, 0xc7], + [0x72, 0xfe, 0x52, 0x97, 0x5a, 0x43, 0x64, 0xee], + [0x5a, 0x16, 0x45, 0xb2, 0x76, 0xd5, 0x92, 0xa1], + [0xb2, 0x74, 0xcb, 0x8e, 0xbf, 0x87, 0x87, 0x0a], + [0x6f, 0x9b, 0xb4, 0x20, 0x3d, 0xe7, 0xb3, 0x81], + [0xea, 0xec, 0xb2, 0xa3, 0x0b, 0x22, 0xa8, 0x7f], + [0x99, 0x24, 0xa4, 0x3c, 0xc1, 0x31, 0x57, 0x24], + [0xbd, 0x83, 0x8d, 0x3a, 0xaf, 0xbf, 0x8d, 0xb7], + [0x0b, 0x1a, 0x2a, 0x32, 0x65, 0xd5, 0x1a, 0xea], + [0x13, 0x50, 0x79, 0xa3, 0x23, 0x1c, 0xe6, 0x60], + [0x93, 0x2b, 0x28, 0x46, 0xe4, 0xd7, 0x06, 0x66], + [0xe1, 0x91, 0x5f, 0x5c, 0xb1, 0xec, 0xa4, 0x6c], + [0xf3, 0x25, 0x96, 0x5c, 0xa1, 0x6d, 0x62, 0x9f], + [0x57, 0x5f, 0xf2, 0x8e, 0x60, 0x38, 0x1b, 0xe5], + [0x72, 0x45, 0x06, 0xeb, 0x4c, 0x32, 0x8a, 0x95], + ]; + + let k0 = 0x_07_06_05_04_03_02_01_00; + let k1 = 0x_0f_0e_0d_0c_0b_0a_09_08; + let mut buf = Vec::new(); + let mut t = 0; + let mut state_inc = SipHasher24::new_with_keys(k0, k1); + + while t < 64 { + let vec = u8to64_le!(vecs[t], 0); + let out = hash_with(SipHasher24::new_with_keys(k0, k1), &Bytes(&buf)); + assert_eq!(vec, out); + + let full = hash_with(SipHasher24::new_with_keys(k0, k1), &Bytes(&buf)); + let i = state_inc.finish(); + + assert_eq!(full, i); + assert_eq!(full, vec); + + buf.push(t as u8); + Hasher::write(&mut state_inc, &[t as u8]); + + t += 1; + } +} +#[test] +fn test_hash_idempotent() { + let val64 = 0xdead_beef_dead_beef_u64; + assert_eq!(hash(&val64), hash(&val64)); + let val32 = 0xdeadbeef_u32; + assert_eq!(hash(&val32), hash(&val32)); +} + +#[test] +fn test_hash_no_bytes_dropped_64() { + let val = 0xdead_beef_dead_beef_u64; + + assert_ne!(hash(&val), hash(&zero_byte(val, 0))); + assert_ne!(hash(&val), hash(&zero_byte(val, 1))); + assert_ne!(hash(&val), hash(&zero_byte(val, 2))); + assert_ne!(hash(&val), hash(&zero_byte(val, 3))); + assert_ne!(hash(&val), hash(&zero_byte(val, 4))); + assert_ne!(hash(&val), hash(&zero_byte(val, 5))); + assert_ne!(hash(&val), hash(&zero_byte(val, 6))); + assert_ne!(hash(&val), hash(&zero_byte(val, 7))); + + fn zero_byte(val: u64, byte: usize) -> u64 { + assert!(byte < 8); + val & !(0xff << (byte * 8)) + } +} + +#[test] +fn test_hash_no_bytes_dropped_32() { + let val = 0xdeadbeef_u32; + + assert_ne!(hash(&val), hash(&zero_byte(val, 0))); + assert_ne!(hash(&val), hash(&zero_byte(val, 1))); + assert_ne!(hash(&val), hash(&zero_byte(val, 2))); + assert_ne!(hash(&val), hash(&zero_byte(val, 3))); + + fn zero_byte(val: u32, byte: usize) -> u32 { + assert!(byte < 4); + val & !(0xff << (byte * 8)) + } +} + +#[test] +fn test_hash_no_concat_alias() { + let s = ("aa", "bb"); + let t = ("aabb", ""); + let u = ("a", "abb"); + + assert!(s != t && t != u); + assert!(hash(&s) != hash(&t) && hash(&s) != hash(&u)); + + let u = [1, 0, 0, 0]; + let v = (&u[..1], &u[1..3], &u[3..]); + let w = (&u[..], &u[4..4], &u[4..4]); + + assert_ne!(v, w); + assert_ne!(hash(&v), hash(&w)); +} + +#[test] +fn test_hash_simple() { + let array: &[u8] = &[1, 2, 3]; + let key: &[u8; 16] = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]; + let hasher = SipHasher13::new_with_key(key); + let h = hasher.hash(array); + _ = h; +} + +#[test] +fn test_hash_incremental() { + let array1: &[u8] = &[1, 2, 3]; + let array2: &[u8] = &[4, 5, 6]; + let key: &[u8; 16] = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]; + let mut hasher = SipHasher13::new_with_key(key); + hasher.write(array1); + hasher.write(array2); + let h = hasher.finish(); + _ = h; +} + +#[test] +#[cfg(all(feature = "serde", feature = "serde_json"))] +fn test_hash_serde() { + let val64 = 0xdead_beef_dead_beef_u64; + let hash = hash(&val64); + let serialized = serde_json::to_string(&hash).unwrap(); + let deserialized: u64 = serde_json::from_str(&serialized).unwrap(); + assert_eq!(hash, deserialized); +} diff --git a/anneal/vendor/siphasher/src/tests128.rs b/anneal/vendor/siphasher/src/tests128.rs new file mode 100644 index 0000000000..017962a19b --- /dev/null +++ b/anneal/vendor/siphasher/src/tests128.rs @@ -0,0 +1,134 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::hash::{Hash, Hasher}; + +use super::sip128::{Hasher128, SipHasher, SipHasher13, SipHasher24}; + +// Hash just the bytes of the slice, without length prefix +struct Bytes<'a>(&'a [u8]); + +impl Hash for Bytes<'_> { + #[allow(unused_must_use)] + fn hash(&self, state: &mut H) { + let Bytes(v) = *self; + state.write(v); + } +} + +fn hash_with(mut st: H, x: &T) -> [u8; 16] { + x.hash(&mut st); + st.finish128().as_bytes() +} + +fn hash(x: &T) -> [u8; 16] { + hash_with(SipHasher::new(), x) +} + +#[test] +fn test_siphash128_idempotent() { + let val64 = 0xdead_beef_dead_beef_u64; + assert_eq!(hash(&val64), hash(&val64)); + let val32 = 0xdeadbeef_u32; + assert_eq!(hash(&val32), hash(&val32)); +} + +#[test] +#[allow(unused_must_use)] +fn test_siphash128_1_3() { + let vecs: [[u8; 16]; 1] = [[ + 231, 126, 188, 178, 39, 136, 165, 190, 253, 98, 219, 106, 221, 48, 48, 1, + ]]; + + let k0 = 0x_07_06_05_04_03_02_01_00; + let k1 = 0x_0f_0e_0d_0c_0b_0a_09_08; + let mut buf = Vec::new(); + let mut t = 0; + let mut state_inc = SipHasher13::new_with_keys(k0, k1); + + while t < 1 { + let vec = vecs[t]; + let out = hash_with(SipHasher13::new_with_keys(k0, k1), &Bytes(&buf)); + assert_eq!(vec, out[..]); + + let full = hash_with(SipHasher13::new_with_keys(k0, k1), &Bytes(&buf)); + let i = state_inc.finish128().as_bytes(); + + assert_eq!(full, i); + assert_eq!(full, vec); + + buf.push(t as u8); + Hasher::write(&mut state_inc, &[t as u8]); + + t += 1; + } +} + +#[test] +#[allow(unused_must_use)] +fn test_siphash128_2_4() { + let vecs: [[u8; 16]; 1] = [[ + 163, 129, 127, 4, 186, 37, 168, 230, 109, 246, 114, 20, 199, 85, 2, 147, + ]]; + + let k0 = 0x_07_06_05_04_03_02_01_00; + let k1 = 0x_0f_0e_0d_0c_0b_0a_09_08; + let mut buf = Vec::new(); + let mut t = 0; + let mut state_inc = SipHasher24::new_with_keys(k0, k1); + + while t < 1 { + let vec = vecs[t]; + let out = hash_with(SipHasher24::new_with_keys(k0, k1), &Bytes(&buf)); + assert_eq!(vec, out[..]); + + let full = hash_with(SipHasher24::new_with_keys(k0, k1), &Bytes(&buf)); + let i = state_inc.finish128().as_bytes(); + + assert_eq!(full, i); + assert_eq!(full, vec); + + buf.push(t as u8); + Hasher::write(&mut state_inc, &[t as u8]); + + t += 1; + } +} + +#[test] +fn test_siphash128_simple() { + let array: &[u8] = &[1, 2, 3]; + let key: &[u8; 16] = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]; + let hasher = SipHasher13::new_with_key(key); + let h = hasher.hash(array).as_bytes(); + _ = h; +} + +#[test] +fn test_siphash128_incremental() { + let array1: &[u8] = &[1, 2, 3]; + let array2: &[u8] = &[4, 5, 6]; + let key: &[u8; 16] = &[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]; + let mut hasher = SipHasher13::new_with_key(key); + hasher.write(array1); + hasher.write(array2); + let h = hasher.finish128().as_bytes(); + _ = h; +} + +#[test] +#[cfg(all(feature = "serde", feature = "serde_json"))] +fn test_siphash128_serde() { + let val64 = 0xdead_beef_dead_beef_u64; + let hash = hash(&val64); + let serialized = serde_json::to_string(&hash).unwrap(); + let deserialized: [u8; 16] = serde_json::from_str(&serialized).unwrap(); + assert_eq!(hash, deserialized); +} diff --git a/anneal/vendor/tiny_http/.cargo-checksum.json b/anneal/vendor/tiny_http/.cargo-checksum.json deleted file mode 100644 index 15da0bfa8a..0000000000 --- a/anneal/vendor/tiny_http/.cargo-checksum.json +++ /dev/null @@ -1 +0,0 @@ -{"files":{".cargo_vcs_info.json":"d2a5c2966bca9bcb96f1fb22d7839be5e6dc329dd4e3237696b5c0682720faef",".github/workflows/ci.yaml":"862b496fc6c3802d87dc5472252d5ca0c3622ac8d511938e23d0b99b0b3421a7","CHANGELOG.md":"f502b2cccd3ea9e63111b8cecd540a07ed4fedf00c46c6361607d8fbdfabc2ee","Cargo.lock":"7cb7bf8e1832e3a4d1be881a334fbe26cb63d8d1d215c9024da2a2a7defb0f3c","Cargo.toml":"03c7dfc1df0bbf4274890d33e256baddd4c77e0f9eaee1035ef07a250d229db8","Cargo.toml.orig":"43a7c801ff2acd0fd3af6c26bbdd8824bc0cf188fb19e479cd3c58cdc8d56ea6","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"e69bb98ae371fc43e5ef9cb1fc648762faf92edd93a8afd71411fac419b1b550","README.md":"3f8d7fb0101454ba4a3704d7bd4e29b81a142014b66640f3a0cce936214d95c8","benches/bench.rs":"803e8ac58aadee8bf3c037d622e04473b597c4c3399dfe0951065943599254ab","examples/hello-world.rs":"cc095046b2e9ae44dc095032185db2d478e6e556159614e93917f64c60ba3430","examples/php-cgi-example.php":"72b51bbf38c4b60638ea635752fe071c4f329dcf8a615badaf8228078d63030f","examples/php-cgi.rs":"f2650e84a807f11cec6568a9893169bea17dd2eb0013302d7dda38b5da249a55","examples/readme-example.rs":"5b93dd6584c3bea80896391842ceca5f2f350435b2470c7c0a5dd33fb4a46d92","examples/serve-root.rs":"05d84565ef1e38f5782296ae8bc014b621f4e0b9e7827274b5468efbc1aa3921","examples/ssl-cert.pem":"8a171360b51c9bf4fc9b108e263cfe90c689891c56674cea38e4c29e98c9b836","examples/ssl-key.pem":"e345c5c79dba8fd849f31fb06ee871a32d1f1bf1ef4c15b510e70a2524009626","examples/ssl.rs":"1595c78153214ffc5e5724aba903164e61f33fca1083c727f1d5c417aeee85b5","examples/websockets.rs":"69c96aa370a0378f072dab65fa3ab935309032fe2ddf537c523abce6e9662a0f","src/client.rs":"8fcc6344c2259e91677c267cd9e022a316463a1b03bb805c0a2ec692ef94c898","src/common.rs":"5cae33d9517acdf3f559488acbd4fd727b565ac5e10234b060d37f6398ef2c7b","src/connection.rs":"67dbb92f1d7b3edc8b9fb85b6d770bd50250fb08cde0fdc9e75ceacb68125ac0","src/lib.rs":"5545e94107ab9b4474ad8253d6367f0a20a696ad3f647c97136e21c2824b4a90","src/request.rs":"ef8fcf8d36706a0e1a146f3262917b4631ecf8db925408d4b6959a0d7423695a","src/response.rs":"946c4aa551b7a16cc2ea5f86f8ae2ce9b9bda3faecb2f9d1057d5bc17cb2dbdc","src/ssl.rs":"dd38964c51c5d9fe6dbafd383e172f89d0f62f8d52042e94247cc1d67e07c29e","src/ssl/openssl.rs":"7a7317d2d6d5e75b7e57caa282180a21ec0639e71813e5498a75dcba0dab7e7d","src/ssl/rustls.rs":"d39f329d74f611ec992c0a559a8ea32280b66981380f78683142102a0b758268","src/test.rs":"b6a2a4848acb46fbacc7593629cf6d2fac745e852ead9bf57e014e32623ca1b4","src/util/custom_stream.rs":"a6921b2eb16600070647c41a339c57eb2f616f24116b8b7135cf9a26b2e5ceec","src/util/equal_reader.rs":"7d7916b42316d745c12185509fa5719dd9a032fef1c19fe8057b154f48e8db55","src/util/fused_reader.rs":"2c92bf51a069ec7d6aa647fb44709bbb013131c43cd1667a4a0d1784a57eab6b","src/util/messages_queue.rs":"ac283569e0e24d4f5ff7515a4c74ea08a2cc8439152a17dd1121030429ebcb4f","src/util/mod.rs":"107ceae83c9a10672be797a1805e13772f9c25e68b74e02bef6f45af85018293","src/util/refined_tcp_stream.rs":"f1ee3a06ded63843209120d2d9bf94ae113bc781be91daedce7f5ce9e537a6a1","src/util/sequential.rs":"5ad84c87bd412edec8b289639d6d21486201e84f25a7edc1be4284be59c5570c","src/util/task_pool.rs":"08e3cc8139d1512bb751f828e9838579d8848607e7172ae41b4033d8ecc46fdb","tests/input-tests.rs":"591757acb069d74c6f3a36acee1d931ddb13fe9bc06fe8527d23527071c36e35","tests/network.rs":"ccb382c4c2205caa6025ad8c53c8c101f4fb7f2288c6891c6e04ddb6b31882d3","tests/non-chunked-buffering.rs":"66548e609ce52e8b20e557c3cebab465ed276af5246ea60921a3c344507a0a94","tests/promptness.rs":"1a59a54cde25b90d737f1285a5173b560f962bb8ca9e0a6c74477a5b366b431d","tests/simple-test.rs":"0bf6f66c281fb0ac1b59228c6933cfe49ba8423b7c4fb211b72fa0d800153250","tests/support/mod.rs":"2aed60864d0edb33157bfb3da9fe5d373001cf411a2e6b6fcb065dc972cc8887","tests/unblock-test.rs":"f4a474b808bc0832cf6b1c0e1db754d81a2ae2ea6cf1f539838ac23051e5f8f7","tests/unix-test.rs":"9b104a35858ef9b11d8c7e8803c2399d01b01b2faaf75a10d66dc110356f7dca"},"package":"389915df6413a2e74fb181895f933386023c71110878cd0825588928e64cdc82"} \ No newline at end of file diff --git a/anneal/vendor/tiny_http/.cargo_vcs_info.json b/anneal/vendor/tiny_http/.cargo_vcs_info.json deleted file mode 100644 index 1fbb959db3..0000000000 --- a/anneal/vendor/tiny_http/.cargo_vcs_info.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "git": { - "sha1": "212b1c45852fef2093dc1374875a9393c55eb4b9" - }, - "path_in_vcs": "" -} \ No newline at end of file diff --git a/anneal/vendor/tiny_http/.github/workflows/ci.yaml b/anneal/vendor/tiny_http/.github/workflows/ci.yaml deleted file mode 100644 index e4902a57ad..0000000000 --- a/anneal/vendor/tiny_http/.github/workflows/ci.yaml +++ /dev/null @@ -1,64 +0,0 @@ -on: [push, pull_request] -name: CI -jobs: - clippy_rustfmt: - name: Lint & Format - runs-on: ubuntu-latest - strategy: - matrix: - features: - - default - - ssl-openssl - - ssl-rustls - steps: - - uses: actions/checkout@v2 - - name: Install stable toolchain - uses: actions-rs/toolchain@v1 - with: - toolchain: stable - components: rustfmt, clippy - - - name: Clippy - uses: actions-rs/cargo@v1 - with: - command: clippy - args: --features ${{ matrix.features }} - - - name: Format - uses: actions-rs/cargo@v1 - with: - command: fmt - args: -- --check - - test: - name: Build & Test - runs-on: ubuntu-latest - strategy: - matrix: - rust: - - stable - - nightly - - 1.56 - features: - - default - - ssl-openssl - - ssl-rustls - steps: - - uses: actions/checkout@v2 - - name: Install toolchain - uses: actions-rs/toolchain@v1 - with: - toolchain: ${{ matrix.rust }} - override: true - - - name: Build - uses: actions-rs/cargo@v1 - with: - command: build - args: --features ${{ matrix.features }} - - - name: Test - uses: actions-rs/cargo@v1 - with: - command: test - args: --features ${{ matrix.features }} diff --git a/anneal/vendor/tiny_http/CHANGELOG.md b/anneal/vendor/tiny_http/CHANGELOG.md deleted file mode 100644 index 59c5ad9e1c..0000000000 --- a/anneal/vendor/tiny_http/CHANGELOG.md +++ /dev/null @@ -1,172 +0,0 @@ -# Changes - -## 0.12.0 -* Bumped the minimum compiler version tested by CI to 1.56 - this is necessary due to an increasing number of dependencies - introducing Cargo manifest features only supported on newer versions of Rust. - -* [Add support for UNIX sockets](https://github.com/tiny-http/tiny-http/pull/224) - - Thanks to @ColonelThirtyTwo for adding support for binding to UNIX sockets when creating a tiny-http server. This change - makes a few small breaking API modifications, if you are constructing `ServerConfig` manually you will need to use the new `ListenAddr` - type rather than directly supplying a `net::SocketAddr`. Likewise `Server::server_addr()` will now return an enum that can - represent either a TCP socket or a UNIX socket. - - Finally `Request::remote_addr()` now returns an `Option<&SocketAddr>` as UNIX sockets don't ever have a remote host. - -* [Reduce required dependencies by switching to `httpdate`](https://github.com/tiny-http/tiny-http/pull/228) - - @esheppa replaced our internal HTTPDate type with the `httpdate` library (used extensively in the community by Hyper, Tokio and others) - which reduces our baseline dependency tree from 18 crates to 5! - -* `TestRequest::path` no longer has a `'static` bound, allowing for fuzzers to generate test request paths at runtime. - -* Unpinned `zeroize` so it can float around any stable `^1` version. - -## 0.11.0 - -* [Add support for Rustls](https://github.com/tiny-http/tiny-http/pull/218) - - Thanks to @3xmblzj5 and @travispaul for their help in implementing [`Rustls`](https://github.com/rustls/rustls) as a - drop-in replacement for OpenSSL, you can now build `tiny-http` with TLS support without any external dependencies! - OpenSSL will remain the default implementation if you just enable the `ssl` feature, but you are strongly encouraged - to use `ssl-rustls` where possible! - -* [Fix incorrect certificate chain loading](https://github.com/tiny-http/tiny-http/commit/876efd6b752e991c699d27d3d0ad9a47e9d35c29) - - Fix a longstanding bug where we were only loading the first (i.e. the leaf) certificate from any PEM file supplied by - the user. - - -## 0.10.0 - -* [Replace chrono with time-rs](https://github.com/tiny-http/tiny-http/commit/75ac7758fd0ca660c35f58c2a36edb23a42cda32) - - `chrono` was only used to store and format `DateTime` into the slightly odd format required by RFC 7231, so to - avoid the numerous RUSTSEC advisories generated by the `localtime_r` issue, we can just drop it entirely and switch - to `time-rs`. - Unfortunately this means we need to **bump our minimum tested compiler version to 1.51**, and as such this change - requires a full minor release. - -## 0.9.0 - -* [Rust 2018 Refactor](https://github.com/tiny-http/tiny-http/pull/208) -* [Enable prompt responses, before the request has been fully read](https://github.com/tiny-http/tiny-http/pull/207) - - This isn't an API change, but does result in different behaviour to 0.8.2 and so justifies a minor version bump. - - HTTP requests now return a boxed `FusedReader` which drops the underlying - reader once it reaches EOF, such that the reader no longer needs to be - explicitly consumed and the server may now respond with e.g. a "413 Payload - too large" without waiting for the whole reader. - -* Bumped the minimum compiler version tested by CI to 1.48 (the version supported in Debian Bullseye) - -## 0.8.2 - -* [Add TestRequest for writing server tests more easily](https://github.com/tiny-http/tiny-http/pull/203) - -## 0.8.1 - -* [Don't set Transfer-Encoding for 1xx or 204 Responses](https://github.com/tiny-http/tiny-http/pull/198) - -## 0.8.0 - -* [Fix RUSTSEC-2020-0031](https://github.com/tiny-http/tiny-http/pull/190) -* [Filter out the same socket-closing errors on flush as on write](https://github.com/tiny-http/tiny-http/pull/192) -* [response: Drop the use of EqualReader for TransferEncoding::Identity](https://github.com/tiny-http/tiny-http/pull/183) -* [Add unblock method for graceful shutdown](https://github.com/tiny-http/tiny-http/pull/184) -* [Response: Don't forget `chunked_threshold`](https://github.com/tiny-http/tiny-http/pull/177) -* [Response: Allow manual handling of Range requests](https://github.com/tiny-http/tiny-http/pull/175) -* [Feature | Getters for Response Status Code & Data Length Properties](https://github.com/tiny-http/tiny-http/pull/186) - -## 0.7.0 - -* [Fix HTTPS deadlock](https://github.com/tiny-http/tiny-http/pull/151) -* [Relicense to MIT/Apache-2.0](https://github.com/tiny-http/tiny-http/pull/163) -* [Update `ascii` dependency](https://github.com/tiny-http/tiny-http/pull/165) -* [Fix typo in README](https://github.com/tiny-http/tiny-http/pull/171) -* [Fix compilation errors in benchmark](https://github.com/tiny-http/tiny-http/pull/170) -* [Update `url` dependency](https://github.com/tiny-http/tiny-http/pull/168) -* [Update `chunked_transfer` dependency](https://github.com/tiny-http/tiny-http/pull/166) - -## 0.6.2 - -* [Remove AsciiExt usage](https://github.com/tiny-http/tiny-http/pull/152) -* [Remove unused EncodingDecoder](https://github.com/tiny-http/tiny-http/pull/153) - -## 0.6.1 - -* [Fix documentation typo](https://github.com/tiny-http/tiny-http/pull/148) -* [Expose chunked_threshold on Response](https://github.com/tiny-http/tiny-http/pull/150) - -## 0.6.0 - -* [Bump dependencies](https://github.com/tiny-http/tiny-http/pull/142) -* [Fix `next_header_source` alignment](https://github.com/tiny-http/tiny-http/pull/140) - -## 0.5.9 - -* Expanded and changed status code description mapping according to IANA registry: - * https://github.com/tiny-http/tiny-http/pull/138 - -## 0.5.8 - -* Update links to reflect repository ownership change: https://github.com/frewsxcv/tiny-http -> https://github.com/tiny-http/tiny-http - -## 0.5.7 - -* Fix using Transfer-Encoding: identity with no content length - * https://github.com/tiny-http/tiny-http/pull/126 - -## 0.5.6 - -* Update link to documentation - * https://github.com/tiny-http/tiny-http/pull/123 -* Fix websockets - * https://github.com/tiny-http/tiny-http/pull/124 -* Drop the request reader earlier - * https://github.com/tiny-http/tiny-http/pull/125 - -## 0.5.5 - -* Start using the log crate - * https://github.com/tiny-http/tiny-http/pull/121 -* Unblock the accept thread on shutdown - * https://github.com/tiny-http/tiny-http/pull/120 - -## 0.5.4 - -* Fix compilation warnings - * https://github.com/tiny-http/tiny-http/pull/118 - -## 0.5.3 - -* Add try_recv_timeout function to the server - * https://github.com/tiny-http/tiny-http/pull/116 - -## 0.5.2 - -* Update ascii to version 0.7 - * https://github.com/tiny-http/tiny-http/pull/114 - -## 0.5.1 - -* Request::respond now returns an IoResult - * https://github.com/tiny-http/tiny-http/pull/110 - -## 0.5.0 - -* HTTPS support - * https://github.com/tiny-http/tiny-http/pull/107 -* Rework the server creation API - * https://github.com/tiny-http/tiny-http/pull/106 - -## 0.4.1 - -* Allow binding to a nic by specifying the socket address - * https://github.com/tiny-http/tiny-http/pull/103 - -## 0.4.0 - -* Make Method into an enum instead of a character string - * https://github.com/tiny-http/tiny-http/pull/102 diff --git a/anneal/vendor/tiny_http/Cargo.lock b/anneal/vendor/tiny_http/Cargo.lock deleted file mode 100644 index 1784e035ef..0000000000 --- a/anneal/vendor/tiny_http/Cargo.lock +++ /dev/null @@ -1,395 +0,0 @@ -# This file is automatically @generated by Cargo. -# It is not intended for manual editing. -version = 3 - -[[package]] -name = "ascii" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbf56136a5198c7b01a49e3afcbef6cf84597273d298f54432926024107b0109" - -[[package]] -name = "autocfg" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" - -[[package]] -name = "base64" -version = "0.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "904dfeac50f3cdaba28fc6f57fdcddb75f49ed61346676a78c4ffe55877802fd" - -[[package]] -name = "bitflags" -version = "1.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" - -[[package]] -name = "bumpalo" -version = "3.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1ad822118d20d2c234f427000d5acc36eabe1e29a348c89b63dd60b13f28e5d" - -[[package]] -name = "cc" -version = "1.0.73" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11" - -[[package]] -name = "cfg-if" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" - -[[package]] -name = "chunked_transfer" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fff857943da45f546682664a79488be82e69e43c1a7a2307679ab9afb3a66d2e" - -[[package]] -name = "fdlimit" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0da54a593b34c71b889ee45f5b5bb900c74148c5f7f8c6a9479ee7899f69603c" -dependencies = [ - "libc", -] - -[[package]] -name = "foreign-types" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" -dependencies = [ - "foreign-types-shared", -] - -[[package]] -name = "foreign-types-shared" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" - -[[package]] -name = "httpdate" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4a1e36c821dbe04574f602848a19f742f4fb3c98d40449f11bcad18d6b17421" - -[[package]] -name = "js-sys" -version = "0.3.59" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "258451ab10b34f8af53416d1fdab72c22e805f0c92a1136d59470ec0b11138b2" -dependencies = [ - "wasm-bindgen", -] - -[[package]] -name = "libc" -version = "0.2.132" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8371e4e5341c3a96db127eb2465ac681ced4c433e01dd0e938adbef26ba93ba5" - -[[package]] -name = "log" -version = "0.4.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb12e687cfb44aa40f41fc3978ef76448f9b6038cad6aef4259d3c095a2382e" -dependencies = [ - "cfg-if", -] - -[[package]] -name = "once_cell" -version = "1.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f7254b99e31cad77da24b08ebf628882739a608578bb1bcdfc1f9c21260d7c0" - -[[package]] -name = "openssl" -version = "0.10.41" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "618febf65336490dfcf20b73f885f5651a0c89c64c2d4a8c3662585a70bf5bd0" -dependencies = [ - "bitflags", - "cfg-if", - "foreign-types", - "libc", - "once_cell", - "openssl-macros", - "openssl-sys", -] - -[[package]] -name = "openssl-macros" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b501e44f11665960c7e7fcf062c7d96a14ade4aa98116c004b2e37b5be7d736c" -dependencies = [ - "proc-macro2", - "quote", - "syn", -] - -[[package]] -name = "openssl-sys" -version = "0.9.75" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5f9bd0c2710541a3cda73d6f9ac4f1b240de4ae261065d309dbe73d9dceb42f" -dependencies = [ - "autocfg", - "cc", - "libc", - "pkg-config", - "vcpkg", -] - -[[package]] -name = "pkg-config" -version = "0.3.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae" - -[[package]] -name = "proc-macro2" -version = "1.0.43" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab" -dependencies = [ - "unicode-ident", -] - -[[package]] -name = "quote" -version = "1.0.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179" -dependencies = [ - "proc-macro2", -] - -[[package]] -name = "ring" -version = "0.16.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" -dependencies = [ - "cc", - "libc", - "once_cell", - "spin", - "untrusted", - "web-sys", - "winapi", -] - -[[package]] -name = "rustc-serialize" -version = "0.3.24" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda" - -[[package]] -name = "rustls" -version = "0.20.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aab8ee6c7097ed6057f43c187a62418d0c05a4bd5f18b3571db50ee0f9ce033" -dependencies = [ - "log", - "ring", - "sct", - "webpki", -] - -[[package]] -name = "rustls-pemfile" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eebeaeb360c87bfb72e84abdb3447159c0eaececf1bef2aecd65a8be949d1c9" -dependencies = [ - "base64", -] - -[[package]] -name = "sct" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" -dependencies = [ - "ring", - "untrusted", -] - -[[package]] -name = "sha1" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1da05c97445caa12d05e848c4a4fcbbea29e748ac28f7e80e9b010392063770" -dependencies = [ - "sha1_smol", -] - -[[package]] -name = "sha1_smol" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012" - -[[package]] -name = "spin" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" - -[[package]] -name = "syn" -version = "1.0.99" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - -[[package]] -name = "tiny_http" -version = "0.12.0" -dependencies = [ - "ascii", - "chunked_transfer", - "fdlimit", - "httpdate", - "log", - "openssl", - "rustc-serialize", - "rustls", - "rustls-pemfile", - "sha1", - "zeroize", -] - -[[package]] -name = "unicode-ident" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4f5b37a154999a8f3f98cc23a628d850e154479cd94decf3414696e12e31aaf" - -[[package]] -name = "untrusted" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" - -[[package]] -name = "vcpkg" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" - -[[package]] -name = "wasm-bindgen" -version = "0.2.82" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc7652e3f6c4706c8d9cd54832c4a4ccb9b5336e2c3bd154d5cccfbf1c1f5f7d" -dependencies = [ - "cfg-if", - "wasm-bindgen-macro", -] - -[[package]] -name = "wasm-bindgen-backend" -version = "0.2.82" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "662cd44805586bd52971b9586b1df85cdbbd9112e4ef4d8f41559c334dc6ac3f" -dependencies = [ - "bumpalo", - "log", - "once_cell", - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-macro" -version = "0.2.82" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b260f13d3012071dfb1512849c033b1925038373aea48ced3012c09df952c602" -dependencies = [ - "quote", - "wasm-bindgen-macro-support", -] - -[[package]] -name = "wasm-bindgen-macro-support" -version = "0.2.82" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5be8e654bdd9b79216c2929ab90721aa82faf65c48cdf08bdc4e7f51357b80da" -dependencies = [ - "proc-macro2", - "quote", - "syn", - "wasm-bindgen-backend", - "wasm-bindgen-shared", -] - -[[package]] -name = "wasm-bindgen-shared" -version = "0.2.82" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6598dd0bd3c7d51095ff6531a5b23e02acdc81804e30d8f07afb77b7215a140a" - -[[package]] -name = "web-sys" -version = "0.3.59" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed055ab27f941423197eb86b2035720b1a3ce40504df082cac2ecc6ed73335a1" -dependencies = [ - "js-sys", - "wasm-bindgen", -] - -[[package]] -name = "webpki" -version = "0.22.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" -dependencies = [ - "ring", - "untrusted", -] - -[[package]] -name = "winapi" -version = "0.3.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" -dependencies = [ - "winapi-i686-pc-windows-gnu", - "winapi-x86_64-pc-windows-gnu", -] - -[[package]] -name = "winapi-i686-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" - -[[package]] -name = "winapi-x86_64-pc-windows-gnu" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" - -[[package]] -name = "zeroize" -version = "1.5.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f" diff --git a/anneal/vendor/tiny_http/Cargo.toml b/anneal/vendor/tiny_http/Cargo.toml deleted file mode 100644 index 7f25f3a587..0000000000 --- a/anneal/vendor/tiny_http/Cargo.toml +++ /dev/null @@ -1,82 +0,0 @@ -# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO -# -# When uploading crates to the registry Cargo will automatically -# "normalize" Cargo.toml files for maximal compatibility -# with all versions of Cargo and also rewrite `path` dependencies -# to registry (e.g., crates.io) dependencies. -# -# If you are reading this file be aware that the original Cargo.toml -# will likely look very different (and much more reasonable). -# See Cargo.toml.orig for the original contents. - -[package] -edition = "2018" -name = "tiny_http" -version = "0.12.0" -authors = [ - "pierre.krieger1708@gmail.com", - "Corey Farwell ", -] -description = "Low level HTTP server library" -documentation = "https://tiny-http.github.io/tiny-http/tiny_http/index.html" -readme = "README.md" -keywords = [ - "http", - "server", - "web", -] -license = "MIT OR Apache-2.0" -repository = "https://github.com/tiny-http/tiny-http" - -[package.metadata.docs.rs] -features = ["ssl-openssl"] - -[dependencies.ascii] -version = "1.0" - -[dependencies.chunked_transfer] -version = "1" - -[dependencies.httpdate] -version = "1.0.2" - -[dependencies.log] -version = "0.4.4" - -[dependencies.openssl] -version = "0.10" -optional = true - -[dependencies.rustls] -version = "0.20" -optional = true - -[dependencies.rustls-pemfile] -version = "0.2.1" -optional = true - -[dependencies.zeroize] -version = "1" -optional = true - -[dev-dependencies.fdlimit] -version = "0.1" - -[dev-dependencies.rustc-serialize] -version = "0.3" - -[dev-dependencies.sha1] -version = "0.6.0" - -[features] -default = [] -ssl = ["ssl-openssl"] -ssl-openssl = [ - "openssl", - "zeroize", -] -ssl-rustls = [ - "rustls", - "rustls-pemfile", - "zeroize", -] diff --git a/anneal/vendor/tiny_http/Cargo.toml.orig b/anneal/vendor/tiny_http/Cargo.toml.orig deleted file mode 100644 index 1bc6851366..0000000000 --- a/anneal/vendor/tiny_http/Cargo.toml.orig +++ /dev/null @@ -1,37 +0,0 @@ -[package] -name = "tiny_http" -version = "0.12.0" -authors = ["pierre.krieger1708@gmail.com", - "Corey Farwell "] -description = "Low level HTTP server library" -documentation = "https://tiny-http.github.io/tiny-http/tiny_http/index.html" -keywords = ["http", "server", "web"] -license = "MIT OR Apache-2.0" -repository = "https://github.com/tiny-http/tiny-http" -edition = "2018" - -[features] -default = [] -ssl = ["ssl-openssl"] -ssl-openssl = ["openssl", "zeroize"] -ssl-rustls = ["rustls", "rustls-pemfile", "zeroize"] - -[dependencies] -ascii = "1.0" -chunked_transfer = "1" -log = "0.4.4" -httpdate = "1.0.2" - -openssl = { version = "0.10", optional = true } -rustls = { version = "0.20", optional = true } -rustls-pemfile = { version = "0.2.1", optional = true } -zeroize = { version = "1", optional = true } - -[dev-dependencies] -rustc-serialize = "0.3" -sha1 = "0.6.0" -fdlimit = "0.1" - -[package.metadata.docs.rs] -# Enable just one SSL implementation -features = ["ssl-openssl"] diff --git a/anneal/vendor/tiny_http/LICENSE-APACHE b/anneal/vendor/tiny_http/LICENSE-APACHE deleted file mode 100644 index 16fe87b06e..0000000000 --- a/anneal/vendor/tiny_http/LICENSE-APACHE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/anneal/vendor/tiny_http/LICENSE-MIT b/anneal/vendor/tiny_http/LICENSE-MIT deleted file mode 100644 index 474b78857f..0000000000 --- a/anneal/vendor/tiny_http/LICENSE-MIT +++ /dev/null @@ -1,25 +0,0 @@ -Copyright (c) 2014-2019 The tiny-http contributors - -Permission is hereby granted, free of charge, to any -person obtaining a copy of this software and associated -documentation files (the "Software"), to deal in the -Software without restriction, including without -limitation the rights to use, copy, modify, merge, -publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software -is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice -shall be included in all copies or substantial portions -of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -DEALINGS IN THE SOFTWARE. diff --git a/anneal/vendor/tiny_http/README.md b/anneal/vendor/tiny_http/README.md deleted file mode 100644 index 0d43793793..0000000000 --- a/anneal/vendor/tiny_http/README.md +++ /dev/null @@ -1,108 +0,0 @@ -# tiny-http - -[![Crate][crate_img]][crate] -[![Documentation][docs_img]][docs] -![License][license_img] -[![CI Status][ci_badge]][ci_link] - -[**Documentation**](https://docs.rs/tiny_http) - -Tiny but strong HTTP server in Rust. -Its main objectives are to be 100% compliant with the HTTP standard and to provide an easy way to create an HTTP server. - -What does **tiny-http** handle? - - Accepting and managing connections to the clients - - Parsing requests - - Requests pipelining - - HTTPS (using either OpenSSL or Rustls) - - Transfer-Encoding and Content-Encoding - - Turning user input (eg. POST input) into a contiguous UTF-8 string (**not implemented yet**) - - Ranges (**not implemented yet**) - - `Connection: upgrade` (used by websockets) - -Tiny-http handles everything that is related to client connections and data transfers and encoding. - -Everything else (parsing the values of the headers, multipart data, routing, etags, cache-control, HTML templates, etc.) must be handled by your code. -If you want to create a website in Rust, I strongly recommend using a framework instead of this library. - -### Installation - -Add this to the `Cargo.toml` file of your project: - -```toml -[dependencies] -tiny_http = "0.11" -``` - -### Usage - -```rust -use tiny_http::{Server, Response}; - -let server = Server::http("0.0.0.0:8000").unwrap(); - -for request in server.incoming_requests() { - println!("received request! method: {:?}, url: {:?}, headers: {:?}", - request.method(), - request.url(), - request.headers() - ); - - let response = Response::from_string("hello world"); - request.respond(response); -} -``` - -### Speed - -Tiny-http was designed with speed in mind: - - Each client connection will be dispatched to a thread pool. Each thread will handle one client. - If there is no thread available when a client connects, a new one is created. Threads that are idle - for a long time (currently 5 seconds) will automatically die. - - If multiple requests from the same client are being pipelined (ie. multiple requests - are sent without waiting for the answer), tiny-http will read them all at once and they will - all be available via `server.recv()`. Tiny-http will automatically rearrange the responses - so that they are sent in the right order. - - One exception to the previous statement exists when a request has a large body (currently > 1kB), - in which case the request handler will read the body directly from the stream and tiny-http - will wait for it to be read before processing the next request. Tiny-http will never wait for - a request to be answered to read the next one. - - When a client connection has sent its last request (by sending `Connection: close` header), - the thread will immediately stop reading from this client and can be reclaimed, even when the - request has not yet been answered. The reading part of the socket will also be immediately closed. - - Decoding the client's request is done lazily. If you don't read the request's body, it will not - be decoded. - -### Examples - -Examples of tiny-http in use: - -* [heroku-tiny-http-hello-world](https://github.com/frewsxcv/heroku-tiny-http-hello-world) - A simple web application demonstrating how to deploy tiny-http to Heroku -* [crate-deps](https://github.com/frewsxcv/crate-deps) - A web service that generates images of dependency graphs for crates hosted on crates.io -* [rouille](https://crates.io/crates/rouille) - Web framework built on tiny-http - -### License - -This project is licensed under either of - - * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or - http://www.apache.org/licenses/LICENSE-2.0) - * MIT license ([LICENSE-MIT](LICENSE-MIT) or - http://opensource.org/licenses/MIT) - -at your option. - -#### Contribution - -Unless you explicitly state otherwise, any contribution intentionally submitted -for inclusion in tiny-http by you, as defined in the Apache-2.0 license, shall be -dual licensed as above, without any additional terms or conditions. - - -[crate_img]: https://img.shields.io/crates/v/tiny_http.svg?logo=rust "Crate Page" -[crate]: https://crates.io/crates/tiny_http "Crate Link" -[docs]: https://docs.rs/tiny_http "Documentation" -[docs_img]: https://docs.rs/tiny_http/badge.svg "Documentation" -[license_img]: https://img.shields.io/crates/l/tiny_http.svg "License" -[ci_badge]: https://github.com/tiny-http/tiny-http/actions/workflows/ci.yaml/badge.svg "CI Status" -[ci_link]: https://github.com/tiny-http/tiny-http/actions/workflows/ci.yaml "Workflow Link" diff --git a/anneal/vendor/tiny_http/benches/bench.rs b/anneal/vendor/tiny_http/benches/bench.rs deleted file mode 100644 index f011ad793d..0000000000 --- a/anneal/vendor/tiny_http/benches/bench.rs +++ /dev/null @@ -1,80 +0,0 @@ -#![feature(test)] - -extern crate fdlimit; -extern crate test; -extern crate tiny_http; - -use std::io::Write; -use std::process::Command; -use tiny_http::Method; - -#[test] -#[ignore] -// TODO: obtain time -fn curl_bench() { - let server = tiny_http::Server::http("0.0.0.0:0").unwrap(); - let port = server.server_addr().to_ip().unwrap().port(); - let num_requests = 10usize; - - match Command::new("curl") - .arg("-s") - .arg(format!("http://localhost:{}/?[1-{}]", port, num_requests)) - .output() - { - Ok(p) => p, - Err(_) => return, // ignoring test - }; - - drop(server); -} - -#[bench] -fn sequential_requests(bencher: &mut test::Bencher) { - let server = tiny_http::Server::http("0.0.0.0:0").unwrap(); - let port = server.server_addr().to_ip().unwrap().port(); - - let mut stream = std::net::TcpStream::connect(("127.0.0.1", port)).unwrap(); - - bencher.iter(|| { - (write!(stream, "GET / HTTP/1.1\r\nHost: localhost\r\n\r\n")).unwrap(); - - let request = server.recv().unwrap(); - - assert_eq!(request.method(), &Method::Get); - - request.respond(tiny_http::Response::new_empty(tiny_http::StatusCode(204))); - }); -} - -#[bench] -fn parallel_requests(bencher: &mut test::Bencher) { - fdlimit::raise_fd_limit(); - - let server = tiny_http::Server::http("0.0.0.0:0").unwrap(); - let port = server.server_addr().to_ip().unwrap().port(); - - bencher.iter(|| { - let mut streams = Vec::new(); - - for _ in 0..1000usize { - let mut stream = std::net::TcpStream::connect(("127.0.0.1", port)).unwrap(); - (write!( - stream, - "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n" - )) - .unwrap(); - streams.push(stream); - } - - loop { - let request = match server.try_recv().unwrap() { - None => break, - Some(rq) => rq, - }; - - assert_eq!(request.method(), &Method::Get); - - request.respond(tiny_http::Response::new_empty(tiny_http::StatusCode(204))); - } - }); -} diff --git a/anneal/vendor/tiny_http/examples/hello-world.rs b/anneal/vendor/tiny_http/examples/hello-world.rs deleted file mode 100644 index bce1a4b96e..0000000000 --- a/anneal/vendor/tiny_http/examples/hello-world.rs +++ /dev/null @@ -1,26 +0,0 @@ -extern crate tiny_http; - -use std::sync::Arc; -use std::thread; - -fn main() { - let server = Arc::new(tiny_http::Server::http("0.0.0.0:9975").unwrap()); - println!("Now listening on port 9975"); - - let mut handles = Vec::new(); - - for _ in 0..4 { - let server = server.clone(); - - handles.push(thread::spawn(move || { - for rq in server.incoming_requests() { - let response = tiny_http::Response::from_string("hello world".to_string()); - let _ = rq.respond(response); - } - })); - } - - for h in handles { - h.join().unwrap(); - } -} diff --git a/anneal/vendor/tiny_http/examples/php-cgi-example.php b/anneal/vendor/tiny_http/examples/php-cgi-example.php deleted file mode 100644 index b19249f445..0000000000 --- a/anneal/vendor/tiny_http/examples/php-cgi-example.php +++ /dev/null @@ -1,3 +0,0 @@ - - -*/ - -fn handle(rq: tiny_http::Request, script: &str) { - use std::io::Write; - use std::process::Command; - - let php = Command::new("php-cgi") - .arg(script) - //.stdin(Ignored) - //.extra_io(Ignored) - .env("AUTH_TYPE", "") - .env( - "CONTENT_LENGTH", - format!("{}", rq.body_length().unwrap_or(0)), - ) - .env("CONTENT_TYPE", "") - .env("GATEWAY_INTERFACE", "CGI/1.1") - .env("PATH_INFO", "") - .env("PATH_TRANSLATED", "") - .env("QUERY_STRING", format!("{}", rq.url())) - .env("REMOTE_ADDR", format!("{}", rq.remote_addr().unwrap())) - .env("REMOTE_HOST", "") - .env("REMOTE_IDENT", "") - .env("REMOTE_USER", "") - .env("REQUEST_METHOD", format!("{}", rq.method())) - .env("SCRIPT_NAME", script) - .env("SERVER_NAME", "tiny-http php-cgi example") - .env("SERVER_PORT", format!("{}", rq.remote_addr().unwrap())) - .env("SERVER_PROTOCOL", "HTTP/1.1") - .env("SERVER_SOFTWARE", "tiny-http php-cgi example") - .output() - .unwrap(); - - // note: this is not a good implementation - // cgi returns the status code in the headers ; also many headers will be missing - // in the response - match php.status { - status if status.success() => { - let mut writer = rq.into_writer(); - let writer: &mut dyn Write = &mut *writer; - - (write!(writer, "HTTP/1.1 200 OK\r\n")).unwrap(); - (write!(writer, "{}", php.stdout.clone().as_ascii_str().unwrap())).unwrap(); - - writer.flush().unwrap(); - } - _ => { - println!( - "Error in script execution: {}", - php.stderr.clone().as_ascii_str().unwrap() - ); - } - } -} - -fn main() { - use std::env; - use std::sync::Arc; - use std::thread::spawn; - - let php_script = { - let mut args = env::args(); - if args.len() < 2 { - println!("Usage: php-cgi "); - return; - } - args.nth(1).unwrap() - }; - - let server = Arc::new(tiny_http::Server::http("0.0.0.0:9975").unwrap()); - println!("Now listening on port 9975"); - - let num_cpus = 4; // TODO: dynamically generate this value - for _ in 0..num_cpus { - let server = server.clone(); - let php_script = php_script.clone(); - - spawn(move || { - for rq in server.incoming_requests() { - handle(rq, &php_script); - } - }); - } -} diff --git a/anneal/vendor/tiny_http/examples/readme-example.rs b/anneal/vendor/tiny_http/examples/readme-example.rs deleted file mode 100644 index e093827d5a..0000000000 --- a/anneal/vendor/tiny_http/examples/readme-example.rs +++ /dev/null @@ -1,19 +0,0 @@ -extern crate tiny_http; - -fn main() { - use tiny_http::{Response, Server}; - - let server = Server::http("0.0.0.0:8000").unwrap(); - - for request in server.incoming_requests() { - println!( - "received request! method: {:?}, url: {:?}, headers: {:?}", - request.method(), - request.url(), - request.headers() - ); - - let response = Response::from_string("hello world"); - request.respond(response).expect("Responded"); - } -} diff --git a/anneal/vendor/tiny_http/examples/serve-root.rs b/anneal/vendor/tiny_http/examples/serve-root.rs deleted file mode 100644 index 5391c1b09c..0000000000 --- a/anneal/vendor/tiny_http/examples/serve-root.rs +++ /dev/null @@ -1,58 +0,0 @@ -use ascii::AsciiString; -use std::fs; -use std::path::Path; - -extern crate ascii; -extern crate tiny_http; - -fn get_content_type(path: &Path) -> &'static str { - let extension = match path.extension() { - None => return "text/plain", - Some(e) => e, - }; - - match extension.to_str().unwrap() { - "gif" => "image/gif", - "jpg" => "image/jpeg", - "jpeg" => "image/jpeg", - "png" => "image/png", - "pdf" => "application/pdf", - "htm" => "text/html; charset=utf8", - "html" => "text/html; charset=utf8", - "txt" => "text/plain; charset=utf8", - _ => "text/plain; charset=utf8", - } -} - -fn main() { - let server = tiny_http::Server::http("0.0.0.0:8000").unwrap(); - let port = server.server_addr().to_ip().unwrap().port(); - println!("Now listening on port {}", port); - - loop { - let rq = match server.recv() { - Ok(rq) => rq, - Err(_) => break, - }; - - println!("{:?}", rq); - - let url = rq.url().to_string(); - let path = Path::new(&url); - let file = fs::File::open(&path); - - if file.is_ok() { - let response = tiny_http::Response::from_file(file.unwrap()); - - let response = response.with_header(tiny_http::Header { - field: "Content-Type".parse().unwrap(), - value: AsciiString::from_ascii(get_content_type(&path)).unwrap(), - }); - - let _ = rq.respond(response); - } else { - let rep = tiny_http::Response::new_empty(tiny_http::StatusCode(404)); - let _ = rq.respond(rep); - } - } -} diff --git a/anneal/vendor/tiny_http/examples/ssl-cert.pem b/anneal/vendor/tiny_http/examples/ssl-cert.pem deleted file mode 100644 index 6488a21970..0000000000 --- a/anneal/vendor/tiny_http/examples/ssl-cert.pem +++ /dev/null @@ -1,23 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDxzCCAq+gAwIBAgIUVB6JBT6sYv0g4IGfZgjelceiqBQwDQYJKoZIhvcNAQEL -BQAwcjELMAkGA1UEBhMCTk8xDTALBgNVBAgMBE5vbmUxDTALBgNVBAcMBE5vbmUx -DTALBgNVBAoMBE5vbmUxDTALBgNVBAsMBE5vbmUxEjAQBgNVBAMMCWxvY2FsaG9z -dDETMBEGCSqGSIb3DQEJARYETm9uZTAgFw0yMjAxMjgyMDQzMjFaGA8yMDcyMDEx -NjIwNDMyMVowcjELMAkGA1UEBhMCTk8xDTALBgNVBAgMBE5vbmUxDTALBgNVBAcM -BE5vbmUxDTALBgNVBAoMBE5vbmUxDTALBgNVBAsMBE5vbmUxEjAQBgNVBAMMCWxv -Y2FsaG9zdDETMBEGCSqGSIb3DQEJARYETm9uZTCCASIwDQYJKoZIhvcNAQEBBQAD -ggEPADCCAQoCggEBANgjc9hZtEwve2usjNrc+2w4bT9fJi2uVQ3eHdtECirBxHrm -rSbSeOyhvTPmonyp81LQv52KzHDLxwVSmFoJkrIKrnqqSzw/ynuqBpykhV3TKPLK -SCZiyqQmGucTIxOXM9ZEB51zCvq+2jL4v2nBueibY2SzXG6MSAjRRC5ezDTYvIMH -12uH0U4a3UMICPTEMluy+mT4S1EGZLTj37+6JQA/1xYzZAifZAGEKRcCd0q5f9IU -V8GnnYjptFFswJJF7EBpExZIlxwTn7c4Un8yjYOTAj9Yw6OiAy6MVv8NSF1DeGmY -wUFHm6eUUmv+YO/T99sdt1dpdf1+807Fa62L1d8CAwEAAaNTMFEwHQYDVR0OBBYE -FCjWLWB1sdWiGdHT/PY4BcuqnJq0MB8GA1UdIwQYMBaAFCjWLWB1sdWiGdHT/PY4 -BcuqnJq0MA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAH+Gafl1 -/yhjg4wVniQRFKqdufMmaEkAgJGaB87/Cjb5dyf1ku3ZvM7SX9MyV2R3tw6hncBQ -E3XbPhOTJXcohGzpZ5hC043eY+/yAjKgbrSH4c0z/g9iSigX1B/F1hfF4Evx1eR6 -WD5CCpA/YLF8Ik09WU2HKFT85sDIygmv0hmuI0dF+9lpqfPguhx6iLOoFyXkbgqF -RDWe8V0/GtEnX4PckdyyYk/uFX5aKMeW5dBY6GL9YDRcZvTLsjJi1wj3OcDsr7n5 -ULkGWiWdQScpkrGWOPoM72r6yyFi5P/RCBI/p0LBseARXAAedgC8tTK6DpfRXe1M -BumieRdjmHDbE+4= ------END CERTIFICATE----- diff --git a/anneal/vendor/tiny_http/examples/ssl-key.pem b/anneal/vendor/tiny_http/examples/ssl-key.pem deleted file mode 100644 index c924759b6e..0000000000 --- a/anneal/vendor/tiny_http/examples/ssl-key.pem +++ /dev/null @@ -1,28 +0,0 @@ ------BEGIN PRIVATE KEY----- -MIIEvwIBADANBgkqhkiG9w0BAQEFAASCBKkwggSlAgEAAoIBAQDYI3PYWbRML3tr -rIza3PtsOG0/XyYtrlUN3h3bRAoqwcR65q0m0njsob0z5qJ8qfNS0L+disxwy8cF -UphaCZKyCq56qks8P8p7qgacpIVd0yjyykgmYsqkJhrnEyMTlzPWRAedcwr6vtoy -+L9pwbnom2Nks1xujEgI0UQuXsw02LyDB9drh9FOGt1DCAj0xDJbsvpk+EtRBmS0 -49+/uiUAP9cWM2QIn2QBhCkXAndKuX/SFFfBp52I6bRRbMCSRexAaRMWSJccE5+3 -OFJ/Mo2DkwI/WMOjogMujFb/DUhdQ3hpmMFBR5unlFJr/mDv0/fbHbdXaXX9fvNO -xWuti9XfAgMBAAECggEBALRhyiHKo71dd0yigh96g96KrSpRV4SSVOuw7wv6md2b -P0Yu1F1tFHywcz4ogn02PRtlmjV6DCsq9ltL1lh2WtZ6MamwDAApYOyaNtBuQdvP -CgKurU5T7rjWEGe/QevsqddtiUlvJL+lnmch0GYLxwMJBAeb5U1hiBDLzXJBrX1/ -xstCJzYC1MO3zRNuoNudpZyMU18BCbk1E+XMu/yPyJJPm5VSxS3Qe0RIX0nqC3hd -RjwiTGYIqHX/hTFuZta+lNwVSx/8TJY6wTq69r6mGf0hudXz9A0SEhSVdkU77yFE -NLCTRGD8xkF1bJliT57dGofheSrn4+ATR6eMYzsGmYECgYEA/0Ao+dccjMgqiyhR -K3TTaikSSvvTOY/muISvovlpDe1U10+HIPMfGIJUVOE7p4oAbg9W70x8hMox+0hN -yERLp4Pq9skkwxfmGjA0ooKJt0e0Ux1A5LiboLUPGlBsHwNcLqno/vj4WK/TGdZu -51zG1zvBrSIMLtT5QChsy+zTtsECgYEA2MXlkgy8W7/9BRcdzCXj2Uan5L+H6KT3 -fXHYWc5yp0QBl6dqNCtPJ2lIpZ+qrNzJ1QZM56+qPEftOFOmhnz4+U6SAaKc/75n -bAL//ggMQwHzMus6ufhXJC3AYSduq9e1hnOC+K7Hc3dPLPkJGjVZVC2kXhACTwuL -aBbi5EmWVJ8CgYByk3BRRdgQ8cD3Gi/lW9mSq8EEW6njCs88QIM+msoncENHKvGz -Pq7Up5wHRdsrR20N+mDBpgm26bQp4bjYjp+PIE4WXQ/daxrk4oKd+A6tcMhnDpiU -krF5IA0ZeMQv36g/YhGucj+4P6R40qKRxDmVX8N+XewuEXeY7wx3NWWLgQKBgQC4 -DqA0eDfet49AqTYVxv5F2GZqJe5iLOAvVWDcMBzNxUKM4AufLD7TOeQDLSUgDYAa -LnVSK6eh83iKYQx+GNLV7E6wsMAZrjPmVE3EBlVS9+7lhzGgAisLfwVf+LlRk6B/ -/shwGwcjFWTWzMVbyXyFqxNrArDTKPw/b19LcugABQKBgQD8ARwEoZRyl8kuLyLh -33FRUAVTvuCU1KvEl9CU6BcmEDNcI/O/IYOeOUXYeIeNyATNH2L8A++i53EptpMd -wVxEIE2YBHD2t6+OcTjFSey7/6BejBUExxMVyWnUZMq3Xvf30ecfvacSb/DexqAG -yos+VoW4PIphdW3NBfE1hlLDZQ== ------END PRIVATE KEY----- diff --git a/anneal/vendor/tiny_http/examples/ssl.rs b/anneal/vendor/tiny_http/examples/ssl.rs deleted file mode 100644 index eef2c0521c..0000000000 --- a/anneal/vendor/tiny_http/examples/ssl.rs +++ /dev/null @@ -1,42 +0,0 @@ -extern crate tiny_http; - -#[cfg(not(any(feature = "ssl-openssl", feature = "ssl-rustls")))] -fn main() { - println!("This example requires one of the supported `ssl-*` features to be enabled"); -} - -#[cfg(any(feature = "ssl-openssl", feature = "ssl-rustls"))] -fn main() { - use tiny_http::{Response, Server}; - - let server = Server::https( - "0.0.0.0:8000", - tiny_http::SslConfig { - certificate: include_bytes!("ssl-cert.pem").to_vec(), - private_key: include_bytes!("ssl-key.pem").to_vec(), - }, - ) - .unwrap(); - - println!( - "Note: connecting to this server will likely give you a warning from your browser \ - because the connection is unsecure. This is because the certificate used by this \ - example is self-signed. With a real certificate, you wouldn't get this warning." - ); - - for request in server.incoming_requests() { - assert!(request.secure()); - - println!( - "received request! method: {:?}, url: {:?}, headers: {:?}", - request.method(), - request.url(), - request.headers() - ); - - let response = Response::from_string("hello world"); - request - .respond(response) - .unwrap_or(println!("Failed to respond to request")); - } -} diff --git a/anneal/vendor/tiny_http/examples/websockets.rs b/anneal/vendor/tiny_http/examples/websockets.rs deleted file mode 100644 index acea4e4602..0000000000 --- a/anneal/vendor/tiny_http/examples/websockets.rs +++ /dev/null @@ -1,148 +0,0 @@ -extern crate rustc_serialize; -extern crate sha1; -extern crate tiny_http; - -use std::io::Cursor; -use std::io::Read; -use std::thread::spawn; - -use rustc_serialize::base64::{Config, Newline, Standard, ToBase64}; - -fn home_page(port: u16) -> tiny_http::Response>> { - tiny_http::Response::from_string(format!( - " - -

This example will receive "Hello" for each byte in the packet being sent. - Tiny-http doesn't support decoding websocket frames, so we can't do anything better.

-

-

-

Received:

-

- ", - port - )) - .with_header( - "Content-type: text/html" - .parse::() - .unwrap(), - ) -} - -/// Turns a Sec-WebSocket-Key into a Sec-WebSocket-Accept. -/// Feel free to copy-paste this function, but please use a better error handling. -fn convert_key(input: &str) -> String { - use sha1::Sha1; - - let mut input = input.to_string().into_bytes(); - let mut bytes = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11" - .to_string() - .into_bytes(); - input.append(&mut bytes); - - let mut sha1 = Sha1::new(); - sha1.update(&input); - - sha1.digest().bytes().to_base64(Config { - char_set: Standard, - pad: true, - line_length: None, - newline: Newline::LF, - }) -} - -fn main() { - let server = tiny_http::Server::http("0.0.0.0:0").unwrap(); - let port = server.server_addr().to_ip().unwrap().port(); - - println!("Server started"); - println!( - "To try this example, open a browser to http://localhost:{}/", - port - ); - - for request in server.incoming_requests() { - // we are handling this websocket connection in a new task - spawn(move || { - // checking the "Upgrade" header to check that it is a websocket - match request - .headers() - .iter() - .find(|h| h.field.equiv(&"Upgrade")) - .and_then(|hdr| { - if hdr.value == "websocket" { - Some(hdr) - } else { - None - } - }) { - None => { - // sending the HTML page - request.respond(home_page(port)).expect("Responded"); - return; - } - _ => (), - }; - - // getting the value of Sec-WebSocket-Key - let key = match request - .headers() - .iter() - .find(|h| h.field.equiv(&"Sec-WebSocket-Key")) - .map(|h| h.value.clone()) - { - None => { - let response = tiny_http::Response::new_empty(tiny_http::StatusCode(400)); - request.respond(response).expect("Responded"); - return; - } - Some(k) => k, - }; - - // building the "101 Switching Protocols" response - let response = tiny_http::Response::new_empty(tiny_http::StatusCode(101)) - .with_header("Upgrade: websocket".parse::().unwrap()) - .with_header("Connection: Upgrade".parse::().unwrap()) - .with_header( - "Sec-WebSocket-Protocol: ping" - .parse::() - .unwrap(), - ) - .with_header( - format!("Sec-WebSocket-Accept: {}", convert_key(key.as_str())) - .parse::() - .unwrap(), - ); - - // - let mut stream = request.upgrade("websocket", response); - - // - loop { - let mut out = Vec::new(); - match Read::by_ref(&mut stream).take(1).read_to_end(&mut out) { - Ok(n) if n >= 1 => { - // "Hello" frame - let data = [0x81, 0x05, 0x48, 0x65, 0x6c, 0x6c, 0x6f]; - stream.write(&data).ok(); - stream.flush().ok(); - } - Ok(_) => panic!("eof ; should never happen"), - Err(e) => { - println!("closing connection because: {}", e); - return; - } - }; - } - }); - } -} diff --git a/anneal/vendor/tiny_http/src/client.rs b/anneal/vendor/tiny_http/src/client.rs deleted file mode 100644 index 68bbd469a2..0000000000 --- a/anneal/vendor/tiny_http/src/client.rs +++ /dev/null @@ -1,309 +0,0 @@ -use ascii::AsciiString; - -use std::io::Error as IoError; -use std::io::Result as IoResult; -use std::io::{BufReader, BufWriter, ErrorKind, Read}; - -use std::net::SocketAddr; -use std::str::FromStr; - -use crate::common::{HTTPVersion, Method}; -use crate::util::RefinedTcpStream; -use crate::util::{SequentialReader, SequentialReaderBuilder, SequentialWriterBuilder}; -use crate::Request; - -/// A ClientConnection is an object that will store a socket to a client -/// and return Request objects. -pub struct ClientConnection { - // address of the client - remote_addr: IoResult>, - - // sequence of Readers to the stream, so that the data is not read in - // the wrong order - source: SequentialReaderBuilder>, - - // sequence of Writers to the stream, to avoid writing response #2 before - // response #1 - sink: SequentialWriterBuilder>, - - // Reader to read the next header from - next_header_source: SequentialReader>, - - // set to true if we know that the previous request is the last one - no_more_requests: bool, - - // true if the connection goes through SSL - secure: bool, -} - -/// Error that can happen when reading a request. -#[derive(Debug)] -enum ReadError { - WrongRequestLine, - WrongHeader(HTTPVersion), - /// the client sent an unrecognized `Expect` header - ExpectationFailed(HTTPVersion), - ReadIoError(IoError), -} - -impl ClientConnection { - /// Creates a new `ClientConnection` that takes ownership of the `TcpStream`. - pub fn new( - write_socket: RefinedTcpStream, - mut read_socket: RefinedTcpStream, - ) -> ClientConnection { - let remote_addr = read_socket.peer_addr(); - let secure = read_socket.secure(); - - let mut source = SequentialReaderBuilder::new(BufReader::with_capacity(1024, read_socket)); - let first_header = source.next().unwrap(); - - ClientConnection { - source, - sink: SequentialWriterBuilder::new(BufWriter::with_capacity(1024, write_socket)), - remote_addr, - next_header_source: first_header, - no_more_requests: false, - secure, - } - } - - /// true if the connection is HTTPS - pub fn secure(&self) -> bool { - self.secure - } - - /// Reads the next line from self.next_header_source. - /// - /// Reads until `CRLF` is reached. The next read will start - /// at the first byte of the new line. - fn read_next_line(&mut self) -> IoResult { - let mut buf = Vec::new(); - let mut prev_byte_was_cr = false; - - loop { - let byte = self.next_header_source.by_ref().bytes().next(); - - let byte = match byte { - Some(b) => b?, - None => return Err(IoError::new(ErrorKind::ConnectionAborted, "Unexpected EOF")), - }; - - if byte == b'\n' && prev_byte_was_cr { - buf.pop(); // removing the '\r' - return AsciiString::from_ascii(buf) - .map_err(|_| IoError::new(ErrorKind::InvalidInput, "Header is not in ASCII")); - } - - prev_byte_was_cr = byte == b'\r'; - - buf.push(byte); - } - } - - /// Reads a request from the stream. - /// Blocks until the header has been read. - fn read(&mut self) -> Result { - let (method, path, version, headers) = { - // reading the request line - let (method, path, version) = { - let line = self.read_next_line().map_err(ReadError::ReadIoError)?; - - parse_request_line( - line.as_str().trim(), // TODO: remove this conversion - )? - }; - - // getting all headers - let headers = { - let mut headers = Vec::new(); - loop { - let line = self.read_next_line().map_err(ReadError::ReadIoError)?; - - if line.is_empty() { - break; - }; - headers.push(match FromStr::from_str(line.as_str().trim()) { - // TODO: remove this conversion - Ok(h) => h, - _ => return Err(ReadError::WrongHeader(version)), - }); - } - - headers - }; - - (method, path, version, headers) - }; - - // building the writer for the request - let writer = self.sink.next().unwrap(); - - // follow-up for next potential request - let mut data_source = self.source.next().unwrap(); - std::mem::swap(&mut self.next_header_source, &mut data_source); - - // building the next reader - let request = crate::request::new_request( - self.secure, - method, - path, - version.clone(), - headers, - *self.remote_addr.as_ref().unwrap(), - data_source, - writer, - ) - .map_err(|e| { - use crate::request; - match e { - request::RequestCreationError::CreationIoError(e) => ReadError::ReadIoError(e), - request::RequestCreationError::ExpectationFailed => { - ReadError::ExpectationFailed(version) - } - } - })?; - - // return the request - Ok(request) - } -} - -impl Iterator for ClientConnection { - type Item = Request; - - /// Blocks until the next Request is available. - /// Returns None when no new Requests will come from the client. - fn next(&mut self) -> Option { - use crate::{Response, StatusCode}; - - // the client sent a "connection: close" header in this previous request - // or is using HTTP 1.0, meaning that no new request will come - if self.no_more_requests { - return None; - } - - loop { - let rq = match self.read() { - Err(ReadError::WrongRequestLine) => { - let writer = self.sink.next().unwrap(); - let response = Response::new_empty(StatusCode(400)); - response - .raw_print(writer, HTTPVersion(1, 1), &[], false, None) - .ok(); - return None; // we don't know where the next request would start, - // se we have to close - } - - Err(ReadError::WrongHeader(ver)) => { - let writer = self.sink.next().unwrap(); - let response = Response::new_empty(StatusCode(400)); - response.raw_print(writer, ver, &[], false, None).ok(); - return None; // we don't know where the next request would start, - // se we have to close - } - - Err(ReadError::ReadIoError(ref err)) if err.kind() == ErrorKind::TimedOut => { - // request timeout - let writer = self.sink.next().unwrap(); - let response = Response::new_empty(StatusCode(408)); - response - .raw_print(writer, HTTPVersion(1, 1), &[], false, None) - .ok(); - return None; // closing the connection - } - - Err(ReadError::ExpectationFailed(ver)) => { - let writer = self.sink.next().unwrap(); - let response = Response::new_empty(StatusCode(417)); - response.raw_print(writer, ver, &[], true, None).ok(); - return None; // TODO: should be recoverable, but needs handling in case of body - } - - Err(ReadError::ReadIoError(_)) => return None, - - Ok(rq) => rq, - }; - - // checking HTTP version - if *rq.http_version() > (1, 1) { - let writer = self.sink.next().unwrap(); - let response = Response::from_string( - "This server only supports HTTP versions 1.0 and 1.1".to_owned(), - ) - .with_status_code(StatusCode(505)); - response - .raw_print(writer, HTTPVersion(1, 1), &[], false, None) - .ok(); - continue; - } - - // updating the status of the connection - let connection_header = rq - .headers() - .iter() - .find(|h| h.field.equiv("Connection")) - .map(|h| h.value.as_str()); - - let lowercase = connection_header.map(|h| h.to_ascii_lowercase()); - - match lowercase { - Some(ref val) if val.contains("close") => self.no_more_requests = true, - Some(ref val) if val.contains("upgrade") => self.no_more_requests = true, - Some(ref val) - if !val.contains("keep-alive") && *rq.http_version() == HTTPVersion(1, 0) => - { - self.no_more_requests = true - } - None if *rq.http_version() == HTTPVersion(1, 0) => self.no_more_requests = true, - _ => (), - }; - - // returning the request - return Some(rq); - } - } -} - -/// Parses a "HTTP/1.1" string. -fn parse_http_version(version: &str) -> Result { - let (major, minor) = match version { - "HTTP/0.9" => (0, 9), - "HTTP/1.0" => (1, 0), - "HTTP/1.1" => (1, 1), - "HTTP/2.0" => (2, 0), - "HTTP/3.0" => (3, 0), - _ => return Err(ReadError::WrongRequestLine), - }; - - Ok(HTTPVersion(major, minor)) -} - -/// Parses the request line of the request. -/// eg. GET / HTTP/1.1 -fn parse_request_line(line: &str) -> Result<(Method, String, HTTPVersion), ReadError> { - let mut parts = line.split(' '); - - let method = parts.next().and_then(|w| w.parse().ok()); - let path = parts.next().map(ToOwned::to_owned); - let version = parts.next().and_then(|w| parse_http_version(w).ok()); - - method - .and_then(|method| Some((method, path?, version?))) - .ok_or(ReadError::WrongRequestLine) -} - -#[cfg(test)] -mod test { - #[test] - fn test_parse_request_line() { - let (method, path, ver) = super::parse_request_line("GET /hello HTTP/1.1").unwrap(); - - assert!(method == crate::Method::Get); - assert!(path == "/hello"); - assert!(ver == crate::common::HTTPVersion(1, 1)); - - assert!(super::parse_request_line("GET /hello").is_err()); - assert!(super::parse_request_line("qsd qsd qsd").is_err()); - } -} diff --git a/anneal/vendor/tiny_http/src/common.rs b/anneal/vendor/tiny_http/src/common.rs deleted file mode 100644 index 473fd8d488..0000000000 --- a/anneal/vendor/tiny_http/src/common.rs +++ /dev/null @@ -1,440 +0,0 @@ -use ascii::{AsciiStr, AsciiString, FromAsciiError}; -use std::cmp::Ordering; -use std::fmt::{self, Display, Formatter}; -use std::str::FromStr; - -/// Status code of a request or response. -#[derive(Eq, PartialEq, Copy, Clone, Debug, Ord, PartialOrd)] -pub struct StatusCode(pub u16); - -impl StatusCode { - /// Returns the default reason phrase for this status code. - /// For example the status code 404 corresponds to "Not Found". - pub fn default_reason_phrase(&self) -> &'static str { - match self.0 { - 100 => "Continue", - 101 => "Switching Protocols", - 102 => "Processing", - 103 => "Early Hints", - - 200 => "OK", - 201 => "Created", - 202 => "Accepted", - 203 => "Non-Authoritative Information", - 204 => "No Content", - 205 => "Reset Content", - 206 => "Partial Content", - 207 => "Multi-Status", - 208 => "Already Reported", - 226 => "IM Used", - - 300 => "Multiple Choices", - 301 => "Moved Permanently", - 302 => "Found", - 303 => "See Other", - 304 => "Not Modified", - 305 => "Use Proxy", - 307 => "Temporary Redirect", - 308 => "Permanent Redirect", - - 400 => "Bad Request", - 401 => "Unauthorized", - 402 => "Payment Required", - 403 => "Forbidden", - 404 => "Not Found", - 405 => "Method Not Allowed", - 406 => "Not Acceptable", - 407 => "Proxy Authentication Required", - 408 => "Request Timeout", - 409 => "Conflict", - 410 => "Gone", - 411 => "Length Required", - 412 => "Precondition Failed", - 413 => "Payload Too Large", - 414 => "URI Too Long", - 415 => "Unsupported Media Type", - 416 => "Range Not Satisfiable", - 417 => "Expectation Failed", - 421 => "Misdirected Request", - 422 => "Unprocessable Entity", - 423 => "Locked", - 424 => "Failed Dependency", - 426 => "Upgrade Required", - 428 => "Precondition Required", - 429 => "Too Many Requests", - 431 => "Request Header Fields Too Large", - 451 => "Unavailable For Legal Reasons", - - 500 => "Internal Server Error", - 501 => "Not Implemented", - 502 => "Bad Gateway", - 503 => "Service Unavailable", - 504 => "Gateway Timeout", - 505 => "HTTP Version Not Supported", - 506 => "Variant Also Negotiates", - 507 => "Insufficient Storage", - 508 => "Loop Detected", - 510 => "Not Extended", - 511 => "Network Authentication Required", - _ => "Unknown", - } - } -} - -impl From for StatusCode { - fn from(in_code: i8) -> StatusCode { - StatusCode(in_code as u16) - } -} - -impl From for StatusCode { - fn from(in_code: u8) -> StatusCode { - StatusCode(in_code as u16) - } -} - -impl From for StatusCode { - fn from(in_code: i16) -> StatusCode { - StatusCode(in_code as u16) - } -} - -impl From for StatusCode { - fn from(in_code: u16) -> StatusCode { - StatusCode(in_code) - } -} - -impl From for StatusCode { - fn from(in_code: i32) -> StatusCode { - StatusCode(in_code as u16) - } -} - -impl From for StatusCode { - fn from(in_code: u32) -> StatusCode { - StatusCode(in_code as u16) - } -} - -impl AsRef for StatusCode { - fn as_ref(&self) -> &u16 { - &self.0 - } -} - -impl PartialEq for StatusCode { - fn eq(&self, other: &u16) -> bool { - &self.0 == other - } -} - -impl PartialEq for u16 { - fn eq(&self, other: &StatusCode) -> bool { - self == &other.0 - } -} - -impl PartialOrd for StatusCode { - fn partial_cmp(&self, other: &u16) -> Option { - self.0.partial_cmp(other) - } -} - -impl PartialOrd for u16 { - fn partial_cmp(&self, other: &StatusCode) -> Option { - self.partial_cmp(&other.0) - } -} - -/// Represents a HTTP header. -#[derive(Debug, Clone)] -pub struct Header { - pub field: HeaderField, - pub value: AsciiString, -} - -impl Header { - /// Builds a `Header` from two `Vec`s or two `&[u8]`s. - /// - /// Example: - /// - /// ``` - /// let header = tiny_http::Header::from_bytes(&b"Content-Type"[..], &b"text/plain"[..]).unwrap(); - /// ``` - #[allow(clippy::result_unit_err)] - pub fn from_bytes(header: B1, value: B2) -> Result - where - B1: Into> + AsRef<[u8]>, - B2: Into> + AsRef<[u8]>, - { - let header = HeaderField::from_bytes(header).or(Err(()))?; - let value = AsciiString::from_ascii(value).or(Err(()))?; - - Ok(Header { - field: header, - value, - }) - } -} - -impl FromStr for Header { - type Err = (); - - fn from_str(input: &str) -> Result { - let mut elems = input.splitn(2, ':'); - - let field = elems.next().and_then(|f| f.parse().ok()).ok_or(())?; - let value = elems - .next() - .and_then(|v| AsciiString::from_ascii(v.trim()).ok()) - .ok_or(())?; - - Ok(Header { field, value }) - } -} - -impl Display for Header { - fn fmt(&self, formatter: &mut Formatter<'_>) -> Result<(), fmt::Error> { - write!(formatter, "{}: {}", self.field, self.value.as_str()) - } -} - -/// Field of a header (eg. `Content-Type`, `Content-Length`, etc.) -/// -/// Comparison between two `HeaderField`s ignores case. -#[derive(Debug, Clone, Eq)] -pub struct HeaderField(AsciiString); - -impl HeaderField { - pub fn from_bytes(bytes: B) -> Result> - where - B: Into> + AsRef<[u8]>, - { - AsciiString::from_ascii(bytes).map(HeaderField) - } - - pub fn as_str(&self) -> &AsciiStr { - &self.0 - } - - pub fn equiv(&self, other: &'static str) -> bool { - other.eq_ignore_ascii_case(self.as_str().as_str()) - } -} - -impl FromStr for HeaderField { - type Err = (); - - fn from_str(s: &str) -> Result { - if s.contains(char::is_whitespace) { - Err(()) - } else { - AsciiString::from_ascii(s).map(HeaderField).map_err(|_| ()) - } - } -} - -impl Display for HeaderField { - fn fmt(&self, formatter: &mut Formatter<'_>) -> Result<(), fmt::Error> { - write!(formatter, "{}", self.0.as_str()) - } -} - -impl PartialEq for HeaderField { - fn eq(&self, other: &HeaderField) -> bool { - let self_str: &str = self.as_str().as_ref(); - let other_str = other.as_str().as_ref(); - self_str.eq_ignore_ascii_case(other_str) - } -} - -/// HTTP request methods -/// -/// As per [RFC 7231](https://tools.ietf.org/html/rfc7231#section-4.1) and -/// [RFC 5789](https://tools.ietf.org/html/rfc5789) -#[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum Method { - /// `GET` - Get, - - /// `HEAD` - Head, - - /// `POST` - Post, - - /// `PUT` - Put, - - /// `DELETE` - Delete, - - /// `CONNECT` - Connect, - - /// `OPTIONS` - Options, - - /// `TRACE` - Trace, - - /// `PATCH` - Patch, - - /// Request methods not standardized by the IETF - NonStandard(AsciiString), -} - -impl Method { - pub fn as_str(&self) -> &str { - match *self { - Method::Get => "GET", - Method::Head => "HEAD", - Method::Post => "POST", - Method::Put => "PUT", - Method::Delete => "DELETE", - Method::Connect => "CONNECT", - Method::Options => "OPTIONS", - Method::Trace => "TRACE", - Method::Patch => "PATCH", - Method::NonStandard(ref s) => s.as_str(), - } - } -} - -impl FromStr for Method { - type Err = (); - - fn from_str(s: &str) -> Result { - Ok(match s { - "GET" => Method::Get, - "HEAD" => Method::Head, - "POST" => Method::Post, - "PUT" => Method::Put, - "DELETE" => Method::Delete, - "CONNECT" => Method::Connect, - "OPTIONS" => Method::Options, - "TRACE" => Method::Trace, - "PATCH" => Method::Patch, - s => { - let ascii_string = AsciiString::from_ascii(s).map_err(|_| ())?; - Method::NonStandard(ascii_string) - } - }) - } -} - -impl Display for Method { - fn fmt(&self, formatter: &mut Formatter<'_>) -> Result<(), fmt::Error> { - write!(formatter, "{}", self.as_str()) - } -} - -/// HTTP version (usually 1.0 or 1.1). -#[allow(clippy::upper_case_acronyms)] -#[derive(Debug, Clone, PartialEq, Eq)] -pub struct HTTPVersion(pub u8, pub u8); - -impl Display for HTTPVersion { - fn fmt(&self, formatter: &mut Formatter<'_>) -> Result<(), fmt::Error> { - write!(formatter, "{}.{}", self.0, self.1) - } -} - -impl Ord for HTTPVersion { - fn cmp(&self, other: &Self) -> Ordering { - let HTTPVersion(my_major, my_minor) = *self; - let HTTPVersion(other_major, other_minor) = *other; - - if my_major != other_major { - return my_major.cmp(&other_major); - } - - my_minor.cmp(&other_minor) - } -} - -impl PartialOrd for HTTPVersion { - fn partial_cmp(&self, other: &HTTPVersion) -> Option { - Some(self.cmp(other)) - } -} - -impl PartialEq<(u8, u8)> for HTTPVersion { - fn eq(&self, &(major, minor): &(u8, u8)) -> bool { - self.eq(&HTTPVersion(major, minor)) - } -} - -impl PartialEq for (u8, u8) { - fn eq(&self, other: &HTTPVersion) -> bool { - let &(major, minor) = self; - HTTPVersion(major, minor).eq(other) - } -} - -impl PartialOrd<(u8, u8)> for HTTPVersion { - fn partial_cmp(&self, &(major, minor): &(u8, u8)) -> Option { - self.partial_cmp(&HTTPVersion(major, minor)) - } -} - -impl PartialOrd for (u8, u8) { - fn partial_cmp(&self, other: &HTTPVersion) -> Option { - let &(major, minor) = self; - HTTPVersion(major, minor).partial_cmp(other) - } -} - -impl From<(u8, u8)> for HTTPVersion { - fn from((major, minor): (u8, u8)) -> HTTPVersion { - HTTPVersion(major, minor) - } -} - -#[cfg(test)] -mod test { - use super::Header; - use httpdate::HttpDate; - use std::time::{Duration, SystemTime}; - - #[test] - fn test_parse_header() { - let header: Header = "Content-Type: text/html".parse().unwrap(); - - assert!(header.field.equiv(&"content-type")); - assert!(header.value.as_str() == "text/html"); - - assert!("hello world".parse::
().is_err()); - } - - #[test] - fn formats_date_correctly() { - let http_date = HttpDate::from(SystemTime::UNIX_EPOCH + Duration::from_secs(420895020)); - - assert_eq!(http_date.to_string(), "Wed, 04 May 1983 11:17:00 GMT") - } - - #[test] - fn test_parse_header_with_doublecolon() { - let header: Header = "Time: 20: 34".parse().unwrap(); - - assert!(header.field.equiv(&"time")); - assert!(header.value.as_str() == "20: 34"); - } - - // This tests reslstance to RUSTSEC-2020-0031: "HTTP Request smuggling - // through malformed Transfer Encoding headers" - // (https://rustsec.org/advisories/RUSTSEC-2020-0031.html). - #[test] - fn test_strict_headers() { - assert!("Transfer-Encoding : chunked".parse::
().is_err()); - assert!(" Transfer-Encoding: chunked".parse::
().is_err()); - assert!("Transfer Encoding: chunked".parse::
().is_err()); - assert!(" Transfer\tEncoding : chunked".parse::
().is_err()); - assert!("Transfer-Encoding: chunked".parse::
().is_ok()); - assert!("Transfer-Encoding: chunked ".parse::
().is_ok()); - assert!("Transfer-Encoding: chunked ".parse::
().is_ok()); - } -} diff --git a/anneal/vendor/tiny_http/src/connection.rs b/anneal/vendor/tiny_http/src/connection.rs deleted file mode 100644 index 6d161cd35e..0000000000 --- a/anneal/vendor/tiny_http/src/connection.rs +++ /dev/null @@ -1,194 +0,0 @@ -//! Abstractions of Tcp and Unix socket types - -#[cfg(unix)] -use std::os::unix::net as unix_net; -use std::{ - net::{Shutdown, SocketAddr, TcpListener, TcpStream, ToSocketAddrs}, - path::PathBuf, -}; - -/// Unified listener. Either a [`TcpListener`] or [`std::os::unix::net::UnixListener`] -pub enum Listener { - Tcp(TcpListener), - #[cfg(unix)] - Unix(unix_net::UnixListener), -} -impl Listener { - pub(crate) fn local_addr(&self) -> std::io::Result { - match self { - Self::Tcp(l) => l.local_addr().map(ListenAddr::from), - #[cfg(unix)] - Self::Unix(l) => l.local_addr().map(ListenAddr::from), - } - } - - pub(crate) fn accept(&self) -> std::io::Result<(Connection, Option)> { - match self { - Self::Tcp(l) => l - .accept() - .map(|(conn, addr)| (Connection::from(conn), Some(addr))), - #[cfg(unix)] - Self::Unix(l) => l.accept().map(|(conn, _)| (Connection::from(conn), None)), - } - } -} -impl From for Listener { - fn from(s: TcpListener) -> Self { - Self::Tcp(s) - } -} -#[cfg(unix)] -impl From for Listener { - fn from(s: unix_net::UnixListener) -> Self { - Self::Unix(s) - } -} - -/// Unified connection. Either a [`TcpStream`] or [`std::os::unix::net::UnixStream`]. -#[derive(Debug)] -pub(crate) enum Connection { - Tcp(TcpStream), - #[cfg(unix)] - Unix(unix_net::UnixStream), -} -impl std::io::Read for Connection { - fn read(&mut self, buf: &mut [u8]) -> std::io::Result { - match self { - Self::Tcp(s) => s.read(buf), - #[cfg(unix)] - Self::Unix(s) => s.read(buf), - } - } -} -impl std::io::Write for Connection { - fn write(&mut self, buf: &[u8]) -> std::io::Result { - match self { - Self::Tcp(s) => s.write(buf), - #[cfg(unix)] - Self::Unix(s) => s.write(buf), - } - } - - fn flush(&mut self) -> std::io::Result<()> { - match self { - Self::Tcp(s) => s.flush(), - #[cfg(unix)] - Self::Unix(s) => s.flush(), - } - } -} -impl Connection { - /// Gets the peer's address. Some for TCP, None for Unix sockets. - pub(crate) fn peer_addr(&mut self) -> std::io::Result> { - match self { - Self::Tcp(s) => s.peer_addr().map(Some), - #[cfg(unix)] - Self::Unix(_) => Ok(None), - } - } - - pub(crate) fn shutdown(&self, how: Shutdown) -> std::io::Result<()> { - match self { - Self::Tcp(s) => s.shutdown(how), - #[cfg(unix)] - Self::Unix(s) => s.shutdown(how), - } - } - - pub(crate) fn try_clone(&self) -> std::io::Result { - match self { - Self::Tcp(s) => s.try_clone().map(Self::from), - #[cfg(unix)] - Self::Unix(s) => s.try_clone().map(Self::from), - } - } -} -impl From for Connection { - fn from(s: TcpStream) -> Self { - Self::Tcp(s) - } -} -#[cfg(unix)] -impl From for Connection { - fn from(s: unix_net::UnixStream) -> Self { - Self::Unix(s) - } -} - -#[derive(Debug, Clone)] -pub enum ConfigListenAddr { - IP(Vec), - #[cfg(unix)] - // TODO: use SocketAddr when bind_addr is stabilized - Unix(std::path::PathBuf), -} -impl ConfigListenAddr { - pub fn from_socket_addrs(addrs: A) -> std::io::Result { - addrs.to_socket_addrs().map(|it| Self::IP(it.collect())) - } - - #[cfg(unix)] - pub fn unix_from_path>(path: P) -> Self { - Self::Unix(path.into()) - } - - pub(crate) fn bind(&self) -> std::io::Result { - match self { - Self::IP(a) => TcpListener::bind(a.as_slice()).map(Listener::from), - #[cfg(unix)] - Self::Unix(a) => unix_net::UnixListener::bind(a).map(Listener::from), - } - } -} - -/// Unified listen socket address. Either a [`SocketAddr`] or [`std::os::unix::net::SocketAddr`]. -#[derive(Debug, Clone)] -pub enum ListenAddr { - IP(SocketAddr), - #[cfg(unix)] - Unix(unix_net::SocketAddr), -} -impl ListenAddr { - pub fn to_ip(self) -> Option { - match self { - Self::IP(s) => Some(s), - #[cfg(unix)] - Self::Unix(_) => None, - } - } - - /// Gets the Unix socket address. - /// - /// This is also available on non-Unix platforms, for ease of use, but always returns `None`. - #[cfg(unix)] - pub fn to_unix(self) -> Option { - match self { - Self::IP(_) => None, - Self::Unix(s) => Some(s), - } - } - #[cfg(not(unix))] - pub fn to_unix(self) -> Option { - None - } -} -impl From for ListenAddr { - fn from(s: SocketAddr) -> Self { - Self::IP(s) - } -} -#[cfg(unix)] -impl From for ListenAddr { - fn from(s: unix_net::SocketAddr) -> Self { - Self::Unix(s) - } -} -impl std::fmt::Display for ListenAddr { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::IP(s) => s.fmt(f), - #[cfg(unix)] - Self::Unix(s) => std::fmt::Debug::fmt(s, f), - } - } -} diff --git a/anneal/vendor/tiny_http/src/lib.rs b/anneal/vendor/tiny_http/src/lib.rs deleted file mode 100644 index 5e4568ab8b..0000000000 --- a/anneal/vendor/tiny_http/src/lib.rs +++ /dev/null @@ -1,445 +0,0 @@ -//! # Simple usage -//! -//! ## Creating the server -//! -//! The easiest way to create a server is to call `Server::http()`. -//! -//! The `http()` function returns an `IoResult` which will return an error -//! in the case where the server creation fails (for example if the listening port is already -//! occupied). -//! -//! ```no_run -//! let server = tiny_http::Server::http("0.0.0.0:0").unwrap(); -//! ``` -//! -//! A newly-created `Server` will immediately start listening for incoming connections and HTTP -//! requests. -//! -//! ## Receiving requests -//! -//! Calling `server.recv()` will block until the next request is available. -//! This function returns an `IoResult`, so you need to handle the possible errors. -//! -//! ```no_run -//! # let server = tiny_http::Server::http("0.0.0.0:0").unwrap(); -//! -//! loop { -//! // blocks until the next request is received -//! let request = match server.recv() { -//! Ok(rq) => rq, -//! Err(e) => { println!("error: {}", e); break } -//! }; -//! -//! // do something with the request -//! // ... -//! } -//! ``` -//! -//! In a real-case scenario, you will probably want to spawn multiple worker tasks and call -//! `server.recv()` on all of them. Like this: -//! -//! ```no_run -//! # use std::sync::Arc; -//! # use std::thread; -//! # let server = tiny_http::Server::http("0.0.0.0:0").unwrap(); -//! let server = Arc::new(server); -//! let mut guards = Vec::with_capacity(4); -//! -//! for _ in (0 .. 4) { -//! let server = server.clone(); -//! -//! let guard = thread::spawn(move || { -//! loop { -//! let rq = server.recv().unwrap(); -//! -//! // ... -//! } -//! }); -//! -//! guards.push(guard); -//! } -//! ``` -//! -//! If you don't want to block, you can call `server.try_recv()` instead. -//! -//! ## Handling requests -//! -//! The `Request` object returned by `server.recv()` contains informations about the client's request. -//! The most useful methods are probably `request.method()` and `request.url()` which return -//! the requested method (`GET`, `POST`, etc.) and url. -//! -//! To handle a request, you need to create a `Response` object. See the docs of this object for -//! more infos. Here is an example of creating a `Response` from a file: -//! -//! ```no_run -//! # use std::fs::File; -//! # use std::path::Path; -//! let response = tiny_http::Response::from_file(File::open(&Path::new("image.png")).unwrap()); -//! ``` -//! -//! All that remains to do is call `request.respond()`: -//! -//! ```no_run -//! # use std::fs::File; -//! # use std::path::Path; -//! # let server = tiny_http::Server::http("0.0.0.0:0").unwrap(); -//! # let request = server.recv().unwrap(); -//! # let response = tiny_http::Response::from_file(File::open(&Path::new("image.png")).unwrap()); -//! let _ = request.respond(response); -//! ``` -#![forbid(unsafe_code)] -#![deny(rust_2018_idioms)] -#![allow(clippy::match_like_matches_macro)] - -#[cfg(any(feature = "ssl-openssl", feature = "ssl-rustls"))] -use zeroize::Zeroizing; - -use std::error::Error; -use std::io::Error as IoError; -use std::io::ErrorKind as IoErrorKind; -use std::io::Result as IoResult; -use std::net::{Shutdown, TcpStream, ToSocketAddrs}; -use std::sync::atomic::AtomicBool; -use std::sync::atomic::Ordering::Relaxed; -use std::sync::mpsc; -use std::sync::Arc; -use std::thread; -use std::time::Duration; - -use client::ClientConnection; -use connection::Connection; -use util::MessagesQueue; - -pub use common::{HTTPVersion, Header, HeaderField, Method, StatusCode}; -pub use connection::{ConfigListenAddr, ListenAddr, Listener}; -pub use request::{ReadWrite, Request}; -pub use response::{Response, ResponseBox}; -pub use test::TestRequest; - -mod client; -mod common; -mod connection; -mod request; -mod response; -mod ssl; -mod test; -mod util; - -/// The main class of this library. -/// -/// Destroying this object will immediately close the listening socket and the reading -/// part of all the client's connections. Requests that have already been returned by -/// the `recv()` function will not close and the responses will be transferred to the client. -pub struct Server { - // should be false as long as the server exists - // when set to true, all the subtasks will close within a few hundreds ms - close: Arc, - - // queue for messages received by child threads - messages: Arc>, - - // result of TcpListener::local_addr() - listening_addr: ListenAddr, -} - -enum Message { - Error(IoError), - NewRequest(Request), -} - -impl From for Message { - fn from(e: IoError) -> Message { - Message::Error(e) - } -} - -impl From for Message { - fn from(rq: Request) -> Message { - Message::NewRequest(rq) - } -} - -// this trait is to make sure that Server implements Share and Send -#[doc(hidden)] -trait MustBeShareDummy: Sync + Send {} -#[doc(hidden)] -impl MustBeShareDummy for Server {} - -pub struct IncomingRequests<'a> { - server: &'a Server, -} - -/// Represents the parameters required to create a server. -#[derive(Debug, Clone)] -pub struct ServerConfig { - /// The addresses to try to listen to. - pub addr: ConfigListenAddr, - - /// If `Some`, then the server will use SSL to encode the communications. - pub ssl: Option, -} - -/// Configuration of the server for SSL. -#[derive(Debug, Clone)] -pub struct SslConfig { - /// Contains the public certificate to send to clients. - pub certificate: Vec, - /// Contains the ultra-secret private key used to decode communications. - pub private_key: Vec, -} - -impl Server { - /// Shortcut for a simple server on a specific address. - #[inline] - pub fn http(addr: A) -> Result> - where - A: ToSocketAddrs, - { - Server::new(ServerConfig { - addr: ConfigListenAddr::from_socket_addrs(addr)?, - ssl: None, - }) - } - - /// Shortcut for an HTTPS server on a specific address. - #[cfg(any(feature = "ssl-openssl", feature = "ssl-rustls"))] - #[inline] - pub fn https( - addr: A, - config: SslConfig, - ) -> Result> - where - A: ToSocketAddrs, - { - Server::new(ServerConfig { - addr: ConfigListenAddr::from_socket_addrs(addr)?, - ssl: Some(config), - }) - } - - #[cfg(unix)] - #[inline] - /// Shortcut for a UNIX socket server at a specific path - pub fn http_unix( - path: &std::path::Path, - ) -> Result> { - Server::new(ServerConfig { - addr: ConfigListenAddr::unix_from_path(path), - ssl: None, - }) - } - - /// Builds a new server that listens on the specified address. - pub fn new(config: ServerConfig) -> Result> { - let listener = config.addr.bind()?; - Self::from_listener(listener, config.ssl) - } - - /// Builds a new server using the specified TCP listener. - /// - /// This is useful if you've constructed TcpListener using some less usual method - /// such as from systemd. For other cases, you probably want the `new()` function. - pub fn from_listener>( - listener: L, - ssl_config: Option, - ) -> Result> { - let listener = listener.into(); - // building the "close" variable - let close_trigger = Arc::new(AtomicBool::new(false)); - - // building the TcpListener - let (server, local_addr) = { - let local_addr = listener.local_addr()?; - log::debug!("Server listening on {}", local_addr); - (listener, local_addr) - }; - - // building the SSL capabilities - #[cfg(all(feature = "ssl-openssl", feature = "ssl-rustls"))] - compile_error!( - "Features 'ssl-openssl' and 'ssl-rustls' must not be enabled at the same time" - ); - #[cfg(not(any(feature = "ssl-openssl", feature = "ssl-rustls")))] - type SslContext = (); - #[cfg(any(feature = "ssl-openssl", feature = "ssl-rustls"))] - type SslContext = crate::ssl::SslContextImpl; - let ssl: Option = { - match ssl_config { - #[cfg(any(feature = "ssl-openssl", feature = "ssl-rustls"))] - Some(config) => Some(SslContext::from_pem( - config.certificate, - Zeroizing::new(config.private_key), - )?), - #[cfg(not(any(feature = "ssl-openssl", feature = "ssl-rustls")))] - Some(_) => return Err( - "Building a server with SSL requires enabling the `ssl` feature in tiny-http" - .into(), - ), - None => None, - } - }; - - // creating a task where server.accept() is continuously called - // and ClientConnection objects are pushed in the messages queue - let messages = MessagesQueue::with_capacity(8); - - let inside_close_trigger = close_trigger.clone(); - let inside_messages = messages.clone(); - thread::spawn(move || { - // a tasks pool is used to dispatch the connections into threads - let tasks_pool = util::TaskPool::new(); - - log::debug!("Running accept thread"); - while !inside_close_trigger.load(Relaxed) { - let new_client = match server.accept() { - Ok((sock, _)) => { - use util::RefinedTcpStream; - let (read_closable, write_closable) = match ssl { - None => RefinedTcpStream::new(sock), - #[cfg(any(feature = "ssl-openssl", feature = "ssl-rustls"))] - Some(ref ssl) => { - // trying to apply SSL over the connection - // if an error occurs, we just close the socket and resume listening - let sock = match ssl.accept(sock) { - Ok(s) => s, - Err(_) => continue, - }; - - RefinedTcpStream::new(sock) - } - #[cfg(not(any(feature = "ssl-openssl", feature = "ssl-rustls")))] - Some(ref _ssl) => unreachable!(), - }; - - Ok(ClientConnection::new(write_closable, read_closable)) - } - Err(e) => Err(e), - }; - - match new_client { - Ok(client) => { - let messages = inside_messages.clone(); - let mut client = Some(client); - tasks_pool.spawn(Box::new(move || { - if let Some(client) = client.take() { - // Synchronization is needed for HTTPS requests to avoid a deadlock - if client.secure() { - let (sender, receiver) = mpsc::channel(); - for rq in client { - messages.push(rq.with_notify_sender(sender.clone()).into()); - receiver.recv().unwrap(); - } - } else { - for rq in client { - messages.push(rq.into()); - } - } - } - })); - } - - Err(e) => { - log::error!("Error accepting new client: {}", e); - inside_messages.push(e.into()); - break; - } - } - } - log::debug!("Terminating accept thread"); - }); - - // result - Ok(Server { - messages, - close: close_trigger, - listening_addr: local_addr, - }) - } - - /// Returns an iterator for all the incoming requests. - /// - /// The iterator will return `None` if the server socket is shutdown. - #[inline] - pub fn incoming_requests(&self) -> IncomingRequests<'_> { - IncomingRequests { server: self } - } - - /// Returns the address the server is listening to. - #[inline] - pub fn server_addr(&self) -> ListenAddr { - self.listening_addr.clone() - } - - /// Returns the number of clients currently connected to the server. - pub fn num_connections(&self) -> usize { - unimplemented!() - //self.requests_receiver.lock().len() - } - - /// Blocks until an HTTP request has been submitted and returns it. - pub fn recv(&self) -> IoResult { - match self.messages.pop() { - Some(Message::Error(err)) => Err(err), - Some(Message::NewRequest(rq)) => Ok(rq), - None => Err(IoError::new(IoErrorKind::Other, "thread unblocked")), - } - } - - /// Same as `recv()` but doesn't block longer than timeout - pub fn recv_timeout(&self, timeout: Duration) -> IoResult> { - match self.messages.pop_timeout(timeout) { - Some(Message::Error(err)) => Err(err), - Some(Message::NewRequest(rq)) => Ok(Some(rq)), - None => Ok(None), - } - } - - /// Same as `recv()` but doesn't block. - pub fn try_recv(&self) -> IoResult> { - match self.messages.try_pop() { - Some(Message::Error(err)) => Err(err), - Some(Message::NewRequest(rq)) => Ok(Some(rq)), - None => Ok(None), - } - } - - /// Unblock thread stuck in recv() or incoming_requests(). - /// If there are several such threads, only one is unblocked. - /// This method allows graceful shutdown of server. - pub fn unblock(&self) { - self.messages.unblock(); - } -} - -impl Iterator for IncomingRequests<'_> { - type Item = Request; - fn next(&mut self) -> Option { - self.server.recv().ok() - } -} - -impl Drop for Server { - fn drop(&mut self) { - self.close.store(true, Relaxed); - // Connect briefly to ourselves to unblock the accept thread - let maybe_stream = match &self.listening_addr { - ListenAddr::IP(addr) => TcpStream::connect(addr).map(Connection::from), - #[cfg(unix)] - ListenAddr::Unix(addr) => { - // TODO: use connect_addr when its stabilized. - let path = addr.as_pathname().unwrap(); - std::os::unix::net::UnixStream::connect(path).map(Connection::from) - } - }; - if let Ok(stream) = maybe_stream { - let _ = stream.shutdown(Shutdown::Both); - } - - #[cfg(unix)] - if let ListenAddr::Unix(addr) = &self.listening_addr { - if let Some(path) = addr.as_pathname() { - let _ = std::fs::remove_file(path); - } - } - } -} diff --git a/anneal/vendor/tiny_http/src/request.rs b/anneal/vendor/tiny_http/src/request.rs deleted file mode 100644 index 531d6aa658..0000000000 --- a/anneal/vendor/tiny_http/src/request.rs +++ /dev/null @@ -1,518 +0,0 @@ -use std::io::Error as IoError; -use std::io::{self, Cursor, ErrorKind, Read, Write}; - -use std::fmt; -use std::net::SocketAddr; -use std::str::FromStr; - -use std::sync::mpsc::Sender; - -use crate::util::{EqualReader, FusedReader}; -use crate::{HTTPVersion, Header, Method, Response, StatusCode}; -use chunked_transfer::Decoder; - -/// Represents an HTTP request made by a client. -/// -/// A `Request` object is what is produced by the server, and is your what -/// your code must analyse and answer. -/// -/// This object implements the `Send` trait, therefore you can dispatch your requests to -/// worker threads. -/// -/// # Pipelining -/// -/// If a client sends multiple requests in a row (without waiting for the response), then you will -/// get multiple `Request` objects simultaneously. This is called *requests pipelining*. -/// Tiny-http automatically reorders the responses so that you don't need to worry about the order -/// in which you call `respond` or `into_writer`. -/// -/// This mechanic is disabled if: -/// -/// - The body of a request is large enough (handling requires pipelining requires storing the -/// body of the request in a buffer ; if the body is too big, tiny-http will avoid doing that) -/// - A request sends a `Expect: 100-continue` header (which means that the client waits to -/// know whether its body will be processed before sending it) -/// - A request sends a `Connection: close` header or `Connection: upgrade` header (used for -/// websockets), which indicates that this is the last request that will be received on this -/// connection -/// -/// # Automatic cleanup -/// -/// If a `Request` object is destroyed without `into_writer` or `respond` being called, -/// an empty response with a 500 status code (internal server error) will automatically be -/// sent back to the client. -/// This means that if your code fails during the handling of a request, this "internal server -/// error" response will automatically be sent during the stack unwinding. -/// -/// # Testing -/// -/// If you want to build fake requests to test your server, use [`TestRequest`](crate::test::TestRequest). -pub struct Request { - // where to read the body from - data_reader: Option>, - - // if this writer is empty, then the request has been answered - response_writer: Option>, - - remote_addr: Option, - - // true if HTTPS, false if HTTP - secure: bool, - - method: Method, - - path: String, - - http_version: HTTPVersion, - - headers: Vec
, - - body_length: Option, - - // true if a `100 Continue` response must be sent when `as_reader()` is called - must_send_continue: bool, - - // If Some, a message must be sent after responding - notify_when_responded: Option>, -} - -struct NotifyOnDrop { - sender: Sender<()>, - inner: R, -} - -impl Read for NotifyOnDrop { - fn read(&mut self, buf: &mut [u8]) -> io::Result { - self.inner.read(buf) - } -} -impl Write for NotifyOnDrop { - fn write(&mut self, buf: &[u8]) -> io::Result { - self.inner.write(buf) - } - fn flush(&mut self) -> io::Result<()> { - self.inner.flush() - } -} -impl Drop for NotifyOnDrop { - fn drop(&mut self) { - self.sender.send(()).unwrap(); - } -} - -/// Error that can happen when building a `Request` object. -#[derive(Debug)] -pub enum RequestCreationError { - /// The client sent an `Expect` header that was not recognized by tiny-http. - ExpectationFailed, - - /// Error while reading data from the socket during the creation of the `Request`. - CreationIoError(IoError), -} - -impl From for RequestCreationError { - fn from(err: IoError) -> RequestCreationError { - RequestCreationError::CreationIoError(err) - } -} - -/// Builds a new request. -/// -/// After the request line and headers have been read from the socket, a new `Request` object -/// is built. -/// -/// You must pass a `Read` that will allow the `Request` object to read from the incoming data. -/// It is the responsibility of the `Request` to read only the data of the request and not further. -/// -/// The `Write` object will be used by the `Request` to write the response. -#[allow(clippy::too_many_arguments)] -pub fn new_request( - secure: bool, - method: Method, - path: String, - version: HTTPVersion, - headers: Vec
, - remote_addr: Option, - mut source_data: R, - writer: W, -) -> Result -where - R: Read + Send + 'static, - W: Write + Send + 'static, -{ - // finding the transfer-encoding header - let transfer_encoding = headers - .iter() - .find(|h: &&Header| h.field.equiv("Transfer-Encoding")) - .map(|h| h.value.clone()); - - // finding the content-length header - let content_length = if transfer_encoding.is_some() { - // if transfer-encoding is specified, the Content-Length - // header must be ignored (RFC2616 #4.4) - None - } else { - headers - .iter() - .find(|h: &&Header| h.field.equiv("Content-Length")) - .and_then(|h| FromStr::from_str(h.value.as_str()).ok()) - }; - - // true if the client sent a `Expect: 100-continue` header - let expects_continue = { - match headers - .iter() - .find(|h: &&Header| h.field.equiv("Expect")) - .map(|h| h.value.as_str()) - { - None => false, - Some(v) if v.eq_ignore_ascii_case("100-continue") => true, - _ => return Err(RequestCreationError::ExpectationFailed), - } - }; - - // true if the client sent a `Connection: upgrade` header - let connection_upgrade = { - match headers - .iter() - .find(|h: &&Header| h.field.equiv("Connection")) - .map(|h| h.value.as_str()) - { - Some(v) if v.to_ascii_lowercase().contains("upgrade") => true, - _ => false, - } - }; - - // we wrap `source_data` around a reading whose nature depends on the transfer-encoding and - // content-length headers - let reader = if connection_upgrade { - // if we have a `Connection: upgrade`, always keeping the whole reader - Box::new(source_data) as Box - } else if let Some(content_length) = content_length { - if content_length == 0 { - Box::new(io::empty()) as Box - } else if content_length <= 1024 && !expects_continue { - // if the content-length is small enough, we just read everything into a buffer - - let mut buffer = vec![0; content_length]; - let mut offset = 0; - - while offset != content_length { - let read = source_data.read(&mut buffer[offset..])?; - if read == 0 { - // the socket returned EOF, but we were before the expected content-length - // aborting - let info = "Connection has been closed before we received enough data"; - let err = IoError::new(ErrorKind::ConnectionAborted, info); - return Err(RequestCreationError::CreationIoError(err)); - } - - offset += read; - } - - Box::new(Cursor::new(buffer)) as Box - } else { - let (data_reader, _) = EqualReader::new(source_data, content_length); // TODO: - Box::new(FusedReader::new(data_reader)) as Box - } - } else if transfer_encoding.is_some() { - // if a transfer-encoding was specified, then "chunked" is ALWAYS applied - // over the message (RFC2616 #3.6) - Box::new(FusedReader::new(Decoder::new(source_data))) as Box - } else { - // if we have neither a Content-Length nor a Transfer-Encoding, - // assuming that we have no data - // TODO: could also be multipart/byteranges - Box::new(io::empty()) as Box - }; - - Ok(Request { - data_reader: Some(reader), - response_writer: Some(Box::new(writer) as Box), - remote_addr, - secure, - method, - path, - http_version: version, - headers, - body_length: content_length, - must_send_continue: expects_continue, - notify_when_responded: None, - }) -} - -impl Request { - /// Returns true if the request was made through HTTPS. - #[inline] - pub fn secure(&self) -> bool { - self.secure - } - - /// Returns the method requested by the client (eg. `GET`, `POST`, etc.). - #[inline] - pub fn method(&self) -> &Method { - &self.method - } - - /// Returns the resource requested by the client. - #[inline] - pub fn url(&self) -> &str { - &self.path - } - - /// Returns a list of all headers sent by the client. - #[inline] - pub fn headers(&self) -> &[Header] { - &self.headers - } - - /// Returns the HTTP version of the request. - #[inline] - pub fn http_version(&self) -> &HTTPVersion { - &self.http_version - } - - /// Returns the length of the body in bytes. - /// - /// Returns `None` if the length is unknown. - #[inline] - pub fn body_length(&self) -> Option { - self.body_length - } - - /// Returns the address of the client that sent this request. - /// - /// The address is always `Some` for TCP listeners, but always `None` for UNIX listeners - /// (as the remote address of a UNIX client is almost always unnamed). - /// - /// Note that this is gathered from the socket. If you receive the request from a proxy, - /// this function will return the address of the proxy and not the address of the actual - /// user. - #[inline] - pub fn remote_addr(&self) -> Option<&SocketAddr> { - self.remote_addr.as_ref() - } - - /// Sends a response with a `Connection: upgrade` header, then turns the `Request` into a `Stream`. - /// - /// The main purpose of this function is to support websockets. - /// If you detect that the request wants to use some kind of protocol upgrade, you can - /// call this function to obtain full control of the socket stream. - /// - /// If you call this on a non-websocket request, tiny-http will wait until this `Stream` object - /// is destroyed before continuing to read or write on the socket. Therefore you should always - /// destroy it as soon as possible. - pub fn upgrade( - mut self, - protocol: &str, - response: Response, - ) -> Box { - use crate::util::CustomStream; - - response - .raw_print( - self.response_writer.as_mut().unwrap().by_ref(), - self.http_version.clone(), - &self.headers, - false, - Some(protocol), - ) - .ok(); // TODO: unused result - - self.response_writer.as_mut().unwrap().flush().ok(); // TODO: unused result - - let stream = CustomStream::new(self.extract_reader_impl(), self.extract_writer_impl()); - if let Some(sender) = self.notify_when_responded.take() { - let stream = NotifyOnDrop { - sender, - inner: stream, - }; - Box::new(stream) as Box - } else { - Box::new(stream) as Box - } - } - - /// Allows to read the body of the request. - /// - /// # Example - /// - /// ```no_run - /// # extern crate rustc_serialize; - /// # extern crate tiny_http; - /// # use rustc_serialize::json::Json; - /// # use std::io::Read; - /// # fn get_content_type(_: &tiny_http::Request) -> &'static str { "" } - /// # fn main() { - /// # let server = tiny_http::Server::http("0.0.0.0:0").unwrap(); - /// let mut request = server.recv().unwrap(); - /// - /// if get_content_type(&request) == "application/json" { - /// let mut content = String::new(); - /// request.as_reader().read_to_string(&mut content).unwrap(); - /// let json: Json = content.parse().unwrap(); - /// } - /// # } - /// ``` - /// - /// If the client sent a `Expect: 100-continue` header with the request, calling this - /// function will send back a `100 Continue` response. - #[inline] - pub fn as_reader(&mut self) -> &mut dyn Read { - if self.must_send_continue { - let msg = Response::new_empty(StatusCode(100)); - msg.raw_print( - self.response_writer.as_mut().unwrap().by_ref(), - self.http_version.clone(), - &self.headers, - true, - None, - ) - .ok(); - self.response_writer.as_mut().unwrap().flush().ok(); - self.must_send_continue = false; - } - - self.data_reader.as_mut().unwrap() - } - - /// Turns the `Request` into a writer. - /// - /// The writer has a raw access to the stream to the user. - /// This function is useful for things like CGI. - /// - /// Note that the destruction of the `Writer` object may trigger - /// some events. For exemple if a client has sent multiple requests and the requests - /// have been processed in parallel, the destruction of a writer will trigger - /// the writing of the next response. - /// Therefore you should always destroy the `Writer` as soon as possible. - #[inline] - pub fn into_writer(mut self) -> Box { - let writer = self.extract_writer_impl(); - if let Some(sender) = self.notify_when_responded.take() { - let writer = NotifyOnDrop { - sender, - inner: writer, - }; - Box::new(writer) as Box - } else { - writer - } - } - - /// Extract the response `Writer` object from the Request, dropping this `Writer` has the same side effects - /// as the object returned by `into_writer` above. - /// - /// This may only be called once on a single request. - fn extract_writer_impl(&mut self) -> Box { - use std::mem; - - assert!(self.response_writer.is_some()); - - let mut writer = None; - mem::swap(&mut self.response_writer, &mut writer); - writer.unwrap() - } - - /// Extract the body `Reader` object from the Request. - /// - /// This may only be called once on a single request. - fn extract_reader_impl(&mut self) -> Box { - use std::mem; - - assert!(self.data_reader.is_some()); - - let mut reader = None; - mem::swap(&mut self.data_reader, &mut reader); - reader.unwrap() - } - - /// Sends a response to this request. - #[inline] - pub fn respond(mut self, response: Response) -> Result<(), IoError> - where - R: Read, - { - let res = self.respond_impl(response); - if let Some(sender) = self.notify_when_responded.take() { - sender.send(()).unwrap(); - } - res - } - - fn respond_impl(&mut self, response: Response) -> Result<(), IoError> - where - R: Read, - { - let mut writer = self.extract_writer_impl(); - - let do_not_send_body = self.method == Method::Head; - - Self::ignore_client_closing_errors(response.raw_print( - writer.by_ref(), - self.http_version.clone(), - &self.headers, - do_not_send_body, - None, - ))?; - - Self::ignore_client_closing_errors(writer.flush()) - } - - fn ignore_client_closing_errors(result: io::Result<()>) -> io::Result<()> { - result.or_else(|err| match err.kind() { - ErrorKind::BrokenPipe => Ok(()), - ErrorKind::ConnectionAborted => Ok(()), - ErrorKind::ConnectionRefused => Ok(()), - ErrorKind::ConnectionReset => Ok(()), - _ => Err(err), - }) - } - - pub(crate) fn with_notify_sender(mut self, sender: Sender<()>) -> Self { - self.notify_when_responded = Some(sender); - self - } -} - -impl fmt::Debug for Request { - fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> { - write!( - formatter, - "Request({} {} from {:?})", - self.method, self.path, self.remote_addr - ) - } -} - -impl Drop for Request { - fn drop(&mut self) { - if self.response_writer.is_some() { - let response = Response::empty(500); - let _ = self.respond_impl(response); // ignoring any potential error - if let Some(sender) = self.notify_when_responded.take() { - sender.send(()).unwrap(); - } - } - } -} - -/// Dummy trait that regroups the `Read` and `Write` traits. -/// -/// Automatically implemented on all types that implement both `Read` and `Write`. -pub trait ReadWrite: Read + Write {} -impl ReadWrite for T where T: Read + Write {} - -#[cfg(test)] -mod tests { - use super::Request; - - #[test] - fn must_be_send() { - #![allow(dead_code)] - fn f(_: &T) {} - fn bar(rq: &Request) { - f(rq); - } - } -} diff --git a/anneal/vendor/tiny_http/src/response.rs b/anneal/vendor/tiny_http/src/response.rs deleted file mode 100644 index aaedf5c76b..0000000000 --- a/anneal/vendor/tiny_http/src/response.rs +++ /dev/null @@ -1,574 +0,0 @@ -use crate::common::{HTTPVersion, Header, StatusCode}; -use httpdate::HttpDate; -use std::cmp::Ordering; -use std::sync::mpsc::Receiver; - -use std::io::Result as IoResult; -use std::io::{self, Cursor, Read, Write}; - -use std::fs::File; - -use std::str::FromStr; -use std::time::SystemTime; - -/// Object representing an HTTP response whose purpose is to be given to a `Request`. -/// -/// Some headers cannot be changed. Trying to define the value -/// of one of these will have no effect: -/// -/// - `Connection` -/// - `Trailer` -/// - `Transfer-Encoding` -/// - `Upgrade` -/// -/// Some headers have special behaviors: -/// -/// - `Content-Encoding`: If you define this header, the library -/// will assume that the data from the `Read` object has the specified encoding -/// and will just pass-through. -/// -/// - `Content-Length`: The length of the data should be set manually -/// using the `Reponse` object's API. Attempting to set the value of this -/// header will be equivalent to modifying the size of the data but the header -/// itself may not be present in the final result. -/// -/// - `Content-Type`: You may only set this header to one value at a time. If you -/// try to set it more than once, the existing value will be overwritten. This -/// behavior differs from the default for most headers, which is to allow them to -/// be set multiple times in the same response. -/// -pub struct Response { - reader: R, - status_code: StatusCode, - headers: Vec
, - data_length: Option, - chunked_threshold: Option, -} - -/// A `Response` without a template parameter. -pub type ResponseBox = Response>; - -/// Transfer encoding to use when sending the message. -/// Note that only *supported* encoding are listed here. -#[derive(Copy, Clone)] -enum TransferEncoding { - Identity, - Chunked, -} - -impl FromStr for TransferEncoding { - type Err = (); - - fn from_str(input: &str) -> Result { - if input.eq_ignore_ascii_case("identity") { - Ok(TransferEncoding::Identity) - } else if input.eq_ignore_ascii_case("chunked") { - Ok(TransferEncoding::Chunked) - } else { - Err(()) - } - } -} - -/// Builds a Date: header with the current date. -fn build_date_header() -> Header { - let d = HttpDate::from(SystemTime::now()); - Header::from_bytes(&b"Date"[..], &d.to_string().into_bytes()[..]).unwrap() -} - -fn write_message_header( - mut writer: W, - http_version: &HTTPVersion, - status_code: &StatusCode, - headers: &[Header], -) -> IoResult<()> -where - W: Write, -{ - // writing status line - write!( - &mut writer, - "HTTP/{}.{} {} {}\r\n", - http_version.0, - http_version.1, - status_code.0, - status_code.default_reason_phrase() - )?; - - // writing headers - for header in headers.iter() { - writer.write_all(header.field.as_str().as_ref())?; - write!(&mut writer, ": ")?; - writer.write_all(header.value.as_str().as_ref())?; - write!(&mut writer, "\r\n")?; - } - - // separator between header and data - write!(&mut writer, "\r\n")?; - - Ok(()) -} - -fn choose_transfer_encoding( - status_code: StatusCode, - request_headers: &[Header], - http_version: &HTTPVersion, - entity_length: &Option, - has_additional_headers: bool, - chunked_threshold: usize, -) -> TransferEncoding { - use crate::util; - - // HTTP 1.0 doesn't support other encoding - if *http_version <= (1, 0) { - return TransferEncoding::Identity; - } - - // Per section 3.3.1 of RFC7230: - // A server MUST NOT send a Transfer-Encoding header field in any response with a status code - // of 1xx (Informational) or 204 (No Content). - if status_code.0 < 200 || status_code.0 == 204 { - return TransferEncoding::Identity; - } - - // parsing the request's TE header - let user_request = request_headers - .iter() - // finding TE - .find(|h| h.field.equiv("TE")) - // getting its value - .map(|h| h.value.clone()) - // getting the corresponding TransferEncoding - .and_then(|value| { - // getting list of requested elements - let mut parse = util::parse_header_value(value.as_str()); // TODO: remove conversion - - // sorting elements by most priority - parse.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(Ordering::Equal)); - - // trying to parse each requested encoding - for value in parse.iter() { - // q=0 are ignored - if value.1 <= 0.0 { - continue; - } - - if let Ok(te) = TransferEncoding::from_str(value.0) { - return Some(te); - } - } - - // encoding not found - None - }); - - if let Some(user_request) = user_request { - return user_request; - } - - // if we have additional headers, using chunked - if has_additional_headers { - return TransferEncoding::Chunked; - } - - // if we don't have a Content-Length, or if the Content-Length is too big, using chunks writer - if entity_length - .as_ref() - .map_or(true, |val| *val >= chunked_threshold) - { - return TransferEncoding::Chunked; - } - - // Identity by default - TransferEncoding::Identity -} - -impl Response -where - R: Read, -{ - /// Creates a new Response object. - /// - /// The `additional_headers` argument is a receiver that - /// may provide headers even after the response has been sent. - /// - /// All the other arguments are straight-forward. - pub fn new( - status_code: StatusCode, - headers: Vec
, - data: R, - data_length: Option, - additional_headers: Option>, - ) -> Response { - let mut response = Response { - reader: data, - status_code, - headers: Vec::with_capacity(16), - data_length, - chunked_threshold: None, - }; - - for h in headers { - response.add_header(h) - } - - // dummy implementation - if let Some(additional_headers) = additional_headers { - for h in additional_headers.iter() { - response.add_header(h) - } - } - - response - } - - /// Set a threshold for `Content-Length` where we chose chunked - /// transfer. Notice that chunked transfer might happen regardless of - /// this threshold, for instance when the request headers indicate - /// it is wanted or when there is no `Content-Length`. - pub fn with_chunked_threshold(mut self, length: usize) -> Response { - self.chunked_threshold = Some(length); - self - } - - /// Convert the response into the underlying `Read` type. - /// - /// This is mainly useful for testing as it must consume the `Response`. - pub fn into_reader(self) -> R { - self.reader - } - - /// The current `Content-Length` threshold for switching over to - /// chunked transfer. The default is 32768 bytes. Notice that - /// chunked transfer is mutually exclusive with sending a - /// `Content-Length` header as per the HTTP spec. - pub fn chunked_threshold(&self) -> usize { - self.chunked_threshold.unwrap_or(32768) - } - - /// Adds a header to the list. - /// Does all the checks. - pub fn add_header(&mut self, header: H) - where - H: Into
, - { - let header = header.into(); - - // ignoring forbidden headers - if header.field.equiv("Connection") - || header.field.equiv("Trailer") - || header.field.equiv("Transfer-Encoding") - || header.field.equiv("Upgrade") - { - return; - } - - // if the header is Content-Length, setting the data length - if header.field.equiv("Content-Length") { - if let Ok(val) = usize::from_str(header.value.as_str()) { - self.data_length = Some(val) - } - - return; - // if the header is Content-Type and it's already set, overwrite it - } else if header.field.equiv("Content-Type") { - if let Some(content_type_header) = self - .headers - .iter_mut() - .find(|h| h.field.equiv("Content-Type")) - { - content_type_header.value = header.value; - return; - } - } - - self.headers.push(header); - } - - /// Returns the same request, but with an additional header. - /// - /// Some headers cannot be modified and some other have a - /// special behavior. See the documentation above. - #[inline] - pub fn with_header(mut self, header: H) -> Response - where - H: Into
, - { - self.add_header(header.into()); - self - } - - /// Returns the same request, but with a different status code. - #[inline] - pub fn with_status_code(mut self, code: S) -> Response - where - S: Into, - { - self.status_code = code.into(); - self - } - - /// Returns the same request, but with different data. - pub fn with_data(self, reader: S, data_length: Option) -> Response - where - S: Read, - { - Response { - reader, - headers: self.headers, - status_code: self.status_code, - data_length, - chunked_threshold: self.chunked_threshold, - } - } - - /// Prints the HTTP response to a writer. - /// - /// This function is the one used to send the response to the client's socket. - /// Therefore you shouldn't expect anything pretty-printed or even readable. - /// - /// The HTTP version and headers passed as arguments are used to - /// decide which features (most notably, encoding) to use. - /// - /// Note: does not flush the writer. - pub fn raw_print( - mut self, - mut writer: W, - http_version: HTTPVersion, - request_headers: &[Header], - do_not_send_body: bool, - upgrade: Option<&str>, - ) -> IoResult<()> { - let mut transfer_encoding = Some(choose_transfer_encoding( - self.status_code, - request_headers, - &http_version, - &self.data_length, - false, /* TODO */ - self.chunked_threshold(), - )); - - // add `Date` if not in the headers - if !self.headers.iter().any(|h| h.field.equiv("Date")) { - self.headers.insert(0, build_date_header()); - } - - // add `Server` if not in the headers - if !self.headers.iter().any(|h| h.field.equiv("Server")) { - self.headers.insert( - 0, - Header::from_bytes(&b"Server"[..], &b"tiny-http (Rust)"[..]).unwrap(), - ); - } - - // handling upgrade - if let Some(upgrade) = upgrade { - self.headers.insert( - 0, - Header::from_bytes(&b"Upgrade"[..], upgrade.as_bytes()).unwrap(), - ); - self.headers.insert( - 0, - Header::from_bytes(&b"Connection"[..], &b"upgrade"[..]).unwrap(), - ); - transfer_encoding = None; - } - - // if the transfer encoding is identity, the content length must be known ; therefore if - // we don't know it, we buffer the entire response first here - // while this is an expensive operation, it is only ever needed for clients using HTTP 1.0 - let (mut reader, data_length): (Box, _) = - match (self.data_length, transfer_encoding) { - (Some(l), _) => (Box::new(self.reader), Some(l)), - (None, Some(TransferEncoding::Identity)) => { - let mut buf = Vec::new(); - self.reader.read_to_end(&mut buf)?; - let l = buf.len(); - (Box::new(Cursor::new(buf)), Some(l)) - } - _ => (Box::new(self.reader), None), - }; - - // checking whether to ignore the body of the response - let do_not_send_body = do_not_send_body - || match self.status_code.0 { - // status code 1xx, 204 and 304 MUST not include a body - 100..=199 | 204 | 304 => true, - _ => false, - }; - - // preparing headers for transfer - match transfer_encoding { - Some(TransferEncoding::Chunked) => self - .headers - .push(Header::from_bytes(&b"Transfer-Encoding"[..], &b"chunked"[..]).unwrap()), - - Some(TransferEncoding::Identity) => { - assert!(data_length.is_some()); - let data_length = data_length.unwrap(); - - self.headers.push( - Header::from_bytes( - &b"Content-Length"[..], - format!("{}", data_length).as_bytes(), - ) - .unwrap(), - ) - } - - _ => (), - }; - - // sending headers - write_message_header( - writer.by_ref(), - &http_version, - &self.status_code, - &self.headers, - )?; - - // sending the body - if !do_not_send_body { - match transfer_encoding { - Some(TransferEncoding::Chunked) => { - use chunked_transfer::Encoder; - - let mut writer = Encoder::new(writer); - io::copy(&mut reader, &mut writer)?; - } - - Some(TransferEncoding::Identity) => { - assert!(data_length.is_some()); - let data_length = data_length.unwrap(); - - if data_length >= 1 { - io::copy(&mut reader, &mut writer)?; - } - } - - _ => (), - } - } - - Ok(()) - } - - /// Retrieves the current value of the `Response` status code - pub fn status_code(&self) -> StatusCode { - self.status_code - } - - /// Retrieves the current value of the `Response` data length - pub fn data_length(&self) -> Option { - self.data_length - } - - /// Retrieves the current list of `Response` headers - pub fn headers(&self) -> &[Header] { - &self.headers - } -} - -impl Response -where - R: Read + Send + 'static, -{ - /// Turns this response into a `Response>`. - pub fn boxed(self) -> ResponseBox { - Response { - reader: Box::new(self.reader) as Box, - status_code: self.status_code, - headers: self.headers, - data_length: self.data_length, - chunked_threshold: self.chunked_threshold, - } - } -} - -impl Response { - /// Builds a new `Response` from a `File`. - /// - /// The `Content-Type` will **not** be automatically detected, - /// you must set it yourself. - pub fn from_file(file: File) -> Response { - let file_size = file.metadata().ok().map(|v| v.len() as usize); - - Response::new( - StatusCode(200), - Vec::with_capacity(0), - file, - file_size, - None, - ) - } -} - -impl Response>> { - pub fn from_data(data: D) -> Response>> - where - D: Into>, - { - let data = data.into(); - let data_len = data.len(); - - Response::new( - StatusCode(200), - Vec::with_capacity(0), - Cursor::new(data), - Some(data_len), - None, - ) - } - - pub fn from_string(data: S) -> Response>> - where - S: Into, - { - let data = data.into(); - let data_len = data.len(); - - Response::new( - StatusCode(200), - vec![ - Header::from_bytes(&b"Content-Type"[..], &b"text/plain; charset=UTF-8"[..]) - .unwrap(), - ], - Cursor::new(data.into_bytes()), - Some(data_len), - None, - ) - } -} - -impl Response { - /// Builds an empty `Response` with the given status code. - pub fn empty(status_code: S) -> Response - where - S: Into, - { - Response::new( - status_code.into(), - Vec::with_capacity(0), - io::empty(), - Some(0), - None, - ) - } - - /// DEPRECATED. Use `empty` instead. - pub fn new_empty(status_code: StatusCode) -> Response { - Response::empty(status_code) - } -} - -impl Clone for Response { - fn clone(&self) -> Response { - Response { - reader: io::empty(), - status_code: self.status_code, - headers: self.headers.clone(), - data_length: self.data_length, - chunked_threshold: self.chunked_threshold, - } - } -} diff --git a/anneal/vendor/tiny_http/src/ssl.rs b/anneal/vendor/tiny_http/src/ssl.rs deleted file mode 100644 index d4a8f1b914..0000000000 --- a/anneal/vendor/tiny_http/src/ssl.rs +++ /dev/null @@ -1,20 +0,0 @@ -//! Modules providing SSL/TLS implementations. For backwards compatibility, OpenSSL is the default -//! implementation, but Rustls is highly recommended as a pure Rust alternative. -//! -//! In order to simplify the swappable implementations these SSL/TLS modules adhere to an implicit -//! trait contract and specific implementations are re-exported as [`SslContextImpl`] and [`SslStream`]. -//! The concrete type of these aliases will depend on which module you enable in `Cargo.toml`. - -#[cfg(feature = "ssl-openssl")] -pub(crate) mod openssl; -#[cfg(feature = "ssl-openssl")] -pub(crate) use self::openssl::OpenSslContext as SslContextImpl; -#[cfg(feature = "ssl-openssl")] -pub(crate) use self::openssl::SplitOpenSslStream as SslStream; - -#[cfg(feature = "ssl-rustls")] -pub(crate) mod rustls; -#[cfg(feature = "ssl-rustls")] -pub(crate) use self::rustls::RustlsContext as SslContextImpl; -#[cfg(feature = "ssl-rustls")] -pub(crate) use self::rustls::RustlsStream as SslStream; diff --git a/anneal/vendor/tiny_http/src/ssl/openssl.rs b/anneal/vendor/tiny_http/src/ssl/openssl.rs deleted file mode 100644 index 55d4650096..0000000000 --- a/anneal/vendor/tiny_http/src/ssl/openssl.rs +++ /dev/null @@ -1,110 +0,0 @@ -use crate::connection::Connection; -use crate::util::refined_tcp_stream::Stream as RefinedStream; -use std::error::Error; -use std::io::{Read, Write}; -use std::net::{Shutdown, SocketAddr}; -use std::sync::{Arc, Mutex}; -use zeroize::Zeroizing; - -pub(crate) struct OpenSslStream { - inner: openssl::ssl::SslStream, -} - -/// An OpenSSL stream which has been split into two mutually exclusive streams (e.g. for read / write) -pub(crate) struct SplitOpenSslStream(Arc>); - -// These struct methods form the implict contract for swappable TLS implementations -impl SplitOpenSslStream { - pub(crate) fn peer_addr(&mut self) -> std::io::Result> { - self.0.lock().unwrap().inner.get_mut().peer_addr() - } - - pub(crate) fn shutdown(&mut self, how: Shutdown) -> std::io::Result<()> { - self.0.lock().unwrap().inner.get_mut().shutdown(how) - } -} - -impl Clone for SplitOpenSslStream { - fn clone(&self) -> Self { - Self(self.0.clone()) - } -} - -impl Read for SplitOpenSslStream { - fn read(&mut self, buf: &mut [u8]) -> std::io::Result { - self.0.lock().unwrap().read(buf) - } -} - -impl Write for SplitOpenSslStream { - fn write(&mut self, buf: &[u8]) -> std::io::Result { - self.0.lock().unwrap().write(buf) - } - - fn flush(&mut self) -> std::io::Result<()> { - self.0.lock().unwrap().flush() - } -} - -impl Read for OpenSslStream { - fn read(&mut self, buf: &mut [u8]) -> std::io::Result { - self.inner.read(buf) - } -} - -impl Write for OpenSslStream { - fn write(&mut self, buf: &[u8]) -> std::io::Result { - self.inner.write(buf) - } - - fn flush(&mut self) -> std::io::Result<()> { - self.inner.flush() - } -} - -pub(crate) struct OpenSslContext(openssl::ssl::SslContext); - -impl OpenSslContext { - pub fn from_pem( - certificates: Vec, - private_key: Zeroizing>, - ) -> Result> { - use openssl::pkey::PKey; - use openssl::ssl::{self, SslVerifyMode}; - use openssl::x509::X509; - - let mut ctx = openssl::ssl::SslContext::builder(ssl::SslMethod::tls())?; - ctx.set_cipher_list("DEFAULT")?; - let certificate_chain = X509::stack_from_pem(&certificates)?; - if certificate_chain.is_empty() { - return Err("Couldn't extract certificate chain from config.".into()); - } - // The leaf certificate must always be first in the PEM file - ctx.set_certificate(&certificate_chain[0])?; - for chain_cert in certificate_chain.into_iter().skip(1) { - ctx.add_extra_chain_cert(chain_cert)?; - } - let key = PKey::private_key_from_pem(&private_key)?; - ctx.set_private_key(&key)?; - ctx.set_verify(SslVerifyMode::NONE); - ctx.check_private_key()?; - - Ok(Self(ctx.build())) - } - - pub fn accept( - &self, - stream: Connection, - ) -> Result> { - use openssl::ssl::Ssl; - let session = Ssl::new(&self.0).expect("Failed to create new OpenSSL session"); - let stream = session.accept(stream)?; - Ok(OpenSslStream { inner: stream }) - } -} - -impl From for RefinedStream { - fn from(stream: OpenSslStream) -> Self { - RefinedStream::Https(SplitOpenSslStream(Arc::new(Mutex::new(stream)))) - } -} diff --git a/anneal/vendor/tiny_http/src/ssl/rustls.rs b/anneal/vendor/tiny_http/src/ssl/rustls.rs deleted file mode 100644 index ccc6fdf466..0000000000 --- a/anneal/vendor/tiny_http/src/ssl/rustls.rs +++ /dev/null @@ -1,120 +0,0 @@ -use crate::connection::Connection; -use crate::util::refined_tcp_stream::Stream as RefinedStream; -use std::error::Error; -use std::io::{Read, Write}; -use std::net::{Shutdown, SocketAddr}; -use std::sync::{Arc, Mutex}; -use zeroize::Zeroizing; - -/// A wrapper around an owned Rustls connection and corresponding stream. -/// -/// Uses an internal Mutex to permit disparate reader & writer threads to access the stream independently. -pub(crate) struct RustlsStream( - Arc>>, -); - -impl RustlsStream { - pub(crate) fn peer_addr(&mut self) -> std::io::Result> { - self.0 - .lock() - .expect("Failed to lock SSL stream mutex") - .sock - .peer_addr() - } - - pub(crate) fn shutdown(&mut self, how: Shutdown) -> std::io::Result<()> { - self.0 - .lock() - .expect("Failed to lock SSL stream mutex") - .sock - .shutdown(how) - } -} - -impl Clone for RustlsStream { - fn clone(&self) -> Self { - Self(self.0.clone()) - } -} - -impl Read for RustlsStream { - fn read(&mut self, buf: &mut [u8]) -> std::io::Result { - self.0 - .lock() - .expect("Failed to lock SSL stream mutex") - .read(buf) - } -} - -impl Write for RustlsStream { - fn write(&mut self, buf: &[u8]) -> std::io::Result { - self.0 - .lock() - .expect("Failed to lock SSL stream mutex") - .write(buf) - } - - fn flush(&mut self) -> std::io::Result<()> { - self.0 - .lock() - .expect("Failed to lock SSL stream mutex") - .flush() - } -} - -pub(crate) struct RustlsContext(Arc); - -impl RustlsContext { - pub(crate) fn from_pem( - certificates: Vec, - private_key: Zeroizing>, - ) -> Result> { - let certificate_chain: Vec = - rustls_pemfile::certs(&mut certificates.as_slice())? - .into_iter() - .map(|bytes| rustls::Certificate(bytes)) - .collect(); - - if certificate_chain.is_empty() { - return Err("Couldn't extract certificate chain from config.".into()); - } - - let private_key = rustls::PrivateKey({ - let pkcs8_keys = rustls_pemfile::pkcs8_private_keys( - &mut private_key.clone().as_slice(), - ) - .expect("file contains invalid pkcs8 private key (encrypted keys are not supported)"); - - if let Some(pkcs8_key) = pkcs8_keys.first() { - pkcs8_key.clone() - } else { - let rsa_keys = rustls_pemfile::rsa_private_keys(&mut private_key.as_slice()) - .expect("file contains invalid rsa private key"); - rsa_keys[0].clone() - } - }); - - let tls_conf = rustls::ServerConfig::builder() - .with_safe_defaults() - .with_no_client_auth() - .with_single_cert(certificate_chain, private_key)?; - - Ok(Self(Arc::new(tls_conf))) - } - - pub(crate) fn accept( - &self, - stream: Connection, - ) -> Result> { - let connection = rustls::ServerConnection::new(self.0.clone())?; - Ok(RustlsStream(Arc::new(Mutex::new( - rustls::StreamOwned::new(connection, stream), - )))) - } -} - -impl From for RefinedStream { - fn from(stream: RustlsStream) -> Self { - Self::Https(stream) - } -} diff --git a/anneal/vendor/tiny_http/src/test.rs b/anneal/vendor/tiny_http/src/test.rs deleted file mode 100644 index 996f025678..0000000000 --- a/anneal/vendor/tiny_http/src/test.rs +++ /dev/null @@ -1,127 +0,0 @@ -use crate::{request::new_request, HTTPVersion, Header, HeaderField, Method, Request}; -use ascii::AsciiString; -use std::net::SocketAddr; -use std::str::FromStr; - -/// A simpler version of [`Request`] that is useful for testing. No data actually goes anywhere. -/// -/// By default, `TestRequest` pretends to be an insecure GET request for the server root (`/`) -/// with no headers. To create a `TestRequest` with different parameters, use the builder pattern: -/// -/// ``` -/// # use tiny_http::{Method, TestRequest}; -/// let request = TestRequest::new() -/// .with_method(Method::Post) -/// .with_path("/api/widgets") -/// .with_body("42"); -/// ``` -/// -/// Then, convert the `TestRequest` into a real `Request` and pass it to the server under test: -/// -/// ``` -/// # use tiny_http::{Method, Request, Response, Server, StatusCode, TestRequest}; -/// # use std::io::Cursor; -/// # let request = TestRequest::new() -/// # .with_method(Method::Post) -/// # .with_path("/api/widgets") -/// # .with_body("42"); -/// # struct TestServer { -/// # listener: Server, -/// # } -/// # let server = TestServer { -/// # listener: Server::http("0.0.0.0:0").unwrap(), -/// # }; -/// # impl TestServer { -/// # fn handle_request(&self, request: Request) -> Response>> { -/// # Response::from_string("test") -/// # } -/// # } -/// let response = server.handle_request(request.into()); -/// assert_eq!(response.status_code(), StatusCode(200)); -/// ``` -pub struct TestRequest { - body: &'static str, - remote_addr: SocketAddr, - // true if HTTPS, false if HTTP - secure: bool, - method: Method, - path: String, - http_version: HTTPVersion, - headers: Vec
, -} - -impl From for Request { - fn from(mut mock: TestRequest) -> Request { - // if the user didn't set the Content-Length header, then set it for them - // otherwise, leave it alone (it may be under test) - if !mock - .headers - .iter_mut() - .any(|h| h.field.equiv("Content-Length")) - { - mock.headers.push(Header { - field: HeaderField::from_str("Content-Length").unwrap(), - value: AsciiString::from_ascii(mock.body.len().to_string()).unwrap(), - }); - } - new_request( - mock.secure, - mock.method, - mock.path, - mock.http_version, - mock.headers, - Some(mock.remote_addr), - mock.body.as_bytes(), - std::io::sink(), - ) - .unwrap() - } -} - -impl Default for TestRequest { - fn default() -> Self { - TestRequest { - body: "", - remote_addr: "127.0.0.1:23456".parse().unwrap(), - secure: false, - method: Method::Get, - path: "/".to_string(), - http_version: HTTPVersion::from((1, 1)), - headers: Vec::new(), - } - } -} - -impl TestRequest { - pub fn new() -> Self { - TestRequest::default() - } - pub fn with_body(mut self, body: &'static str) -> Self { - self.body = body; - self - } - pub fn with_remote_addr(mut self, remote_addr: SocketAddr) -> Self { - self.remote_addr = remote_addr; - self - } - pub fn with_https(mut self) -> Self { - self.secure = true; - self - } - pub fn with_method(mut self, method: Method) -> Self { - self.method = method; - self - } - pub fn with_path(mut self, path: &str) -> Self { - self.path = path.to_string(); - self - } - pub fn with_http_version(mut self, version: HTTPVersion) -> Self { - self.http_version = version; - self - } - pub fn with_header(mut self, header: Header) -> Self { - self.headers.push(header); - self - } -} diff --git a/anneal/vendor/tiny_http/src/util/custom_stream.rs b/anneal/vendor/tiny_http/src/util/custom_stream.rs deleted file mode 100644 index ac00b9efc7..0000000000 --- a/anneal/vendor/tiny_http/src/util/custom_stream.rs +++ /dev/null @@ -1,39 +0,0 @@ -use std::io::Result as IoResult; -use std::io::{Read, Write}; - -pub struct CustomStream { - reader: R, - writer: W, -} - -impl CustomStream -where - R: Read, - W: Write, -{ - pub fn new(reader: R, writer: W) -> CustomStream { - CustomStream { reader, writer } - } -} - -impl Read for CustomStream -where - R: Read, -{ - fn read(&mut self, buf: &mut [u8]) -> IoResult { - self.reader.read(buf) - } -} - -impl Write for CustomStream -where - W: Write, -{ - fn write(&mut self, buf: &[u8]) -> IoResult { - self.writer.write(buf) - } - - fn flush(&mut self) -> IoResult<()> { - self.writer.flush() - } -} diff --git a/anneal/vendor/tiny_http/src/util/equal_reader.rs b/anneal/vendor/tiny_http/src/util/equal_reader.rs deleted file mode 100644 index 1305bc5fd7..0000000000 --- a/anneal/vendor/tiny_http/src/util/equal_reader.rs +++ /dev/null @@ -1,131 +0,0 @@ -use std::io::Read; -use std::io::Result as IoResult; -use std::sync::mpsc::channel; -use std::sync::mpsc::{Receiver, Sender}; - -/// A `Reader` that reads exactly the number of bytes from a sub-reader. -/// -/// If the limit is reached, it returns EOF. If the limit is not reached -/// when the destructor is called, the remaining bytes will be read and -/// thrown away. -pub struct EqualReader -where - R: Read, -{ - reader: R, - size: usize, - last_read_signal: Sender>, -} - -impl EqualReader -where - R: Read, -{ - pub fn new(reader: R, size: usize) -> (EqualReader, Receiver>) { - let (tx, rx) = channel(); - - let r = EqualReader { - reader, - size, - last_read_signal: tx, - }; - - (r, rx) - } -} - -impl Read for EqualReader -where - R: Read, -{ - fn read(&mut self, buf: &mut [u8]) -> IoResult { - if self.size == 0 { - return Ok(0); - } - - let buf = if buf.len() < self.size { - buf - } else { - &mut buf[..self.size] - }; - - match self.reader.read(buf) { - Ok(len) => { - self.size -= len; - Ok(len) - } - err @ Err(_) => err, - } - } -} - -impl Drop for EqualReader -where - R: Read, -{ - fn drop(&mut self) { - let mut remaining_to_read = self.size; - - while remaining_to_read > 0 { - let mut buf = vec![0; remaining_to_read]; - - match self.reader.read(&mut buf) { - Err(e) => { - self.last_read_signal.send(Err(e)).ok(); - break; - } - Ok(0) => { - self.last_read_signal.send(Ok(())).ok(); - break; - } - Ok(other) => { - remaining_to_read -= other; - } - } - } - } -} - -#[cfg(test)] -mod tests { - use super::EqualReader; - use std::io::Read; - - #[test] - fn test_limit() { - use std::io::Cursor; - - let mut org_reader = Cursor::new("hello world".to_string().into_bytes()); - - { - let (mut equal_reader, _) = EqualReader::new(org_reader.by_ref(), 5); - - let mut string = String::new(); - equal_reader.read_to_string(&mut string).unwrap(); - assert_eq!(string, "hello"); - } - - let mut string = String::new(); - org_reader.read_to_string(&mut string).unwrap(); - assert_eq!(string, " world"); - } - - #[test] - fn test_not_enough() { - use std::io::Cursor; - - let mut org_reader = Cursor::new("hello world".to_string().into_bytes()); - - { - let (mut equal_reader, _) = EqualReader::new(org_reader.by_ref(), 5); - - let mut vec = [0]; - equal_reader.read_exact(&mut vec).unwrap(); - assert_eq!(vec[0], b'h'); - } - - let mut string = String::new(); - org_reader.read_to_string(&mut string).unwrap(); - assert_eq!(string, " world"); - } -} diff --git a/anneal/vendor/tiny_http/src/util/fused_reader.rs b/anneal/vendor/tiny_http/src/util/fused_reader.rs deleted file mode 100644 index 6300387946..0000000000 --- a/anneal/vendor/tiny_http/src/util/fused_reader.rs +++ /dev/null @@ -1,48 +0,0 @@ -use std::io::{IoSliceMut, Read, Result as IoResult}; - -/// Wraps another reader and provides "fused" behavior. -/// When the underlying reader reaches EOF, it is dropped -/// and the fused reader becomes an empty stub. -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct FusedReader { - inner: Option, -} - -impl FusedReader { - pub fn new(inner: R) -> Self { - Self { inner: Some(inner) } - } - - #[allow(dead_code)] - pub fn into_inner(self) -> Option { - self.inner - } -} - -impl Read for FusedReader { - fn read(&mut self, buf: &mut [u8]) -> IoResult { - match &mut self.inner { - Some(r) => { - let l = r.read(buf)?; - if l == 0 { - self.inner = None; - } - Ok(l) - } - None => Ok(0), - } - } - - fn read_vectored(&mut self, bufs: &mut [IoSliceMut<'_>]) -> IoResult { - match &mut self.inner { - Some(r) => { - let l = r.read_vectored(bufs)?; - if l == 0 { - self.inner = None; - } - Ok(l) - } - None => Ok(0), - } - } -} diff --git a/anneal/vendor/tiny_http/src/util/messages_queue.rs b/anneal/vendor/tiny_http/src/util/messages_queue.rs deleted file mode 100644 index d94e7dbb74..0000000000 --- a/anneal/vendor/tiny_http/src/util/messages_queue.rs +++ /dev/null @@ -1,96 +0,0 @@ -use std::collections::VecDeque; -use std::sync::{Arc, Condvar, Mutex}; -use std::time::{Duration, Instant}; - -enum Control { - Elem(T), - Unblock, -} - -pub struct MessagesQueue -where - T: Send, -{ - queue: Mutex>>, - condvar: Condvar, -} - -impl MessagesQueue -where - T: Send, -{ - pub fn with_capacity(capacity: usize) -> Arc> { - Arc::new(MessagesQueue { - queue: Mutex::new(VecDeque::with_capacity(capacity)), - condvar: Condvar::new(), - }) - } - - /// Pushes an element to the queue. - pub fn push(&self, value: T) { - let mut queue = self.queue.lock().unwrap(); - queue.push_back(Control::Elem(value)); - self.condvar.notify_one(); - } - - /// Unblock one thread stuck in pop loop. - pub fn unblock(&self) { - let mut queue = self.queue.lock().unwrap(); - queue.push_back(Control::Unblock); - self.condvar.notify_one(); - } - - /// Pops an element. Blocks until one is available. - /// Returns None in case unblock() was issued. - pub fn pop(&self) -> Option { - let mut queue = self.queue.lock().unwrap(); - - loop { - match queue.pop_front() { - Some(Control::Elem(value)) => return Some(value), - Some(Control::Unblock) => return None, - None => (), - } - - queue = self.condvar.wait(queue).unwrap(); - } - } - - /// Tries to pop an element without blocking. - pub fn try_pop(&self) -> Option { - let mut queue = self.queue.lock().unwrap(); - match queue.pop_front() { - Some(Control::Elem(value)) => Some(value), - Some(Control::Unblock) | None => None, - } - } - - /// Tries to pop an element without blocking - /// more than the specified timeout duration - /// or unblock() was issued - pub fn pop_timeout(&self, timeout: Duration) -> Option { - let mut queue = self.queue.lock().unwrap(); - let mut duration = timeout; - loop { - match queue.pop_front() { - Some(Control::Elem(value)) => return Some(value), - Some(Control::Unblock) => return None, - None => (), - } - let now = Instant::now(); - let (_queue, result) = self.condvar.wait_timeout(queue, timeout).unwrap(); - queue = _queue; - let sleep_time = now.elapsed(); - duration = if duration > sleep_time { - duration - sleep_time - } else { - Duration::from_millis(0) - }; - if result.timed_out() - || (duration.as_secs() == 0 && duration.subsec_nanos() < 1_000_000) - { - return None; - } - } - } -} diff --git a/anneal/vendor/tiny_http/src/util/mod.rs b/anneal/vendor/tiny_http/src/util/mod.rs deleted file mode 100644 index 4fb2aca5c3..0000000000 --- a/anneal/vendor/tiny_http/src/util/mod.rs +++ /dev/null @@ -1,64 +0,0 @@ -pub use self::custom_stream::CustomStream; -pub use self::equal_reader::EqualReader; -pub use self::fused_reader::FusedReader; -pub use self::messages_queue::MessagesQueue; -pub use self::refined_tcp_stream::RefinedTcpStream; -pub use self::sequential::{SequentialReader, SequentialReaderBuilder}; -pub use self::sequential::{SequentialWriter, SequentialWriterBuilder}; -pub use self::task_pool::TaskPool; - -use std::str::FromStr; - -mod custom_stream; -mod equal_reader; -mod fused_reader; -mod messages_queue; -pub(crate) mod refined_tcp_stream; -mod sequential; -mod task_pool; - -/// Parses a the value of a header. -/// Suitable for `Accept-*`, `TE`, etc. -/// -/// For example with `text/plain, image/png; q=1.5` this function would -/// return `[ ("text/plain", 1.0), ("image/png", 1.5) ]` -pub fn parse_header_value(input: &str) -> Vec<(&str, f32)> { - input - .split(',') - .filter_map(|elem| { - let mut params = elem.split(';'); - - let t = params.next()?; - - let mut value = 1.0_f32; - - for p in params { - if p.trim_start().starts_with("q=") { - if let Ok(val) = f32::from_str(p.trim_start()[2..].trim()) { - value = val; - break; - } - } - } - - Some((t.trim(), value)) - }) - .collect() -} - -#[cfg(test)] -mod test { - #[test] - #[allow(clippy::float_cmp)] - fn test_parse_header() { - let result = super::parse_header_value("text/html, text/plain; q=1.5 , image/png ; q=2.0"); - - assert_eq!(result.len(), 3); - assert_eq!(result[0].0, "text/html"); - assert_eq!(result[0].1, 1.0); - assert_eq!(result[1].0, "text/plain"); - assert_eq!(result[1].1, 1.5); - assert_eq!(result[2].0, "image/png"); - assert_eq!(result[2].1, 2.0); - } -} diff --git a/anneal/vendor/tiny_http/src/util/refined_tcp_stream.rs b/anneal/vendor/tiny_http/src/util/refined_tcp_stream.rs deleted file mode 100644 index 875fbe2fba..0000000000 --- a/anneal/vendor/tiny_http/src/util/refined_tcp_stream.rs +++ /dev/null @@ -1,152 +0,0 @@ -use std::io::Result as IoResult; -use std::io::{Read, Write}; -use std::net::{Shutdown, SocketAddr}; - -use crate::connection::Connection; -#[cfg(any(feature = "ssl-openssl", feature = "ssl-rustls"))] -use crate::ssl::SslStream; - -pub(crate) enum Stream { - Http(Connection), - #[cfg(any(feature = "ssl-openssl", feature = "ssl-rustls"))] - Https(SslStream), -} - -impl Clone for Stream { - fn clone(&self) -> Self { - match self { - Stream::Http(tcp_stream) => Stream::Http(tcp_stream.try_clone().unwrap()), - #[cfg(any(feature = "ssl-openssl", feature = "ssl-rustls"))] - Stream::Https(ssl_stream) => Stream::Https(ssl_stream.clone()), - } - } -} - -impl From for Stream { - fn from(tcp_stream: Connection) -> Self { - Stream::Http(tcp_stream) - } -} - -impl Stream { - fn secure(&self) -> bool { - match self { - Stream::Http(_) => false, - #[cfg(any(feature = "ssl-openssl", feature = "ssl-rustls"))] - Stream::Https(_) => true, - } - } - - fn peer_addr(&mut self) -> IoResult> { - match self { - Stream::Http(tcp_stream) => tcp_stream.peer_addr(), - #[cfg(any(feature = "ssl-openssl", feature = "ssl-rustls"))] - Stream::Https(ssl_stream) => ssl_stream.peer_addr(), - } - } - - fn shutdown(&mut self, how: Shutdown) -> IoResult<()> { - match self { - Stream::Http(tcp_stream) => tcp_stream.shutdown(how), - #[cfg(any(feature = "ssl-openssl", feature = "ssl-rustls"))] - Stream::Https(ssl_stream) => ssl_stream.shutdown(how), - } - } -} - -impl Read for Stream { - fn read(&mut self, buf: &mut [u8]) -> IoResult { - match self { - Stream::Http(tcp_stream) => tcp_stream.read(buf), - #[cfg(any(feature = "ssl-openssl", feature = "ssl-rustls"))] - Stream::Https(ssl_stream) => ssl_stream.read(buf), - } - } -} - -impl Write for Stream { - fn write(&mut self, buf: &[u8]) -> IoResult { - match self { - Stream::Http(tcp_stream) => tcp_stream.write(buf), - #[cfg(any(feature = "ssl-openssl", feature = "ssl-rustls"))] - Stream::Https(ssl_stream) => ssl_stream.write(buf), - } - } - - fn flush(&mut self) -> IoResult<()> { - match self { - Stream::Http(tcp_stream) => tcp_stream.flush(), - #[cfg(any(feature = "ssl-openssl", feature = "ssl-rustls"))] - Stream::Https(ssl_stream) => ssl_stream.flush(), - } - } -} - -pub struct RefinedTcpStream { - stream: Stream, - close_read: bool, - close_write: bool, -} - -impl RefinedTcpStream { - pub(crate) fn new(stream: S) -> (RefinedTcpStream, RefinedTcpStream) - where - S: Into, - { - let stream: Stream = stream.into(); - - let (read, write) = (stream.clone(), stream); - - let read = RefinedTcpStream { - stream: read, - close_read: true, - close_write: false, - }; - - let write = RefinedTcpStream { - stream: write, - close_read: false, - close_write: true, - }; - - (read, write) - } - - /// Returns true if this struct wraps around a secure connection. - #[inline] - pub(crate) fn secure(&self) -> bool { - self.stream.secure() - } - - pub(crate) fn peer_addr(&mut self) -> IoResult> { - self.stream.peer_addr() - } -} - -impl Drop for RefinedTcpStream { - fn drop(&mut self) { - if self.close_read { - self.stream.shutdown(Shutdown::Read).ok(); - } - - if self.close_write { - self.stream.shutdown(Shutdown::Write).ok(); - } - } -} - -impl Read for RefinedTcpStream { - fn read(&mut self, buf: &mut [u8]) -> IoResult { - self.stream.read(buf) - } -} - -impl Write for RefinedTcpStream { - fn write(&mut self, buf: &[u8]) -> IoResult { - self.stream.write(buf) - } - - fn flush(&mut self) -> IoResult<()> { - self.stream.flush() - } -} diff --git a/anneal/vendor/tiny_http/src/util/sequential.rs b/anneal/vendor/tiny_http/src/util/sequential.rs deleted file mode 100644 index 8ecc3b93fd..0000000000 --- a/anneal/vendor/tiny_http/src/util/sequential.rs +++ /dev/null @@ -1,174 +0,0 @@ -use std::io::Result as IoResult; -use std::io::{Read, Write}; - -use std::sync::mpsc::channel; -use std::sync::mpsc::{Receiver, Sender}; -use std::sync::{Arc, Mutex}; - -use std::mem; - -pub struct SequentialReaderBuilder -where - R: Read + Send, -{ - inner: SequentialReaderBuilderInner, -} - -enum SequentialReaderBuilderInner -where - R: Read + Send, -{ - First(R), - NotFirst(Receiver), -} - -pub struct SequentialReader -where - R: Read + Send, -{ - inner: SequentialReaderInner, - next: Sender, -} - -enum SequentialReaderInner -where - R: Read + Send, -{ - MyTurn(R), - Waiting(Receiver), - Empty, -} - -pub struct SequentialWriterBuilder -where - W: Write + Send, -{ - writer: Arc>, - next_trigger: Option>, -} - -pub struct SequentialWriter -where - W: Write + Send, -{ - trigger: Option>, - writer: Arc>, - on_finish: Sender<()>, -} - -impl SequentialReaderBuilder { - pub fn new(reader: R) -> SequentialReaderBuilder { - SequentialReaderBuilder { - inner: SequentialReaderBuilderInner::First(reader), - } - } -} - -impl SequentialWriterBuilder { - pub fn new(writer: W) -> SequentialWriterBuilder { - SequentialWriterBuilder { - writer: Arc::new(Mutex::new(writer)), - next_trigger: None, - } - } -} - -impl Iterator for SequentialReaderBuilder { - type Item = SequentialReader; - - fn next(&mut self) -> Option> { - let (tx, rx) = channel(); - - let inner = mem::replace(&mut self.inner, SequentialReaderBuilderInner::NotFirst(rx)); - - match inner { - SequentialReaderBuilderInner::First(reader) => Some(SequentialReader { - inner: SequentialReaderInner::MyTurn(reader), - next: tx, - }), - - SequentialReaderBuilderInner::NotFirst(previous) => Some(SequentialReader { - inner: SequentialReaderInner::Waiting(previous), - next: tx, - }), - } - } -} - -impl Iterator for SequentialWriterBuilder { - type Item = SequentialWriter; - fn next(&mut self) -> Option> { - let (tx, rx) = channel(); - let mut next_next_trigger = Some(rx); - ::std::mem::swap(&mut next_next_trigger, &mut self.next_trigger); - - Some(SequentialWriter { - trigger: next_next_trigger, - writer: self.writer.clone(), - on_finish: tx, - }) - } -} - -impl Read for SequentialReader { - fn read(&mut self, buf: &mut [u8]) -> IoResult { - let mut reader = match self.inner { - SequentialReaderInner::MyTurn(ref mut reader) => return reader.read(buf), - SequentialReaderInner::Waiting(ref mut recv) => recv.recv().unwrap(), - SequentialReaderInner::Empty => unreachable!(), - }; - - let result = reader.read(buf); - self.inner = SequentialReaderInner::MyTurn(reader); - result - } -} - -impl Write for SequentialWriter { - fn write(&mut self, buf: &[u8]) -> IoResult { - if let Some(v) = self.trigger.as_mut() { - v.recv().unwrap() - } - self.trigger = None; - - self.writer.lock().unwrap().write(buf) - } - - fn flush(&mut self) -> IoResult<()> { - if let Some(v) = self.trigger.as_mut() { - v.recv().unwrap() - } - self.trigger = None; - - self.writer.lock().unwrap().flush() - } -} - -impl Drop for SequentialReader -where - R: Read + Send, -{ - fn drop(&mut self) { - let inner = mem::replace(&mut self.inner, SequentialReaderInner::Empty); - - match inner { - SequentialReaderInner::MyTurn(reader) => { - self.next.send(reader).ok(); - } - SequentialReaderInner::Waiting(recv) => { - let reader = recv.recv().unwrap(); - self.next.send(reader).ok(); - } - SequentialReaderInner::Empty => (), - } - } -} - -impl Drop for SequentialWriter -where - W: Write + Send, -{ - fn drop(&mut self) { - self.on_finish.send(()).ok(); - } -} diff --git a/anneal/vendor/tiny_http/src/util/task_pool.rs b/anneal/vendor/tiny_http/src/util/task_pool.rs deleted file mode 100644 index 155a34d9cd..0000000000 --- a/anneal/vendor/tiny_http/src/util/task_pool.rs +++ /dev/null @@ -1,137 +0,0 @@ -use std::collections::VecDeque; -use std::sync::atomic::{AtomicUsize, Ordering}; -use std::sync::{Arc, Condvar, Mutex}; -use std::thread; -use std::time::Duration; - -/// Manages a collection of threads. -/// -/// A new thread is created every time all the existing threads are full. -/// Any idle thread will automatically die after a few seconds. -pub struct TaskPool { - sharing: Arc, -} - -struct Sharing { - // list of the tasks to be done by worker threads - todo: Mutex>>, - - // condvar that will be notified whenever a task is added to `todo` - condvar: Condvar, - - // number of total worker threads running - active_tasks: AtomicUsize, - - // number of idle worker threads - waiting_tasks: AtomicUsize, -} - -/// Minimum number of active threads. -static MIN_THREADS: usize = 4; - -struct Registration<'a> { - nb: &'a AtomicUsize, -} - -impl<'a> Registration<'a> { - fn new(nb: &'a AtomicUsize) -> Registration<'a> { - nb.fetch_add(1, Ordering::Release); - Registration { nb } - } -} - -impl<'a> Drop for Registration<'a> { - fn drop(&mut self) { - self.nb.fetch_sub(1, Ordering::Release); - } -} - -impl TaskPool { - pub fn new() -> TaskPool { - let pool = TaskPool { - sharing: Arc::new(Sharing { - todo: Mutex::new(VecDeque::new()), - condvar: Condvar::new(), - active_tasks: AtomicUsize::new(0), - waiting_tasks: AtomicUsize::new(0), - }), - }; - - for _ in 0..MIN_THREADS { - pool.add_thread(None) - } - - pool - } - - /// Executes a function in a thread. - /// If no thread is available, spawns a new one. - pub fn spawn(&self, code: Box) { - let mut queue = self.sharing.todo.lock().unwrap(); - - if self.sharing.waiting_tasks.load(Ordering::Acquire) == 0 { - self.add_thread(Some(code)); - } else { - queue.push_back(code); - self.sharing.condvar.notify_one(); - } - } - - fn add_thread(&self, initial_fn: Option>) { - let sharing = self.sharing.clone(); - - thread::spawn(move || { - let sharing = sharing; - let _active_guard = Registration::new(&sharing.active_tasks); - - if let Some(mut f) = initial_fn { - f(); - } - - loop { - let mut task: Box = { - let mut todo = sharing.todo.lock().unwrap(); - - let task; - loop { - if let Some(poped_task) = todo.pop_front() { - task = poped_task; - break; - } - let _waiting_guard = Registration::new(&sharing.waiting_tasks); - - let received = - if sharing.active_tasks.load(Ordering::Acquire) <= MIN_THREADS { - todo = sharing.condvar.wait(todo).unwrap(); - true - } else { - let (new_lock, waitres) = sharing - .condvar - .wait_timeout(todo, Duration::from_millis(5000)) - .unwrap(); - todo = new_lock; - !waitres.timed_out() - }; - - if !received && todo.is_empty() { - return; - } - } - - task - }; - - task(); - } - }); - } -} - -impl Drop for TaskPool { - fn drop(&mut self) { - self.sharing - .active_tasks - .store(999_999_999, Ordering::Release); - self.sharing.condvar.notify_all(); - } -} diff --git a/anneal/vendor/tiny_http/tests/input-tests.rs b/anneal/vendor/tiny_http/tests/input-tests.rs deleted file mode 100644 index 10974be78a..0000000000 --- a/anneal/vendor/tiny_http/tests/input-tests.rs +++ /dev/null @@ -1,122 +0,0 @@ -extern crate tiny_http; - -use std::io::{Read, Write}; -use std::net::Shutdown; -use std::sync::mpsc; -use std::thread; - -#[allow(dead_code)] -mod support; - -#[test] -fn basic_string_input() { - let (server, client) = support::new_one_server_one_client(); - - { - let mut client = client; - (write!(client, "GET / HTTP/1.1\r\nHost: localhost\r\nContent-Type: text/plain; charset=utf8\r\nContent-Length: 5\r\n\r\nhello")).unwrap(); - } - - let mut request = server.recv().unwrap(); - - let mut output = String::new(); - request.as_reader().read_to_string(&mut output).unwrap(); - assert_eq!(output, "hello"); -} - -#[test] -fn wrong_content_length() { - let (server, client) = support::new_one_server_one_client(); - - { - let mut client = client; - (write!(client, "GET / HTTP/1.1\r\nHost: localhost\r\nContent-Type: text/plain; charset=utf8\r\nContent-Length: 3\r\n\r\nhello")).unwrap(); - } - - let mut request = server.recv().unwrap(); - - let mut output = String::new(); - request.as_reader().read_to_string(&mut output).unwrap(); - assert_eq!(output, "hel"); -} - -#[test] -fn expect_100_continue() { - let (server, client) = support::new_one_server_one_client(); - - let mut client = client; - (write!(client, "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\nExpect: 100-continue\r\nContent-Type: text/plain; charset=utf8\r\nContent-Length: 5\r\n\r\n")).unwrap(); - client.flush().unwrap(); - - let (tx, rx) = mpsc::channel(); - - thread::spawn(move || { - let mut request = server.recv().unwrap(); - let mut output = String::new(); - request.as_reader().read_to_string(&mut output).unwrap(); - assert_eq!(output, "hello"); - tx.send(()).unwrap(); - }); - - // client.set_keepalive(Some(3)).unwrap(); FIXME: reenable this - let mut content = vec![0; 12]; - client.read_exact(&mut content).unwrap(); - assert!(&content[9..].starts_with(b"100")); // 100 status code - - (write!(client, "hello")).unwrap(); - client.flush().unwrap(); - client.shutdown(Shutdown::Write).unwrap(); - - rx.recv().unwrap(); -} - -#[test] -fn unsupported_expect_header() { - let mut client = support::new_client_to_hello_world_server(); - - (write!(client, "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\nExpect: 189-dummy\r\nContent-Type: text/plain; charset=utf8\r\n\r\n")).unwrap(); - - // client.set_keepalive(Some(3)).unwrap(); FIXME: reenable this - let mut content = String::new(); - client.read_to_string(&mut content).unwrap(); - assert!(&content[9..].starts_with("417")); // 417 status code -} - -#[test] -fn invalid_header_name() { - let mut client = support::new_client_to_hello_world_server(); - - // note the space hidden in the Content-Length, which is invalid - (write!(client, "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\nContent-Type: text/plain; charset=utf8\r\nContent-Length : 5\r\n\r\nhello")).unwrap(); - - let mut content = String::new(); - client.read_to_string(&mut content).unwrap(); - assert!(&content[9..].starts_with("400 Bad Request")); // 400 status code -} - -#[test] -fn custom_content_type_response_header() { - let (server, mut stream) = support::new_one_server_one_client(); - write!( - stream, - "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n" - ) - .unwrap(); - - let request = server.recv().unwrap(); - request - .respond( - tiny_http::Response::from_string("{\"custom\": \"Content-Type\"}").with_header( - "Content-Type: application/json" - .parse::() - .unwrap(), - ), - ) - .unwrap(); - - let mut content = String::new(); - stream.read_to_string(&mut content).unwrap(); - - assert!(content.ends_with("{\"custom\": \"Content-Type\"}")); - assert_ne!(content.find("Content-Type: application/json"), None); -} diff --git a/anneal/vendor/tiny_http/tests/network.rs b/anneal/vendor/tiny_http/tests/network.rs deleted file mode 100644 index d6c6e0540d..0000000000 --- a/anneal/vendor/tiny_http/tests/network.rs +++ /dev/null @@ -1,222 +0,0 @@ -extern crate tiny_http; - -use std::io::{Read, Write}; -use std::net::{Shutdown, TcpStream}; -use std::thread; -use std::time::Duration; - -#[allow(dead_code)] -mod support; - -#[test] -fn connection_close_header() { - let mut client = support::new_client_to_hello_world_server(); - - (write!(client, "GET / HTTP/1.1\r\nConnection: keep-alive\r\n\r\n")).unwrap(); - thread::sleep(Duration::from_millis(1000)); - - (write!(client, "GET / HTTP/1.1\r\nConnection: close\r\n\r\n")).unwrap(); - - // if the connection was not closed, this will err with timeout - // client.set_keepalive(Some(1)).unwrap(); FIXME: reenable this - let mut out = Vec::new(); - client.read_to_end(&mut out).unwrap(); -} - -#[test] -fn http_1_0_connection_close() { - let mut client = support::new_client_to_hello_world_server(); - - (write!(client, "GET / HTTP/1.0\r\nHost: localhost\r\n\r\n")).unwrap(); - - // if the connection was not closed, this will err with timeout - // client.set_keepalive(Some(1)).unwrap(); FIXME: reenable this - let mut out = Vec::new(); - client.read_to_end(&mut out).unwrap(); -} - -#[test] -fn detect_connection_closed() { - let mut client = support::new_client_to_hello_world_server(); - - (write!(client, "GET / HTTP/1.1\r\nConnection: keep-alive\r\n\r\n")).unwrap(); - thread::sleep(Duration::from_millis(1000)); - - client.shutdown(Shutdown::Write).unwrap(); - - // if the connection was not closed, this will err with timeout - // client.set_keepalive(Some(1)).unwrap(); FIXME: reenable this - let mut out = Vec::new(); - client.read_to_end(&mut out).unwrap(); -} - -#[test] -fn poor_network_test() { - let mut client = support::new_client_to_hello_world_server(); - - (write!(client, "G")).unwrap(); - thread::sleep(Duration::from_millis(100)); - (write!(client, "ET /he")).unwrap(); - thread::sleep(Duration::from_millis(100)); - (write!(client, "llo HT")).unwrap(); - thread::sleep(Duration::from_millis(100)); - (write!(client, "TP/1.")).unwrap(); - thread::sleep(Duration::from_millis(100)); - (write!(client, "1\r\nHo")).unwrap(); - thread::sleep(Duration::from_millis(100)); - (write!(client, "st: localho")).unwrap(); - thread::sleep(Duration::from_millis(100)); - (write!(client, "st\r\nConnec")).unwrap(); - thread::sleep(Duration::from_millis(100)); - (write!(client, "tion: close\r")).unwrap(); - thread::sleep(Duration::from_millis(100)); - (write!(client, "\n\r")).unwrap(); - thread::sleep(Duration::from_millis(100)); - (writeln!(client)).unwrap(); - - // client.set_keepalive(Some(2)).unwrap(); FIXME: reenable this - let mut data = String::new(); - client.read_to_string(&mut data).unwrap(); - assert!(data.ends_with("hello world")); -} - -#[test] -fn pipelining_test() { - let mut client = support::new_client_to_hello_world_server(); - - (write!(client, "GET / HTTP/1.1\r\nHost: localhost\r\n\r\n")).unwrap(); - (write!(client, "GET /hello HTTP/1.1\r\nHost: localhost\r\n\r\n")).unwrap(); - (write!( - client, - "GET /world HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n" - )) - .unwrap(); - - // client.set_keepalive(Some(2)).unwrap(); FIXME: reenable this - let mut data = String::new(); - client.read_to_string(&mut data).unwrap(); - assert_eq!(data.split("hello world").count(), 4); -} - -#[test] -fn server_crash_results_in_response() { - let server = tiny_http::Server::http("0.0.0.0:0").unwrap(); - let port = server.server_addr().to_ip().unwrap().port(); - let mut client = TcpStream::connect(("127.0.0.1", port)).unwrap(); - - thread::spawn(move || { - server.recv().unwrap(); - // oops, server crash - }); - - (write!( - client, - "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n" - )) - .unwrap(); - - // client.set_keepalive(Some(2)).unwrap(); FIXME: reenable this - let mut content = String::new(); - client.read_to_string(&mut content).unwrap(); - assert!(&content[9..].starts_with('5')); // 5xx status code -} - -#[test] -fn responses_reordered() { - let (server, mut client) = support::new_one_server_one_client(); - - (write!(client, "GET / HTTP/1.1\r\nHost: localhost\r\n\r\n")).unwrap(); - (write!( - client, - "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n" - )) - .unwrap(); - - thread::spawn(move || { - let rq1 = server.recv().unwrap(); - let rq2 = server.recv().unwrap(); - - thread::spawn(move || { - rq2.respond(tiny_http::Response::from_string( - "second request".to_owned(), - )) - .unwrap(); - }); - - thread::sleep(Duration::from_millis(100)); - - thread::spawn(move || { - rq1.respond(tiny_http::Response::from_string("first request".to_owned())) - .unwrap(); - }); - }); - - // client.set_keepalive(Some(2)).unwrap(); FIXME: reenable this - let mut content = String::new(); - client.read_to_string(&mut content).unwrap(); - assert!(content.ends_with("second request")); -} - -#[test] -fn no_transfer_encoding_on_204() { - let (server, mut client) = support::new_one_server_one_client(); - - (write!( - client, - "GET / HTTP/1.1\r\nHost: localhost\r\nTE: chunked\r\nConnection: close\r\n\r\n" - )) - .unwrap(); - - thread::spawn(move || { - let rq = server.recv().unwrap(); - - let resp = tiny_http::Response::empty(tiny_http::StatusCode(204)); - rq.respond(resp).unwrap(); - }); - - let mut content = String::new(); - client.read_to_string(&mut content).unwrap(); - - assert!(content.starts_with("HTTP/1.1 204")); - assert!(!content.contains("Transfer-Encoding: chunked")); -} - -/* FIXME: uncomment and fix -#[test] -fn connection_timeout() { - let (server, mut client) = { - let server = tiny_http::ServerBuilder::new() - .with_client_connections_timeout(3000) - .with_random_port().build().unwrap(); - let port = server.server_addr().port(); - let client = TcpStream::connect(("127.0.0.1", port)).unwrap(); - (server, client) - }; - - let (tx_stop, rx_stop) = mpsc::channel(); - - // executing server in parallel - thread::spawn(move || { - loop { - server.try_recv(); - thread::sleep(Duration::from_millis(100)); - if rx_stop.try_recv().is_ok() { break } - } - }); - - // waiting for the 408 response - let mut content = String::new(); - client.read_to_string(&mut content).unwrap(); - assert!(&content[9..].starts_with("408")); - - // stopping server - tx_stop.send(()); -} -*/ - -#[test] -fn chunked_threshold() { - let resp = tiny_http::Response::from_string("test".to_string()); - assert_eq!(resp.chunked_threshold(), 32768); - assert_eq!(resp.with_chunked_threshold(42).chunked_threshold(), 42); -} diff --git a/anneal/vendor/tiny_http/tests/non-chunked-buffering.rs b/anneal/vendor/tiny_http/tests/non-chunked-buffering.rs deleted file mode 100644 index 24a5d68002..0000000000 --- a/anneal/vendor/tiny_http/tests/non-chunked-buffering.rs +++ /dev/null @@ -1,103 +0,0 @@ -extern crate tiny_http; - -use std::io::{Cursor, Read, Write}; -use std::sync::{ - atomic::{ - AtomicUsize, - Ordering::{AcqRel, Acquire}, - }, - Arc, -}; - -#[allow(dead_code)] -mod support; - -struct MeteredReader { - inner: T, - position: Arc, -} - -impl Read for MeteredReader -where - T: Read, -{ - fn read(&mut self, buf: &mut [u8]) -> std::io::Result { - match self.inner.read(buf) { - Ok(read) => { - self.position.fetch_add(read, AcqRel); - Ok(read) - } - e => e, - } - } -} - -type Reader = MeteredReader>; - -fn big_response_reader() -> Reader { - let big_body = "ABCDEFGHIJKLMNOPQRSTUVXYZ".repeat(1024 * 1024 * 16); - MeteredReader { - inner: Cursor::new(big_body), - position: Arc::new(AtomicUsize::new(0)), - } -} - -fn identity_served(r: &mut Reader) -> tiny_http::Response<&mut Reader> { - let body_len = r.inner.get_ref().len(); - tiny_http::Response::empty(200) - .with_chunked_threshold(std::usize::MAX) - .with_data(r, Some(body_len)) -} - -/// Checks that a body-Read:er is not called when the client has disconnected -#[test] -fn responding_to_closed_client() { - let (server, mut stream) = support::new_one_server_one_client(); - write!( - stream, - "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n" - ) - .unwrap(); - - let request = server.recv().unwrap(); - - // Client already disconnected - drop(stream); - - let mut reader = big_response_reader(); - request - .respond(identity_served(&mut reader)) - .expect("Successful"); - - assert!(reader.position.load(Acquire) < 1024 * 1024); -} - -/// Checks that a slow client does not cause data to be consumed and buffered from a reader -#[test] -fn responding_to_non_consuming_client() { - let (server, mut stream) = support::new_one_server_one_client(); - write!( - stream, - "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n" - ) - .unwrap(); - - let request = server.recv().unwrap(); - - let mut reader = big_response_reader(); - let position = reader.position.clone(); - - // Client still connected, but not reading anything - std::thread::spawn(move || { - request - .respond(identity_served(&mut reader)) - .expect("Successful"); - }); - - std::thread::sleep(std::time::Duration::from_millis(100)); - - // It seems the client TCP socket can buffer quite a lot, so we need to be permissive - assert!(position.load(Acquire) < 8 * 1024 * 1024); - - drop(stream); -} diff --git a/anneal/vendor/tiny_http/tests/promptness.rs b/anneal/vendor/tiny_http/tests/promptness.rs deleted file mode 100644 index a621d3e585..0000000000 --- a/anneal/vendor/tiny_http/tests/promptness.rs +++ /dev/null @@ -1,207 +0,0 @@ -extern crate tiny_http; - -use std::io::{copy, Read, Write}; -use std::net::{Shutdown, TcpStream}; -use std::ops::Deref; -use std::sync::mpsc::channel; -use std::sync::Arc; -use std::thread::{sleep, spawn}; -use std::time::Duration; -use tiny_http::{Response, Server}; - -/// Stream that produces bytes very slowly -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -struct SlowByteSrc { - val: u8, - len: usize, -} -impl<'b> Read for SlowByteSrc { - fn read(&mut self, buf: &mut [u8]) -> std::io::Result { - sleep(Duration::from_millis(100)); - let l = self.len.min(buf.len()).min(1000); - for v in buf[..l].iter_mut() { - *v = self.val; - } - self.len -= l; - Ok(l) - } -} - -/// crude impl of http `Transfer-Encoding: chunked` -fn encode_chunked(data: &mut dyn Read, output: &mut dyn Write) { - let mut buf = [0u8; 4096]; - loop { - let l = data.read(&mut buf).unwrap(); - write!(output, "{:X}\r\n", l).unwrap(); - output.write_all(&buf[..l]).unwrap(); - write!(output, "\r\n").unwrap(); - if l == 0 { - break; - } - } -} - -mod prompt_pipelining { - use super::*; - - /// Check that pipelined requests on the same connection are received promptly. - fn assert_requests_parsed_promptly( - req_cnt: usize, - req_body: &'static [u8], - timeout: Duration, - req_writer: impl FnOnce(&mut dyn Write) + Send + 'static, - ) { - let resp_body = SlowByteSrc { - val: 42, - len: 1000_000, - }; // very slow response body - - let server = Server::http("0.0.0.0:0").unwrap(); - let mut client = TcpStream::connect(server.server_addr().to_ip().unwrap()).unwrap(); - let (svr_send, svr_rcv) = channel(); - - spawn(move || { - for _ in 0..req_cnt { - let mut req = server.recv().unwrap(); - // read the whole body of the request - let mut body = Vec::new(); - req.as_reader().read_to_end(&mut body).unwrap(); - assert_eq!(req_body, body.as_slice()); - // The next pipelined request should now be available for parsing, - // while we send the (possibly slow) response in another thread - spawn(move || { - req.respond(Response::empty(200).with_data(resp_body, Some(resp_body.len))) - }); - } - svr_send.send(()).unwrap(); - }); - - spawn(move || req_writer(&mut client)); - - // requests must be sent and received quickly (before timeout expires) - svr_rcv - .recv_timeout(timeout) - .expect("Server did not finish reading pipelined requests quickly enough"); - } - - #[test] - fn empty() { - assert_requests_parsed_promptly(5, &[], Duration::from_millis(200), move |wr| { - for _ in 0..5 { - write!(wr, "GET / HTTP/1.1\r\n").unwrap(); - write!(wr, "Connection: keep-alive\r\n\r\n").unwrap(); - } - }); - } - - #[test] - fn content_length_short() { - let body = &[65u8; 100]; // short but not trivial - assert_requests_parsed_promptly(5, body, Duration::from_millis(200), move |wr| { - for _ in 0..5 { - write!(wr, "GET / HTTP/1.1\r\n").unwrap(); - write!(wr, "Connection: keep-alive\r\n").unwrap(); - write!(wr, "Content-Length: {}\r\n\r\n", body.len()).unwrap(); - wr.write_all(body).unwrap(); - } - }); - } - - #[test] - fn content_length_long() { - let body = &[65u8; 10000]; // long enough that it won't be buffered - assert_requests_parsed_promptly(5, body, Duration::from_millis(200), move |wr| { - for _ in 0..5 { - write!(wr, "GET / HTTP/1.1\r\n").unwrap(); - write!(wr, "Connection: keep-alive\r\n").unwrap(); - write!(wr, "Content-Length: {}\r\n\r\n", body.len()).unwrap(); - wr.write_all(body).unwrap(); - } - }); - } - - #[test] - fn chunked() { - let body = &[65u8; 10000]; - assert_requests_parsed_promptly(5, body, Duration::from_millis(200), move |wr| { - for _ in 0..5 { - write!(wr, "GET / HTTP/1.1\r\n").unwrap(); - write!(wr, "Connection: keep-alive\r\n").unwrap(); - write!(wr, "Transfer-Encoding: chunked\r\n\r\n").unwrap(); - encode_chunked(&mut &body[..], wr); - } - }); - } -} - -mod prompt_responses { - use super::*; - - /// Check that response is sent promptly without waiting for full request body. - fn assert_responds_promptly( - timeout: Duration, - req_writer: impl FnOnce(&mut dyn Write) + Send + 'static, - ) { - let server = Server::http("0.0.0.0:0").unwrap(); - let client = TcpStream::connect(server.server_addr().to_ip().unwrap()).unwrap(); - - spawn(move || loop { - // server attempts to respond immediately - let req = server.recv().unwrap(); - req.respond(Response::empty(400)).unwrap(); - }); - - let client = Arc::new(client); - let client_write = Arc::clone(&client); - // request written (possibly very slowly) in another thread - spawn(move || req_writer(&mut client_write.deref())); - - // response should arrive quickly (before timeout expires) - client.set_read_timeout(Some(timeout)).unwrap(); - let resp = client.deref().read(&mut [0u8; 4096]); - client.shutdown(Shutdown::Both).unwrap(); - assert!(resp.is_ok(), "Server response was not sent promptly"); - } - - static SLOW_BODY: SlowByteSrc = SlowByteSrc { - val: 65, - len: 1000_000, - }; - - #[test] - fn content_length_http11() { - assert_responds_promptly(Duration::from_millis(200), move |wr| { - write!(wr, "GET / HTTP/1.1\r\n").unwrap(); - write!(wr, "Content-Length: {}\r\n\r\n", SLOW_BODY.len).unwrap(); - copy(&mut SLOW_BODY.clone(), wr).unwrap(); - }); - } - - #[test] - fn content_length_http10() { - assert_responds_promptly(Duration::from_millis(200), move |wr| { - write!(wr, "GET / HTTP/1.0\r\n").unwrap(); - write!(wr, "Content-Length: {}\r\n\r\n", SLOW_BODY.len).unwrap(); - copy(&mut SLOW_BODY.clone(), wr).unwrap(); - }); - } - - #[test] - fn expect_continue() { - assert_responds_promptly(Duration::from_millis(200), move |wr| { - write!(wr, "GET / HTTP/1.1\r\n").unwrap(); - write!(wr, "Expect: 100 continue\r\n").unwrap(); - write!(wr, "Content-Length: {}\r\n\r\n", SLOW_BODY.len).unwrap(); - copy(&mut SLOW_BODY.clone(), wr).unwrap(); - }); - } - - #[test] - fn chunked() { - assert_responds_promptly(Duration::from_millis(200), move |wr| { - write!(wr, "GET / HTTP/1.1\r\n").unwrap(); - write!(wr, "Transfer-Encoding: chunked\r\n\r\n").unwrap(); - encode_chunked(&mut SLOW_BODY.clone(), wr); - }); - } -} diff --git a/anneal/vendor/tiny_http/tests/simple-test.rs b/anneal/vendor/tiny_http/tests/simple-test.rs deleted file mode 100644 index 4375109d92..0000000000 --- a/anneal/vendor/tiny_http/tests/simple-test.rs +++ /dev/null @@ -1,29 +0,0 @@ -extern crate tiny_http; - -use std::io::{Read, Write}; - -#[allow(dead_code)] -mod support; - -#[test] -fn basic_handling() { - let (server, mut stream) = support::new_one_server_one_client(); - write!( - stream, - "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n" - ) - .unwrap(); - - let request = server.recv().unwrap(); - assert!(*request.method() == tiny_http::Method::Get); - //assert!(request.url() == "/"); - request - .respond(tiny_http::Response::from_string("hello world".to_owned())) - .unwrap(); - - server.try_recv().unwrap(); - - let mut content = String::new(); - stream.read_to_string(&mut content).unwrap(); - assert!(content.ends_with("hello world")); -} diff --git a/anneal/vendor/tiny_http/tests/support/mod.rs b/anneal/vendor/tiny_http/tests/support/mod.rs deleted file mode 100644 index 7a4dc587be..0000000000 --- a/anneal/vendor/tiny_http/tests/support/mod.rs +++ /dev/null @@ -1,40 +0,0 @@ -use std::net::TcpStream; -use std::thread; -use std::time::Duration; - -/// Creates a server and a client connected to the server. -pub fn new_one_server_one_client() -> (tiny_http::Server, TcpStream) { - let server = tiny_http::Server::http("0.0.0.0:0").unwrap(); - let port = server.server_addr().to_ip().unwrap().port(); - let client = TcpStream::connect(("127.0.0.1", port)).unwrap(); - (server, client) -} - -/// Creates a "hello world" server with a client connected to the server. -/// -/// The server will automatically close after 3 seconds. -pub fn new_client_to_hello_world_server() -> TcpStream { - let server = tiny_http::Server::http("0.0.0.0:0").unwrap(); - let port = server.server_addr().to_ip().unwrap().port(); - let client = TcpStream::connect(("127.0.0.1", port)).unwrap(); - - thread::spawn(move || { - let mut cycles = 3 * 1000 / 20; - - loop { - if let Some(rq) = server.try_recv().unwrap() { - let response = tiny_http::Response::from_string("hello world".to_string()); - rq.respond(response).unwrap(); - } - - thread::sleep(Duration::from_millis(20)); - - cycles -= 1; - if cycles == 0 { - break; - } - } - }); - - client -} diff --git a/anneal/vendor/tiny_http/tests/unblock-test.rs b/anneal/vendor/tiny_http/tests/unblock-test.rs deleted file mode 100644 index 001568a480..0000000000 --- a/anneal/vendor/tiny_http/tests/unblock-test.rs +++ /dev/null @@ -1,34 +0,0 @@ -extern crate tiny_http; - -use std::sync::Arc; -use std::thread; - -#[test] -fn unblock_server() { - let server = tiny_http::Server::http("0.0.0.0:0").unwrap(); - let s = Arc::new(server); - - let s1 = s.clone(); - thread::spawn(move || s1.unblock()); - - // Without unblock this would hang forever - for _rq in s.incoming_requests() {} -} - -#[test] -fn unblock_threads() { - let server = tiny_http::Server::http("0.0.0.0:0").unwrap(); - let s = Arc::new(server); - - let s1 = s.clone(); - let s2 = s.clone(); - let h1 = thread::spawn(move || for _rq in s1.incoming_requests() {}); - let h2 = thread::spawn(move || for _rq in s2.incoming_requests() {}); - - // Graceful shutdown; removing even one of the - // unblock calls prevents termination - s.unblock(); - s.unblock(); - h1.join().unwrap(); - h2.join().unwrap(); -} diff --git a/anneal/vendor/tiny_http/tests/unix-test.rs b/anneal/vendor/tiny_http/tests/unix-test.rs deleted file mode 100644 index 42d9476d05..0000000000 --- a/anneal/vendor/tiny_http/tests/unix-test.rs +++ /dev/null @@ -1,44 +0,0 @@ -#![cfg(unix)] - -extern crate tiny_http; - -use std::{ - io::{Read, Write}, - os::unix::net::UnixStream, - path::{Path, PathBuf}, -}; - -#[allow(dead_code)] -mod support; - -#[test] -fn unix_basic_handling() { - let server = tiny_http::Server::http_unix(Path::new("/tmp/tiny-http-test.sock")).unwrap(); - let path: PathBuf = server - .server_addr() - .to_unix() - .unwrap() - .as_pathname() - .unwrap() - .into(); - let mut client = UnixStream::connect(path).unwrap(); - - write!( - client, - "GET / HTTP/1.1\r\nHost: localhost\r\nConnection: close\r\n\r\n" - ) - .unwrap(); - - let request = server.recv().unwrap(); - assert!(*request.method() == tiny_http::Method::Get); - //assert!(request.url() == "/"); - request - .respond(tiny_http::Response::from_string("hello world".to_owned())) - .unwrap(); - - server.try_recv().unwrap(); - - let mut content = String::new(); - client.read_to_string(&mut content).unwrap(); - assert!(content.ends_with("hello world")); -} diff --git a/anneal/vendor/toml_const/.cargo-checksum.json b/anneal/vendor/toml_const/.cargo-checksum.json new file mode 100644 index 0000000000..eff3b78082 --- /dev/null +++ b/anneal/vendor/toml_const/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{".cargo_vcs_info.json":"a2498507038fea8354b4dfc5798e93af8c30c3d16cc566902ef895566e848508","Cargo.lock":"4770c57766b8dfd1cf3d7cedf483821d5ad408f3e535ab13bbb0ff7d83a44b46","Cargo.toml":"bef7c2f63b0d479f5b0bb1a430941f234d482265a4726835934e8e6ae3fde87b","Cargo.toml.orig":"1796532b80de2a799a925772d50b06cf845f68e24514c9c60b1f35d27c890a56","README.md":"da28e01d72d0bace098ae07c2c1a525723cce4f177149a59d7619392cba286e0","src/lib.rs":"d931a9d715970adba255901a9c8c5d7c232d7f053931b1c56435c9225b4f8b8e"},"package":"60a93119c23cd286a0e585f25bffcd1c292eaa6b90edfd8d58c442a3d2fe57c1"} \ No newline at end of file diff --git a/anneal/vendor/toml_const/.cargo_vcs_info.json b/anneal/vendor/toml_const/.cargo_vcs_info.json new file mode 100644 index 0000000000..180a0b3341 --- /dev/null +++ b/anneal/vendor/toml_const/.cargo_vcs_info.json @@ -0,0 +1,6 @@ +{ + "git": { + "sha1": "38dc135cc3b17aacd2e8ff0bdc3a0dc058376f6b" + }, + "path_in_vcs": "toml_const" +} \ No newline at end of file diff --git a/anneal/vendor/toml_const/Cargo.lock b/anneal/vendor/toml_const/Cargo.lock new file mode 100644 index 0000000000..cb46d335d4 --- /dev/null +++ b/anneal/vendor/toml_const/Cargo.lock @@ -0,0 +1,223 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "fastrand" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" + +[[package]] +name = "hashbrown" +version = "0.15.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5" + +[[package]] +name = "indexmap" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "memchr" +version = "2.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" + +[[package]] +name = "phf" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "913273894cec178f401a31ec4b656318d95473527be05c0752cc41cdc32be8b7" +dependencies = [ + "phf_macros", + "phf_shared", + "serde", +] + +[[package]] +name = "phf_generator" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2cbb1126afed61dd6368748dae63b1ee7dc480191c6262a3b4ff1e29d86a6c5b" +dependencies = [ + "fastrand", + "phf_shared", +] + +[[package]] +name = "phf_macros" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d713258393a82f091ead52047ca779d37e5766226d009de21696c4e667044368" +dependencies = [ + "phf_generator", + "phf_shared", + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "phf_shared" +version = "0.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "06005508882fb681fd97892ecff4b7fd0fee13ef1aa569f8695dae7ab9099981" +dependencies = [ + "siphasher", +] + +[[package]] +name = "proc-macro2" +version = "1.0.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "serde" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", +] + +[[package]] +name = "siphasher" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" + +[[package]] +name = "syn" +version = "2.0.104" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "toml" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_const" +version = "1.2.1" +dependencies = [ + "phf", + "toml", + "toml_const_macros", +] + +[[package]] +name = "toml_const_macros" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0ca608371311e568b6f918f3cf851640c6811625f39852c188b50ce11e2201b" +dependencies = [ + "indexmap", + "proc-macro2", + "quote", + "syn", + "toml", +] + +[[package]] +name = "toml_datetime" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.22.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "toml_write", + "winnow", +] + +[[package]] +name = "toml_write" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" + +[[package]] +name = "unicode-ident" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" + +[[package]] +name = "winnow" +version = "0.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74c7b26e3480b707944fc872477815d29a8e429d2f93a1ce000f5fa84a15cbcd" +dependencies = [ + "memchr", +] diff --git a/anneal/vendor/toml_const/Cargo.toml b/anneal/vendor/toml_const/Cargo.toml new file mode 100644 index 0000000000..5e87132c57 --- /dev/null +++ b/anneal/vendor/toml_const/Cargo.toml @@ -0,0 +1,61 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2021" +rust-version = "1.56" +name = "toml_const" +version = "1.2.1" +build = false +autolib = false +autobins = false +autoexamples = false +autotests = false +autobenches = false +description = "Compile-time constants from TOML" +readme = "README.md" +keywords = [ + "toml", + "config", + "static", +] +categories = [ + "no-std", + "config", + "development-tools::build-utils", +] +license = "MIT" +repository = "https://github.com/facesthe/toml_const" + +[features] +default = ["phf"] +phf = [ + "dep:phf", + "macros/phf", +] + +[lib] +name = "toml_const" +path = "src/lib.rs" + +[dependencies.macros] +version = "1.2.1" +default-features = false +package = "toml_const_macros" + +[dependencies.phf] +version = "0.12" +features = ["macros"] +optional = true + +[dependencies.toml] +version = "0.8" +features = ["preserve_order"] diff --git a/anneal/vendor/toml_const/Cargo.toml.orig b/anneal/vendor/toml_const/Cargo.toml.orig new file mode 100644 index 0000000000..3fb27bb4e6 --- /dev/null +++ b/anneal/vendor/toml_const/Cargo.toml.orig @@ -0,0 +1,26 @@ +[package] +name = "toml_const" +version.workspace = true +edition.workspace = true +rust-version.workspace = true +description = "Compile-time constants from TOML" +readme = "README.md" +repository = "https://github.com/facesthe/toml_const" +license = "MIT" +keywords = ["toml", "config", "static"] +categories = ["no-std", "config", "development-tools::build-utils"] + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[lib] +name = "toml_const" +path = "src/lib.rs" + +[features] +default = ["phf"] +phf = ["dep:phf", "macros/phf"] + +[dependencies] +toml = { workspace = true } +macros = { path = "../toml_const_macros", package = "toml_const_macros", version = "1.2.1", default-features = false } +phf = { version = "0.12", features = ["macros"], optional = true } diff --git a/anneal/vendor/toml_const/README.md b/anneal/vendor/toml_const/README.md new file mode 100644 index 0000000000..24fe771fa3 --- /dev/null +++ b/anneal/vendor/toml_const/README.md @@ -0,0 +1,202 @@ +# toml_const + +
+ +**TOML compile-time constants** + + +![no std](https://img.shields.io/badge/no__std-12a077) +[![crate](https://img.shields.io/crates/v/toml_const.svg)](https://crates.io/crates/toml_const) +[![docs](https://docs.rs/toml_const/badge.svg)](https://docs.rs/toml_const) +[![build status](https://github.com/facesthe/toml_const/actions/workflows/ci.yml/badge.svg)](https://github.com/facesthe/toml_const/actions/workflows/ci.yml) + +
+ +## Getting started + +```rust +use toml_const::{toml_const, toml_const_ws}; + +// workspace root +// ├── example.toml +// ├── normalize.toml +// ├── toml_const <---- you are here +// │   ├── Cargo.toml +// │   └── src +// └── toml_const_macros +// ├── Cargo.toml +// └── src + +// include a TOML file in your project relative to your manifest directory +toml_const! { + /// Docstring for this item + #[derive(PartialEq)] // Clone, Copy, Debug are already derived + pub const EXAMPLE_TOML: "../example.toml"; + // multiple definitions are supported + static CARGO_TOML: "Cargo.toml"; +} + +// include a file relative to your workspace root +toml_const_ws! {static EXAMPLE_TOML_WS: "example.toml";} + +// table keys are capitalized struct fields +const TITLE: &str = EXAMPLE_TOML.title; +assert_eq!(EXAMPLE_TOML.title, EXAMPLE_TOML_WS.title); +``` + +## Table substitution + +File substitution is supported. +The first path that exists and satisfies the following conditions will be used. +These conditions are, in order of precedence: + +- if a substitute path has the `use` keyword prefixed +- iif a toml file contains `use = true` at the root level + +Multiple substitute files can be specified in the macro expression. +The first file containing a `use = true` key will be merged into the parent file. + +These files may contain secrets or other sensitive information that you don't want to check into version control. + +```rust +use toml_const::toml_const; + +toml_const! { + // example.toml is the template/parent file (must exist) + pub static EXAMPLE_TOML: "../example.toml" { + // if Cargo.toml exists, it will be substituted + use "../Cargo.toml"; + // if Cargo.toml does not exist and example.toml contains + // `use = true`, it will be substituted + "../example.toml"; + // files that do not exist are ignored + "non_existent.toml"; + // .. and so on + } +} +``` + +## Normalization + +A TOML file is normalized before it is generated as code. This step does not modify the original config file. + +Tables within arrays will have their keys propagated across all elements. Missing keys will be filled with default values. +This means that keys can be omitted from parts of your config as long as it is defined in at least one element. + +Empty arrays will be inferred to be `&'static [&'static str]`. + +```toml +# this table will normalize to ... +[program] +name = "my_library" +versions = [ + { version = "0.1.0", description = "Initial release" }, + { version = "0.2.0" }, # description is omitted + { version = "0.3.0", description = "Added support for arrays of tables", bug_fixes = [ + { issue = "1", description = "Fixed a bug with arrays of tables" }, + { issue = "2", description = "support nested arrays" }, + ] }, +] + +# ... this +[program] +name = "my_library" +versions = [ + { version = "0.1.0", description = "Initial release", bug_fixes = [] }, + { version = "0.2.0", description = "", bug_fixes = [] }, + { version = "0.3.0", description = "Added support for arrays of tables", bug_fixes = [ + { issue = "1", description = "Fixed a bug with arrays of tables" }, + { issue = "2", description = "support nested arrays" }, + ] }, +] +``` + +## Hashmaps + +A table that contains identical keys will implement a `const map()` method that returns `&phf::OrderedMap`. + +This feature is included by default under the feature flag `"phf"`. You can opt to disable it by adding `default-features = false` to this dependency. + +```rust +use toml_const::toml_const; + +toml_const! { + #[derive(PartialEq)] + pub const NORMALIZE_TOML: "../normalize.toml"; +} + +// keys can be accessed through struct fields as usual +let first_value = NORMALIZE_TOML.identical_values.first; +let second_value = NORMALIZE_TOML.identical_values.second; + +let map = NORMALIZE_TOML.identical_values.map(); +for (key, value) in map.into_iter() { + + // they are the same type + let _ = &first_value == value; + + // in this case, the inner value also contains a hashmap + println!("{}: {:?}", key, value); + + for (inner_key, inner_value) in value.map().into_iter() { + // this will print the inner key and value + println!("\t{}: {:?}", inner_key, inner_value); + } +} +``` + +## Unwrapping datetime + +`toml::Datetime` contains fields that point to `Option`s, which need const/runtime checks. +As the toml spec defines [4 datetime formats](https://docs.rs/toml/latest/toml/value/struct.Datetime.html), +non-option types can be used to unwrap datetime values at compile time. + +Datetime values are also normalized to support multiple formats defined for one key. +The union of all formats will be used to generate the final datetime format. + +## Attributes + +Docstrings and derive attributes are supported. +`Clone`, `Copy`, and `Debug` are automatically derived for all types. + +```rust +use toml_const::toml_const; + +toml_const! { + /// # Cargo manifest file + /// + /// This file contains + /// - something + #[derive(PartialEq)] + pub const CARGO_TOML: "Cargo.toml"; +} +``` + +## Limitations + +This library does not support the full TOML specification. + +It **will fail to**: + +- generate arrays with distinct types (arrays containing different types, arrays of tables with conflicting key types) +- create a struct from a table with a blank key `"" = true` +- parse reserved keys (`__map__` is reserved cannot be used as a key) + +It **will modify**: + +- table keys that begin with numbers +- table keys that contain invalid characters for identifiers + +## TOML data types + +All TOML data types are supported. Datetime related structs are re-exported from `toml`. + +| data type | rust type | +| --- | --- | +| boolean | `bool` | +| integer | `i64` | +| float | `f64` | +| string | `&'static str` | +| date | `toml_const::Datetime` | +| array | `&'static [T]` | +| table | auto-generated struct | diff --git a/anneal/vendor/toml_const/src/lib.rs b/anneal/vendor/toml_const/src/lib.rs new file mode 100644 index 0000000000..49a4a52458 --- /dev/null +++ b/anneal/vendor/toml_const/src/lib.rs @@ -0,0 +1,157 @@ +#![doc = include_str!("../README.md")] +#![no_std] + +// re-exports +pub use datetime::*; +pub use macros::*; +pub use toml::value::{Date, Datetime, Offset, Time}; + +#[cfg(feature = "phf")] +#[doc(hidden)] +pub use phf; +#[cfg(feature = "phf")] +pub use phf::phf_ordered_map as phf_map_macro; +#[cfg(feature = "phf")] +pub use phf::OrderedMap as PhfMap; + +/// Destructured datetime structs +mod datetime { + use super::*; + + const DEFAULT_DATE: Date = Date { + year: 1970, + month: 1, + day: 1, + }; + + const DEFAULT_TIME: Time = Time { + hour: 0, + minute: 0, + second: 0, + nanosecond: 0, + }; + + const DEFAULT_OFFSET: Offset = Offset::Z; + + #[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Eq, Ord)] + pub struct OffsetDateTime { + pub date: Date, + pub time: Time, + pub offset: Offset, + } + + #[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Eq, Ord)] + pub struct LocalDateTime { + pub date: Date, + pub time: Time, + } + + #[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Eq, Ord)] + pub struct LocalDate { + pub date: Date, + } + + #[derive(Clone, Copy, Debug, PartialEq, PartialOrd, Eq, Ord)] + pub struct LocalTime { + pub time: Time, + } + + impl Default for OffsetDateTime { + fn default() -> Self { + Self { + date: DEFAULT_DATE, + time: DEFAULT_TIME, + offset: DEFAULT_OFFSET, + } + } + } + + impl Default for LocalDateTime { + fn default() -> Self { + Self { + date: DEFAULT_DATE, + time: DEFAULT_TIME, + } + } + } + + impl Default for LocalDate { + fn default() -> Self { + Self { date: DEFAULT_DATE } + } + } + + impl Default for LocalTime { + fn default() -> Self { + Self { time: DEFAULT_TIME } + } + } + + impl From for Datetime { + fn from(value: OffsetDateTime) -> Self { + Self { + date: Some(value.date), + time: Some(value.time), + offset: Some(value.offset), + } + } + } + + impl From for Datetime { + fn from(value: LocalDateTime) -> Self { + Self { + date: Some(value.date), + time: Some(value.time), + offset: None, + } + } + } + + impl From for Datetime { + fn from(value: LocalDate) -> Self { + Self { + date: Some(value.date), + time: None, + offset: None, + } + } + } + + impl From for Datetime { + fn from(value: LocalTime) -> Self { + Self { + date: None, + time: Some(value.time), + offset: None, + } + } + } + + impl core::fmt::Display for OffsetDateTime { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let as_dt = Datetime::from(*self); + write!(f, "{}", as_dt) + } + } + + impl core::fmt::Display for LocalDateTime { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let as_dt = Datetime::from(*self); + write!(f, "{}", as_dt) + } + } + + impl core::fmt::Display for LocalDate { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let as_dt = Datetime::from(*self); + write!(f, "{}", as_dt) + } + } + + impl core::fmt::Display for LocalTime { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + let as_dt = Datetime::from(*self); + write!(f, "{}", as_dt) + } + } +} diff --git a/anneal/vendor/toml_const_macros/.cargo-checksum.json b/anneal/vendor/toml_const_macros/.cargo-checksum.json new file mode 100644 index 0000000000..bb69fd7faa --- /dev/null +++ b/anneal/vendor/toml_const_macros/.cargo-checksum.json @@ -0,0 +1 @@ +{"files":{".cargo_vcs_info.json":"9dbf536c95ec014b71b5ab4afe236bcd9463ffa430e52ffb24a33b0f27406d2d","Cargo.lock":"af7ad458f1459a3fd4980a89f3400f25f3468f4b95656471567f93cddf5191bf","Cargo.toml":"4859ef445dd62fb9d6fdecc1401591862a3acc57edc17ea8c8a7f31a32692f94","Cargo.toml.orig":"feee744cc2a5ef18aa0aee0387902f5dd6d2a8277456cb668208c6e3ad706e78","src/check.rs":"ff006342a2553732e489c53156623c8c36c63f27950e5b5f176b1964d6775500","src/instantiate.rs":"dac76b628daed69f8f927fc1992bbc52b3c7e177dc7a4af805543837524a7a0c","src/lib.rs":"554f0e7a4a69b73e77018c262be69d673e564d076ab833630a5dc3a83b0a1769","src/normalize.rs":"42e2612f5666f2b6c251965f4d6bf85fd24ba5dbe07281e5a456c10e1e547b55","src/parse.rs":"ae800bf2d4b7c49fdb74874d18bfea854680ca9ed7a603a6d34e046c96ff3337"},"package":"a0ca608371311e568b6f918f3cf851640c6811625f39852c188b50ce11e2201b"} \ No newline at end of file diff --git a/anneal/vendor/toml_const_macros/.cargo_vcs_info.json b/anneal/vendor/toml_const_macros/.cargo_vcs_info.json new file mode 100644 index 0000000000..5b2c522bc7 --- /dev/null +++ b/anneal/vendor/toml_const_macros/.cargo_vcs_info.json @@ -0,0 +1,6 @@ +{ + "git": { + "sha1": "38dc135cc3b17aacd2e8ff0bdc3a0dc058376f6b" + }, + "path_in_vcs": "toml_const_macros" +} \ No newline at end of file diff --git a/anneal/vendor/toml_const_macros/Cargo.lock b/anneal/vendor/toml_const_macros/Cargo.lock new file mode 100644 index 0000000000..da402fa0e9 --- /dev/null +++ b/anneal/vendor/toml_const_macros/Cargo.lock @@ -0,0 +1,157 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "equivalent" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" + +[[package]] +name = "hashbrown" +version = "0.15.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5971ac85611da7067dbfcabef3c70ebb5606018acd9e2a3903a0da507521e0d5" + +[[package]] +name = "indexmap" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cea70ddb795996207ad57735b50c5982d8844f38ba9ee5f1aedcfb708a2aa11e" +dependencies = [ + "equivalent", + "hashbrown", +] + +[[package]] +name = "memchr" +version = "2.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" + +[[package]] +name = "proc-macro2" +version = "1.0.95" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02b3e5e68a3a1a02aad3ec490a98007cbc13c37cbe84a3cd7b8e406d76e7f778" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "quote" +version = "1.0.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "serde" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde_derive" +version = "1.0.219" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "serde_spanned" +version = "0.6.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bf41e0cfaf7226dca15e8197172c295a782857fcb97fad1808a166870dee75a3" +dependencies = [ + "serde", +] + +[[package]] +name = "syn" +version = "2.0.104" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17b6f705963418cdb9927482fa304bc562ece2fdd4f616084c50b7023b435a40" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "toml" +version = "0.8.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "toml_edit", +] + +[[package]] +name = "toml_const_macros" +version = "1.2.1" +dependencies = [ + "indexmap", + "proc-macro2", + "quote", + "syn", + "toml", +] + +[[package]] +name = "toml_datetime" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +dependencies = [ + "serde", +] + +[[package]] +name = "toml_edit" +version = "0.22.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +dependencies = [ + "indexmap", + "serde", + "serde_spanned", + "toml_datetime", + "toml_write", + "winnow", +] + +[[package]] +name = "toml_write" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" + +[[package]] +name = "unicode-ident" +version = "1.0.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" + +[[package]] +name = "winnow" +version = "0.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74c7b26e3480b707944fc872477815d29a8e429d2f93a1ce000f5fa84a15cbcd" +dependencies = [ + "memchr", +] diff --git a/anneal/vendor/toml_const_macros/Cargo.toml b/anneal/vendor/toml_const_macros/Cargo.toml new file mode 100644 index 0000000000..4441597747 --- /dev/null +++ b/anneal/vendor/toml_const_macros/Cargo.toml @@ -0,0 +1,53 @@ +# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO +# +# When uploading crates to the registry Cargo will automatically +# "normalize" Cargo.toml files for maximal compatibility +# with all versions of Cargo and also rewrite `path` dependencies +# to registry (e.g., crates.io) dependencies. +# +# If you are reading this file be aware that the original Cargo.toml +# will likely look very different (and much more reasonable). +# See Cargo.toml.orig for the original contents. + +[package] +edition = "2021" +rust-version = "1.56" +name = "toml_const_macros" +version = "1.2.1" +build = false +autolib = false +autobins = false +autoexamples = false +autotests = false +autobenches = false +description = "proc-macros for toml_const" +readme = false +keywords = [] +categories = ["development-tools::build-utils"] +license = "MIT" +repository = "https://github.com/facesthe/toml_const" + +[features] +phf = [] + +[lib] +name = "toml_const_macros" +path = "src/lib.rs" +proc-macro = true + +[dependencies.indexmap] +version = "2" + +[dependencies.proc-macro2] +version = "1" + +[dependencies.quote] +version = "1" + +[dependencies.syn] +version = "2" +features = ["full"] + +[dependencies.toml] +version = "0.8" +features = ["preserve_order"] diff --git a/anneal/vendor/toml_const_macros/Cargo.toml.orig b/anneal/vendor/toml_const_macros/Cargo.toml.orig new file mode 100644 index 0000000000..3296865734 --- /dev/null +++ b/anneal/vendor/toml_const_macros/Cargo.toml.orig @@ -0,0 +1,23 @@ +[package] +name = "toml_const_macros" +version.workspace = true +edition.workspace = true +rust-version.workspace = true +description = "proc-macros for toml_const" +repository = "https://github.com/facesthe/toml_const" +license = "MIT" +keywords = [] +categories = ["development-tools::build-utils"] + +[features] +phf = [] + +[lib] +proc-macro = true + +[dependencies] +syn = { version = "2", features = ["full"] } +quote = "1" +proc-macro2 = "1" +toml = { workspace = true } +indexmap = { workspace = true } diff --git a/anneal/vendor/toml_const_macros/src/check.rs b/anneal/vendor/toml_const_macros/src/check.rs new file mode 100644 index 0000000000..f29b2f71ff --- /dev/null +++ b/anneal/vendor/toml_const_macros/src/check.rs @@ -0,0 +1,439 @@ +//! Checks performed for parsed toml inputs + +use std::collections::HashSet; + +// use proc_macro::Span; +use proc_macro2::{self as pm2, Span}; + +use crate::MAP_FIELD; + +/// Various ways checks can be mismatched +#[derive(Clone, Debug)] +pub enum CheckError { + /// Key that is in one table but not the other. + KeyMismatch { + /// Sequence of keys in reverse order that leads to this mismatch. + path: Vec, + a_diff: Option, + b_diff: Option, + }, + /// A mismatch in value types. + /// + /// Sequence of keys in reverse order that leads to this mismatch. + ValueMismatch(Vec), +} + +impl std::fmt::Display for CheckError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + CheckError::KeyMismatch { + path: table_path, + a_diff, + b_diff, + } => { + let table_path = table_path + .iter() + .rev() + .cloned() + .collect::>() + .join("::"); + + let desc = match (a_diff, b_diff) { + (None, None) => unimplemented!("cannot have both None"), + (None, Some(key)) | (Some(key), None) => format!( + "{} contains at least one additional key: {}", + table_path, key + ), + (Some(key_a), Some(key_b)) => format!( + "{} has at least 2 keys that differ: {}, {}", + table_path, key_a, key_b + ), + }; + + write!(f, "{}", desc) + } + CheckError::ValueMismatch(items) => { + let key_path = items.iter().rev().cloned().collect::>().join("::"); + + write!(f, "type mismatch for key: {}", key_path) + } + } + + // todo!() + } +} + +impl std::error::Error for CheckError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + None + } + + fn description(&self) -> &str { + "description() is deprecated; use Display" + } + + fn cause(&self) -> Option<&dyn std::error::Error> { + self.source() + } +} + +/// Check that this table and all child items do not contain prohibited keys. +pub fn check_unauthorized_keys(input: &toml::Table) -> Result<(), pm2::TokenStream> { + for (key, value) in input.iter() { + if key.is_empty() { + return Err( + syn::Error::new(Span::call_site(), "empty quoted keys cannot be used") + .to_compile_error(), + ); + } + + if key == MAP_FIELD { + return Err(syn::Error::new( + Span::call_site(), + format!("\"{MAP_FIELD}\" is a reserved key"), + ) + .to_compile_error()); + } + + match value { + toml::Value::Table(sub_table) => check_unauthorized_keys(sub_table)?, + toml::Value::Array(arr) => { + for item in arr.iter() { + if let toml::Value::Table(sub_table) = item { + check_unauthorized_keys(sub_table)? + } + } + } + _ => (), + } + } + + Ok(()) +} + +/// Main check entry point +#[allow(unused)] +fn check(table: &toml::Table) -> Result<(), CheckError> { + // check that all arrays are consistent + for (key, value) in table.iter() { + match value { + toml::Value::Array(arr) => match check_array_schema(arr) { + Ok(_) => (), + Err(e) => return Err(propagate_check_error(key, e)), + }, + toml::Value::Table(sub_table) => match check(sub_table) { + Ok(_) => (), + Err(e) => return Err(propagate_check_error(key, e)), + }, + _ => (), + } + } + + Ok(()) +} + +/// When receiving an error when performing some op on key+values, this function accumulates current key to the error. +#[allow(unused)] +fn propagate_check_error(key: &str, err: CheckError) -> CheckError { + match err { + CheckError::KeyMismatch { + path: mut tp, + a_diff, + b_diff, + } => { + tp.push(key.to_string()); + + CheckError::KeyMismatch { + path: tp, + a_diff, + b_diff, + } + } + CheckError::ValueMismatch(mut items) => { + items.push(key.to_string()); + CheckError::ValueMismatch(items) + } + } +} + +#[allow(unused)] +fn compare_value( + key: Option<&str>, + val_a: &toml::Value, + val_b: &toml::Value, +) -> Result<(), CheckError> { + match (val_a, val_b) { + (toml::Value::Boolean(_), toml::Value::Boolean(_)) + | (toml::Value::Datetime(_), toml::Value::Datetime(_)) + | (toml::Value::Float(_), toml::Value::Float(_)) + | (toml::Value::Integer(_), toml::Value::Integer(_)) + | (toml::Value::String(_), toml::Value::String(_)) => Ok(()), + + (toml::Value::Array(arr_a), toml::Value::Array(arr_b)) => { + compare_array_schema(key, arr_a, arr_b) + } + (toml::Value::Table(a_table), toml::Value::Table(b_table)) => { + match compare_table_schema(a_table, b_table) { + Ok(_) => Ok(()), + Err(e) => match key { + Some(k) => Err(propagate_check_error(k, e)), + None => Err(e), + }, + } + } + + _ => Err(CheckError::ValueMismatch(if let Some(k) = key { + vec![k.to_string()] + } else { + vec![] + })), + } +} + +#[allow(unused)] +fn check_array_schema(arr: &toml::value::Array) -> Result<(), CheckError> { + match arr.len() { + 0..2 => (), + _ => { + let mut arr_iter = arr.iter(); + let first = arr_iter.next().unwrap(); + + for elem in arr_iter { + // arrays do not propagate their key downwards + compare_value(None, first, elem)?; + } + } + } + + Ok(()) +} + +#[allow(unused)] +fn compare_array_schema( + key: Option<&str>, + arr_a: &toml::value::Array, + arr_b: &toml::value::Array, +) -> Result<(), CheckError> { + check_array_schema(arr_a)?; + check_array_schema(arr_b)?; + + match (arr_a.len(), arr_b.len()) { + (0, 0) | (0, _) | (_, 0) => Ok(()), + _ => compare_value(key, &arr_a[0], &arr_b[0]), + } +} + +/// Check that both tables match exactly in keys and types. +#[allow(unused)] +pub fn compare_table_schema( + table_a: &toml::Table, + table_b: &toml::Table, +) -> Result<(), CheckError> { + // check that both tables have the same keys + let a_keys = table_a.keys().collect::>(); + let b_keys = table_b.keys().collect::>(); + + match ( + a_keys.difference(&b_keys).next(), + b_keys.difference(&a_keys).next(), + ) { + (None, None) => (), + (None, Some(b)) => { + return Err(CheckError::KeyMismatch { + path: vec![], + a_diff: None, + b_diff: Some(b.to_string()), + }); + } + (Some(a), None) => { + return Err(CheckError::KeyMismatch { + path: vec![], + a_diff: Some(a.to_string()), + b_diff: None, + }); + } + (Some(a), Some(b)) => { + return Err(CheckError::KeyMismatch { + path: vec![], + a_diff: Some(a.to_string()), + b_diff: Some(b.to_string()), + }); + } + } + + for (key, a_val) in table_a.iter() { + let b_val = table_b.get(key).expect("already checked in previous step"); + + match (a_val, b_val) { + (toml::Value::Boolean(_), toml::Value::Boolean(_)) + | (toml::Value::Datetime(_), toml::Value::Datetime(_)) + | (toml::Value::Float(_), toml::Value::Float(_)) + | (toml::Value::Integer(_), toml::Value::Integer(_)) + | (toml::Value::String(_), toml::Value::String(_)) => (), + + // more checks + (toml::Value::Array(a_arr), toml::Value::Array(b_arr)) => { + compare_array_schema(Some(key), a_arr, b_arr)?; + } + (toml::Value::Table(a_table), toml::Value::Table(b_table)) => { + match compare_table_schema(a_table, b_table) { + Ok(_) => (), + Err(e) => return Err(propagate_check_error(key, e)), + } + } + + _ => return Err(CheckError::ValueMismatch(vec![key.to_string()])), + } + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + + use super::*; + use toml::de::from_str; + + #[test] + fn test_check_unauthorized_keys() { + let toml_str = r#" + [a] + key1 = "value1" + key2 = 42 + key3 = true + + [[b]] + key1 = "value1" + + [[b]] + key1 = "value2" + + [c] + subtable.item = "value3" + subtable.otheritem = "value4" + "#; + + let table: toml::Table = from_str(toml_str).unwrap(); + assert!(check_unauthorized_keys(&table).is_ok()); + + // Check with an empty key + let toml_str_with_empty_key = r#" + [a] + "" = "value1" + key2 = 42 + key3 = true + + [[b]] + key1 = "value1" + + [[b]] + key1 = "value2" + + [c] + subtable.item = "value3" + subtable.otheritem = "value4" + "#; + + let table_with_empty_key: toml::Table = from_str(toml_str_with_empty_key).unwrap(); + let res = check_unauthorized_keys(&table_with_empty_key); + assert!(res.is_err()); + } + + #[test] + fn test_check_matching_schema() { + let toml_a = r#" + [a] + key1 = "value1" + key2 = 42 + key3 = true + + [[b]] + key1 = "value1" + + [[b]] + key1 = "value2" + + [c] + subtable.item = "value3" + subtable.otheritem = "value4" + "#; + + let toml_b = r#" + [a] + key1 = "value2" + key2 = 24 + key3 = false + + [[b]] + key1 = "value3" + + [c] + subtable.item = "value3" + subtable.otheritem = "value4" + "#; + + let table_a: toml::Table = from_str(toml_a).unwrap(); + let table_b: toml::Table = from_str(toml_b).unwrap(); + + assert!(compare_table_schema(&table_a, &table_b).is_ok()); + } + + /// Return an error pointing to the key that does not have the correct data type + #[test] + fn test_key_type_mismatch() { + let toml_a = r#" + [a] + a_inner.key1 = "value1" + key2 = 42 + "#; + + let toml_b = r#" + [a] + a_inner.key1 = true + key2 = 24 + "#; + + let table_a: toml::Table = from_str(toml_a).unwrap(); + let table_b: toml::Table = from_str(toml_b).unwrap(); + + let res = compare_table_schema(&table_a, &table_b); + assert!(res.is_err()); + + if let CheckError::ValueMismatch(items) = res.clone().unwrap_err() { + assert_eq!(items, vec!["key1", "a_inner", "a"]); + } else { + panic!("Expected ValueMismatch error, got {:?}", res); + } + } + + /// Return an error showing one or two keys that + #[test] + fn test_key_mismatch() { + let toml_a = r#" + [a] + key1 = "value1" + key2 = 42 + "#; + + let toml_b = r#" + [a] + key1 = "value2" + key3 = 24 + "#; + + let table_a: toml::Table = from_str(toml_a).unwrap(); + let table_b: toml::Table = from_str(toml_b).unwrap(); + + let res = compare_table_schema(&table_a, &table_b); + println!("{:?}", res); + assert!(res.is_err()); + + if let CheckError::KeyMismatch { a_diff, b_diff, .. } = res.clone().unwrap_err() { + assert_eq!(a_diff, Some("key2".to_string())); + assert_eq!(b_diff, Some("key3".to_string())); + } else { + panic!("Expected KeyMismatch error, got {:?}", res); + } + } +} diff --git a/anneal/vendor/toml_const_macros/src/instantiate.rs b/anneal/vendor/toml_const_macros/src/instantiate.rs new file mode 100644 index 0000000000..3436e381ee --- /dev/null +++ b/anneal/vendor/toml_const_macros/src/instantiate.rs @@ -0,0 +1,404 @@ +//! Struct instantiation crate. +//! +//! A toml table is converted to a custom struct. +//! The identifier of the struct is used as the struct's type. + +use proc_macro2::{self as pm2, Span}; +use quote::quote; +use syn::{punctuated::Punctuated, Ident}; + +use crate::TomlValue; + +/// Chars to replace when converting to an identifier. +const REPLACE_CHARS: &[char] = &[' ', '-', '_', ':', '.', '/', '\\', '"']; + +/// Generate the instantiation of an item. This can be a custom struct or a simple value. +/// If a key is provided, the instantiation will be in a field-value pair. +/// +/// Keys are not provided if: +/// - the table is the root table +/// - the value is defined as an element in an array +/// +/// This is basically a wrapper around [quote::ToTokens]. +pub trait Instantiate { + fn instantiate( + &self, + key: &str, + toml_value: &TomlValue, + parents: Vec<&Ident>, + ) -> pm2::TokenStream; +} + +/// Create identifiers for variables and types from a string. +pub trait ConstIdentDef { + /// Create a valid variable identifier, formatted as SCREAMING_SNAKE_CASE. + fn to_variable_ident(&self) -> syn::Ident; + + /// Create a valid module identifier, formatted as snake_case. + fn to_module_ident(&self) -> syn::Ident { + syn::Ident::new_raw( + &self.to_variable_ident().to_string().to_lowercase(), + Span::call_site(), + ) + } + + /// Create a valid type identifier, formatted as PascalCase. + fn to_type_ident(&self) -> syn::Ident; + + // /// Create an array type identifier formatted as PascalCase. + // fn to_array_type_ident(&self) -> String { + // format!("{}Item", self.to_type_ident()) + // } +} + +impl ConstIdentDef for T +where + T: AsRef, +{ + fn to_variable_ident(&self) -> syn::Ident { + let self_ref = self.as_ref(); + + let inter = self_ref.replace(REPLACE_CHARS, "_"); + + let inter = inter + .split('_') + .map(|item| item.to_uppercase()) + .collect::>() + .join("_"); + + let inter = match inter.starts_with(char::is_numeric) { + true => format!("_{}", inter), + false => inter, + }; + + syn::Ident::new(&inter, Span::call_site()) + } + + fn to_type_ident(&self) -> syn::Ident { + let inter = self.as_ref().replace(REPLACE_CHARS, "_"); + + let inter = match inter.contains("_") { + true => inter + .split('_') + .map(|item| { + let mut chars = item.chars(); + + match chars.next() { + Some(c) => { + let first_char = c.to_ascii_uppercase(); + let rest = chars.collect::().to_ascii_lowercase(); + format!("{}{}", first_char, rest) + } + None => String::new(), + } + }) + .collect::(), + false => { + // split at a capital letter, but preserve the letter + let inter = inter.chars().fold(String::new(), |mut acc, c| { + if c.is_uppercase() && !acc.is_empty() { + acc.push('_'); + } + acc.push(c); + acc + }); + + inter + .split("_") + .map(|item| { + let mut chars = item.chars(); + + match chars.next() { + Some(c) => { + let first_char = c.to_ascii_uppercase(); + let rest = chars.collect::().to_ascii_lowercase(); + format!("{}{}", first_char, rest) + } + None => String::new(), + } + }) + .collect::() + + // todo!() + } + }; + + let inter = match inter.starts_with(char::is_numeric) { + true => format!("_{}", inter), + false => inter, + }; + + syn::Ident::new(&inter, Span::call_site()) + } +} + +impl Instantiate for toml::Value { + fn instantiate( + &self, + key: &str, + toml_value: &TomlValue, + parents: Vec<&Ident>, + ) -> proc_macro2::TokenStream { + use toml::Value::*; + + match self { + // cases when items are instantiated as fields in an array + String(val) => quote! { #val }, + Integer(val) => quote! { #val }, + Float(val) => quote! { #val }, + Boolean(val) => quote! { #val }, + + // items with inner impls + Datetime(datetime) => datetime.instantiate(key, toml_value, vec![]), + Array(values) => values.instantiate(key, toml_value, parents), + Table(map) => map.instantiate(key, toml_value, parents), + } + } +} + +impl Instantiate for toml::Table { + fn instantiate( + &self, + key: &str, + toml_value: &TomlValue, + parents: Vec<&Ident>, + ) -> proc_macro2::TokenStream { + let table_type = key.to_type_ident(); + let table_mod = key.to_module_ident(); + + let table_ty = match parents.len() { + 0 => { + quote! { #table_type } + } + _ => { + let p = parents.iter().collect::>(); + quote! { #p :: #table_type } + } + }; + + let mut parents = parents.clone(); + parents.push(&table_mod); + + let new_params = match toml_value { + TomlValue::Table(tab) => tab + .iter() + .map(|(key, val)| { + let inner_val = self.get(key).expect("key should exist in table"); + + inner_val.instantiate(key, val, parents.clone()) + }) + .collect::>(), + TomlValue::TableMap { + keys, + first, + value_type, + } => { + let map_vals = keys + .iter() + .map(|k| { + let key_lit = syn::LitStr::new(k, Span::call_site()); + + let value = self.get(k).expect("key should exist in table"); + let value = value.instantiate(first, value_type, parents.clone()); + + quote! {#key_lit => #value} + }) + .collect::>(); + + let map_value = quote! {{ + use toml_const::phf; + &toml_const::phf_map_macro! { + #map_vals + } + }}; + + self.iter() + .map(|(_, f_val)| f_val.instantiate(first, value_type, parents.clone())) + .chain([map_value]) + .collect::>() + } + _ => unimplemented!("expected a table or table map"), + }; + + quote! { + #table_ty::new( + #new_params + ) + } + } +} + +impl Instantiate for toml::value::Array { + fn instantiate( + &self, + key: &str, + toml_value: &TomlValue, + parents: Vec<&Ident>, + ) -> proc_macro2::TokenStream { + let arr = if let TomlValue::Array(arr) = toml_value { + arr + } else { + unimplemented!("expected a toml array value"); + }; + + let val = match arr.first() { + Some(v) => v, + None => return quote! { &[] }, + }; + + let elements = self + .iter() + .map(|elem| elem.instantiate(key, val, parents.clone())) + .collect::>(); + + quote! { + &[ #elements ] + } + } +} + +// datetime structs do not require a key, as they are already defined. +impl Instantiate for toml::value::Datetime { + fn instantiate(&self, k: &str, _: &TomlValue, _: Vec<&Ident>) -> proc_macro2::TokenStream { + match (self.date, self.time, self.offset) { + (Some(d), Some(t), Some(o)) => { + let d = d.instantiate(k, &TomlValue::Boolean, vec![]); + let t = t.instantiate(k, &TomlValue::Boolean, vec![]); + let o = o.instantiate(k, &TomlValue::Boolean, vec![]); + + quote! { + toml_const::OffsetDateTime { + date: #d, + time: #t, + offset: #o + } + } + } + (Some(d), Some(t), None) => { + let d = d.instantiate(k, &TomlValue::Boolean, vec![]); + let t = t.instantiate(k, &TomlValue::Boolean, vec![]); + + quote! { + toml_const::LocalDateTime { + date: #d, + time: #t + } + } + } + (Some(d), None, None) => { + let d = d.instantiate(k, &TomlValue::Boolean, vec![]); + + quote! { + toml_const::LocalDate { + date: #d + } + } + } + (None, Some(t), None) => { + let t = t.instantiate(k, &TomlValue::Boolean, vec![]); + + quote! { + toml_const::LocalTime { + time: #t + } + } + } + + _ => unimplemented!("unsupported datetime combination"), + } + } +} + +// sub structs do not require key, they implement `Key::Element`. +impl Instantiate for toml::value::Date { + fn instantiate(&self, _: &str, _: &TomlValue, _: Vec<&Ident>) -> proc_macro2::TokenStream { + let year = self.year; + let month = self.month; + let day = self.day; + + quote! { + toml_const::Date { + year: #year, + month: #month, + day: #day + } + } + } +} + +impl Instantiate for toml::value::Time { + fn instantiate(&self, _: &str, _: &TomlValue, _: Vec<&Ident>) -> proc_macro2::TokenStream { + let hour = self.hour; + let minute = self.minute; + let second = self.second; + let nanosecond = self.nanosecond; + + quote! { + toml_const::Time { + hour: #hour, + minute: #minute, + second: #second, + nanosecond: #nanosecond + } + } + } +} + +impl Instantiate for toml::value::Offset { + fn instantiate(&self, _: &str, _: &TomlValue, _: Vec<&Ident>) -> proc_macro2::TokenStream { + match self { + toml::value::Offset::Z => quote! { toml_const::Offset::Z }, + toml::value::Offset::Custom { minutes } => quote! { + toml_const::Offset::Custom { + minutes: #minutes + } + }, + } + } +} + +#[cfg(test)] +mod tests { + use std::str::FromStr; + + use super::*; + + #[test] + fn test_instantiation() { + let cargo_manifest = include_str!("../Cargo.toml"); + let toml: toml::Table = toml::Table::from_str(cargo_manifest).unwrap(); + let value: TomlValue = toml.clone().into(); + + let root_ident = Ident::new("ROOT_TABLE", Span::call_site()); + let instantiation = toml.instantiate(&root_ident.to_string(), &value, vec![]); + + println!("Table instantiation: {}", instantiation); + } + + #[test] + fn test_split_pascal_case() { + let inter = "PascalCase"; + + let inter = inter.chars().fold(String::new(), |mut acc, c| { + if c.is_uppercase() && !acc.is_empty() { + acc.push('_'); + } + acc.push(c); + acc + }); + + println!("inter: {inter}"); + + let inter = "Pascal"; + + let inter = inter.chars().fold(String::new(), |mut acc, c| { + if c.is_uppercase() && !acc.is_empty() { + acc.push('_'); + } + acc.push(c); + acc + }); + + println!("inter: {inter}"); + } +} diff --git a/anneal/vendor/toml_const_macros/src/lib.rs b/anneal/vendor/toml_const_macros/src/lib.rs new file mode 100644 index 0000000000..df8c6a5628 --- /dev/null +++ b/anneal/vendor/toml_const_macros/src/lib.rs @@ -0,0 +1,202 @@ +mod check; +mod instantiate; +mod normalize; +mod parse; + +use std::path::PathBuf; + +use instantiate::Instantiate; +use proc_macro as pm; +use proc_macro2::{self as pm2, Span}; + +use parse::{MacroInput, MultipleMacroInput}; +use quote::{quote, ToTokens}; +use syn::parse_macro_input; + +use crate::{instantiate::ConstIdentDef, normalize::TomlValue}; + +/// Private map field for tables that can be represented as hashmaps +const MAP_FIELD: &str = "__map__"; + +/// Instantiate a const definition of the contents from a TOML file. +/// +/// This macro resolves paths relative to the first parent directory containing a `Cargo.toml` file. +#[proc_macro] +pub fn toml_const(input: pm::TokenStream) -> pm::TokenStream { + let input: MultipleMacroInput = parse_macro_input!(input); + + let manifest_path = + std::env::var("CARGO_MANIFEST_DIR").expect("manifest dir variable must exist"); + let manifest_path = PathBuf::from(manifest_path); + assert!(manifest_path.is_dir()); + let abs_manifest_path = manifest_path + .canonicalize() + .expect("path must canonicalize"); + + let const_defs = input + .0 + .iter() + .map(|i| i.to_const_defs(&abs_manifest_path)) + .collect::(); + + let inner_calls = input + .0 + .iter() + .map(|i| { + let absolute = i.to_abs_path(&abs_manifest_path); + quote! { + toml_const::toml_const_inner! { + #absolute + } + } + }) + .collect::(); + + quote! { + #const_defs + + #inner_calls + } + .into() +} + +/// Instantiate a const definition of the contents from a TOML file. +/// +/// If this macro is used in a workspace, it will resolve paths relative to the workspace's `Cargo.toml`. +/// +/// If this macro is used in a crate, it will resolve paths relative to the crate's `Cargo.toml`. +#[proc_macro] +pub fn toml_const_ws(input: pm::TokenStream) -> pm::TokenStream { + let input: MultipleMacroInput = parse_macro_input!(input); + + let ws_dir = std::env::current_dir() + .expect("current directory must exist") + .to_string_lossy() + .to_string(); + + let ws_path = PathBuf::from(ws_dir); + assert!(ws_path.is_dir()); + let abs_ws_path = ws_path.canonicalize().expect("path must canonicalize"); + + let const_defs = input + .0 + .iter() + .map(|i| i.to_const_defs(&abs_ws_path)) + .collect::(); + + // let collected = input.to_const_defs(&abs_ws_path); + let inner_calls = input + .0 + .iter() + .map(|i| { + let absolute = i.to_abs_path(&abs_ws_path); + quote! { + toml_const::toml_const_inner! { + #absolute + } + } + }) + .collect::(); + + quote! { + #const_defs + + #inner_calls + } + .into() +} + +/// TODO: working title +#[proc_macro_attribute] +pub fn unwrap_datetime(_attr: pm::TokenStream, item: pm::TokenStream) -> pm::TokenStream { + let syn::ItemMacro { + attrs, + ident, + mac, + semi_token, + } = parse_macro_input!(item); + + syn::ItemMacro { + attrs, + ident, + mac, + semi_token, + } + .to_token_stream() + .into() +} + +/// Inner method call generated by public macros +#[doc(hidden)] +#[proc_macro] +pub fn toml_const_inner(input: pm::TokenStream) -> pm::TokenStream { + let input: MacroInput = parse_macro_input!(input); + + let toml_table = match input.generate_toml_table() { + Ok(tt) => tt, + Err(e) => return e.into(), + }; + + match check::check_unauthorized_keys(&toml_table) { + Ok(_) => (), + Err(e) => return e.into(), + } + + // perform normalization + let toml_val_table = TomlValue::from(toml_table.clone()); + let toml_val_table = match toml_val_table.normalize() { + Ok(n) => n, + Err(e) => { + return syn::Error::new(Span::call_site(), e.to_string()) + .to_compile_error() + .into() + } + }; + let toml_val_table = toml_val_table.reduce(); + + let mut toml_table_val = toml::Value::Table(toml_table); + toml_val_table.normalize_toml(&mut toml_table_val); + let toml_table = toml_table_val + .as_table() + .expect("conversion back to table must not fail"); + + let derive_attrs = input + .attrs + .iter() + .filter(|attr| attr.path().is_ident("derive")) + .cloned() + .collect::>(); + + let table_definitions = toml_val_table.definition(&input.item_ident.to_string(), &derive_attrs); + + let instantiation = + toml_table.instantiate(&input.item_ident.to_string(), &toml_val_table, vec![]); + + let pub_token = if input.is_pub { + quote! {pub} + } else { + quote! {} + }; + + let static_const_token = match input.static_const { + true => quote! {const}, + false => quote! {static}, + }; + + let item_ident = &input.item_ident; + let item_ty = input.item_ident.to_string().to_type_ident(); + + let doc_attrs = input + .doc_attrs() + .into_iter() + .map(|a| a.to_token_stream()) + .collect::(); + + quote! { + #table_definitions + + #doc_attrs + #pub_token #static_const_token #item_ident: #item_ty = #instantiation; + } + .into() +} diff --git a/anneal/vendor/toml_const_macros/src/normalize.rs b/anneal/vendor/toml_const_macros/src/normalize.rs new file mode 100644 index 0000000000..e7939049e8 --- /dev/null +++ b/anneal/vendor/toml_const_macros/src/normalize.rs @@ -0,0 +1,820 @@ +//! Normalizing module, aka field inference. +//! +//! Normalization is the process of inferring missing fields in a TOML table inside arrays. +//! A user can define only the fields they care about, and the rest are initialized with default values. +//! +//! Steps to perform normalization are: +//! - Derive a normalized "schema" from the input TOML table. Arrays are reduced to 0/1 elements +//! - Using this normalized table, visit the original table and populate missing fields +//! +//! Empty fields are populated with default values: +//! - primitive types are set to their defaults +//! - arrays are empty +//! - dates are set to `1970-01-01T00:00:00Z` + +use indexmap::IndexMap; +use proc_macro2 as pm2; +use proc_macro2::Span; +use quote::{quote, ToTokens}; +use syn::{punctuated::Punctuated, Ident}; +use toml::value::{Date, Datetime}; + +use crate::{instantiate::ConstIdentDef, MAP_FIELD}; + +const DEFAULT_DATE: Date = Date { + year: 1970, + month: 1, + day: 1, +}; +const DEFAULT_TIME: toml::value::Time = toml::value::Time { + hour: 0, + minute: 0, + second: 0, + nanosecond: 0, +}; +const DEFAULT_OFFSET: toml::value::Offset = toml::value::Offset::Z; + +#[derive(Clone, Debug)] +pub enum NormalizationError { + /// A mismatch in value types. + /// + /// Sequence of keys in reverse order that leads to this mismatch. + ValueMismatch { + /// Reverse key path leading to the mismatch + path: Vec, + + /// Conflicting value types + value_types: Box<(TomlValue, TomlValue)>, + }, +} + +/// Working intermediate representation - contains only key and type information +#[derive(Clone, Debug, PartialEq)] +pub enum TomlValue { + String, + Integer, + Float, + Boolean, + Datetime { + date: bool, + time: bool, + offset: bool, + }, + Array(Vec), + Table(IndexMap), + + /// A table map is a subset of a table that contains identical values for all keys. + TableMap { + keys: Vec, + /// The key that table and array types inherit + first: String, + value_type: Box, + }, +} + +impl std::error::Error for NormalizationError { + fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { + None + } + + fn description(&self) -> &str { + "description() is deprecated; use Display" + } + + fn cause(&self) -> Option<&dyn std::error::Error> { + self.source() + } +} + +impl std::fmt::Display for NormalizationError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + NormalizationError::ValueMismatch { path, value_types } => { + let path = path + .iter() + .rev() + .map(|s| s.as_str()) + .collect::>() + .join("::"); + + write!( + f, + "Value mismatch at {} - found: {:?} and {:?}", + path, value_types.0, value_types.1 + ) + } + } + } +} + +impl From for toml::Value { + fn from(value: TomlValue) -> Self { + match value { + TomlValue::String => toml::Value::String(Default::default()), + TomlValue::Integer => toml::Value::Integer(Default::default()), + TomlValue::Float => toml::Value::Float(Default::default()), + TomlValue::Boolean => toml::Value::Boolean(Default::default()), + TomlValue::Datetime { date, time, offset } => { + toml::Value::Datetime(toml::value::Datetime { + date: if date { Some(DEFAULT_DATE) } else { None }, + time: if time { Some(DEFAULT_TIME) } else { None }, + offset: if offset { Some(DEFAULT_OFFSET) } else { None }, + }) + } + TomlValue::Array(elements) => { + toml::Value::Array(elements.into_iter().map(|e| e.into()).collect()) + } + TomlValue::Table(sub_table) => { + toml::Value::Table(sub_table.into_iter().map(|(k, v)| (k, v.into())).collect()) + } + TomlValue::TableMap { + keys, value_type, .. + } => toml::Value::Table( + keys.into_iter() + .map(|k| (k, (*value_type.clone()).into())) + .collect(), + ), + } + } +} + +impl From for TomlValue { + fn from(value: toml::Value) -> Self { + match value { + toml::Value::String(_) => Self::String, + toml::Value::Integer(_) => Self::Integer, + toml::Value::Float(_) => Self::Float, + toml::Value::Boolean(_) => Self::Boolean, + toml::Value::Datetime(datetime) => Self::Datetime { + date: datetime.date.is_some(), + time: datetime.time.is_some(), + offset: datetime.offset.is_some(), + }, + toml::Value::Array(values) => { + Self::Array(values.into_iter().map(|v| v.into()).collect()) + } + toml::Value::Table(map) => map.into(), + } + } +} + +impl From for TomlValue { + fn from(value: toml::Table) -> Self { + Self::Table(value.into_iter().map(|(k, v)| (k, v.into())).collect()) + } +} + +impl TomlValue { + /// This method assumes that [TomlValue::normalize] is already called. + /// + /// This will recursively visit and normalize all items in a [toml::Value]. + pub fn normalize_toml(&self, toml: &mut toml::Value) { + match (self, toml) { + (TomlValue::String, toml::Value::String(_)) + | (TomlValue::Integer, toml::Value::Integer(_)) + | (TomlValue::Float, toml::Value::Float(_)) + | (TomlValue::Boolean, toml::Value::Boolean(_)) => (), + + ( + TomlValue::Datetime { + date: tv_date, + time: tv_time, + offset: tv_offset, + }, + toml::Value::Datetime(Datetime { date, time, offset }), + ) => { + if *tv_date && date.is_none() { + *date = Some(DEFAULT_DATE) + } + + if *tv_time && time.is_none() { + *time = Some(DEFAULT_TIME) + } + + if *tv_offset && offset.is_none() { + *offset = Some(DEFAULT_OFFSET) + } + } + (TomlValue::Array(toml_values), toml::Value::Array(values)) => { + if let Some(toml_value) = toml_values.first() { + for val in values { + toml_value.normalize_toml(val); + } + } + } + (TomlValue::Table(hash_map), toml::Value::Table(map)) => { + for (key, value) in hash_map { + match (map.get_mut(key), value) { + (Some(toml_value), _) => { + value.normalize_toml(toml_value); + } + // for missing keys that point to arrays, we initialize them as empty arrays + (None, TomlValue::Array(_)) => { + map.insert(key.to_owned(), toml::Value::Array(vec![])); + } + (None, _) => { + map.insert(key.to_owned(), value.clone().into()); + } + } + } + } + ( + TomlValue::TableMap { + keys, value_type, .. + }, + toml::Value::Table(map), + ) => { + for key in keys { + match (map.get_mut(key), value_type.as_ref()) { + (Some(toml_value), _) => { + value_type.normalize_toml(toml_value); + } + (None, TomlValue::Array(_)) => { + map.insert(key.to_owned(), toml::Value::Array(vec![])); + } + (None, _) => { + map.insert(key.to_owned(), (*value_type.clone()).into()); + } + } + } + } + _ => unimplemented!("normalizing different types cannot be done"), + } + } + + /// Derive a normalized version of [Self]. + /// + /// At this point, the schema of [Self] will be superset of the original. + pub fn normalize(self) -> Result { + match self { + TomlValue::Array(toml_values) => match toml_values.first() { + Some(first) => { + let first_val = first.clone(); + let normalized = toml_values.into_iter().try_fold(first_val, |acc, item| { + let inter = item.normalize()?; + acc.union(&inter) + })?; + + Ok(TomlValue::Array(vec![normalized])) + } + None => Ok(TomlValue::Array(vec![])), + }, + + TomlValue::Table(toml_table) => { + let norm_table = toml_table + .into_iter() + .map(|(k, v)| { + let normalized_value = v.normalize(); + match normalized_value { + Ok(nv) => Ok((k.clone(), nv)), + Err(e) => Err(e.propagate(&k)), + } + }) + .collect::, NormalizationError>>()?; + + Ok(TomlValue::Table(norm_table)) + } + + TomlValue::Datetime { date, time, offset } => { + Ok(Self::resolve_date_time_offset(date, time, offset)) + } + + // everything else is already normalized + other => Ok(other), + } + } + + /// Transform tables with identical values to table maps + #[cfg(feature = "phf")] + pub fn reduce(self) -> Self { + match self { + TomlValue::Table(tab) => { + match tab.len() { + 0 => TomlValue::Table(tab), + _ => { + // reduce inner first + let reduced_inner = tab + .into_iter() + .map(|(k, v)| (k, v.reduce())) + .collect::>(); + + // let mut key_values = reduced_inner.iter(); + // let (first_key, first_value) = + // key_values.next().expect("already checked for empty table"); + + let (keys, values): (Vec<_>, Vec<_>) = reduced_inner.into_iter().unzip(); + let first_val = &values[0]; + let first_key = keys[0].to_string(); + + if values.iter().all(|v| first_val == v) { + TomlValue::TableMap { + keys, + first: first_key, + value_type: Box::new(first_val.clone()), + } + } else { + TomlValue::Table((keys.into_iter()).zip(values).collect()) + } + } + } + } + + TomlValue::Array(arr) => { + TomlValue::Array(arr.into_iter().map(|a| a.reduce()).collect()) + } + // no need to reduce primitive types + other => other, + } + } + + #[cfg(not(feature = "phf"))] + pub fn reduce(self) -> Self { + self + } + + /// Calculate the union of two [TomlValue] types. + /// + /// This will first check if both types are the same, and then merge table and array types. + /// Arrays will be reduced to lengths 1 or 0. + fn union(&self, other: &Self) -> Result { + match (self, other) { + (TomlValue::String, TomlValue::String) => Ok(TomlValue::String), + (TomlValue::Integer, TomlValue::Integer) => Ok(TomlValue::Integer), + (TomlValue::Float, TomlValue::Float) => Ok(TomlValue::Float), + (TomlValue::Boolean, TomlValue::Boolean) => Ok(TomlValue::Boolean), + ( + TomlValue::Datetime { + date: ld, + time: lt, + offset: lo, + }, + TomlValue::Datetime { + date: rd, + time: rt, + offset: ro, + }, + ) => Ok(TomlValue::Datetime { + date: *ld || *rd, + time: *lt || *rt, + offset: *lo || *ro, + }), + + (TomlValue::Array(arr_self), TomlValue::Array(arr_other)) => { + let mut chained = arr_self.iter().chain(arr_other.iter()); + + match chained.next() { + Some(first) => { + let merged = arr_self + .iter() + .chain(arr_other.iter()) + .try_fold(first.to_owned(), |acc, item| acc.union(item))?; + + Ok(TomlValue::Array(vec![merged])) + } + None => Ok(TomlValue::Array(vec![])), + } + } + + (TomlValue::Table(tab_self), TomlValue::Table(tab_other)) => { + let mut merged = tab_self.clone(); + + for (key, value) in tab_other { + match merged.get_mut(key) { + Some(existing_val) => { + match existing_val.union(value) { + Ok(u) => *existing_val = u, + Err(e) => Err(e.propagate(key))?, + }; + } + None => { + merged.insert(key.to_string(), value.clone()); + } + } + } + + Ok(TomlValue::Table(merged)) + } + + err_other => Err(NormalizationError::ValueMismatch { + path: vec![], + value_types: Box::new((err_other.0.clone(), err_other.1.clone())), + }), + } + } + + /// Some date-time combinations are not valid + fn resolve_date_time_offset(date: bool, time: bool, offset: bool) -> TomlValue { + match (date, time, offset) { + // offset date time - anything containing offsets is promoted to offset date time + (_, _, true) => TomlValue::Datetime { + date: true, + time: true, + offset: true, + }, + // local date time + (true, true, false) => TomlValue::Datetime { + date: true, + time: true, + offset: false, + }, + // local date + (true, false, false) => TomlValue::Datetime { + date: true, + time: false, + offset: false, + }, + // local time + (false, true, false) => TomlValue::Datetime { + date: false, + time: true, + offset: false, + }, + (false, false, false) => { + unimplemented!("datetime cannot be constructed without any components") + } + } + } + + /// Return the type of a value. + /// Arrays will descend and return their inner type. + fn ty(&self, key: &str, parent_mod: Option<&Ident>) -> pm2::TokenStream { + match self { + TomlValue::String => quote! {&'static str}, + TomlValue::Integer => quote! {i64}, + TomlValue::Float => quote! {f64}, + TomlValue::Boolean => quote! {bool}, + TomlValue::Datetime { date, time, offset } => { + let dt_ident = date_time_struct_ident(*date, *time, *offset); + quote! { toml_const :: #dt_ident } + } + TomlValue::Array(toml_values) => { + match toml_values.first() { + Some(inner) => { + let inner_type = inner.ty(key, parent_mod); + + quote! { &'static [#inner_type] } + } + // default to string array + None => quote! { &'static [&'static str] }, + } + } + TomlValue::Table(_) | TomlValue::TableMap { .. } => { + let self_type = key.to_type_ident(); + + match parent_mod { + Some(parent) => quote! { #parent :: #self_type }, + None => quote! { #self_type }, + } + } // TomlValue::TableMap { keys, value_type } => { + // // &value_type.ty(key, parent_mod) + + // todo!() + // } + } + } + + /// Recursively define array and table types. + /// + /// `Self` should be normalized and reduced first. + pub fn definition(&self, key: &str, derive_attrs: &[syn::Attribute]) -> pm2::TokenStream { + match self { + // do not need to define primitive/provided types + TomlValue::String + | TomlValue::Integer + | TomlValue::Float + | TomlValue::Boolean + | TomlValue::Datetime { .. } => quote! {}, + + TomlValue::Array(arr) => match arr.len() { + 0 => quote! {}, // instantiated as bool array + 1 => { + let inner_value = &arr[0]; + + inner_value.definition(key, derive_attrs) + } + _ => unimplemented!("normalized array should have 0 or 1 elements"), + }, + TomlValue::Table(tab) => { + let self_ident = key.to_type_ident(); + let self_mod = key.to_module_ident(); + + // // we make the identifier in all values the same type, if all values in the table are the same. + // let same_val_type = match tab.len() { + // 0 => None, + // _ => { + // let mut key_vals = tab.iter(); + // let (first_key, first_val) = + // key_vals.next().expect("already checked for empty table"); + + // match key_vals.all(|(_, v)| v == first_val) { + // true => Some(first_val.ty(first_key, Some(&self_mod))), + // false => None, + // } + // } + // }; + + // let mut x = 0; + + let constructor_fields = tab + .iter() + .map(|(k, v)| { + // x += 1; + // let field_ident = k.to_variable_ident(); + let field_ident = k.to_module_ident(); + + let field_type = v.ty(k, Some(&self_mod)); + + quote! { + #field_ident: #field_type + } + }) + .collect::>(); + + let struct_fields = constructor_fields + .iter() + .map(|k| { + quote! {pub #k} + }) + .collect::>(); + + let inner_definitions = tab + .iter() + .filter(|(_, v)| { + matches!( + v, + TomlValue::Array(_) | TomlValue::Table(_) | TomlValue::TableMap { .. } + ) + }) + .map(|(k, v)| v.definition(k, derive_attrs)) + .collect::(); + + let shorthand_init_fields = tab + .iter() + .map(|(k, _)| k.to_module_ident().to_token_stream()) + .collect::>(); + + let derives = derive_attrs + .iter() + .map(|attr| quote! { #attr }) + .collect::(); + + quote! { + #[derive(Clone, Copy, Debug)] + #derives + pub struct #self_ident { + #struct_fields + } + + impl #self_ident { + #[doc(hidden)] + #[allow(clippy::too_many_arguments)] + pub const fn new( + #constructor_fields + ) -> Self { + Self { + #shorthand_init_fields + } + } + } + + pub mod #self_mod { + #inner_definitions + } + } + } + TomlValue::TableMap { + keys, + first, + value_type, + } => { + let self_ident = key.to_type_ident(); + let self_mod = key.to_module_ident(); + let all_field_type = value_type.ty(first, Some(&self_mod)); + + let map_field_ident = MAP_FIELD.to_module_ident(); + let phf_map_type = quote! {::toml_const::PhfMap<&'static str, #all_field_type>}; + + // final map field type + let map_field = quote! { + #map_field_ident: &'static #phf_map_type + }; + + let constructor_fields = keys + .iter() + .map(|k| { + let field_ident = k.to_module_ident(); + quote! { + #field_ident: #all_field_type + } + }) + .collect::>(); + + let struct_fields = constructor_fields + .iter() + .map(|k| { + quote! {pub #k} + }) + .chain([map_field.clone()]) + .collect::>(); + + let constructor_fields = constructor_fields + .into_iter() + .chain([map_field]) + .collect::>(); + + let derives = derive_attrs + .iter() + .map(|attr| quote! { #attr }) + .collect::(); + + let shorthand_init_fields = keys + .iter() + .map(|k| k.to_module_ident().to_token_stream()) + .chain([map_field_ident.to_token_stream()]) + .collect::>(); + + let inner_definitions = value_type.definition(first, derive_attrs); + + quote! { + #[derive(Clone, Copy, Debug)] + #derives + pub struct #self_ident { + #struct_fields + } + + impl #self_ident { + #[doc(hidden)] + #[allow(clippy::too_many_arguments)] + pub const fn new( + #constructor_fields + ) -> Self { + Self { + #shorthand_init_fields + } + } + + pub const fn map(&'static self) -> &'static #phf_map_type { + self.#map_field_ident + } + } + + pub mod #self_mod { + #inner_definitions + } + } + } + } + } +} + +fn date_time_struct_ident(date: bool, time: bool, offset: bool) -> syn::Ident { + match (date, time, offset) { + (_, _, true) => syn::Ident::new("OffsetDateTime", Span::call_site()), + (true, true, false) => syn::Ident::new("LocalDateTime", Span::call_site()), + (true, false, false) => syn::Ident::new("LocalDate", Span::call_site()), + (false, true, false) => syn::Ident::new("LocalTime", Span::call_site()), + (false, false, false) => { + unimplemented!("datetime cannot be constructed without any components") + } + } +} + +impl NormalizationError { + /// When receiving an error when performing some op on key+values, this function accumulates current key to the error. + pub fn propagate(self, key: &str) -> NormalizationError { + match self { + // NormalizationError::KeyMismatch { + // path: mut tp, + // a_diff, + // b_diff, + // } => { + // tp.push(key.to_string()); + + // NormalizationError::KeyMismatch { + // path: tp, + // a_diff, + // b_diff, + // } + // } + NormalizationError::ValueMismatch { + mut path, + value_types, + } => { + path.push(key.to_string()); + NormalizationError::ValueMismatch { path, value_types } + } + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use std::str::FromStr; + + #[test] + fn test_parse_toml_toml_value() { + const NORMALIZE_FILE: &str = include_str!("../Cargo.toml"); + + let parsed = toml::Table::from_str(NORMALIZE_FILE).expect("must parse"); + let toml_val = TomlValue::from(parsed.clone()); + + println!("original: {:#?}", toml_val); + + let normalized = match toml_val.normalize() { + Ok(n) => n, + Err(e) => panic!("{}", e), + }; + println!("normalized: {:#?}", normalized); + + let mut og_value = toml::Value::Table(parsed.clone()); + normalized.normalize_toml(&mut og_value); + let norm_table = og_value.as_table().unwrap(); + + println!("norm table: {:#?}", norm_table); + + println!( + "definition: {}", + normalized.definition("TOP_LEVEL_TABLE", &[]) + ); + } + + #[test] + fn test_normalize_error_value_mismatch() { + let toml = r#" + [[array]] + key1 = "value1" + key2 = 42 + + [[array]] + key1 = "value2" + key2 = "invalid value" + "#; + + let parsed = toml::Table::from_str(toml).expect("must parse"); + let toml_val = TomlValue::from(parsed.clone()); + match toml_val.normalize() { + Ok(n) => { + panic!("Normalization should have failed, but succeeded: {:#?}", n); + } + Err(e) => match e { + NormalizationError::ValueMismatch { path, value_types } => { + assert!(path == ["key2".to_string(), "array".to_string()]); + assert!(matches!(value_types.0, TomlValue::Integer)); + assert!(matches!(value_types.1, TomlValue::String)); + } + }, + }; + + let toml = r#" + [[array]] + [[array.table]] + key2 = "false" + key1 = "value1" + [[array.table.inner]] + item = "name" + + [[array]] + [[array.table]] + key1 = "value1" + [[array.table.inner]] + item = false + "#; + + let parsed = toml::Table::from_str(toml).expect("must parse"); + let toml_val = TomlValue::from(parsed.clone()); + match toml_val.normalize() { + Ok(n) => { + panic!("Normalization should have failed, but succeeded: {:#?}", n); + } + Err(e) => match e { + NormalizationError::ValueMismatch { path, value_types } => { + assert!( + path == [ + "item".to_string(), + "inner".to_string(), + "table".to_string(), + "array".to_string() + ] + ); + assert!(matches!(value_types.0, TomlValue::String)); + assert!(matches!(value_types.1, TomlValue::Boolean)); + } + }, + }; + } + + #[test] + fn test_show_tablemap_normalize() { + let normalize_toml = include_str!("../../normalize.toml"); + let parsed = toml::Table::from_str(normalize_toml).expect("must parse"); + + let toml_val = TomlValue::from(parsed.clone()); + let normalized = toml_val.normalize().expect("must normalize"); + + let reduced = normalized.reduce(); + println!("reduced: {:#?}", reduced); + + // println!("normalized: {:#?}", normalized); + } +} diff --git a/anneal/vendor/toml_const_macros/src/parse.rs b/anneal/vendor/toml_const_macros/src/parse.rs new file mode 100644 index 0000000000..5bb415932a --- /dev/null +++ b/anneal/vendor/toml_const_macros/src/parse.rs @@ -0,0 +1,523 @@ +//! Custom input syntax for proc-macro inputs + +use std::fs; +use std::path::{Path, PathBuf}; + +use proc_macro2 as pm2; +use proc_macro2::{Delimiter, Group}; +use quote::{quote, ToTokens, TokenStreamExt}; +use syn::Ident; +use syn::{braced, parse::Parse, punctuated::Punctuated, LitStr}; + +#[derive(Clone)] +pub struct MultipleMacroInput(pub Vec); + +/// Input to [toml_const!](crate::toml_const) +#[derive(Clone)] +pub struct MacroInput { + pub attrs: Vec, + + // pub destructure_datetime: bool, + /// Whether the static variable is public + pub is_pub: bool, + + /// `false` if static, `true` if const + pub static_const: bool, + + /// Static item identifier + pub item_ident: Ident, + + /// `final` marks if the input file can be substituted + pub is_final: bool, + + /// Path to the template file, mandatory + pub path: LitStr, + + /// Any optional paths to substitute over the first path + pub sub_paths: Option>, +} + +/// A litstring path, with an optional use override keyword +#[derive(Clone)] +pub struct UsePath { + pub path: LitStr, + /// Manual use override in macro input + pub is_used: bool, +} + +impl Parse for MultipleMacroInput { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let mut macro_inputs = Vec::new(); + while !input.is_empty() { + let macro_input: MacroInput = input.parse()?; + macro_inputs.push(macro_input); + } + + Ok(Self(macro_inputs)) + } +} + +impl Parse for MacroInput { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + // parse docstring and datetime attr + let attrs = input.call(syn::Attribute::parse_outer).unwrap_or_default(); + + let is_pub: bool = { + let lookahead = input.lookahead1(); + match lookahead.peek(syn::Token![pub]) { + true => { + let _: syn::Token![pub] = input.parse()?; + true + } + false => false, + } + }; + + let static_const = { + let lookahead = input.lookahead1(); + + if lookahead.peek(syn::Token![const]) { + let _: syn::Token![const] = input.parse()?; + true + } else if lookahead.peek(syn::Token![static]) { + let _: syn::Token![static] = input.parse()?; + false + } else { + return Err(syn::Error::new( + input.span(), + "expected `static` or `const`", + )); + } + }; + + let item_ident: syn::Ident = input.parse()?; + let _: syn::Token![:] = input.parse()?; + + let is_final = { + let lookahead = input.lookahead1(); + + match lookahead.peek(syn::Token![final]) { + true => { + let _: syn::Token![final] = input.parse()?; + true + } + false => false, + } + }; + + let template: LitStr = input.parse()?; + + let lookahead = input.lookahead1(); + let sub_paths = match lookahead.peek(syn::Token![;]) { + true => { + let _: syn::Token![;] = input.parse()?; + None + } + false => match lookahead.peek(syn::token::Brace) { + true => { + let content; + braced!(content in input); + + let lit_str_vec = + Punctuated::::parse_terminated(&content)?; + + let res = lit_str_vec.into_iter().collect::>(); + Some(res) + } + false => return Err(syn::Error::new(input.span(), "expected {} or ;")), + }, + }; + + match is_final && sub_paths.is_some() { + true => Err(syn::Error::new( + template.span(), + "final inputs cannot accept substitutions", + )), + false => Ok(Self { + attrs, + is_pub, + static_const, + item_ident, + is_final, + path: template, + sub_paths, + }), + } + } +} + +impl ToTokens for MacroInput { + fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { + for attr in &self.attrs { + attr.to_tokens(tokens); + } + + if self.is_pub { + quote! {pub}.to_tokens(tokens); + } + + match self.static_const { + true => quote! {const}.to_tokens(tokens), + false => quote! {static}.to_tokens(tokens), + } + + self.item_ident.to_tokens(tokens); + quote! {:}.to_tokens(tokens); + + if self.is_final { + quote! {final}.to_tokens(tokens); + } + + self.path.to_tokens(tokens); + + match &self.sub_paths { + Some(sub) => { + let subs = sub.iter().collect::>(); + + let subs = match subs.len() { + 0 => quote! {#subs}, + _ => quote! {#subs;}, + }; + + tokens.append(Group::new(Delimiter::Brace, subs.to_token_stream())); + } + None => quote! {;}.to_tokens(tokens), + } + } +} + +impl Parse for UsePath { + fn parse(input: syn::parse::ParseStream) -> syn::Result { + let is_used = { + let lookahead = input.lookahead1(); + match lookahead.peek(syn::Token![use]) { + true => { + let _: syn::Token![use] = input.parse()?; + true + } + false => false, + } + }; + + let path: LitStr = input.parse()?; + + Ok(Self { path, is_used }) + } +} + +impl ToTokens for UsePath { + fn to_tokens(&self, tokens: &mut proc_macro2::TokenStream) { + if self.is_used { + quote! {use}.to_tokens(tokens); + } + + self.path.to_tokens(tokens); + } +} + +impl MacroInput { + /// Return one or more const definitions to an underscore expression (`_`). + /// If the path does not point to a file, it will not be included. + /// + /// These are calls to [include_str!] containing absolute paths. + pub fn to_const_defs(&self, base_path: &Path) -> pm2::TokenStream { + let mut template_path = base_path.to_path_buf(); + template_path.push(PathBuf::from(&self.path.value())); + let template_path = pathbuf_to_str(&template_path); + + let mut const_defs = vec![quote! {const _: &'static str = include_str!(#template_path);}]; + + if let Some(sp) = &self.sub_paths { + let additions = sp.iter().map(|sub_path| { + let mut abs_sub_path = base_path.to_path_buf(); + abs_sub_path.push(PathBuf::from(sub_path.path.value())); + + match abs_sub_path.exists() { + true => match abs_sub_path.is_file() { + true => { + let sub_path = pathbuf_to_str(&abs_sub_path); + + quote! { + const _: &'static str = include_str!(#sub_path); + } + } + false => syn::Error::new( + sub_path.path.span(), + format!("path {} is not a file", abs_sub_path.display()), + ) + .to_compile_error() + .to_token_stream(), + }, + false => quote! {}, + } + }); + + const_defs.extend(additions); + } + + const_defs.into_iter().collect::() + } + + /// Create a clone of `self` with all inner paths turned to absolute paths. + /// + /// The input base path must be absolute. + pub fn to_abs_path(&self, base_path: &Path) -> Self { + let mut abs_base_path = base_path.to_path_buf(); + + abs_base_path.push(PathBuf::from(self.path.value())); + let abs_base_path = LitStr::new(pathbuf_to_str(&abs_base_path), self.path.span()); + + let sub_paths = self.sub_paths.clone(); + let sub_paths = sub_paths.map(|sp| { + sp.into_iter() + .map(|p| { + let mut abs_sub_path = base_path.to_path_buf(); + abs_sub_path.push(PathBuf::from(p.path.value())); + let new_path = LitStr::new(pathbuf_to_str(&abs_sub_path), p.path.span()); + + UsePath { + path: new_path, + ..p + } + }) + .collect::>() + }); + + Self { + path: abs_base_path, + sub_paths, + ..self.clone() + } + } + + /// With the the data in `self`, read in the template file and apply any substitutions + pub fn generate_toml_table(&self) -> Result { + let template_toml = read_litstr_to_toml(&self.path)?.ok_or( + syn::Error::new( + self.path.span(), + format!("unable to read template file: {}", self.path.value()), + ) + .to_compile_error(), + )?; + + let substitute_file = match &self.sub_paths { + Some(paths) => { + let mut res_sub = None; + + for sub_path in paths.iter() { + let sub_toml = read_litstr_to_toml(&sub_path.path)?; + let sub_toml = match sub_toml { + Some(st) => st, + None => continue, + }; + + match (sub_path.is_used, sub_toml.contains_key("use")) { + // macro-level override + (true, _) => { + res_sub = Some(sub_toml); + break; + } + // toml-level override + (false, true) => { + let use_val = sub_toml.get("use").expect("already checked"); + if let toml::Value::Boolean(true) = use_val { + res_sub = Some(sub_toml); + break; + } + } + (false, false) => continue, + } + } + + res_sub + } + None => None, + }; + + let merged = match substitute_file { + Some(sf) => merge_tables(&template_toml, &sf), + None => template_toml, + }; + + Ok(merged) + } + + pub fn doc_attrs(&self) -> Vec<&syn::Attribute> { + self.attrs + .iter() + .filter(|a| match a.meta.require_name_value() { + Ok(nv) => nv.path.is_ident("doc"), + Err(_) => false, + }) + .collect() + } +} + +/// Merge a toml template with a changes table. Changes will set/overwrite values in the template. +/// If both values are tables, merge recursively. If both are arrays, merge arrays element-wise. +/// Otherwise, the value from `changes` overrides the value from `template`. +fn merge_tables(template: &toml::Table, changes: &toml::Table) -> toml::Table { + let mut merged_table = template.clone(); + + for (key, value) in changes.iter() { + match (merged_table.get(key), value) { + (Some(toml::Value::Table(orig)), toml::Value::Table(chg)) => { + merged_table.insert(key.clone(), toml::Value::Table(merge_tables(orig, chg))); + } + (Some(toml::Value::Array(orig)), toml::Value::Array(chg)) => { + let mut merged_array = orig.clone(); + let min_len = merged_array.len().min(chg.len()); + // Overwrite elements in orig with those in chg, element-wise + for i in 0..min_len { + merged_array[i] = match (&merged_array[i], &chg[i]) { + (toml::Value::Table(orig_t), toml::Value::Table(chg_t)) => { + toml::Value::Table(merge_tables(orig_t, chg_t)) + } + (toml::Value::Array(orig_a), toml::Value::Array(chg_a)) => { + // Recursively merge arrays + let merged = merge_arrays(orig_a, chg_a); + toml::Value::Array(merged) + } + (_, chg_v) => chg_v.clone(), + }; + } + // If chg is longer, append the extra elements + if chg.len() > merged_array.len() { + merged_array.extend_from_slice(&chg[merged_array.len()..]); + } + merged_table.insert(key.clone(), toml::Value::Array(merged_array)); + } + // Otherwise, just override + _ => { + merged_table.insert(key.clone(), value.clone()); + } + } + } + + merged_table +} + +/// Merge two TOML arrays element-wise, recursively merging tables/arrays, otherwise replacing. +fn merge_arrays(orig: &[toml::Value], chg: &[toml::Value]) -> Vec { + let mut merged = orig.to_vec(); + let min_len = orig.len().min(chg.len()); + for i in 0..min_len { + merged[i] = match (&orig[i], &chg[i]) { + (toml::Value::Table(orig_t), toml::Value::Table(chg_t)) => { + toml::Value::Table(merge_tables(orig_t, chg_t)) + } + (toml::Value::Array(orig_a), toml::Value::Array(chg_a)) => { + toml::Value::Array(merge_arrays(orig_a, chg_a)) + } + (_, chg_v) => chg_v.clone(), + }; + } + if chg.len() > orig.len() { + merged.extend_from_slice(&chg[orig.len()..]); + } + merged +} + +fn pathbuf_to_str(input: &Path) -> &str { + input.to_str().expect("failed to convert path to str") +} + +/// Read in a litstr path to a toml file, return an error tokenstream if it fails. +fn read_litstr_to_toml(litstr: &LitStr) -> Result, pm2::TokenStream> { + let path = PathBuf::from(litstr.value()); + + // we allow paths that do not resolve to a file + if !path.exists() { + return Ok(None); + } + + let file = match fs::read_to_string(path) { + Ok(tf) => tf, + Err(e) => { + return Err(syn::Error::new(litstr.span(), e.to_string()) + .to_compile_error() + .to_token_stream()); + } + }; + + let template_toml: toml::Table = match toml::from_str(&file) { + Ok(tt) => tt, + Err(e) => { + return Err(syn::Error::new(litstr.span(), e.to_string()) + .to_compile_error() + .to_token_stream()); + } + }; + + Ok(Some(template_toml)) +} + +#[cfg(test)] +mod tests { + + use super::*; + + /// Test parsing of some syntax, as well as checking that the re-generated token stream + /// is the same as the input. + macro_rules! test_parse { + ($data_type: ident: $test_fn: ident {$($tokens: tt)*}) => { + #[test] + fn $test_fn() { + let tokens = quote::quote! { + $($tokens)* + }; + let input: $data_type = syn::parse2(tokens.clone()).expect("failed to parse input from tokenstream"); + + let output = input.to_token_stream(); + assert_eq!(tokens.to_string(), output.to_string(), "generated tokenstream and original tokenstream do not match"); + } + }; + } + + test_parse!(MacroInput: test_parse_template_new { + const X: "some_file_path.toml"; + }); + + test_parse!(MacroInput: test_parse_template_empty_brace { + const X: "some_file_path.toml" {} + }); + + test_parse!(MacroInput: test_parse_template_and_subs { + pub const X: "some_file_path.toml" { + "some_sub_file_path.toml"; + "some_other_sub_file_path.toml"; + } + }); + + test_parse!(MacroInput: test_parse_public_static { + pub static X: "some_file_path.toml" { + "some_sub_file_path.toml"; + "some_other_sub_file_path.toml"; + } + }); + + test_parse!(MacroInput: test_parse_template_use_subs { + pub const X: "some_file_path.toml" { + use "some_sub_file_path.toml"; + "some_other_sub_file_path.toml"; + } + }); + + test_parse!(MacroInput: test_parse_template_final { + pub const X: final "some_file_path.toml"; + }); + + test_parse!(MacroInput: test_parse_template_with_attributes { + /// Docstring = #[doc = "Docstring"] + /// Another docstring line + pub const X: final "some_file_path.toml"; + }); + + test_parse!(UsePath: test_parse_use_path_used { + use "some_file_path.toml" + }); + + test_parse!(UsePath: test_parse_use_path_unused { + "some_file_path.toml" + }); +}