cargo vendor

This commit is contained in:
Сергей Конев 2024-10-31 18:29:13 +03:00
parent 98cad74cd7
commit a0ff6a2c2e
177 changed files with 6258 additions and 82540 deletions

155
pve-rs/Cargo.lock generated
View File

@ -61,14 +61,14 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.91"
version = "1.0.93"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c042108f3ed77fd83760a5fd79b53be043192bb3b9dba91d8c574c0ada7850c8"
checksum = "4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775"
[[package]]
name = "apt-pkg-native"
version = "0.3.2"
source = "git+https://gitea.basealt.ru/konevsa/apt-pkg-native#510a48ecb3a4904eccc6bc58f1001c8e21cbd6fa"
source = "git+https://gitea.basealt.ru/konevsa/apt-pkg-native#3f5a452133d633718fc81812c5bb0cf7474197ad"
dependencies = [
"cc",
"lazy_static",
@ -155,9 +155,9 @@ checksum = "9ac0150caa2ae65ca5bd83f25c7de183dea78d4d366469f148435e2acfbad0da"
[[package]]
name = "cc"
version = "1.1.31"
version = "1.1.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2e7962b54006dcfcc61cb72735f4d89bb97061dd6a7ed882ec6b8ee53714c6f"
checksum = "baee610e9452a8f6f0a1b6194ec09ff9e2d85dea54432acdae41aa0761c95d70"
dependencies = [
"shlex",
]
@ -310,7 +310,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
dependencies = [
"proc-macro2 1.0.89",
"quote 1.0.37",
"syn 2.0.85",
"syn 2.0.87",
]
[[package]]
@ -686,7 +686,7 @@ checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6"
dependencies = [
"proc-macro2 1.0.89",
"quote 1.0.37",
"syn 2.0.85",
"syn 2.0.87",
]
[[package]]
@ -697,24 +697,23 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39"
[[package]]
name = "idna"
version = "0.5.0"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6"
checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e"
dependencies = [
"unicode-bidi",
"unicode-normalization",
"idna_adapter",
"smallvec",
"utf8_iter",
]
[[package]]
name = "idna"
version = "1.0.2"
name = "idna_adapter"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd69211b9b519e98303c015e21a007e293db403b6c85b9b124e133d25e242cdd"
checksum = "daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71"
dependencies = [
"icu_normalizer",
"icu_properties",
"smallvec",
"utf8_iter",
]
[[package]]
@ -761,7 +760,7 @@ dependencies = [
"futures-util",
"hostname",
"httpdate",
"idna 1.0.2",
"idna",
"mime",
"native-tls",
"nom",
@ -1006,7 +1005,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c"
dependencies = [
"proc-macro2 1.0.89",
"quote 1.0.37",
"syn 2.0.85",
"syn 2.0.87",
]
[[package]]
@ -1067,7 +1066,7 @@ source = "git+git://git.proxmox.com/git/perlmod.git#3ddf67eb0412e240b88b69192ba9
dependencies = [
"proc-macro2 1.0.89",
"quote 1.0.37",
"syn 2.0.85",
"syn 2.0.87",
]
[[package]]
@ -1101,7 +1100,7 @@ dependencies = [
"pest_meta",
"proc-macro2 1.0.89",
"quote 1.0.37",
"syn 2.0.85",
"syn 2.0.87",
]
[[package]]
@ -1163,18 +1162,18 @@ dependencies = [
[[package]]
name = "proxmox-api-macro"
version = "1.2.1"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"anyhow",
"proc-macro2 1.0.89",
"quote 1.0.37",
"syn 2.0.85",
"syn 2.0.87",
]
[[package]]
name = "proxmox-apt"
version = "0.11.3"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"anyhow",
"apt-pkg-native",
@ -1195,7 +1194,7 @@ dependencies = [
[[package]]
name = "proxmox-apt-api-types"
version = "1.0.1"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"proxmox-config-digest",
"proxmox-schema",
@ -1206,7 +1205,7 @@ dependencies = [
[[package]]
name = "proxmox-config-digest"
version = "0.1.0"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"anyhow",
"hex",
@ -1219,7 +1218,7 @@ dependencies = [
[[package]]
name = "proxmox-http"
version = "0.9.3"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"anyhow",
"base64 0.13.1",
@ -1233,7 +1232,7 @@ dependencies = [
[[package]]
name = "proxmox-http-error"
version = "0.1.0"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"anyhow",
"http",
@ -1243,7 +1242,7 @@ dependencies = [
[[package]]
name = "proxmox-human-byte"
version = "0.1.3"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"anyhow",
"proxmox-schema",
@ -1254,7 +1253,7 @@ dependencies = [
[[package]]
name = "proxmox-io"
version = "1.1.0"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"endian_trait",
]
@ -1262,12 +1261,12 @@ dependencies = [
[[package]]
name = "proxmox-lang"
version = "1.4.0"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
[[package]]
name = "proxmox-log"
version = "0.2.5"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"anyhow",
"nix",
@ -1283,7 +1282,7 @@ dependencies = [
[[package]]
name = "proxmox-notify"
version = "0.4.2"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"anyhow",
"base64 0.13.1",
@ -1309,7 +1308,7 @@ dependencies = [
[[package]]
name = "proxmox-openid"
version = "0.10.3"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"anyhow",
"http",
@ -1337,7 +1336,7 @@ dependencies = [
[[package]]
name = "proxmox-schema"
version = "3.2.0"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"anyhow",
"const_format",
@ -1351,7 +1350,7 @@ dependencies = [
[[package]]
name = "proxmox-section-config"
version = "2.1.1"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"anyhow",
"hex",
@ -1364,7 +1363,7 @@ dependencies = [
[[package]]
name = "proxmox-serde"
version = "0.1.2"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"anyhow",
"base64 0.13.1",
@ -1376,7 +1375,7 @@ dependencies = [
[[package]]
name = "proxmox-shared-cache"
version = "0.1.0"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"anyhow",
"nix",
@ -1388,7 +1387,7 @@ dependencies = [
[[package]]
name = "proxmox-subscription"
version = "0.4.6"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"anyhow",
"base64 0.13.1",
@ -1406,7 +1405,7 @@ dependencies = [
[[package]]
name = "proxmox-sys"
version = "0.6.4"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"anyhow",
"libc",
@ -1422,7 +1421,7 @@ dependencies = [
[[package]]
name = "proxmox-tfa"
version = "5.0.1"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"anyhow",
"base32",
@ -1444,7 +1443,7 @@ dependencies = [
[[package]]
name = "proxmox-time"
version = "2.0.2"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"anyhow",
"bitflags 2.6.0",
@ -1456,7 +1455,7 @@ dependencies = [
[[package]]
name = "proxmox-uuid"
version = "1.0.3"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#b945c8f6b7c5c9b6ea02f9305bae85c4364572e7"
source = "git+https://gitea.basealt.ru/konevsa/proxmox.newest.git#5a02867d44c9e6e1fef12ce6650b6a486a64a7fb"
dependencies = [
"js-sys",
"serde",
@ -1645,9 +1644,9 @@ checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f"
[[package]]
name = "rustix"
version = "0.38.38"
version = "0.38.39"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aa260229e6538e52293eeb577aabd09945a09d6d9cc0fc550ed7529056c2e32a"
checksum = "375116bee2be9ed569afe2154ea6a99dfdffd257f533f187498c2a8f5feaf4ee"
dependencies = [
"bitflags 2.6.0",
"errno",
@ -1794,7 +1793,7 @@ checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766"
dependencies = [
"proc-macro2 1.0.89",
"quote 1.0.37",
"syn 2.0.85",
"syn 2.0.87",
]
[[package]]
@ -1974,9 +1973,9 @@ dependencies = [
[[package]]
name = "syn"
version = "2.0.85"
version = "2.0.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5023162dfcd14ef8f32034d8bcd4cc5ddc61ef7a247c024a33e24e1f24d21b56"
checksum = "25aa4ce346d03a6dcd68dd8b4010bcb74e54e62c90c573f394c46eae99aba32d"
dependencies = [
"proc-macro2 1.0.89",
"quote 1.0.37",
@ -1991,7 +1990,7 @@ checksum = "c8af7666ab7b6390ab78131fb5b0fce11d6b7a6951602017c35fa82800708971"
dependencies = [
"proc-macro2 1.0.89",
"quote 1.0.37",
"syn 2.0.85",
"syn 2.0.87",
]
[[package]]
@ -2020,22 +2019,22 @@ dependencies = [
[[package]]
name = "thiserror"
version = "1.0.65"
version = "1.0.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5d11abd9594d9b38965ef50805c5e469ca9cc6f197f883f717e0269a3057b3d5"
checksum = "02dd99dc800bbb97186339685293e1cc5d9df1f8fae2d0aecd9ff1c77efea892"
dependencies = [
"thiserror-impl",
]
[[package]]
name = "thiserror-impl"
version = "1.0.65"
version = "1.0.68"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae71770322cbd277e69d762a16c444af02aa0575ac0d174f0b9562d3b37f8602"
checksum = "a7c61ec9a6f64d2793d8a45faba21efbe3ced62a886d44c36a009b2b519b4c7e"
dependencies = [
"proc-macro2 1.0.89",
"quote 1.0.37",
"syn 2.0.85",
"syn 2.0.87",
]
[[package]]
@ -2058,21 +2057,6 @@ dependencies = [
"zerovec",
]
[[package]]
name = "tinyvec"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938"
dependencies = [
"tinyvec_macros",
]
[[package]]
name = "tinyvec_macros"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "tokio"
version = "1.41.0"
@ -2106,7 +2090,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7"
dependencies = [
"proc-macro2 1.0.89",
"quote 1.0.37",
"syn 2.0.85",
"syn 2.0.87",
]
[[package]]
@ -2167,12 +2151,6 @@ version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971"
[[package]]
name = "unicode-bidi"
version = "0.3.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893"
[[package]]
name = "unicode-ident"
version = "1.0.13"
@ -2185,15 +2163,6 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f"
[[package]]
name = "unicode-normalization"
version = "0.1.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956"
dependencies = [
"tinyvec",
]
[[package]]
name = "unicode-segmentation"
version = "1.12.0"
@ -2250,12 +2219,12 @@ dependencies = [
[[package]]
name = "url"
version = "2.5.2"
version = "2.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c"
checksum = "8d157f1b96d14500ffdc1f10ba712e780825526c03d9a49b4d0324b0d9113ada"
dependencies = [
"form_urlencoded",
"idna 0.5.0",
"idna",
"percent-encoding",
"serde",
]
@ -2318,7 +2287,7 @@ dependencies = [
"once_cell",
"proc-macro2 1.0.89",
"quote 1.0.37",
"syn 2.0.85",
"syn 2.0.87",
"wasm-bindgen-shared",
]
@ -2340,7 +2309,7 @@ checksum = "26c6ab57572f7a24a4985830b120de1594465e5d500f24afe89e16b4e833ef68"
dependencies = [
"proc-macro2 1.0.89",
"quote 1.0.37",
"syn 2.0.85",
"syn 2.0.87",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
@ -2544,7 +2513,7 @@ checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95"
dependencies = [
"proc-macro2 1.0.89",
"quote 1.0.37",
"syn 2.0.85",
"syn 2.0.87",
"synstructure",
]
@ -2566,7 +2535,7 @@ checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e"
dependencies = [
"proc-macro2 1.0.89",
"quote 1.0.37",
"syn 2.0.85",
"syn 2.0.87",
]
[[package]]
@ -2586,7 +2555,7 @@ checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5"
dependencies = [
"proc-macro2 1.0.89",
"quote 1.0.37",
"syn 2.0.85",
"syn 2.0.87",
"synstructure",
]
@ -2615,5 +2584,5 @@ checksum = "6eafa6dfb17584ea3e2bd6e76e0cc15ad7af12b09abdd1ca55961bed9b1063c6"
dependencies = [
"proc-macro2 1.0.89",
"quote 1.0.37",
"syn 2.0.85",
"syn 2.0.87",
]

View File

@ -1 +1 @@
{"files":{"Cargo.toml":"b9c67d404778b8bc5bbd695cd3afb809a9c7cc4655be31efa5d809c1b0e1a5a8","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"136d2b44cc4060192516f18e43ec3eafb65693c1819a80a867e7a00c60a45ee6","build.rs":"27bcb3380f8b9b52a12d1aedad324858a94de974f17efa86ff93aa7e556b3798","build/probe.rs":"ee0a4518493c0b3cca121ed2e937b1779eb7e8313a5c4d5fc5aea28ff015366b","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/backtrace.rs":"bbaa0e0e228475c9c9532786e305cf04f53729f386c48adb1d93bb8ce07f37ad","src/chain.rs":"85af447405f075633fab186b7f1c94d7f33a36474f239c50a961b2d6197d5426","src/context.rs":"1be432c32752778041e8acf0e7d98d4f6291ce53fd7df5bbb0167824bbea57f7","src/ensure.rs":"9763f418b5397764549866c111ec6db3a7bdc4c30ad95c3bbfc56c5434ea8c09","src/error.rs":"274c175ec92f4aa8bf479d39cf3023d1ead9865a242a0a63ad3998aea57219a6","src/fmt.rs":"adf4be906b29900153bfb4b767a6049d58697dc3bcce7dfbb85ca773f5de5b33","src/kind.rs":"d8cc91e73653049ca0b5593f36aee8632fcc85847b36005b90ecd9a6f0de13cb","src/lib.rs":"0534dd480fff1f6fa19e3e202dcd981047597eeb693406d0a7bc6f795cb4faf0","src/macros.rs":"875797636fde708dcb9c82e0cb3107cf38334086274aaada267fb5bfd60547a9","src/ptr.rs":"4cb31d2f815b178daf951bfb94a1930383e056c0ca68d494603f45d8eea35d50","src/wrapper.rs":"d4e45caee3c2d861d4609a8141310d5c901af59a57d5f0a0de30251347dbd23c","tests/common/mod.rs":"f9088c2d7afafa64ff730b629272045b776bfafc2f5957508242da630635f2e1","tests/compiletest.rs":"4e381aa8ca3eabb7ac14d1e0c3700b3223e47640547a6988cfa13ad68255f60f","tests/drop/mod.rs":"08c3e553c1cc0d2dbd936fc45f4b5b1105057186affd6865e8d261e05f0f0646","tests/test_autotrait.rs":"ba9bc18416115cb48fd08675a3e7fc89584de7926dad6b2be6645dc13d5931df","tests/test_backtrace.rs":"60afdd7ee5850dc22625ff486fe41c47fd322db874a93c4871ddfed2bf603930","tests/test_boxed.rs":"6b26db0e2eb72afe9af7352ea820837aab90f8d486294616dd5dc34c1b94038c","tests/test_chain.rs":"3a8a8d7569913bd98c0e27c69d0bda35101e7fde7c056ed57cdd8ed018e4cbcb","tests/test_context.rs":"8409c53b328562c11e822bd6c3cd17e0d4d50b9bbb8fc3617333fd77303a6a33","tests/test_convert.rs":"7e7a8b4772a427a911014ac4d1083f9519000e786177f898808980dd9bdfde61","tests/test_downcast.rs":"797e69a72d125758c4c4897e5dc776d549d52cc9a6a633e0a33193f588a62b88","tests/test_ensure.rs":"4014ead6596793f5eecd55cbaafa49286b75cee7b7092a8b9b8286fcd813a6da","tests/test_ffi.rs":"d0cb4c1d6d9154090982dee72ae3ebe05a5981f976058c3250f1c9da5a45edef","tests/test_fmt.rs":"0e49b48f08e4faaf03e2f202e1efc5250018876c4e1b01b8379d7a38ae8df870","tests/test_macros.rs":"68673942662a43bceee62aaed69c25d7ddbc55e25d62d528e13033c3e2e756cd","tests/test_repr.rs":"034dee888abd08741e11ac2e95ef4fcb2ab3943d0a76e8e976db404658e1a252","tests/test_source.rs":"b80723cf635a4f8c4df21891b34bfab9ed2b2aa407e7a2f826d24e334cd5f88e","tests/ui/chained-comparison.rs":"6504b03d95b5acc232a7f4defc9f343b2be6733bf475fa0992e8e6545b912bd4","tests/ui/chained-comparison.stderr":"7f1d0a8c251b0ede2d30b3087ec157fc660945c97a642c4a5acf5a14ec58de34","tests/ui/empty-ensure.rs":"ab5bf37c846a0d689f26ce9257a27228411ed64154f9c950f1602d88a355d94b","tests/ui/empty-ensure.stderr":"315782f5f4246290fe190e3767b22c3dcaffaabc19c5ace0373537d53e765278","tests/ui/ensure-nonbool.rs":"7e57cb93fbcd82959b36586ed6bd2ad978b051fe5facd5274651fde6b1600905","tests/ui/ensure-nonbool.stderr":"0b4d1611e3bb65081bf38c1e49b1f12e5096738f276608661016e68f1fe13f7c","tests/ui/must-use.rs":"fb59860b43f673bf4a430a6036ba463e95028844d8dd4243cfe5ebc7f2be582f","tests/ui/must-use.stderr":"c2848c5f254b4c061eea6714d9baf709924aba06619eaf2a8b3aee1266b75f9e","tests/ui/no-impl.rs":"fab6cbf2f6ea510b86f567dfb3b7c31250a9fd71ae5d110dbb9188be569ec593","tests/ui/no-impl.stderr":"0d8ed712d25de898eae18cfdffc575a47f4d5596346058cf6cd50d016c4f8ce8","tests/ui/temporary-value.rs":"4dcc96271b2403e6372cf4cfc813445e5ce4365fc6e156b6bc38274098499a70","tests/ui/temporary-value.stderr":"171f6c1c962503855480696e5d39e68946ec2a027b61a6f36ca1ad1b40265c5d","tests/ui/wrong-interpolation.rs":"9c44d4674c2dccd27b9dedd03341346ec02d993b41793ee89b5755202e7e367e","tests/ui/wrong-interpolation.stderr":"301e60e2eb9401782c7dc0b3580613a4cb2aafd4cc8065734a630a62e1161aa5"},"package":"c042108f3ed77fd83760a5fd79b53be043192bb3b9dba91d8c574c0ada7850c8"}
{"files":{"Cargo.toml":"bb6771224d2fb910cecd79173e851377e4cbc5d375e5908c627e7657a78ff2b3","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"136d2b44cc4060192516f18e43ec3eafb65693c1819a80a867e7a00c60a45ee6","build.rs":"27bcb3380f8b9b52a12d1aedad324858a94de974f17efa86ff93aa7e556b3798","build/probe.rs":"ee0a4518493c0b3cca121ed2e937b1779eb7e8313a5c4d5fc5aea28ff015366b","rust-toolchain.toml":"6bbb61302978c736b2da03e4fb40e3beab908f85d533ab46fd541e637b5f3e0f","src/backtrace.rs":"bbaa0e0e228475c9c9532786e305cf04f53729f386c48adb1d93bb8ce07f37ad","src/chain.rs":"85af447405f075633fab186b7f1c94d7f33a36474f239c50a961b2d6197d5426","src/context.rs":"1be432c32752778041e8acf0e7d98d4f6291ce53fd7df5bbb0167824bbea57f7","src/ensure.rs":"709524741c8f9365e01cb36fc2349a8724513de02cf9e253809a93af8d6a7b6e","src/error.rs":"274c175ec92f4aa8bf479d39cf3023d1ead9865a242a0a63ad3998aea57219a6","src/fmt.rs":"adf4be906b29900153bfb4b767a6049d58697dc3bcce7dfbb85ca773f5de5b33","src/kind.rs":"d8cc91e73653049ca0b5593f36aee8632fcc85847b36005b90ecd9a6f0de13cb","src/lib.rs":"765ceb385be268343353be24483f93b5af6db1bd76b82bca7335244b2fdaa550","src/macros.rs":"875797636fde708dcb9c82e0cb3107cf38334086274aaada267fb5bfd60547a9","src/ptr.rs":"4cb31d2f815b178daf951bfb94a1930383e056c0ca68d494603f45d8eea35d50","src/wrapper.rs":"d4e45caee3c2d861d4609a8141310d5c901af59a57d5f0a0de30251347dbd23c","tests/common/mod.rs":"f9088c2d7afafa64ff730b629272045b776bfafc2f5957508242da630635f2e1","tests/compiletest.rs":"4e381aa8ca3eabb7ac14d1e0c3700b3223e47640547a6988cfa13ad68255f60f","tests/drop/mod.rs":"08c3e553c1cc0d2dbd936fc45f4b5b1105057186affd6865e8d261e05f0f0646","tests/test_autotrait.rs":"ba9bc18416115cb48fd08675a3e7fc89584de7926dad6b2be6645dc13d5931df","tests/test_backtrace.rs":"60afdd7ee5850dc22625ff486fe41c47fd322db874a93c4871ddfed2bf603930","tests/test_boxed.rs":"6b26db0e2eb72afe9af7352ea820837aab90f8d486294616dd5dc34c1b94038c","tests/test_chain.rs":"3a8a8d7569913bd98c0e27c69d0bda35101e7fde7c056ed57cdd8ed018e4cbcb","tests/test_context.rs":"8409c53b328562c11e822bd6c3cd17e0d4d50b9bbb8fc3617333fd77303a6a33","tests/test_convert.rs":"7e7a8b4772a427a911014ac4d1083f9519000e786177f898808980dd9bdfde61","tests/test_downcast.rs":"797e69a72d125758c4c4897e5dc776d549d52cc9a6a633e0a33193f588a62b88","tests/test_ensure.rs":"5ac28daf6889e957134309c0a0fbdec89b6bd974596e2205fbc4e8c1bb02ee5b","tests/test_ffi.rs":"d0cb4c1d6d9154090982dee72ae3ebe05a5981f976058c3250f1c9da5a45edef","tests/test_fmt.rs":"0e49b48f08e4faaf03e2f202e1efc5250018876c4e1b01b8379d7a38ae8df870","tests/test_macros.rs":"68673942662a43bceee62aaed69c25d7ddbc55e25d62d528e13033c3e2e756cd","tests/test_repr.rs":"034dee888abd08741e11ac2e95ef4fcb2ab3943d0a76e8e976db404658e1a252","tests/test_source.rs":"b80723cf635a4f8c4df21891b34bfab9ed2b2aa407e7a2f826d24e334cd5f88e","tests/ui/chained-comparison.rs":"6504b03d95b5acc232a7f4defc9f343b2be6733bf475fa0992e8e6545b912bd4","tests/ui/chained-comparison.stderr":"7f1d0a8c251b0ede2d30b3087ec157fc660945c97a642c4a5acf5a14ec58de34","tests/ui/empty-ensure.rs":"ab5bf37c846a0d689f26ce9257a27228411ed64154f9c950f1602d88a355d94b","tests/ui/empty-ensure.stderr":"315782f5f4246290fe190e3767b22c3dcaffaabc19c5ace0373537d53e765278","tests/ui/ensure-nonbool.rs":"7e57cb93fbcd82959b36586ed6bd2ad978b051fe5facd5274651fde6b1600905","tests/ui/ensure-nonbool.stderr":"0b4d1611e3bb65081bf38c1e49b1f12e5096738f276608661016e68f1fe13f7c","tests/ui/must-use.rs":"fb59860b43f673bf4a430a6036ba463e95028844d8dd4243cfe5ebc7f2be582f","tests/ui/must-use.stderr":"c2848c5f254b4c061eea6714d9baf709924aba06619eaf2a8b3aee1266b75f9e","tests/ui/no-impl.rs":"fab6cbf2f6ea510b86f567dfb3b7c31250a9fd71ae5d110dbb9188be569ec593","tests/ui/no-impl.stderr":"0d8ed712d25de898eae18cfdffc575a47f4d5596346058cf6cd50d016c4f8ce8","tests/ui/temporary-value.rs":"4dcc96271b2403e6372cf4cfc813445e5ce4365fc6e156b6bc38274098499a70","tests/ui/temporary-value.stderr":"171f6c1c962503855480696e5d39e68946ec2a027b61a6f36ca1ad1b40265c5d","tests/ui/wrong-interpolation.rs":"9c44d4674c2dccd27b9dedd03341346ec02d993b41793ee89b5755202e7e367e","tests/ui/wrong-interpolation.stderr":"301e60e2eb9401782c7dc0b3580613a4cb2aafd4cc8065734a630a62e1161aa5"},"package":"4c95c10ba0b00a02636238b814946408b1322d5ac4760326e6fb8ec956d85775"}

View File

@ -13,7 +13,7 @@
edition = "2018"
rust-version = "1.39"
name = "anyhow"
version = "1.0.91"
version = "1.0.93"
authors = ["David Tolnay <dtolnay@gmail.com>"]
build = "build.rs"
autolib = false
@ -116,7 +116,7 @@ version = "2.0"
features = ["full"]
[dev-dependencies.thiserror]
version = "1.0.45"
version = "2"
[dev-dependencies.trybuild]
version = "1.0.66"

View File

@ -155,6 +155,14 @@ macro_rules! __parse_ensure {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $and $mut) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $raw:tt $mut:tt $($dup:tt)*) &raw mut $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $and $raw $mut) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $raw:tt $const:tt $($dup:tt)*) &raw const $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $and $raw $const) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($and:tt $($dup:tt)*) & $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $and) $($parse)*} ($($rest)*) $($rest)*)
};
@ -163,6 +171,14 @@ macro_rules! __parse_ensure {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $andand $mut) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($andand:tt $raw:tt $mut:tt $($dup:tt)*) &&raw mut $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $andand $raw $mut) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($andand:tt $raw:tt $const:tt $($dup:tt)*) &&raw const $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $andand $raw $const) $($parse)*} ($($rest)*) $($rest)*)
};
(0 $stack:tt $bail:tt (~$($fuel:tt)*) {($($buf:tt)*) $($parse:tt)*} ($andand:tt $($dup:tt)*) && $($rest:tt)*) => {
$crate::__parse_ensure!(0 $stack $bail ($($fuel)*) {($($buf)* $andand) $($parse)*} ($($rest)*) $($rest)*)
};

View File

@ -206,7 +206,7 @@
//! function that returns Anyhow's error type, as the trait that `?`-based error
//! conversions are defined by is only available in std in those old versions.
#![doc(html_root_url = "https://docs.rs/anyhow/1.0.91")]
#![doc(html_root_url = "https://docs.rs/anyhow/1.0.93")]
#![cfg_attr(error_generic_member_access, feature(error_generic_member_access))]
#![no_std]
#![deny(dead_code, unused_imports, unused_mut)]

View File

@ -186,6 +186,17 @@ fn test_unary() {
assert_err(test, "Condition failed: `&mut x == *&&mut &2` (1 vs 2)");
}
#[rustversion::since(1.82)]
#[test]
fn test_raw_addr() {
let mut x = 1;
let test = || Ok(ensure!(S + &raw const x != S + &raw mut x));
assert_err(
test,
"Condition failed: `S + &raw const x != S + &raw mut x` (false vs false)",
);
}
#[test]
fn test_if() {
#[rustfmt::skip]

View File

@ -1 +1 @@
{"files":{".cargo/config.toml":"77e9219c27274120197571fd165cbe4121963b5ad3bc0b20b383c86ef0ce6c2b",".gear/apt-pkg-native.spec":"f51204bd4903cc45b54c2066cc3b4775317460627182fd3180323f48ffe3aec4",".gear/rules":"25289e6d0b29990912683a33fa413644f829ff93fd77f8d982651c5cad1a7e3e",".gear/upstream/remotes":"be2df2cc7562819711aaac7d934e0c908096271d1c4175ecbc0a86eeabf5195b",".travis.yml":"c3fdf6173e81b6bf7e1b0399a1767ef55ea9867d0c86b12c3d91f6e717a8d1db","Cargo.lock":"f4c20f001b8884426eadb198b99556bbd62e3a0da8fb40d46444b5e988866113","Cargo.toml":"f25eb040338f607a7fee7b3eaedbea965491d17c50606bdcb3ceb6b684813dd0","LICENSE.MIT":"54e17baced243561c869877e68c2ac8736f17a2c1266062ad9fb1ebf3ed1967c","README.md":"d44dc795397fe707f6cc605d1f801c1cb8490e804861f1ac938945eb6f294cd3","apt-pkg-c/.gitignore":"659109c3f137e0b4f610d98f017adf80a01551689187a6778dc8fa0d54ea26a4","apt-pkg-c/Makefile":"a8472bde3198669e9af22dd1bfc187baa30a3b2b25490ef6ff3185ec3c0d1fc0","apt-pkg-c/lib.cpp":"baeae62600f975c1294c2d68f157e871c329f2c7d8534409f7f39e8100e71f21","build.rs":"e4cf0afcb5cf6887ea1d971eb3de9224d4186cb58ffff4f9592c4f4d73362946","examples/epochs.rs":"be32926cbad6eede80f7530178bb827eb02c958f6be82b30f5c6f5f21dc399d1","examples/list.rs":"0c20b92819ec94291a36a3805315ec52a2777147b211be0962c287127fbe515c","examples/on-sid/.dockerignore":"25ca4d0088686695559d7c5c7666166a6cb731b76fff8ebb1b90d598325c107c","examples/on-sid/Dockerfile":"dbc6cc6a7cb82854662308e27a7fced5256e4aac4b672bc5e307b55bf357e380","examples/on-sid/Makefile":"12353f6701bf4b685a60a9000021bbb41ff536843deb056810d52e33d00bfa12","examples/on-sid/sources.list":"050b38c58e546cd9c98ea4731cf58317b5611c734d372814f92b905a7c7767a0","examples/on-trusty/.dockerignore":"25ca4d0088686695559d7c5c7666166a6cb731b76fff8ebb1b90d598325c107c","examples/on-trusty/Dockerfile":"12f2231aa631687c868534ad8b0fb80b8ef1225791515ec44391e7e512f34d84","examples/on-trusty/Makefile":"5db72b4ffff06c351a81626d8b93e5d3cf222840b7a38cf927ccbecc57bd21aa","examples/on-trusty/uprust.sh":"22aa1f7f4c4b9be99a9d7e13ad45b2aec6714165a0578dd5ef81ca11f55ea24e","examples/policy.rs":"c7afd85c85e79acb2d5d10c362992a48a982f68222d724d11422ca90df2a09fe","examples/sources.rs":"9fb1f5bf21679a77b2c985945543da8ed748e786a9977a1d755be68b2f82b16f","src/citer.rs":"e03662241b7da607171a48870e97939d7c8309bc4a4d7c33697a851d8cc12178","src/lib.rs":"3fcd694e85be9d770baa2a92e02befc4f62a610044d55200fb441806286b9a68","src/raw.rs":"5c0060d052062885f9f7142e1b0274411199aa28696d5078a54ed23c23504606","src/sane.rs":"5e96611ee81f8cd02f20b8254a58f0dc4096f8e01c7b49b387e39b93ba15898f","src/simple.rs":"177994ecd9e106345c4efebe1ec38bb521a970332f12da9d9eda4b3563af5e2b"},"package":null}
{"files":{".cargo/config.toml":"77e9219c27274120197571fd165cbe4121963b5ad3bc0b20b383c86ef0ce6c2b",".gear/apt-pkg-native.spec":"f51204bd4903cc45b54c2066cc3b4775317460627182fd3180323f48ffe3aec4",".gear/rules":"25289e6d0b29990912683a33fa413644f829ff93fd77f8d982651c5cad1a7e3e",".gear/upstream/remotes":"be2df2cc7562819711aaac7d934e0c908096271d1c4175ecbc0a86eeabf5195b",".travis.yml":"c3fdf6173e81b6bf7e1b0399a1767ef55ea9867d0c86b12c3d91f6e717a8d1db","Cargo.lock":"f4c20f001b8884426eadb198b99556bbd62e3a0da8fb40d46444b5e988866113","Cargo.toml":"f25eb040338f607a7fee7b3eaedbea965491d17c50606bdcb3ceb6b684813dd0","LICENSE.MIT":"54e17baced243561c869877e68c2ac8736f17a2c1266062ad9fb1ebf3ed1967c","README.md":"d44dc795397fe707f6cc605d1f801c1cb8490e804861f1ac938945eb6f294cd3","apt-pkg-c/.gitignore":"659109c3f137e0b4f610d98f017adf80a01551689187a6778dc8fa0d54ea26a4","apt-pkg-c/Makefile":"a8472bde3198669e9af22dd1bfc187baa30a3b2b25490ef6ff3185ec3c0d1fc0","apt-pkg-c/lib.cpp":"22b567fa96289f3a91c520f1f9a3b7c226a29bf7b54427e2fecd07f9cdffc5dc","build.rs":"e4cf0afcb5cf6887ea1d971eb3de9224d4186cb58ffff4f9592c4f4d73362946","examples/epochs.rs":"be32926cbad6eede80f7530178bb827eb02c958f6be82b30f5c6f5f21dc399d1","examples/list.rs":"0c20b92819ec94291a36a3805315ec52a2777147b211be0962c287127fbe515c","examples/on-sid/.dockerignore":"25ca4d0088686695559d7c5c7666166a6cb731b76fff8ebb1b90d598325c107c","examples/on-sid/Dockerfile":"dbc6cc6a7cb82854662308e27a7fced5256e4aac4b672bc5e307b55bf357e380","examples/on-sid/Makefile":"12353f6701bf4b685a60a9000021bbb41ff536843deb056810d52e33d00bfa12","examples/on-sid/sources.list":"050b38c58e546cd9c98ea4731cf58317b5611c734d372814f92b905a7c7767a0","examples/on-trusty/.dockerignore":"25ca4d0088686695559d7c5c7666166a6cb731b76fff8ebb1b90d598325c107c","examples/on-trusty/Dockerfile":"12f2231aa631687c868534ad8b0fb80b8ef1225791515ec44391e7e512f34d84","examples/on-trusty/Makefile":"5db72b4ffff06c351a81626d8b93e5d3cf222840b7a38cf927ccbecc57bd21aa","examples/on-trusty/uprust.sh":"22aa1f7f4c4b9be99a9d7e13ad45b2aec6714165a0578dd5ef81ca11f55ea24e","examples/policy.rs":"c7afd85c85e79acb2d5d10c362992a48a982f68222d724d11422ca90df2a09fe","examples/sources.rs":"9fb1f5bf21679a77b2c985945543da8ed748e786a9977a1d755be68b2f82b16f","src/citer.rs":"e03662241b7da607171a48870e97939d7c8309bc4a4d7c33697a851d8cc12178","src/lib.rs":"3fcd694e85be9d770baa2a92e02befc4f62a610044d55200fb441806286b9a68","src/raw.rs":"5c0060d052062885f9f7142e1b0274411199aa28696d5078a54ed23c23504606","src/sane.rs":"5e96611ee81f8cd02f20b8254a58f0dc4096f8e01c7b49b387e39b93ba15898f","src/simple.rs":"177994ecd9e106345c4efebe1ec38bb521a970332f12da9d9eda4b3563af5e2b"},"package":null}

View File

@ -201,12 +201,6 @@ PCache *pkg_cache_create() {
#else
const char *to_c_string(std::string s);
#include <unistd.h>
bool can_commit() {
// Allow user with effective SU to acquire lock
return geteuid() == 0;
}
// See ALT's apt/cmdline
// Used to print out the progress
@ -214,12 +208,9 @@ bool can_commit() {
ostream progress_stream(0);
PCache *pkg_cache_create() {
// Maybe should be set to false sometimes
// (Set to can_commit() for now)
// We should lock for installation, etc.
// For read-only access no locking needed
// In apt-shell, WithLock is always set according to (geteuid() == 0).
const bool WithLock = can_commit();
const bool WithLock = false;
CacheFile *cache_file = new CacheFile(progress_stream, WithLock);
cache_file->Open();

View File

@ -1 +1 @@
{"files":{"CHANGELOG.md":"4c2d7e4367b23e9232a1a08f93c6cd739d9c54394d8f6a58b1674101e7fd70ed","Cargo.toml":"8531bf0801327dae7536893352efc21e1864af5cbb854547c3850a4905a88f36","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"f1ddbede208a5b78333a25dac0a7598e678e9b601a7d99a791069bddaf180dfe","clippy.toml":"aa7850db4350883c8f373bd0d6b4d19bf3b75f13c1c238e24368c109cb52fb1d","src/command_helpers.rs":"63742844930bd693e029fa93b734d21c64453c1d9c58f792b3363b28a4c0e86d","src/detect_compiler_family.c":"97ca4b021495611e828becea6187add37414186a16dfedd26c2947cbce6e8b2f","src/lib.rs":"68e35fadafc880de1f59c327f5bc1681837c1009089dcdf6815798e76bbcc39f","src/parallel/async_executor.rs":"4ce24435fff6b6555b43fee042c16bd65d4150d0346567f246b9190d85b45983","src/parallel/job_token.rs":"f4ed0a03d89a42bfd5527133d12b267af519b745f3f2b997ed293df15a2641b8","src/parallel/mod.rs":"55fb4c2d15e66677b2ed5ffa6d65ea161bcf1a1e1dc7910ee3bde06f2f67ab14","src/parallel/once_lock.rs":"d13e4cb82d6bca3297ca8671d83a40dd5affd7ac89191d733dd451867181bb02","src/parallel/stderr.rs":"74384d41198740a6fce0877f144262db09fb091225fa8fbfa771314bb11487c6","src/target_info.rs":"447d3083f24e10fe4c449925b349b3d14ab2ff103c0d9f942ea9b581873442e1","src/tempfile.rs":"ebafb5b0e5d08b0706916ed911d4245240e60c3e2d0c9a1630c520842988a2b3","src/tool.rs":"2e6550062e021f2b394388172bbb01e86fe6a94d2395bcb3c85a9e86690da1a9","src/utilities.rs":"a13bb0a351fcef72823485b1b5dc4f514c533fa4feac95deb66ed9e5fbfe7b53","src/windows/com.rs":"a2800ddb81215fff2bf618336f5c4ff8e8bdb746dd18b795873c7304b3f2a5e3","src/windows/find_tools.rs":"dd6b2450909cd8334a2aa2ce856bcc72a9654d92422267d6345d5fabfcbf57c5","src/windows/mod.rs":"34cfa201cfbcac7ccaa3ea5295d3e4200439af3cc5c6433baf81502596040a89","src/windows/registry.rs":"c521b72c825e8095843e73482ffa810ed066ad8bb9f86e6db0c5c143c171aba1","src/windows/setup_config.rs":"754439cbab492afd44c9755abcbec1a41c9b2c358131cee2df13c0e996dbbec8","src/windows/vs_instances.rs":"946527cf8fd32c3472f6a2884dcdec290763101097334c7478f9c24c3950db6b","src/windows/winapi.rs":"250d51c1826d1a2329e9889dd9f058cfce253dbf2a678b076147c6cdb5db046c","src/windows/windows_sys.rs":"e2714c8307bfa083b9745eb0e46cadd7f98d7b88abf45a7637172019324e34b8","src/windows/windows_targets.rs":"5b4648ebc22b028caca9f4b4bf8881fe2d094b7bec217264ba2e6e2c49d1ccee"},"package":"c2e7962b54006dcfcc61cb72735f4d89bb97061dd6a7ed882ec6b8ee53714c6f"}
{"files":{"CHANGELOG.md":"f564d232d3258fea87d151172c28eb3b3b70b4d1e723c687613f0624d7fb3a88","Cargo.toml":"b69bfe75585b04924aaf37b3ea9dfe45d9927e6e4a0e027678129ba0f003b513","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"378f5840b258e2779c39418f3f2d7b2ba96f1c7917dd6be0713f88305dbda397","README.md":"f1ddbede208a5b78333a25dac0a7598e678e9b601a7d99a791069bddaf180dfe","clippy.toml":"aa7850db4350883c8f373bd0d6b4d19bf3b75f13c1c238e24368c109cb52fb1d","src/command_helpers.rs":"63742844930bd693e029fa93b734d21c64453c1d9c58f792b3363b28a4c0e86d","src/detect_compiler_family.c":"97ca4b021495611e828becea6187add37414186a16dfedd26c2947cbce6e8b2f","src/lib.rs":"045afe722eda4020e39516317467f1422dfa6d9932ad407228274ead0a7250e7","src/parallel/async_executor.rs":"4ce24435fff6b6555b43fee042c16bd65d4150d0346567f246b9190d85b45983","src/parallel/job_token.rs":"018a01cb00182270bbcb68e31e7a7c5c621a95f086e4c68cfa2bf557ac24e5f2","src/parallel/mod.rs":"bd9c1334d17d138c281961c690b8d8118a2d6295a7d6cd7296826255436fa063","src/parallel/stderr.rs":"74384d41198740a6fce0877f144262db09fb091225fa8fbfa771314bb11487c6","src/target.rs":"7313240db078ad85c1209a42a5fa96c3692b5d6e5093c7f366a1b99677247109","src/target/apple.rs":"6afbecac9f66aa72db55694413532f80b2753f28466a6213d1aa901a03c78bcd","src/target/generated.rs":"3c53af166bd4e042abb22a6cb9969d3c8b29ef82a0f197a4e4d59fde82b7537f","src/target/llvm.rs":"e1db4a7fb8b905ee9853781f2fcc64a9d3b48154e4c8b6cb12f5373c5935da6e","src/target/parser.rs":"4f2129a24273d62bf8ab339098e7f758e4e42b0df369dd2b0eb1b20726be91f8","src/tempfile.rs":"ebafb5b0e5d08b0706916ed911d4245240e60c3e2d0c9a1630c520842988a2b3","src/tool.rs":"2e6550062e021f2b394388172bbb01e86fe6a94d2395bcb3c85a9e86690da1a9","src/utilities.rs":"52b30b24a1c31cdefb105309ee5220cfc9fca76eaf4e6d6509c3e19f431448fe","src/windows/com.rs":"a2800ddb81215fff2bf618336f5c4ff8e8bdb746dd18b795873c7304b3f2a5e3","src/windows/find_tools.rs":"d89d1e3ebbfe4807b4fd93a10cb050abe4c1ea884e7a4abcb34fb84845cf3215","src/windows/mod.rs":"34cfa201cfbcac7ccaa3ea5295d3e4200439af3cc5c6433baf81502596040a89","src/windows/registry.rs":"c521b72c825e8095843e73482ffa810ed066ad8bb9f86e6db0c5c143c171aba1","src/windows/setup_config.rs":"754439cbab492afd44c9755abcbec1a41c9b2c358131cee2df13c0e996dbbec8","src/windows/vs_instances.rs":"946527cf8fd32c3472f6a2884dcdec290763101097334c7478f9c24c3950db6b","src/windows/winapi.rs":"250d51c1826d1a2329e9889dd9f058cfce253dbf2a678b076147c6cdb5db046c","src/windows/windows_sys.rs":"e2714c8307bfa083b9745eb0e46cadd7f98d7b88abf45a7637172019324e34b8","src/windows/windows_targets.rs":"5b4648ebc22b028caca9f4b4bf8881fe2d094b7bec217264ba2e6e2c49d1ccee"},"package":"baee610e9452a8f6f0a1b6194ec09ff9e2d85dea54432acdae41aa0761c95d70"}

View File

@ -6,6 +6,39 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
## [1.1.36](https://github.com/rust-lang/cc-rs/compare/cc-v1.1.35...cc-v1.1.36) - 2024-11-05
### Other
- Fix CUDA build with clang++. ([#1273](https://github.com/rust-lang/cc-rs/pull/1273))
## [1.1.35](https://github.com/rust-lang/cc-rs/compare/cc-v1.1.34...cc-v1.1.35) - 2024-11-04
### Other
- Remove support for FRC ([#1268](https://github.com/rust-lang/cc-rs/pull/1268))
- Do not add -fPIC by default on UEFI targets ([#1263](https://github.com/rust-lang/cc-rs/pull/1263))
- Use -windows-gnu for all UEFI targets ([#1264](https://github.com/rust-lang/cc-rs/pull/1264))
## [1.1.34](https://github.com/rust-lang/cc-rs/compare/cc-v1.1.33...cc-v1.1.34) - 2024-11-02
### Other
- Remove redundant flags ([#1256](https://github.com/rust-lang/cc-rs/pull/1256))
## [1.1.33](https://github.com/rust-lang/cc-rs/compare/cc-v1.1.32...cc-v1.1.33) - 2024-11-02
### Other
- Reduce size of `cc::Build` and size of generated targets ([#1257](https://github.com/rust-lang/cc-rs/pull/1257))
## [1.1.32](https://github.com/rust-lang/cc-rs/compare/cc-v1.1.31...cc-v1.1.32) - 2024-11-02
### Other
- Use `rustc`'s knowledge of LLVM/Clang target triples ([#1252](https://github.com/rust-lang/cc-rs/pull/1252))
- Use Cargo's target information when possible ([#1225](https://github.com/rust-lang/cc-rs/pull/1225))
## [1.1.31](https://github.com/rust-lang/cc-rs/compare/cc-v1.1.30...cc-v1.1.31) - 2024-10-19
### Other

View File

@ -13,7 +13,7 @@
edition = "2018"
rust-version = "1.63"
name = "cc"
version = "1.1.31"
version = "1.1.36"
authors = ["Alex Crichton <alex@alexcrichton.com>"]
build = false
exclude = [

File diff suppressed because it is too large Load Diff

View File

@ -1,8 +1,6 @@
use std::marker::PhantomData;
use crate::Error;
use super::once_lock::OnceLock;
use crate::{utilities::OnceLock, Error};
pub(crate) struct JobToken(PhantomData<()>);

View File

@ -1,4 +1,3 @@
pub(crate) mod async_executor;
pub(crate) mod job_token;
pub(crate) mod once_lock;
pub(crate) mod stderr;

View File

@ -1,47 +0,0 @@
use std::{
cell::UnsafeCell,
marker::PhantomData,
mem::MaybeUninit,
panic::{RefUnwindSafe, UnwindSafe},
sync::Once,
};
pub(crate) struct OnceLock<T> {
once: Once,
value: UnsafeCell<MaybeUninit<T>>,
_marker: PhantomData<T>,
}
impl<T> OnceLock<T> {
pub(crate) const fn new() -> Self {
Self {
once: Once::new(),
value: UnsafeCell::new(MaybeUninit::uninit()),
_marker: PhantomData,
}
}
pub(crate) fn get_or_init(&self, f: impl FnOnce() -> T) -> &T {
self.once.call_once(|| {
unsafe { &mut *self.value.get() }.write(f());
});
unsafe { (&*self.value.get()).assume_init_ref() }
}
}
unsafe impl<T: Sync + Send> Sync for OnceLock<T> {}
unsafe impl<T: Send> Send for OnceLock<T> {}
impl<T: RefUnwindSafe + UnwindSafe> RefUnwindSafe for OnceLock<T> {}
impl<T: UnwindSafe> UnwindSafe for OnceLock<T> {}
impl<T> Drop for OnceLock<T> {
#[inline]
fn drop(&mut self) {
if self.once.is_completed() {
// SAFETY: The cell is initialized and being dropped, so it can't
// be accessed again.
unsafe { self.value.get_mut().assume_init_drop() };
}
}
}

117
pve-rs/vendor/cc/src/target.rs vendored Normal file
View File

@ -0,0 +1,117 @@
//! Very basic parsing of `rustc` target triples.
//!
//! See the `target-lexicon` crate for a more principled approach to this.
use std::str::FromStr;
use crate::{Error, ErrorKind};
mod apple;
mod generated;
mod llvm;
mod parser;
pub(crate) use parser::TargetInfoParser;
/// Information specific to a `rustc` target.
///
/// See <https://doc.rust-lang.org/cargo/appendix/glossary.html#target>.
#[derive(Debug, PartialEq, Clone)]
pub(crate) struct TargetInfo<'a> {
/// The full architecture, including the subarchitecture.
///
/// This differs from `cfg!(target_arch)`, which only specifies the
/// overall architecture, which is too coarse for certain cases.
pub full_arch: &'a str,
/// The overall target architecture.
///
/// This is the same as the value of `cfg!(target_arch)`.
pub arch: &'a str,
/// The target vendor.
///
/// This is the same as the value of `cfg!(target_vendor)`.
pub vendor: &'a str,
/// The operating system, or `none` on bare-metal targets.
///
/// This is the same as the value of `cfg!(target_os)`.
pub os: &'a str,
/// The environment on top of the operating system.
///
/// This is the same as the value of `cfg!(target_env)`.
pub env: &'a str,
/// The ABI on top of the operating system.
///
/// This is the same as the value of `cfg!(target_abi)`.
pub abi: &'a str,
/// The unversioned LLVM/Clang target triple.
unversioned_llvm_target: &'a str,
}
impl FromStr for TargetInfo<'_> {
type Err = Error;
/// This will fail when using a custom target triple unknown to `rustc`.
fn from_str(target_triple: &str) -> Result<Self, Error> {
if let Ok(index) =
generated::LIST.binary_search_by_key(&target_triple, |(target_triple, _)| target_triple)
{
let (_, info) = &generated::LIST[index];
Ok(info.clone())
} else {
Err(Error::new(
ErrorKind::InvalidTarget,
format!("unknown target `{target_triple}`"),
))
}
}
}
#[cfg(test)]
mod tests {
use std::str::FromStr;
use super::TargetInfo;
// Test tier 1 targets
#[test]
fn tier1() {
let targets = [
"aarch64-unknown-linux-gnu",
"aarch64-apple-darwin",
"i686-pc-windows-gnu",
"i686-pc-windows-msvc",
"i686-unknown-linux-gnu",
"x86_64-apple-darwin",
"x86_64-pc-windows-gnu",
"x86_64-pc-windows-msvc",
"x86_64-unknown-linux-gnu",
];
for target in targets {
// Check that it parses
let _ = TargetInfo::from_str(target).unwrap();
}
}
// Various custom target triples not (or no longer) known by `rustc`
#[test]
fn cannot_parse_extra() {
let targets = [
"aarch64-unknown-none-gnu",
"aarch64-uwp-windows-gnu",
"arm-frc-linux-gnueabi",
"arm-unknown-netbsd-eabi",
"armv7neon-unknown-linux-gnueabihf",
"armv7neon-unknown-linux-musleabihf",
"thumbv7-unknown-linux-gnueabihf",
"thumbv7-unknown-linux-musleabihf",
"x86_64-rumprun-netbsd",
"x86_64-unknown-linux",
];
for target in targets {
// Check that it does not parse
let _ = TargetInfo::from_str(target).unwrap_err();
}
}
}

37
pve-rs/vendor/cc/src/target/apple.rs vendored Normal file
View File

@ -0,0 +1,37 @@
use super::TargetInfo;
impl TargetInfo<'_> {
pub(crate) fn apple_sdk_name(&self) -> &'static str {
match (self.os, self.abi) {
("macos", "") => "macosx",
("ios", "") => "iphoneos",
("ios", "sim") => "iphonesimulator",
("ios", "macabi") => "macosx",
("tvos", "") => "appletvos",
("tvos", "sim") => "appletvsimulator",
("watchos", "") => "watchos",
("watchos", "sim") => "watchsimulator",
("visionos", "") => "xros",
("visionos", "sim") => "xrsimulator",
(os, _) => panic!("invalid Apple target OS {}", os),
}
}
pub(crate) fn apple_version_flag(&self, min_version: &str) -> String {
match (self.os, self.abi) {
("macos", "") => format!("-mmacosx-version-min={min_version}"),
("ios", "") => format!("-miphoneos-version-min={min_version}"),
("ios", "sim") => format!("-mios-simulator-version-min={min_version}"),
("ios", "macabi") => format!("-mtargetos=ios{min_version}-macabi"),
("tvos", "") => format!("-mappletvos-version-min={min_version}"),
("tvos", "sim") => format!("-mappletvsimulator-version-min={min_version}"),
("watchos", "") => format!("-mwatchos-version-min={min_version}"),
("watchos", "sim") => format!("-mwatchsimulator-version-min={min_version}"),
// `-mxros-version-min` does not exist
// https://github.com/llvm/llvm-project/issues/88271
("visionos", "") => format!("-mtargetos=xros{min_version}"),
("visionos", "sim") => format!("-mtargetos=xros{min_version}-simulator"),
(os, _) => panic!("invalid Apple target OS {}", os),
}
}
}

3343
pve-rs/vendor/cc/src/target/generated.rs vendored Normal file

File diff suppressed because it is too large Load Diff

89
pve-rs/vendor/cc/src/target/llvm.rs vendored Normal file
View File

@ -0,0 +1,89 @@
use std::borrow::Cow;
use super::TargetInfo;
impl<'a> TargetInfo<'a> {
/// The versioned LLVM/Clang target triple.
pub(crate) fn versioned_llvm_target(&self, version: Option<&str>) -> Cow<'a, str> {
if let Some(version) = version {
// Only support versioned Apple targets for now.
assert_eq!(self.vendor, "apple");
let mut components = self.unversioned_llvm_target.split("-");
let arch = components.next().expect("llvm_target should have arch");
let vendor = components.next().expect("llvm_target should have vendor");
let os = components.next().expect("LLVM target should have os");
let environment = components.next();
assert_eq!(components.next(), None, "too many LLVM target components");
Cow::Owned(if let Some(env) = environment {
format!("{arch}-{vendor}-{os}{version}-{env}")
} else {
format!("{arch}-{vendor}-{os}{version}")
})
} else {
Cow::Borrowed(self.unversioned_llvm_target)
}
}
}
/// Rust and Clang don't really agree on naming, so do a best-effort
/// conversion to support out-of-tree / custom target-spec targets.
pub(crate) fn guess_llvm_target_triple(
full_arch: &str,
vendor: &str,
os: &str,
env: &str,
abi: &str,
) -> String {
let arch = match full_arch {
riscv32 if riscv32.starts_with("riscv32") => "riscv32",
riscv64 if riscv64.starts_with("riscv64") => "riscv64",
arch => arch,
};
let os = match os {
"darwin" => "macosx",
"visionos" => "xros",
"uefi" => "windows",
os => os,
};
let env = match env {
"newlib" | "nto70" | "nto71" | "ohos" | "p1" | "p2" | "relibc" | "sgx" | "uclibc" => "",
env => env,
};
let abi = match abi {
"sim" => "simulator",
"llvm" | "softfloat" | "uwp" | "vec-extabi" => "",
"ilp32" => "_ilp32",
abi => abi,
};
match (env, abi) {
("", "") => format!("{arch}-{vendor}-{os}"),
(env, abi) => format!("{arch}-{vendor}-{os}-{env}{abi}"),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_basic_llvm_triple_guessing() {
assert_eq!(
guess_llvm_target_triple("aarch64", "unknown", "linux", "", ""),
"aarch64-unknown-linux"
);
assert_eq!(
guess_llvm_target_triple("x86_64", "unknown", "linux", "gnu", ""),
"x86_64-unknown-linux-gnu"
);
assert_eq!(
guess_llvm_target_triple("x86_64", "unknown", "linux", "gnu", "eabi"),
"x86_64-unknown-linux-gnueabi"
);
assert_eq!(
guess_llvm_target_triple("x86_64", "apple", "darwin", "", ""),
"x86_64-apple-macosx"
);
}
}

130
pve-rs/vendor/cc/src/target/parser.rs vendored Normal file
View File

@ -0,0 +1,130 @@
use std::{env, str::FromStr};
use crate::{
target::{llvm, TargetInfo},
utilities::OnceLock,
Error, ErrorKind,
};
#[derive(Debug)]
struct TargetInfoParserInner {
full_arch: Box<str>,
arch: Box<str>,
vendor: Box<str>,
os: Box<str>,
env: Box<str>,
abi: Box<str>,
unversioned_llvm_target: Box<str>,
}
impl TargetInfoParserInner {
fn from_cargo_environment_variables() -> Result<Self, Error> {
// `TARGET` must be present.
//
// No need to emit `rerun-if-env-changed` for this,
// as it is controlled by Cargo itself.
#[allow(clippy::disallowed_methods)]
let target_triple = env::var("TARGET").map_err(|err| {
Error::new(
ErrorKind::EnvVarNotFound,
format!("failed reading TARGET: {err}"),
)
})?;
// Parse the full architecture name from the target triple.
let (full_arch, _rest) = target_triple.split_once('-').ok_or(Error::new(
ErrorKind::InvalidTarget,
format!("target `{target_triple}` had an unknown architecture"),
))?;
let cargo_env = |name, fallback: Option<&str>| -> Result<Box<str>, Error> {
// No need to emit `rerun-if-env-changed` for these,
// as they are controlled by Cargo itself.
#[allow(clippy::disallowed_methods)]
match env::var(name) {
Ok(var) => Ok(var.into_boxed_str()),
Err(err) => match fallback {
Some(fallback) => Ok(fallback.into()),
None => Err(Error::new(
ErrorKind::EnvVarNotFound,
format!("did not find fallback information for target `{target_triple}`, and failed reading {name}: {err}"),
)),
},
}
};
// Prefer to use `CARGO_ENV_*` if set, since these contain the most
// correct information relative to the current `rustc`, and makes it
// possible to support custom target JSON specs unknown to `rustc`.
//
// NOTE: If the user is using an older `rustc`, that data may be older
// than our pre-generated data, but we still prefer Cargo's view of
// the world, since at least `cc` won't differ from `rustc` in that
// case.
//
// These may not be set in case the user depended on being able to
// just set `TARGET` outside of build scripts; in those cases, fall
// back back to data from the known set of target triples instead.
//
// See discussion in #1225 for further details.
let fallback_target = TargetInfo::from_str(&target_triple).ok();
let ft = fallback_target.as_ref();
let arch = cargo_env("CARGO_CFG_TARGET_ARCH", ft.map(|t| t.arch))?;
let vendor = cargo_env("CARGO_CFG_TARGET_VENDOR", ft.map(|t| t.vendor))?;
let os = cargo_env("CARGO_CFG_TARGET_OS", ft.map(|t| t.os))?;
let env = cargo_env("CARGO_CFG_TARGET_ENV", ft.map(|t| t.env))?;
// `target_abi` was stabilized in Rust 1.78, which is higher than our
// MSRV, so it may not always be available; In that case, fall back to
// `""`, which is _probably_ correct for unknown target triples.
let abi = cargo_env("CARGO_CFG_TARGET_ABI", ft.map(|t| t.abi))
.unwrap_or_else(|_| String::default().into_boxed_str());
// Prefer `rustc`'s LLVM target triple information.
let unversioned_llvm_target = match fallback_target {
Some(ft) => ft.unversioned_llvm_target.to_string(),
None => llvm::guess_llvm_target_triple(full_arch, &vendor, &os, &env, &abi),
};
Ok(Self {
full_arch: full_arch.to_string().into_boxed_str(),
arch,
vendor,
os,
env,
abi,
unversioned_llvm_target: unversioned_llvm_target.into_boxed_str(),
})
}
}
/// Parser for [`TargetInfo`], contains cached information.
#[derive(Default, Debug)]
pub(crate) struct TargetInfoParser(OnceLock<Result<TargetInfoParserInner, Error>>);
impl TargetInfoParser {
pub fn parse_from_cargo_environment_variables(&self) -> Result<TargetInfo<'_>, Error> {
match self
.0
.get_or_init(TargetInfoParserInner::from_cargo_environment_variables)
{
Ok(TargetInfoParserInner {
full_arch,
arch,
vendor,
os,
env,
abi,
unversioned_llvm_target,
}) => Ok(TargetInfo {
full_arch,
arch,
vendor,
os,
env,
abi,
unversioned_llvm_target,
}),
Err(e) => Err(e.clone()),
}
}
}

View File

@ -1,29 +0,0 @@
//! This file is generated code. Please edit the generator
//! in dev-tools/gen-target-info if you need to make changes.
pub const RISCV_ARCH_MAPPING: &[(&str, &str)] = &[
("riscv32e", "riscv32"),
("riscv32em", "riscv32"),
("riscv32emc", "riscv32"),
("riscv32gc", "riscv32"),
("riscv32i", "riscv32"),
("riscv32im", "riscv32"),
("riscv32ima", "riscv32"),
("riscv32imac", "riscv32"),
("riscv32imafc", "riscv32"),
("riscv32imc", "riscv32"),
("riscv64gc", "riscv64"),
("riscv64imac", "riscv64"),
];
pub const WINDOWS_TRIPLE_MAPPING: &[(&str, &str)] = &[
("aarch64-pc-windows-gnullvm", "aarch64-pc-windows-gnu"),
("aarch64-uwp-windows-msvc", "aarch64-pc-windows-msvc"),
("i686-pc-windows-gnullvm", "i686-pc-windows-gnu"),
("i686-uwp-windows-gnu", "i686-pc-windows-gnu"),
("i686-uwp-windows-msvc", "i686-pc-windows-msvc"),
("i686-win7-windows-msvc", "i686-pc-windows-msvc"),
("thumbv7a-uwp-windows-msvc", "thumbv7a-pc-windows-msvc"),
("x86_64-pc-windows-gnullvm", "x86_64-pc-windows-gnu"),
("x86_64-uwp-windows-gnu", "x86_64-pc-windows-gnu"),
("x86_64-uwp-windows-msvc", "x86_64-pc-windows-msvc"),
];

View File

@ -1,7 +1,12 @@
use std::{
cell::UnsafeCell,
ffi::OsStr,
fmt::{self, Write},
marker::PhantomData,
mem::MaybeUninit,
panic::{RefUnwindSafe, UnwindSafe},
path::Path,
sync::Once,
};
pub(super) struct JoinOsStrs<'a, T> {
@ -43,3 +48,83 @@ where
}
}
}
pub(crate) struct OnceLock<T> {
once: Once,
value: UnsafeCell<MaybeUninit<T>>,
_marker: PhantomData<T>,
}
impl<T> Default for OnceLock<T> {
fn default() -> Self {
Self::new()
}
}
impl<T> OnceLock<T> {
pub(crate) const fn new() -> Self {
Self {
once: Once::new(),
value: UnsafeCell::new(MaybeUninit::uninit()),
_marker: PhantomData,
}
}
#[inline]
fn is_initialized(&self) -> bool {
self.once.is_completed()
}
unsafe fn get_unchecked(&self) -> &T {
debug_assert!(self.is_initialized());
#[allow(clippy::needless_borrow)]
#[allow(unused_unsafe)]
unsafe {
(&*self.value.get()).assume_init_ref()
}
}
pub(crate) fn get_or_init(&self, f: impl FnOnce() -> T) -> &T {
self.once.call_once(|| {
unsafe { &mut *self.value.get() }.write(f());
});
unsafe { self.get_unchecked() }
}
pub(crate) fn get(&self) -> Option<&T> {
if self.is_initialized() {
// Safe b/c checked is_initialized
Some(unsafe { self.get_unchecked() })
} else {
None
}
}
}
impl<T: fmt::Debug> fmt::Debug for OnceLock<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut d = f.debug_tuple("OnceLock");
match self.get() {
Some(v) => d.field(v),
None => d.field(&format_args!("<uninit>")),
};
d.finish()
}
}
unsafe impl<T: Sync + Send> Sync for OnceLock<T> {}
unsafe impl<T: Send> Send for OnceLock<T> {}
impl<T: RefUnwindSafe + UnwindSafe> RefUnwindSafe for OnceLock<T> {}
impl<T: UnwindSafe> UnwindSafe for OnceLock<T> {}
impl<T> Drop for OnceLock<T> {
#[inline]
fn drop(&mut self) {
if self.once.is_completed() {
// SAFETY: The cell is initialized and being dropped, so it can't
// be accessed again.
unsafe { self.value.get_mut().assume_init_drop() };
}
}
}

View File

@ -23,8 +23,8 @@ use std::{
sync::Arc,
};
use crate::Tool;
use crate::ToolFamily;
use crate::{target::TargetInfo, Tool};
const MSVC_FAMILY: ToolFamily = ToolFamily::Msvc { clang_cl: false };
@ -107,22 +107,22 @@ pub fn find(target: &str, tool: &str) -> Option<Command> {
/// operation (finding a MSVC tool in a local install) but instead returns a
/// `Tool` which may be introspected.
pub fn find_tool(target: &str, tool: &str) -> Option<Tool> {
find_tool_inner(target, tool, &StdEnvGetter)
find_tool_inner(&target.parse().ok()?, tool, &StdEnvGetter)
}
pub(crate) fn find_tool_inner(
target: &str,
target: &TargetInfo,
tool: &str,
env_getter: &dyn EnvGetter,
) -> Option<Tool> {
// This logic is all tailored for MSVC, if we're not that then bail out
// early.
if !target.contains("msvc") {
if target.env != "msvc" {
return None;
}
// Split the target to get the arch.
let target = TargetArch(target.split_once('-')?.0);
// We only need the arch.
let target = TargetArch(target.full_arch);
// Looks like msbuild isn't located in the same location as other tools like
// cl.exe and lib.exe.

View File

@ -1 +0,0 @@
{"files":{"Cargo.toml":"8be30a9748419aed461ce333e260ff4a461bf8166dfc7768307f32fcfc4fbea1","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"76e972ac0f4ddb116e86e10100132a783931a596e7b9872eaa31be15cd4d751d","benches/all.rs":"e734b9c9092ed66986725f86cfe90f3756cfddb058af308b796ba494f9beefc2","src/IdnaMappingTable.txt":"87d6553a4b86bc49dcade38bf26b745cd81800eb8af295dc3fb99b4729eaea38","src/lib.rs":"e7fd80070a7e52dfd1e9fe785bf092eddc9fb421fd0f9a1ba1c2189b8d40d3ed","src/make_uts46_mapping_table.py":"917055fa841f813de2bcf79cc79b595da3d5551559ee768db8660ab77cb26c34","src/punycode.rs":"3697674a70647d200853ac9d1910ffcb4796534332fe328de16c4bb1283e2ec1","src/uts46.rs":"4eee036b6448489002ac5190f3ac28834a4caa063c7cc77474ea6256199619ae","src/uts46_mapping_table.rs":"942fff78147c61da942f5f3a7ff4e90f9d7a00a29285733ac3fc3357eb2ed06f","tests/IdnaTestV2.txt":"c6f3778b0545fd150c8063286c7f5adc901e16557eddccc3751213646d07593d","tests/bad_punycode_tests.json":"ff0a15479ed2cb08f7b4b39465160da66d1ac7575e5d76990c17e7b76cb5e0f5","tests/punycode.rs":"0b0f315a8b124c1275a423a69169b13b19bcd7e9e6a5158bd0d642d01c6db145","tests/punycode_tests.json":"3d4ac0cf25984c37b9ce197f5df680a0136f728fb8ec82bc76624e42139eb3a8","tests/tests.rs":"d205a2bfb29dfee73e014faebd3207a55ef0d40121e6dbd52f5d611b37ac111e","tests/unit.rs":"be025a7d9bab3bd1ce134c87f9d848269e157b31ca5ba0ea03426c1ac736b69e","tests/uts46.rs":"06c97bf7dc20f5372b542fa46922d6dd63fe15e0aa34d799d08df9e3a241aa21"},"package":"634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6"}

View File

@ -1,69 +0,0 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2018"
rust-version = "1.51"
name = "idna"
version = "0.5.0"
authors = ["The rust-url developers"]
autotests = false
description = "IDNA (Internationalizing Domain Names in Applications) and Punycode."
categories = ["no_std"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/servo/rust-url/"
[package.metadata.docs.rs]
rustdoc-args = ["--generate-link-to-definition"]
[lib]
doctest = false
[[test]]
name = "tests"
harness = false
[[test]]
name = "unit"
[[bench]]
name = "all"
harness = false
[dependencies.unicode-bidi]
version = "0.3.10"
features = ["hardcoded-data"]
default-features = false
[dependencies.unicode-normalization]
version = "0.1.22"
default-features = false
[dev-dependencies.assert_matches]
version = "1.3"
[dev-dependencies.bencher]
version = "0.1"
[dev-dependencies.serde_json]
version = "1.0"
[dev-dependencies.tester]
version = "0.9"
[features]
alloc = []
default = ["std"]
std = [
"alloc",
"unicode-bidi/std",
"unicode-normalization/std",
]

View File

@ -1,25 +0,0 @@
Copyright (c) 2013-2022 The rust-url developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View File

@ -1,53 +0,0 @@
#[macro_use]
extern crate bencher;
extern crate idna;
use bencher::{black_box, Bencher};
use idna::Config;
fn to_unicode_puny_label(bench: &mut Bencher) {
let encoded = "abc.xn--mgbcm";
let config = Config::default();
bench.iter(|| config.to_unicode(black_box(encoded)));
}
fn to_unicode_ascii(bench: &mut Bencher) {
let encoded = "example.com";
let config = Config::default();
bench.iter(|| config.to_unicode(black_box(encoded)));
}
fn to_unicode_merged_label(bench: &mut Bencher) {
let encoded = "Beispiel.xn--vermgensberater-ctb";
let config = Config::default();
bench.iter(|| config.to_unicode(black_box(encoded)));
}
fn to_ascii_puny_label(bench: &mut Bencher) {
let encoded = "abc.ابج";
let config = Config::default();
bench.iter(|| config.to_ascii(black_box(encoded)));
}
fn to_ascii_simple(bench: &mut Bencher) {
let encoded = "example.com";
let config = Config::default();
bench.iter(|| config.to_ascii(black_box(encoded)));
}
fn to_ascii_merged(bench: &mut Bencher) {
let encoded = "beispiel.vermögensberater";
let config = Config::default();
bench.iter(|| config.to_ascii(black_box(encoded)));
}
benchmark_group!(
benches,
to_unicode_puny_label,
to_unicode_ascii,
to_unicode_merged_label,
to_ascii_puny_label,
to_ascii_simple,
to_ascii_merged,
);
benchmark_main!(benches);

File diff suppressed because it is too large Load Diff

View File

@ -1,86 +0,0 @@
// Copyright 2016 The rust-url developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This Rust crate implements IDNA
//! [per the WHATWG URL Standard](https://url.spec.whatwg.org/#idna).
//!
//! It also exposes the underlying algorithms from [*Unicode IDNA Compatibility Processing*
//! (Unicode Technical Standard #46)](http://www.unicode.org/reports/tr46/)
//! and [Punycode (RFC 3492)](https://tools.ietf.org/html/rfc3492).
//!
//! Quoting from [UTS #46s introduction](http://www.unicode.org/reports/tr46/#Introduction):
//!
//! > Initially, domain names were restricted to ASCII characters.
//! > A system was introduced in 2003 for internationalized domain names (IDN).
//! > This system is called Internationalizing Domain Names for Applications,
//! > or IDNA2003 for short.
//! > This mechanism supports IDNs by means of a client software transformation
//! > into a format known as Punycode.
//! > A revision of IDNA was approved in 2010 (IDNA2008).
//! > This revision has a number of incompatibilities with IDNA2003.
//! >
//! > The incompatibilities force implementers of client software,
//! > such as browsers and emailers,
//! > to face difficult choices during the transition period
//! > as registries shift from IDNA2003 to IDNA2008.
//! > This document specifies a mechanism
//! > that minimizes the impact of this transition for client software,
//! > allowing client software to access domains that are valid under either system.
#![no_std]
// For forwards compatibility
#[cfg(feature = "std")]
extern crate std;
extern crate alloc;
#[cfg(not(feature = "alloc"))]
compile_error!("the `alloc` feature must be enabled");
#[cfg(test)]
#[macro_use]
extern crate assert_matches;
use alloc::string::String;
pub mod punycode;
mod uts46;
pub use crate::uts46::{Config, Errors, Idna};
/// The [domain to ASCII](https://url.spec.whatwg.org/#concept-domain-to-ascii) algorithm.
///
/// Return the ASCII representation a domain name,
/// normalizing characters (upper-case to lower-case and other kinds of equivalence)
/// and using Punycode as necessary.
///
/// This process may fail.
pub fn domain_to_ascii(domain: &str) -> Result<String, uts46::Errors> {
Config::default().to_ascii(domain)
}
/// The [domain to ASCII](https://url.spec.whatwg.org/#concept-domain-to-ascii) algorithm,
/// with the `beStrict` flag set.
pub fn domain_to_ascii_strict(domain: &str) -> Result<String, uts46::Errors> {
Config::default()
.use_std3_ascii_rules(true)
.verify_dns_length(true)
.to_ascii(domain)
}
/// The [domain to Unicode](https://url.spec.whatwg.org/#concept-domain-to-unicode) algorithm.
///
/// Return the Unicode representation of a domain name,
/// normalizing characters (upper-case to lower-case and other kinds of equivalence)
/// and decoding Punycode as necessary.
///
/// This may indicate [syntax violations](https://url.spec.whatwg.org/#syntax-violation)
/// but always returns a string for the mapped domain.
pub fn domain_to_unicode(domain: &str) -> (String, Result<(), uts46::Errors>) {
Config::default().to_unicode(domain)
}

View File

@ -1,185 +0,0 @@
# Copyright 2013-2014 The rust-url developers.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
# Run as: python make_uts46_mapping_table.py IdnaMappingTable.txt > uts46_mapping_table.rs
# You can get the latest idna table from
# http://www.unicode.org/Public/idna/latest/IdnaMappingTable.txt
import collections
import itertools
print('''\
// Copyright 2013-2020 The rust-url developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Generated by make_idna_table.py
''')
txt = open("IdnaMappingTable.txt")
def escape_char(c):
return "\\u{%x}" % ord(c[0])
def char(s):
return chr(int(s, 16))
strtab = collections.OrderedDict()
strtab_offset = 0
def strtab_slice(s):
global strtab, strtab_offset
if s in strtab:
return strtab[s]
else:
utf8_len = len(s.encode('utf8'))
c = (strtab_offset, utf8_len)
strtab[s] = c
strtab_offset += utf8_len
return c
def rust_slice(s):
start = s[0]
length = s[1]
start_lo = start & 0xff
start_hi = start >> 8
assert length <= 255
assert start_hi <= 255
return "(StringTableSlice { byte_start_lo: %d, byte_start_hi: %d, byte_len: %d })" % (start_lo, start_hi, length)
ranges = []
for line in txt:
# remove comments
line, _, _ = line.partition('#')
# skip empty lines
if len(line.strip()) == 0:
continue
fields = line.split(';')
if fields[0].strip() == 'D800..DFFF':
continue # Surrogates don't occur in Rust strings.
first, _, last = fields[0].strip().partition('..')
if not last:
last = first
mapping = fields[1].strip().replace('_', ' ').title().replace(' ', '')
unicode_str = None
if len(fields) > 2:
if fields[2].strip():
unicode_str = u''.join(char(c) for c in fields[2].strip().split(' '))
elif mapping == "Deviation":
unicode_str = u''
if len(fields) > 3:
assert fields[3].strip() in ('NV8', 'XV8'), fields[3]
assert mapping == 'Valid', mapping
mapping = 'DisallowedIdna2008'
ranges.append((first, last, mapping, unicode_str))
def mergeable_key(r):
mapping = r[2]
# These types have associated data, so we should not merge them.
if mapping in ('Mapped', 'Deviation', 'DisallowedStd3Mapped'):
return r
assert mapping in ('Valid', 'Ignored', 'Disallowed', 'DisallowedStd3Valid', 'DisallowedIdna2008')
return mapping
grouped_ranges = itertools.groupby(ranges, key=mergeable_key)
optimized_ranges = []
for (k, g) in grouped_ranges:
group = list(g)
if len(group) == 1:
optimized_ranges.append(group[0])
continue
# Assert that nothing in the group has an associated unicode string.
for g in group:
if g[3] is not None and len(g[3]) > 2:
assert not g[3][2].strip()
# Assert that consecutive members of the group don't leave gaps in
# the codepoint space.
a, b = itertools.tee(group)
next(b, None)
for (g1, g2) in zip(a, b):
last_char = int(g1[1], 16)
next_char = int(g2[0], 16)
if last_char + 1 == next_char:
continue
# There's a gap where surrogates would appear, but we don't have to
# worry about that gap, as surrogates never appear in Rust strings.
# Assert we're seeing the surrogate case here.
assert last_char == 0xd7ff
assert next_char == 0xe000
optimized_ranges.append((group[0][0], group[-1][1]) + group[0][2:])
def is_single_char_range(r):
(first, last, _, _) = r
return first == last
# We can reduce the size of the character range table and the index table to about 1/4
# by merging runs of single character ranges and using character offsets from the start
# of that range to retrieve the correct `Mapping` value
def merge_single_char_ranges(ranges):
current = []
for r in ranges:
if not current or is_single_char_range(current[-1]) and is_single_char_range(r):
current.append(r)
continue
if len(current) != 0:
ret = current
current = [r]
yield ret
continue
current.append(r)
ret = current
current = []
yield ret
yield current
optimized_ranges = list(merge_single_char_ranges(optimized_ranges))
SINGLE_MARKER = 1 << 15
print("static TABLE: &[(char, u16)] = &[")
offset = 0
for ranges in optimized_ranges:
assert offset < SINGLE_MARKER
block_len = len(ranges)
single = SINGLE_MARKER if block_len == 1 else 0
index = offset | single
offset += block_len
start = escape_char(char(ranges[0][0]))
print(" ('%s', %s)," % (start, index))
print("];\n")
print("static MAPPING_TABLE: &[Mapping] = &[")
for ranges in optimized_ranges:
for (first, last, mapping, unicode_str) in ranges:
if unicode_str is not None:
mapping += rust_slice(strtab_slice(unicode_str))
print(" %s," % mapping)
print("];\n")
def escape_str(s):
return [escape_char(c) for c in s]
print("static STRING_TABLE: &str = \"%s\";"
% '\\\n '.join(itertools.chain(*[escape_str(s) for s in strtab.keys()])))

View File

@ -1,328 +0,0 @@
// Copyright 2013 The rust-url developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Punycode ([RFC 3492](http://tools.ietf.org/html/rfc3492)) implementation.
//!
//! Since Punycode fundamentally works on unicode code points,
//! `encode` and `decode` take and return slices and vectors of `char`.
//! `encode_str` and `decode_to_string` provide convenience wrappers
//! that convert from and to Rusts UTF-8 based `str` and `String` types.
use alloc::{string::String, vec::Vec};
use core::char;
use core::u32;
// Bootstring parameters for Punycode
static BASE: u32 = 36;
static T_MIN: u32 = 1;
static T_MAX: u32 = 26;
static SKEW: u32 = 38;
static DAMP: u32 = 700;
static INITIAL_BIAS: u32 = 72;
static INITIAL_N: u32 = 0x80;
static DELIMITER: char = '-';
#[inline]
fn adapt(mut delta: u32, num_points: u32, first_time: bool) -> u32 {
delta /= if first_time { DAMP } else { 2 };
delta += delta / num_points;
let mut k = 0;
while delta > ((BASE - T_MIN) * T_MAX) / 2 {
delta /= BASE - T_MIN;
k += BASE;
}
k + (((BASE - T_MIN + 1) * delta) / (delta + SKEW))
}
/// Convert Punycode to an Unicode `String`.
///
/// This is a convenience wrapper around `decode`.
#[inline]
pub fn decode_to_string(input: &str) -> Option<String> {
decode(input).map(|chars| chars.into_iter().collect())
}
/// Convert Punycode to Unicode.
///
/// Return None on malformed input or overflow.
/// Overflow can only happen on inputs that take more than
/// 63 encoded bytes, the DNS limit on domain name labels.
pub fn decode(input: &str) -> Option<Vec<char>> {
Some(Decoder::default().decode(input).ok()?.collect())
}
#[derive(Default)]
pub(crate) struct Decoder {
insertions: Vec<(usize, char)>,
}
impl Decoder {
/// Split the input iterator and return a Vec with insertions of encoded characters
pub(crate) fn decode<'a>(&'a mut self, input: &'a str) -> Result<Decode<'a>, ()> {
self.insertions.clear();
// Handle "basic" (ASCII) code points.
// They are encoded as-is before the last delimiter, if any.
let (base, input) = match input.rfind(DELIMITER) {
None => ("", input),
Some(position) => (
&input[..position],
if position > 0 {
&input[position + 1..]
} else {
input
},
),
};
if !base.is_ascii() {
return Err(());
}
let base_len = base.len();
let mut length = base_len as u32;
let mut code_point = INITIAL_N;
let mut bias = INITIAL_BIAS;
let mut i = 0;
let mut iter = input.bytes();
loop {
let previous_i = i;
let mut weight = 1;
let mut k = BASE;
let mut byte = match iter.next() {
None => break,
Some(byte) => byte,
};
// Decode a generalized variable-length integer into delta,
// which gets added to i.
loop {
let digit = match byte {
byte @ b'0'..=b'9' => byte - b'0' + 26,
byte @ b'A'..=b'Z' => byte - b'A',
byte @ b'a'..=b'z' => byte - b'a',
_ => return Err(()),
} as u32;
if digit > (u32::MAX - i) / weight {
return Err(()); // Overflow
}
i += digit * weight;
let t = if k <= bias {
T_MIN
} else if k >= bias + T_MAX {
T_MAX
} else {
k - bias
};
if digit < t {
break;
}
if weight > u32::MAX / (BASE - t) {
return Err(()); // Overflow
}
weight *= BASE - t;
k += BASE;
byte = match iter.next() {
None => return Err(()), // End of input before the end of this delta
Some(byte) => byte,
};
}
bias = adapt(i - previous_i, length + 1, previous_i == 0);
if i / (length + 1) > u32::MAX - code_point {
return Err(()); // Overflow
}
// i was supposed to wrap around from length+1 to 0,
// incrementing code_point each time.
code_point += i / (length + 1);
i %= length + 1;
let c = match char::from_u32(code_point) {
Some(c) => c,
None => return Err(()),
};
// Move earlier insertions farther out in the string
for (idx, _) in &mut self.insertions {
if *idx >= i as usize {
*idx += 1;
}
}
self.insertions.push((i as usize, c));
length += 1;
i += 1;
}
self.insertions.sort_by_key(|(i, _)| *i);
Ok(Decode {
base: base.chars(),
insertions: &self.insertions,
inserted: 0,
position: 0,
len: base_len + self.insertions.len(),
})
}
}
pub(crate) struct Decode<'a> {
base: core::str::Chars<'a>,
pub(crate) insertions: &'a [(usize, char)],
inserted: usize,
position: usize,
len: usize,
}
impl<'a> Iterator for Decode<'a> {
type Item = char;
fn next(&mut self) -> Option<Self::Item> {
loop {
match self.insertions.get(self.inserted) {
Some((pos, c)) if *pos == self.position => {
self.inserted += 1;
self.position += 1;
return Some(*c);
}
_ => {}
}
if let Some(c) = self.base.next() {
self.position += 1;
return Some(c);
} else if self.inserted >= self.insertions.len() {
return None;
}
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.len - self.position;
(len, Some(len))
}
}
impl<'a> ExactSizeIterator for Decode<'a> {
fn len(&self) -> usize {
self.len - self.position
}
}
/// Convert an Unicode `str` to Punycode.
///
/// This is a convenience wrapper around `encode`.
#[inline]
pub fn encode_str(input: &str) -> Option<String> {
if input.len() > u32::MAX as usize {
return None;
}
let mut buf = String::with_capacity(input.len());
encode_into(input.chars(), &mut buf).ok().map(|()| buf)
}
/// Convert Unicode to Punycode.
///
/// Return None on overflow, which can only happen on inputs that would take more than
/// 63 encoded bytes, the DNS limit on domain name labels.
pub fn encode(input: &[char]) -> Option<String> {
if input.len() > u32::MAX as usize {
return None;
}
let mut buf = String::with_capacity(input.len());
encode_into(input.iter().copied(), &mut buf)
.ok()
.map(|()| buf)
}
pub(crate) fn encode_into<I>(input: I, output: &mut String) -> Result<(), ()>
where
I: Iterator<Item = char> + Clone,
{
// Handle "basic" (ASCII) code points. They are encoded as-is.
let (mut input_length, mut basic_length) = (0u32, 0);
for c in input.clone() {
input_length = input_length.checked_add(1).ok_or(())?;
if c.is_ascii() {
output.push(c);
basic_length += 1;
}
}
if basic_length > 0 {
output.push('-')
}
let mut code_point = INITIAL_N;
let mut delta = 0;
let mut bias = INITIAL_BIAS;
let mut processed = basic_length;
while processed < input_length {
// All code points < code_point have been handled already.
// Find the next larger one.
let min_code_point = input
.clone()
.map(|c| c as u32)
.filter(|&c| c >= code_point)
.min()
.unwrap();
if min_code_point - code_point > (u32::MAX - delta) / (processed + 1) {
return Err(()); // Overflow
}
// Increase delta to advance the decoders <code_point,i> state to <min_code_point,0>
delta += (min_code_point - code_point) * (processed + 1);
code_point = min_code_point;
for c in input.clone() {
let c = c as u32;
if c < code_point {
delta = delta.checked_add(1).ok_or(())?;
}
if c == code_point {
// Represent delta as a generalized variable-length integer:
let mut q = delta;
let mut k = BASE;
loop {
let t = if k <= bias {
T_MIN
} else if k >= bias + T_MAX {
T_MAX
} else {
k - bias
};
if q < t {
break;
}
let value = t + ((q - t) % (BASE - t));
output.push(value_to_digit(value));
q = (q - t) / (BASE - t);
k += BASE;
}
output.push(value_to_digit(q));
bias = adapt(delta, processed + 1, processed == basic_length);
delta = 0;
processed += 1;
}
}
delta += 1;
code_point += 1;
}
Ok(())
}
#[inline]
fn value_to_digit(value: u32) -> char {
match value {
0..=25 => (value as u8 + b'a') as char, // a..z
26..=35 => (value as u8 - 26 + b'0') as char, // 0..9
_ => panic!(),
}
}
#[test]
#[ignore = "slow"]
#[cfg(target_pointer_width = "64")]
fn huge_encode() {
let mut buf = String::new();
assert!(encode_into(std::iter::repeat('ß').take(u32::MAX as usize + 1), &mut buf).is_err());
assert_eq!(buf.len(), 0);
}

View File

@ -1,741 +0,0 @@
// Copyright 2013-2014 The rust-url developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! [*Unicode IDNA Compatibility Processing*
//! (Unicode Technical Standard #46)](http://www.unicode.org/reports/tr46/)
use self::Mapping::*;
use crate::punycode;
use alloc::string::String;
use core::fmt;
use unicode_bidi::{bidi_class, BidiClass};
use unicode_normalization::char::is_combining_mark;
use unicode_normalization::{is_nfc, UnicodeNormalization};
include!("uts46_mapping_table.rs");
const PUNYCODE_PREFIX: &str = "xn--";
#[derive(Debug)]
struct StringTableSlice {
// Store these as separate fields so the structure will have an
// alignment of 1 and thus pack better into the Mapping enum, below.
byte_start_lo: u8,
byte_start_hi: u8,
byte_len: u8,
}
fn decode_slice(slice: &StringTableSlice) -> &'static str {
let lo = slice.byte_start_lo as usize;
let hi = slice.byte_start_hi as usize;
let start = (hi << 8) | lo;
let len = slice.byte_len as usize;
&STRING_TABLE[start..(start + len)]
}
#[repr(u8)]
#[derive(Debug)]
enum Mapping {
Valid,
Ignored,
Mapped(StringTableSlice),
Deviation(StringTableSlice),
Disallowed,
DisallowedStd3Valid,
DisallowedStd3Mapped(StringTableSlice),
DisallowedIdna2008,
}
fn find_char(codepoint: char) -> &'static Mapping {
let idx = match TABLE.binary_search_by_key(&codepoint, |&val| val.0) {
Ok(idx) => idx,
Err(idx) => idx - 1,
};
const SINGLE_MARKER: u16 = 1 << 15;
let (base, x) = TABLE[idx];
let single = (x & SINGLE_MARKER) != 0;
let offset = !SINGLE_MARKER & x;
if single {
&MAPPING_TABLE[offset as usize]
} else {
&MAPPING_TABLE[(offset + (codepoint as u16 - base as u16)) as usize]
}
}
struct Mapper<'a> {
chars: core::str::Chars<'a>,
config: Config,
errors: &'a mut Errors,
slice: Option<core::str::Chars<'static>>,
}
impl<'a> Iterator for Mapper<'a> {
type Item = char;
fn next(&mut self) -> Option<Self::Item> {
loop {
if let Some(s) = &mut self.slice {
match s.next() {
Some(c) => return Some(c),
None => {
self.slice = None;
}
}
}
let codepoint = self.chars.next()?;
if let '.' | '-' | 'a'..='z' | '0'..='9' = codepoint {
return Some(codepoint);
}
return Some(match *find_char(codepoint) {
Mapping::Valid => codepoint,
Mapping::Ignored => continue,
Mapping::Mapped(ref slice) => {
self.slice = Some(decode_slice(slice).chars());
continue;
}
Mapping::Deviation(ref slice) => {
if self.config.transitional_processing {
self.slice = Some(decode_slice(slice).chars());
continue;
} else {
codepoint
}
}
Mapping::Disallowed => {
self.errors.disallowed_character = true;
codepoint
}
Mapping::DisallowedStd3Valid => {
if self.config.use_std3_ascii_rules {
self.errors.disallowed_by_std3_ascii_rules = true;
};
codepoint
}
Mapping::DisallowedStd3Mapped(ref slice) => {
if self.config.use_std3_ascii_rules {
self.errors.disallowed_mapped_in_std3 = true;
};
self.slice = Some(decode_slice(slice).chars());
continue;
}
Mapping::DisallowedIdna2008 => {
if self.config.use_idna_2008_rules {
self.errors.disallowed_in_idna_2008 = true;
}
codepoint
}
});
}
}
}
// http://tools.ietf.org/html/rfc5893#section-2
fn passes_bidi(label: &str, is_bidi_domain: bool) -> bool {
// Rule 0: Bidi Rules apply to Bidi Domain Names: a name with at least one RTL label. A label
// is RTL if it contains at least one character of bidi class R, AL or AN.
if !is_bidi_domain {
return true;
}
let mut chars = label.chars();
let first_char_class = match chars.next() {
Some(c) => bidi_class(c),
None => return true, // empty string
};
match first_char_class {
// LTR label
BidiClass::L => {
// Rule 5
for c in chars.by_ref() {
if !matches!(
bidi_class(c),
BidiClass::L
| BidiClass::EN
| BidiClass::ES
| BidiClass::CS
| BidiClass::ET
| BidiClass::ON
| BidiClass::BN
| BidiClass::NSM
) {
return false;
}
}
// Rule 6
// must end in L or EN followed by 0 or more NSM
let mut rev_chars = label.chars().rev();
let mut last_non_nsm = rev_chars.next();
loop {
match last_non_nsm {
Some(c) if bidi_class(c) == BidiClass::NSM => {
last_non_nsm = rev_chars.next();
continue;
}
_ => {
break;
}
}
}
match last_non_nsm {
Some(c) if bidi_class(c) == BidiClass::L || bidi_class(c) == BidiClass::EN => {}
Some(_) => {
return false;
}
_ => {}
}
}
// RTL label
BidiClass::R | BidiClass::AL => {
let mut found_en = false;
let mut found_an = false;
// Rule 2
for c in chars {
let char_class = bidi_class(c);
if char_class == BidiClass::EN {
found_en = true;
} else if char_class == BidiClass::AN {
found_an = true;
}
if !matches!(
char_class,
BidiClass::R
| BidiClass::AL
| BidiClass::AN
| BidiClass::EN
| BidiClass::ES
| BidiClass::CS
| BidiClass::ET
| BidiClass::ON
| BidiClass::BN
| BidiClass::NSM
) {
return false;
}
}
// Rule 3
let mut rev_chars = label.chars().rev();
let mut last = rev_chars.next();
loop {
// must end in L or EN followed by 0 or more NSM
match last {
Some(c) if bidi_class(c) == BidiClass::NSM => {
last = rev_chars.next();
continue;
}
_ => {
break;
}
}
}
match last {
Some(c)
if matches!(
bidi_class(c),
BidiClass::R | BidiClass::AL | BidiClass::EN | BidiClass::AN
) => {}
_ => {
return false;
}
}
// Rule 4
if found_an && found_en {
return false;
}
}
// Rule 1: Should start with L or R/AL
_ => {
return false;
}
}
true
}
/// Check the validity criteria for the given label
///
/// V1 (NFC) and V8 (Bidi) are checked inside `processing()` to prevent doing duplicate work.
///
/// http://www.unicode.org/reports/tr46/#Validity_Criteria
fn check_validity(label: &str, config: Config, errors: &mut Errors) {
let first_char = label.chars().next();
if first_char.is_none() {
// Empty string, pass
return;
}
// V2: No U+002D HYPHEN-MINUS in both third and fourth positions.
//
// NOTE: Spec says that the label must not contain a HYPHEN-MINUS character in both the
// third and fourth positions. But nobody follows this criteria. See the spec issue below:
// https://github.com/whatwg/url/issues/53
// V3: neither begin nor end with a U+002D HYPHEN-MINUS
if config.check_hyphens && (label.starts_with('-') || label.ends_with('-')) {
errors.check_hyphens = true;
return;
}
// V4: not contain a U+002E FULL STOP
//
// Here, label can't contain '.' since the input is from .split('.')
// V5: not begin with a GC=Mark
if is_combining_mark(first_char.unwrap()) {
errors.start_combining_mark = true;
return;
}
// V6: Check against Mapping Table
if label.chars().any(|c| match *find_char(c) {
Mapping::Valid | Mapping::DisallowedIdna2008 => false,
Mapping::Deviation(_) => config.transitional_processing,
Mapping::DisallowedStd3Valid => config.use_std3_ascii_rules,
_ => true,
}) {
errors.invalid_mapping = true;
}
// V7: ContextJ rules
//
// TODO: Implement rules and add *CheckJoiners* flag.
// V8: Bidi rules are checked inside `processing()`
}
// Detect simple cases: all lowercase ASCII characters and digits where none
// of the labels start with PUNYCODE_PREFIX and labels don't start or end with hyphen.
fn is_simple(domain: &str) -> bool {
if domain.is_empty() {
return false;
}
let (mut prev, mut puny_prefix) = ('?', 0);
for c in domain.chars() {
if c == '.' {
if prev == '-' {
return false;
}
puny_prefix = 0;
continue;
} else if puny_prefix == 0 && c == '-' {
return false;
} else if puny_prefix < 5 {
if c == ['x', 'n', '-', '-'][puny_prefix] {
puny_prefix += 1;
if puny_prefix == 4 {
return false;
}
} else {
puny_prefix = 5;
}
}
if !c.is_ascii_lowercase() && !c.is_ascii_digit() {
return false;
}
prev = c;
}
true
}
/// http://www.unicode.org/reports/tr46/#Processing
fn processing(
domain: &str,
config: Config,
normalized: &mut String,
output: &mut String,
) -> Errors {
normalized.clear();
let mut errors = Errors::default();
let offset = output.len();
let iter = Mapper {
chars: domain.chars(),
config,
errors: &mut errors,
slice: None,
};
normalized.extend(iter.nfc());
let mut decoder = punycode::Decoder::default();
let non_transitional = config.transitional_processing(false);
let (mut first, mut has_bidi_labels) = (true, false);
for label in normalized.split('.') {
if !first {
output.push('.');
}
first = false;
if let Some(remainder) = label.strip_prefix(PUNYCODE_PREFIX) {
match decoder.decode(remainder) {
Ok(decode) => {
let start = output.len();
output.extend(decode);
let decoded_label = &output[start..];
if !has_bidi_labels {
has_bidi_labels |= is_bidi_domain(decoded_label);
}
if !errors.is_err() {
if !is_nfc(decoded_label) {
errors.nfc = true;
} else {
check_validity(decoded_label, non_transitional, &mut errors);
}
}
}
Err(()) => {
has_bidi_labels = true;
errors.punycode = true;
}
}
} else {
if !has_bidi_labels {
has_bidi_labels |= is_bidi_domain(label);
}
// `normalized` is already `NFC` so we can skip that check
check_validity(label, config, &mut errors);
output.push_str(label)
}
}
for label in output[offset..].split('.') {
// V8: Bidi rules
//
// TODO: Add *CheckBidi* flag
if !passes_bidi(label, has_bidi_labels) {
errors.check_bidi = true;
break;
}
}
errors
}
#[derive(Default)]
pub struct Idna {
config: Config,
normalized: String,
output: String,
}
impl Idna {
pub fn new(config: Config) -> Self {
Self {
config,
normalized: String::new(),
output: String::new(),
}
}
pub fn to_ascii_inner(&mut self, domain: &str, out: &mut String) -> Errors {
if is_simple(domain) {
out.push_str(domain);
return Errors::default();
}
let mut errors = processing(domain, self.config, &mut self.normalized, out);
self.output = core::mem::replace(out, String::with_capacity(out.len()));
let mut first = true;
for label in self.output.split('.') {
if !first {
out.push('.');
}
first = false;
if label.is_ascii() {
out.push_str(label);
} else {
let offset = out.len();
out.push_str(PUNYCODE_PREFIX);
if let Err(()) = punycode::encode_into(label.chars(), out) {
errors.punycode = true;
out.truncate(offset);
}
}
}
errors
}
/// http://www.unicode.org/reports/tr46/#ToASCII
#[allow(clippy::wrong_self_convention)]
pub fn to_ascii(&mut self, domain: &str, out: &mut String) -> Result<(), Errors> {
let mut errors = self.to_ascii_inner(domain, out);
if self.config.verify_dns_length {
let domain = if out.ends_with('.') {
&out[..out.len() - 1]
} else {
&*out
};
if domain.is_empty() || domain.split('.').any(|label| label.is_empty()) {
errors.too_short_for_dns = true;
}
if domain.len() > 253 || domain.split('.').any(|label| label.len() > 63) {
errors.too_long_for_dns = true;
}
}
errors.into()
}
/// http://www.unicode.org/reports/tr46/#ToUnicode
#[allow(clippy::wrong_self_convention)]
pub fn to_unicode(&mut self, domain: &str, out: &mut String) -> Result<(), Errors> {
if is_simple(domain) {
out.push_str(domain);
return Errors::default().into();
}
processing(domain, self.config, &mut self.normalized, out).into()
}
}
#[derive(Clone, Copy)]
#[must_use]
pub struct Config {
use_std3_ascii_rules: bool,
transitional_processing: bool,
verify_dns_length: bool,
check_hyphens: bool,
use_idna_2008_rules: bool,
}
/// The defaults are that of https://url.spec.whatwg.org/#idna
impl Default for Config {
fn default() -> Self {
Config {
use_std3_ascii_rules: false,
transitional_processing: false,
check_hyphens: false,
// check_bidi: true,
// check_joiners: true,
// Only use for to_ascii, not to_unicode
verify_dns_length: false,
use_idna_2008_rules: false,
}
}
}
impl Config {
#[inline]
pub fn use_std3_ascii_rules(mut self, value: bool) -> Self {
self.use_std3_ascii_rules = value;
self
}
#[inline]
pub fn transitional_processing(mut self, value: bool) -> Self {
self.transitional_processing = value;
self
}
#[inline]
pub fn verify_dns_length(mut self, value: bool) -> Self {
self.verify_dns_length = value;
self
}
#[inline]
pub fn check_hyphens(mut self, value: bool) -> Self {
self.check_hyphens = value;
self
}
#[inline]
pub fn use_idna_2008_rules(mut self, value: bool) -> Self {
self.use_idna_2008_rules = value;
self
}
/// http://www.unicode.org/reports/tr46/#ToASCII
pub fn to_ascii(self, domain: &str) -> Result<String, Errors> {
let mut result = String::with_capacity(domain.len());
let mut codec = Idna::new(self);
codec.to_ascii(domain, &mut result).map(|()| result)
}
/// http://www.unicode.org/reports/tr46/#ToUnicode
pub fn to_unicode(self, domain: &str) -> (String, Result<(), Errors>) {
let mut codec = Idna::new(self);
let mut out = String::with_capacity(domain.len());
let result = codec.to_unicode(domain, &mut out);
(out, result)
}
}
fn is_bidi_domain(s: &str) -> bool {
for c in s.chars() {
if c.is_ascii_graphic() {
continue;
}
match bidi_class(c) {
BidiClass::R | BidiClass::AL | BidiClass::AN => return true,
_ => {}
}
}
false
}
/// Errors recorded during UTS #46 processing.
///
/// This is opaque for now, indicating what types of errors have been encountered at least once.
/// More details may be exposed in the future.
#[derive(Default)]
pub struct Errors {
punycode: bool,
check_hyphens: bool,
check_bidi: bool,
start_combining_mark: bool,
invalid_mapping: bool,
nfc: bool,
disallowed_by_std3_ascii_rules: bool,
disallowed_mapped_in_std3: bool,
disallowed_character: bool,
too_long_for_dns: bool,
too_short_for_dns: bool,
disallowed_in_idna_2008: bool,
}
impl Errors {
fn is_err(&self) -> bool {
let Errors {
punycode,
check_hyphens,
check_bidi,
start_combining_mark,
invalid_mapping,
nfc,
disallowed_by_std3_ascii_rules,
disallowed_mapped_in_std3,
disallowed_character,
too_long_for_dns,
too_short_for_dns,
disallowed_in_idna_2008,
} = *self;
punycode
|| check_hyphens
|| check_bidi
|| start_combining_mark
|| invalid_mapping
|| nfc
|| disallowed_by_std3_ascii_rules
|| disallowed_mapped_in_std3
|| disallowed_character
|| too_long_for_dns
|| too_short_for_dns
|| disallowed_in_idna_2008
}
}
impl fmt::Debug for Errors {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let Errors {
punycode,
check_hyphens,
check_bidi,
start_combining_mark,
invalid_mapping,
nfc,
disallowed_by_std3_ascii_rules,
disallowed_mapped_in_std3,
disallowed_character,
too_long_for_dns,
too_short_for_dns,
disallowed_in_idna_2008,
} = *self;
let fields = [
("punycode", punycode),
("check_hyphens", check_hyphens),
("check_bidi", check_bidi),
("start_combining_mark", start_combining_mark),
("invalid_mapping", invalid_mapping),
("nfc", nfc),
(
"disallowed_by_std3_ascii_rules",
disallowed_by_std3_ascii_rules,
),
("disallowed_mapped_in_std3", disallowed_mapped_in_std3),
("disallowed_character", disallowed_character),
("too_long_for_dns", too_long_for_dns),
("too_short_for_dns", too_short_for_dns),
("disallowed_in_idna_2008", disallowed_in_idna_2008),
];
let mut empty = true;
f.write_str("Errors { ")?;
for (name, val) in &fields {
if *val {
if !empty {
f.write_str(", ")?;
}
f.write_str(name)?;
empty = false;
}
}
if !empty {
f.write_str(" }")
} else {
f.write_str("}")
}
}
}
impl From<Errors> for Result<(), Errors> {
fn from(e: Errors) -> Result<(), Errors> {
if !e.is_err() {
Ok(())
} else {
Err(e)
}
}
}
#[cfg(feature = "std")]
impl std::error::Error for Errors {}
impl fmt::Display for Errors {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(self, f)
}
}
#[cfg(test)]
mod tests {
use super::{find_char, Mapping};
#[test]
fn mapping_fast_path() {
assert_matches!(find_char('-'), &Mapping::Valid);
assert_matches!(find_char('.'), &Mapping::Valid);
for c in &['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'] {
assert_matches!(find_char(*c), &Mapping::Valid);
}
for c in &[
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q',
'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',
] {
assert_matches!(find_char(*c), &Mapping::Valid);
}
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@ -1,114 +0,0 @@
// Copyright 2013 The rust-url developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use crate::test::TestFn;
use idna::punycode::{decode, encode_str};
use serde_json::map::Map;
use serde_json::Value;
use std::panic::catch_unwind;
use std::str::FromStr;
fn one_test(decoded: &str, encoded: &str) {
match decode(encoded) {
None => panic!("Decoding {} failed.", encoded),
Some(result) => {
let result = result.into_iter().collect::<String>();
assert!(
result == decoded,
"Incorrect decoding of \"{}\":\n \"{}\"\n!= \"{}\"\n",
encoded,
result,
decoded
)
}
}
match encode_str(decoded) {
None => panic!("Encoding {} failed.", decoded),
Some(result) => assert!(
result == encoded,
"Incorrect encoding of \"{}\":\n \"{}\"\n!= \"{}\"\n",
decoded,
result,
encoded
),
}
}
fn one_bad_test(encode: &str) {
let result = catch_unwind(|| encode_str(encode));
assert!(
matches!(&result, Ok(None)),
"Should neither panic nor return Some result, but got {:?}",
result
)
}
fn get_string<'a>(map: &'a Map<String, Value>, key: &str) -> &'a str {
match map.get(&key.to_string()) {
Some(Value::String(s)) => s,
None => "",
_ => panic!(),
}
}
pub fn collect_tests<F: FnMut(String, TestFn)>(add_test: &mut F) {
match Value::from_str(include_str!("punycode_tests.json")) {
Ok(Value::Array(tests)) => {
for (i, test) in tests.into_iter().enumerate() {
match test {
Value::Object(o) => {
let test_name = {
let desc = get_string(&o, "description");
if desc.is_empty() {
format!("Punycode {}", i + 1)
} else {
format!("Punycode {}: {}", i + 1, desc)
}
};
add_test(
test_name,
TestFn::DynTestFn(Box::new(move || {
one_test(get_string(&o, "decoded"), get_string(&o, "encoded"))
})),
)
}
_ => panic!(),
}
}
}
other => panic!("{:?}", other),
}
match Value::from_str(include_str!("bad_punycode_tests.json")) {
Ok(Value::Array(tests)) => {
for (i, test) in tests.into_iter().enumerate() {
match test {
Value::Object(o) => {
let test_name = {
let desc = get_string(&o, "description");
if desc.is_empty() {
format!("Bad Punycode {}", i + 1)
} else {
format!("Bad Punycode {}: {}", i + 1, desc)
}
};
add_test(
test_name,
TestFn::DynTestFn(Box::new(move || {
one_bad_test(get_string(&o, "decoded"))
})),
)
}
_ => panic!(),
}
}
}
other => panic!("{:?}", other),
}
}

View File

@ -1,120 +0,0 @@
[
{
"description": "These tests are copied from https://github.com/bestiejs/punycode.js/blob/master/tests/tests.js , used under the MIT license.",
"decoded": "",
"encoded": ""
},
{
"description": "a single basic code point",
"decoded": "Bach",
"encoded": "Bach-"
},
{
"description": "a single non-ASCII character",
"decoded": "\u00FC",
"encoded": "tda"
},
{
"description": "multiple non-ASCII characters",
"decoded": "\u00FC\u00EB\u00E4\u00F6\u2665",
"encoded": "4can8av2009b"
},
{
"description": "mix of ASCII and non-ASCII characters",
"decoded": "b\u00FCcher",
"encoded": "bcher-kva"
},
{
"description": "long string with both ASCII and non-ASCII characters",
"decoded": "Willst du die Bl\u00FCthe des fr\u00FChen, die Fr\u00FCchte des sp\u00E4teren Jahres",
"encoded": "Willst du die Blthe des frhen, die Frchte des spteren Jahres-x9e96lkal"
},
{
"description": "Arabic (Egyptian)",
"decoded": "\u0644\u064A\u0647\u0645\u0627\u0628\u062A\u0643\u0644\u0645\u0648\u0634\u0639\u0631\u0628\u064A\u061F",
"encoded": "egbpdaj6bu4bxfgehfvwxn"
},
{
"description": "Chinese (simplified)",
"decoded": "\u4ED6\u4EEC\u4E3A\u4EC0\u4E48\u4E0D\u8BF4\u4E2d\u6587",
"encoded": "ihqwcrb4cv8a8dqg056pqjye"
},
{
"description": "Chinese (traditional)",
"decoded": "\u4ED6\u5011\u7232\u4EC0\u9EBD\u4E0D\u8AAA\u4E2D\u6587",
"encoded": "ihqwctvzc91f659drss3x8bo0yb"
},
{
"description": "Czech",
"decoded": "Pro\u010Dprost\u011Bnemluv\u00ED\u010Desky",
"encoded": "Proprostnemluvesky-uyb24dma41a"
},
{
"description": "Hebrew",
"decoded": "\u05DC\u05DE\u05D4\u05D4\u05DD\u05E4\u05E9\u05D5\u05D8\u05DC\u05D0\u05DE\u05D3\u05D1\u05E8\u05D9\u05DD\u05E2\u05D1\u05E8\u05D9\u05EA",
"encoded": "4dbcagdahymbxekheh6e0a7fei0b"
},
{
"description": "Hindi (Devanagari)",
"decoded": "\u092F\u0939\u0932\u094B\u0917\u0939\u093F\u0928\u094D\u0926\u0940\u0915\u094D\u092F\u094B\u0902\u0928\u0939\u0940\u0902\u092C\u094B\u0932\u0938\u0915\u0924\u0947\u0939\u0948\u0902",
"encoded": "i1baa7eci9glrd9b2ae1bj0hfcgg6iyaf8o0a1dig0cd"
},
{
"description": "Japanese (kanji and hiragana)",
"decoded": "\u306A\u305C\u307F\u3093\u306A\u65E5\u672C\u8A9E\u3092\u8A71\u3057\u3066\u304F\u308C\u306A\u3044\u306E\u304B",
"encoded": "n8jok5ay5dzabd5bym9f0cm5685rrjetr6pdxa"
},
{
"description": "Korean (Hangul syllables)",
"decoded": "\uC138\uACC4\uC758\uBAA8\uB4E0\uC0AC\uB78C\uB4E4\uC774\uD55C\uAD6D\uC5B4\uB97C\uC774\uD574\uD55C\uB2E4\uBA74\uC5BC\uB9C8\uB098\uC88B\uC744\uAE4C",
"encoded": "989aomsvi5e83db1d2a355cv1e0vak1dwrv93d5xbh15a0dt30a5jpsd879ccm6fea98c"
},
{
"description": "Russian (Cyrillic)",
"decoded": "\u043F\u043E\u0447\u0435\u043C\u0443\u0436\u0435\u043E\u043D\u0438\u043D\u0435\u0433\u043E\u0432\u043E\u0440\u044F\u0442\u043F\u043E\u0440\u0443\u0441\u0441\u043A\u0438",
"encoded": "b1abfaaepdrnnbgefbadotcwatmq2g4l"
},
{
"description": "Spanish",
"decoded": "Porqu\u00E9nopuedensimplementehablarenEspa\u00F1ol",
"encoded": "PorqunopuedensimplementehablarenEspaol-fmd56a"
},
{
"description": "Vietnamese",
"decoded": "T\u1EA1isaoh\u1ECDkh\u00F4ngth\u1EC3ch\u1EC9n\u00F3iti\u1EBFngVi\u1EC7t",
"encoded": "TisaohkhngthchnitingVit-kjcr8268qyxafd2f1b9g"
},
{
"decoded": "3\u5E74B\u7D44\u91D1\u516B\u5148\u751F",
"encoded": "3B-ww4c5e180e575a65lsy2b"
},
{
"decoded": "\u5B89\u5BA4\u5948\u7F8E\u6075-with-SUPER-MONKEYS",
"encoded": "-with-SUPER-MONKEYS-pc58ag80a8qai00g7n9n"
},
{
"decoded": "Hello-Another-Way-\u305D\u308C\u305E\u308C\u306E\u5834\u6240",
"encoded": "Hello-Another-Way--fc4qua05auwb3674vfr0b"
},
{
"decoded": "\u3072\u3068\u3064\u5C4B\u6839\u306E\u4E0B2",
"encoded": "2-u9tlzr9756bt3uc0v"
},
{
"decoded": "Maji\u3067Koi\u3059\u308B5\u79D2\u524D",
"encoded": "MajiKoi5-783gue6qz075azm5e"
},
{
"decoded": "\u30D1\u30D5\u30A3\u30FCde\u30EB\u30F3\u30D0",
"encoded": "de-jg4avhby1noc0d"
},
{
"decoded": "\u305D\u306E\u30B9\u30D4\u30FC\u30C9\u3067",
"encoded": "d9juau41awczczp"
},
{
"description": "ASCII string that breaks the existing rules for host-name labels (It's not a realistic example for IDNA, because IDNA never encodes pure ASCII labels.)",
"decoded": "-> $1.00 <-",
"encoded": "-> $1.00 <--"
}
]

View File

@ -1,25 +0,0 @@
use tester as test;
mod punycode;
mod uts46;
fn main() {
let mut tests = Vec::new();
{
let mut add_test = |name, run| {
tests.push(test::TestDescAndFn {
desc: test::TestDesc {
name: test::DynTestName(name),
ignore: false,
should_panic: test::ShouldPanic::No,
allow_fail: false,
test_type: test::TestType::Unknown,
},
testfn: run,
})
};
punycode::collect_tests(&mut add_test);
uts46::collect_tests(&mut add_test);
}
test::test_main(&std::env::args().collect::<Vec<_>>(), tests, None)
}

View File

@ -1,139 +0,0 @@
use assert_matches::assert_matches;
use unicode_normalization::char::is_combining_mark;
/// https://github.com/servo/rust-url/issues/373
#[test]
fn test_punycode_prefix_with_length_check() {
let config = idna::Config::default()
.verify_dns_length(true)
.check_hyphens(true)
.use_std3_ascii_rules(true);
assert!(config.to_ascii("xn--").is_err());
assert!(config.to_ascii("xn---").is_err());
assert!(config.to_ascii("xn-----").is_err());
assert!(config.to_ascii("xn--.").is_err());
assert!(config.to_ascii("xn--...").is_err());
assert!(config.to_ascii(".xn--").is_err());
assert!(config.to_ascii("...xn--").is_err());
assert!(config.to_ascii("xn--.xn--").is_err());
assert!(config.to_ascii("xn--.example.org").is_err());
}
/// https://github.com/servo/rust-url/issues/373
#[test]
fn test_punycode_prefix_without_length_check() {
let config = idna::Config::default()
.verify_dns_length(false)
.check_hyphens(true)
.use_std3_ascii_rules(true);
assert_eq!(config.to_ascii("xn--").unwrap(), "");
assert!(config.to_ascii("xn---").is_err());
assert!(config.to_ascii("xn-----").is_err());
assert_eq!(config.to_ascii("xn--.").unwrap(), ".");
assert_eq!(config.to_ascii("xn--...").unwrap(), "...");
assert_eq!(config.to_ascii(".xn--").unwrap(), ".");
assert_eq!(config.to_ascii("...xn--").unwrap(), "...");
assert_eq!(config.to_ascii("xn--.xn--").unwrap(), ".");
assert_eq!(config.to_ascii("xn--.example.org").unwrap(), ".example.org");
}
// http://www.unicode.org/reports/tr46/#Table_Example_Processing
#[test]
fn test_examples() {
let mut codec = idna::Idna::default();
let mut out = String::new();
assert_matches!(codec.to_unicode("Bloß.de", &mut out), Ok(()));
assert_eq!(out, "bloß.de");
out.clear();
assert_matches!(codec.to_unicode("xn--blo-7ka.de", &mut out), Ok(()));
assert_eq!(out, "bloß.de");
out.clear();
assert_matches!(codec.to_unicode("u\u{308}.com", &mut out), Ok(()));
assert_eq!(out, "ü.com");
out.clear();
assert_matches!(codec.to_unicode("xn--tda.com", &mut out), Ok(()));
assert_eq!(out, "ü.com");
out.clear();
assert_matches!(codec.to_unicode("xn--u-ccb.com", &mut out), Err(_));
out.clear();
assert_matches!(codec.to_unicode("a⒈com", &mut out), Err(_));
out.clear();
assert_matches!(codec.to_unicode("xn--a-ecp.ru", &mut out), Err(_));
out.clear();
assert_matches!(codec.to_unicode("xn--0.pt", &mut out), Err(_));
out.clear();
assert_matches!(codec.to_unicode("日本語。JP", &mut out), Ok(()));
assert_eq!(out, "日本語.jp");
out.clear();
assert_matches!(codec.to_unicode("☕.us", &mut out), Ok(()));
assert_eq!(out, "☕.us");
}
#[test]
fn test_v5() {
let config = idna::Config::default()
.verify_dns_length(true)
.use_std3_ascii_rules(true);
// IdnaTest:784 蔏。𑰺
assert!(is_combining_mark('\u{11C3A}'));
assert!(config.to_ascii("\u{11C3A}").is_err());
assert!(config.to_ascii("\u{850f}.\u{11C3A}").is_err());
assert!(config.to_ascii("\u{850f}\u{ff61}\u{11C3A}").is_err());
}
#[test]
fn test_v8_bidi_rules() {
let config = idna::Config::default()
.verify_dns_length(true)
.use_std3_ascii_rules(true);
assert_eq!(config.to_ascii("abc").unwrap(), "abc");
assert_eq!(config.to_ascii("123").unwrap(), "123");
assert_eq!(config.to_ascii("אבּג").unwrap(), "xn--kdb3bdf");
assert_eq!(config.to_ascii("ابج").unwrap(), "xn--mgbcm");
assert_eq!(config.to_ascii("abc.ابج").unwrap(), "abc.xn--mgbcm");
assert_eq!(config.to_ascii("אבּג.ابج").unwrap(), "xn--kdb3bdf.xn--mgbcm");
// Bidi domain names cannot start with digits
assert!(config.to_ascii("0a.\u{05D0}").is_err());
assert!(config.to_ascii("0à.\u{05D0}").is_err());
// Bidi chars may be punycode-encoded
assert!(config.to_ascii("xn--0ca24w").is_err());
}
#[test]
fn emoji_domains() {
// HOT BEVERAGE is allowed here...
let config = idna::Config::default()
.verify_dns_length(true)
.use_std3_ascii_rules(true);
assert_eq!(config.to_ascii("☕.com").unwrap(), "xn--53h.com");
// ... but not here
let config = idna::Config::default()
.verify_dns_length(true)
.use_std3_ascii_rules(true)
.use_idna_2008_rules(true);
let error = format!("{:?}", config.to_ascii("☕.com").unwrap_err());
assert!(error.contains("disallowed_in_idna_2008"));
}
#[test]
fn unicode_before_delimiter() {
let config = idna::Config::default();
assert!(config.to_ascii("xn--f\u{34a}-PTP").is_err());
}

View File

@ -1,194 +0,0 @@
// Copyright 2013-2014 The rust-url developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use crate::test::TestFn;
use std::char;
use std::fmt::Write;
use idna::Errors;
pub fn collect_tests<F: FnMut(String, TestFn)>(add_test: &mut F) {
// https://www.unicode.org/Public/idna/13.0.0/IdnaTestV2.txt
for (i, line) in include_str!("IdnaTestV2.txt").lines().enumerate() {
if line.is_empty() || line.starts_with('#') {
continue;
}
// Remove comments
let line = match line.find('#') {
Some(index) => &line[0..index],
None => line,
};
let mut pieces = line.split(';').map(|x| x.trim()).collect::<Vec<&str>>();
let source = unescape(pieces.remove(0));
// ToUnicode
let mut to_unicode = unescape(pieces.remove(0));
if to_unicode.is_empty() {
to_unicode = source.clone();
}
let to_unicode_status = status(pieces.remove(0));
// ToAsciiN
let to_ascii_n = pieces.remove(0);
let to_ascii_n = if to_ascii_n.is_empty() {
to_unicode.clone()
} else {
to_ascii_n.to_owned()
};
let to_ascii_n_status = pieces.remove(0);
let to_ascii_n_status = if to_ascii_n_status.is_empty() {
to_unicode_status.clone()
} else {
status(to_ascii_n_status)
};
// ToAsciiT
let to_ascii_t = pieces.remove(0);
let to_ascii_t = if to_ascii_t.is_empty() {
to_ascii_n.clone()
} else {
to_ascii_t.to_owned()
};
let to_ascii_t_status = pieces.remove(0);
let to_ascii_t_status = if to_ascii_t_status.is_empty() {
to_ascii_n_status.clone()
} else {
status(to_ascii_t_status)
};
let test_name = format!("UTS #46 line {}", i + 1);
add_test(
test_name,
TestFn::DynTestFn(Box::new(move || {
let config = idna::Config::default()
.use_std3_ascii_rules(true)
.verify_dns_length(true)
.check_hyphens(true);
// http://unicode.org/reports/tr46/#Deviations
// applications that perform IDNA2008 lookup are not required to check
// for these contexts, so we skip all tests annotated with C*
// Everybody ignores V2
// https://github.com/servo/rust-url/pull/240
// https://github.com/whatwg/url/issues/53#issuecomment-181528158
// http://www.unicode.org/review/pri317/
// "The special error codes X3 and X4_2 are now returned where a toASCII error code
// was formerly being generated in toUnicode due to an empty label."
// This is not implemented yet, so we skip toUnicode X4_2 tests for now, too.
let (to_unicode_value, to_unicode_result) =
config.transitional_processing(false).to_unicode(&source);
let to_unicode_result = to_unicode_result.map(|()| to_unicode_value);
check(
&source,
(&to_unicode, &to_unicode_status),
to_unicode_result,
|e| e.starts_with('C') || e == "V2" || e == "X4_2",
);
let to_ascii_n_result = config.transitional_processing(false).to_ascii(&source);
check(
&source,
(&to_ascii_n, &to_ascii_n_status),
to_ascii_n_result,
|e| e.starts_with('C') || e == "V2",
);
let to_ascii_t_result = config.transitional_processing(true).to_ascii(&source);
check(
&source,
(&to_ascii_t, &to_ascii_t_status),
to_ascii_t_result,
|e| e.starts_with('C') || e == "V2",
);
})),
)
}
}
#[allow(clippy::redundant_clone)]
fn check<F>(source: &str, expected: (&str, &[&str]), actual: Result<String, Errors>, ignore: F)
where
F: Fn(&str) -> bool,
{
if !expected.1.is_empty() {
if !expected.1.iter().copied().any(ignore) {
let res = actual.ok();
assert_eq!(
res.clone(),
None,
"Expected error {:?}. result: {} | source: {}",
expected.1,
res.unwrap(),
source,
);
}
} else {
assert!(
actual.is_ok(),
"Couldn't parse {} | error: {:?}",
source,
actual.err().unwrap(),
);
assert_eq!(actual.unwrap(), expected.0, "source: {}", source);
}
}
fn unescape(input: &str) -> String {
let mut output = String::new();
let mut chars = input.chars();
loop {
match chars.next() {
None => return output,
Some(c) => {
if c == '\\' {
match chars.next().unwrap() {
'\\' => output.push('\\'),
'u' => {
let c1 = chars.next().unwrap().to_digit(16).unwrap();
let c2 = chars.next().unwrap().to_digit(16).unwrap();
let c3 = chars.next().unwrap().to_digit(16).unwrap();
let c4 = chars.next().unwrap().to_digit(16).unwrap();
match char::from_u32(((c1 * 16 + c2) * 16 + c3) * 16 + c4) {
Some(c) => output.push(c),
None => {
write!(&mut output, "\\u{:X}{:X}{:X}{:X}", c1, c2, c3, c4)
.expect("Could not write to output");
}
};
}
_ => panic!("Invalid test data input"),
}
} else {
output.push(c);
}
}
}
}
}
fn status(status: &str) -> Vec<&str> {
if status.is_empty() || status == "[]" {
return Vec::new();
}
let mut result = status.split(", ").collect::<Vec<_>>();
assert!(result[0].starts_with('['));
result[0] = &result[0][1..];
let idx = result.len() - 1;
let last = &mut result[idx];
assert!(last.ends_with(']'));
*last = &last[..last.len() - 1];
result
}

View File

@ -1 +1 @@
{"files":{"Cargo.toml":"d453ab4fa012a1f5d9233aa29fa03a7d5bcff06008f2197ce0ddac7e7aa28b2b","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"76e972ac0f4ddb116e86e10100132a783931a596e7b9872eaa31be15cd4d751d","README.md":"dd73e159f3b31a7070f4564f9e68dca14495452e3b30d6fe4ca1d84656b69ee6","benches/all.rs":"53002f41ac38bdd5b1bb0a7ec8d5a9b49ce6cd3d073ce16c1014f9d4e90b762b","src/deprecated.rs":"bdba5a73432d9755c831ec01edf4d512f9390b351dba0eb8ce7b0430fa1073ad","src/lib.rs":"4d30605daf5c18d282d460ee561c7e5218aea76cf33fc072fd79f9617256f04e","src/punycode.rs":"2d9dda9bb6504863ea6f374e9ab4192ccc475a789a43a0fb624b15459a611fbc","src/uts46.rs":"2e719c93954930de20789896b153af7dd84c20e14edba6317f9dd80e3baaccc9","tests/IdnaTestV2.txt":"d668c4ea58d60fe04e6c011df98e0b317da6abaa1273d58f42b581eb0dd7adda","tests/bad_punycode_tests.json":"ff0a15479ed2cb08f7b4b39465160da66d1ac7575e5d76990c17e7b76cb5e0f5","tests/deprecated.rs":"cce256f6616a19314330a06003d6308138aae8257136431d143f062f14ab17c7","tests/punycode.rs":"75fa73b6429ccacaeb5d72fab0b927cdf9f2173a9fc5fb366697bf7002b73921","tests/punycode_tests.json":"50859b828d14d5eeba5ab930de25fb72a35310a0b46f421f65d64c7c3e54d08a","tests/tests.rs":"ecee59f0b0be27ba1e7b24bb449c681024253d0275065f0f0e258e7ec2977d12","tests/unit.rs":"7e450599b52900baa51ea26ff0cb55a830456f60642985abbc87ec671a91b8e1","tests/unitbis.rs":"545259b767cd045aed01c1515c3b092d1b3f6b3366ce88d1593a2c8e3ffcd2af","tests/uts46.rs":"0a1c339708f1ab845d726b1f55dc1be8a423a1304b0399234391d0bd419e3fe0"},"package":"bd69211b9b519e98303c015e21a007e293db403b6c85b9b124e133d25e242cdd"}
{"files":{"Cargo.toml":"4b63930f833beebdd51043e4435625638e39f804f172914ee4da834bfd18ac68","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"76e972ac0f4ddb116e86e10100132a783931a596e7b9872eaa31be15cd4d751d","README.md":"b95f383f35d2677a4aace5f816837357dca4b85f9c8cbed76d106c18faaca7e5","benches/all.rs":"cb99d454de05c3a72df82d2fca230757595ad25493e790eeb93a44ad43725cb8","src/deprecated.rs":"bdba5a73432d9755c831ec01edf4d512f9390b351dba0eb8ce7b0430fa1073ad","src/lib.rs":"6d0a03cd07e652974dd73a57b7505c9840c7b8afd84d7e223926515b99216bb4","src/punycode.rs":"86b8496e0b9fa807b456b74586d2c1bbd7dad832c67c3864e726e442646b34b3","src/uts46.rs":"f96ab9d27140a546f882200febc0bea8678d7ec320d35da9ef757045cf3d7309","tests/IdnaTestV2.txt":"d668c4ea58d60fe04e6c011df98e0b317da6abaa1273d58f42b581eb0dd7adda","tests/bad_punycode_tests.json":"ff0a15479ed2cb08f7b4b39465160da66d1ac7575e5d76990c17e7b76cb5e0f5","tests/deprecated.rs":"cce256f6616a19314330a06003d6308138aae8257136431d143f062f14ab17c7","tests/punycode.rs":"75fa73b6429ccacaeb5d72fab0b927cdf9f2173a9fc5fb366697bf7002b73921","tests/punycode_tests.json":"50859b828d14d5eeba5ab930de25fb72a35310a0b46f421f65d64c7c3e54d08a","tests/tests.rs":"ecee59f0b0be27ba1e7b24bb449c681024253d0275065f0f0e258e7ec2977d12","tests/unit.rs":"7e450599b52900baa51ea26ff0cb55a830456f60642985abbc87ec671a91b8e1","tests/unitbis.rs":"545259b767cd045aed01c1515c3b092d1b3f6b3366ce88d1593a2c8e3ffcd2af","tests/uts46.rs":"0a1c339708f1ab845d726b1f55dc1be8a423a1304b0399234391d0bd419e3fe0"},"package":"686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e"}

View File

@ -11,11 +11,15 @@
[package]
edition = "2018"
rust-version = "1.67"
rust-version = "1.57"
name = "idna"
version = "1.0.2"
version = "1.0.3"
authors = ["The rust-url developers"]
build = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "IDNA (Internationalizing Domain Names in Applications) and Punycode."
readme = "README.md"
keywords = [
@ -30,27 +34,30 @@ repository = "https://github.com/servo/rust-url/"
rustdoc-args = ["--generate-link-to-definition"]
[lib]
name = "idna"
path = "src/lib.rs"
doctest = false
[[test]]
name = "tests"
path = "tests/tests.rs"
harness = false
[[test]]
name = "unit"
path = "tests/unit.rs"
[[test]]
name = "unitbis"
path = "tests/unitbis.rs"
[[bench]]
name = "all"
path = "benches/all.rs"
harness = false
[dependencies.icu_normalizer]
version = "1.4.3"
[dependencies.icu_properties]
version = "1.4.2"
[dependencies.idna_adapter]
version = "1"
[dependencies.smallvec]
version = "1.13.1"
@ -73,10 +80,7 @@ version = "0.9"
[features]
alloc = []
compiled_data = [
"icu_normalizer/compiled_data",
"icu_properties/compiled_data",
]
compiled_data = ["idna_adapter/compiled_data"]
default = [
"std",
"compiled_data",

View File

@ -28,6 +28,10 @@ Apps that need to display host names to the user should use `uts46::Uts46::to_us
* `std` - Adds `impl std::error::Error for Errors {}` (and implies `alloc`).
* By default, all of the above are enabled.
## Alternative Unicode back ends
By default, `idna` uses [ICU4X](https://github.com/unicode-org/icu4x/) as its Unicode back end. If you wish to opt for different tradeoffs between correctness, run-time performance, binary size, compile time, and MSRV, please see the [README of the latest version of the `idna_adapter` crate](https://docs.rs/crate/idna_adapter/latest) for how to opt into a different Unicode back end.
## Breaking changes since 0.5.0
* Stricter IDNA 2008 restrictions are no longer supported. Attempting to enable them panics immediately. UTS 46 allows all the names that IDNA 2008 allows, and when transitional processing is disabled, they resolve the same way. There are additional names that IDNA 2008 disallows but UTS 46 maps to names that IDNA 2008 allows (notably, input is mapped to fold-case output). UTS 46 also allows symbols that were allowed in IDNA 2003 as well as newer symbols that are allowed according to the same principle. (Earlier versions of this crate allowed rejecting such symbols. Rejecting characters that UTS 46 maps to IDNA 2008-permitted characters wasn't supported in earlier versions, either.)

View File

@ -49,6 +49,51 @@ fn to_ascii_merged(bench: &mut Bencher) {
bench.iter(|| config.to_ascii(black_box(encoded)));
}
fn to_ascii_cow_plain(bench: &mut Bencher) {
let encoded = "example.com".as_bytes();
bench.iter(|| idna::domain_to_ascii_cow(black_box(encoded), idna::AsciiDenyList::URL));
}
fn to_ascii_cow_hyphen(bench: &mut Bencher) {
let encoded = "hyphenated-example.com".as_bytes();
bench.iter(|| idna::domain_to_ascii_cow(black_box(encoded), idna::AsciiDenyList::URL));
}
fn to_ascii_cow_leading_digit(bench: &mut Bencher) {
let encoded = "1test.example".as_bytes();
bench.iter(|| idna::domain_to_ascii_cow(black_box(encoded), idna::AsciiDenyList::URL));
}
fn to_ascii_cow_unicode_mixed(bench: &mut Bencher) {
let encoded = "مثال.example".as_bytes();
bench.iter(|| idna::domain_to_ascii_cow(black_box(encoded), idna::AsciiDenyList::URL));
}
fn to_ascii_cow_punycode_mixed(bench: &mut Bencher) {
let encoded = "xn--mgbh0fb.example".as_bytes();
bench.iter(|| idna::domain_to_ascii_cow(black_box(encoded), idna::AsciiDenyList::URL));
}
fn to_ascii_cow_unicode_ltr(bench: &mut Bencher) {
let encoded = "නම.උදාහරණ".as_bytes();
bench.iter(|| idna::domain_to_ascii_cow(black_box(encoded), idna::AsciiDenyList::URL));
}
fn to_ascii_cow_punycode_ltr(bench: &mut Bencher) {
let encoded = "xn--r0co.xn--ozc8dl2c3bxd".as_bytes();
bench.iter(|| idna::domain_to_ascii_cow(black_box(encoded), idna::AsciiDenyList::URL));
}
fn to_ascii_cow_unicode_rtl(bench: &mut Bencher) {
let encoded = "الاسم.مثال".as_bytes();
bench.iter(|| idna::domain_to_ascii_cow(black_box(encoded), idna::AsciiDenyList::URL));
}
fn to_ascii_cow_punycode_rtl(bench: &mut Bencher) {
let encoded = "xn--mgba0b1dh.xn--mgbh0fb".as_bytes();
bench.iter(|| idna::domain_to_ascii_cow(black_box(encoded), idna::AsciiDenyList::URL));
}
benchmark_group!(
benches,
to_unicode_puny_label,
@ -58,5 +103,14 @@ benchmark_group!(
to_ascii_already_puny_label,
to_ascii_simple,
to_ascii_merged,
to_ascii_cow_plain,
to_ascii_cow_hyphen,
to_ascii_cow_leading_digit,
to_ascii_cow_unicode_mixed,
to_ascii_cow_punycode_mixed,
to_ascii_cow_unicode_ltr,
to_ascii_cow_punycode_ltr,
to_ascii_cow_unicode_rtl,
to_ascii_cow_punycode_rtl,
);
benchmark_main!(benches);

View File

@ -74,6 +74,9 @@ impl From<Errors> for Result<(), Errors> {
#[cfg(feature = "std")]
impl std::error::Error for Errors {}
#[cfg(not(feature = "std"))]
impl core::error::Error for Errors {}
impl core::fmt::Display for Errors {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
core::fmt::Debug::fmt(self, f)

View File

@ -469,7 +469,7 @@ fn value_to_digit(value: u32) -> char {
fn huge_encode() {
let mut buf = String::new();
assert!(encode_into::<_, _, ExternalCaller>(
std::iter::repeat('ß').take(u32::MAX as usize + 1),
core::iter::repeat('ß').take(u32::MAX as usize + 1),
&mut buf
)
.is_err());

View File

@ -26,13 +26,7 @@ use crate::punycode::InternalCaller;
use alloc::borrow::Cow;
use alloc::string::String;
use core::fmt::Write;
use icu_normalizer::properties::CanonicalCombiningClassMap;
use icu_normalizer::uts46::Uts46Mapper;
use icu_properties::maps::CodePointMapDataBorrowed;
use icu_properties::BidiClass;
use icu_properties::CanonicalCombiningClass;
use icu_properties::GeneralCategory;
use icu_properties::JoiningType;
use idna_adapter::*;
use smallvec::SmallVec;
use utf8_iter::Utf8CharsEx;
@ -106,79 +100,6 @@ const fn ldh_mask() -> u128 {
accu
}
/// Turns a joining type into a mask for comparing with multiple type at once.
const fn joining_type_to_mask(jt: JoiningType) -> u32 {
1u32 << jt.0
}
/// Mask for checking for both left and dual joining.
const LEFT_OR_DUAL_JOINING_MASK: u32 =
joining_type_to_mask(JoiningType::LeftJoining) | joining_type_to_mask(JoiningType::DualJoining);
/// Mask for checking for both left and dual joining.
const RIGHT_OR_DUAL_JOINING_MASK: u32 = joining_type_to_mask(JoiningType::RightJoining)
| joining_type_to_mask(JoiningType::DualJoining);
/// Turns a bidi class into a mask for comparing with multiple classes at once.
const fn bidi_class_to_mask(bc: BidiClass) -> u32 {
1u32 << bc.0
}
/// Mask for checking if the domain is a bidi domain.
const RTL_MASK: u32 = bidi_class_to_mask(BidiClass::RightToLeft)
| bidi_class_to_mask(BidiClass::ArabicLetter)
| bidi_class_to_mask(BidiClass::ArabicNumber);
/// Mask for allowable bidi classes in the first character of a label
/// (either LTR or RTL) in a bidi domain.
const FIRST_BC_MASK: u32 = bidi_class_to_mask(BidiClass::LeftToRight)
| bidi_class_to_mask(BidiClass::RightToLeft)
| bidi_class_to_mask(BidiClass::ArabicLetter);
// Mask for allowable bidi classes of the last (non-Non-Spacing Mark)
// character in an LTR label in a bidi domain.
const LAST_LTR_MASK: u32 =
bidi_class_to_mask(BidiClass::LeftToRight) | bidi_class_to_mask(BidiClass::EuropeanNumber);
// Mask for allowable bidi classes of the last (non-Non-Spacing Mark)
// character in an RTL label in a bidi domain.
const LAST_RTL_MASK: u32 = bidi_class_to_mask(BidiClass::RightToLeft)
| bidi_class_to_mask(BidiClass::ArabicLetter)
| bidi_class_to_mask(BidiClass::EuropeanNumber)
| bidi_class_to_mask(BidiClass::ArabicNumber);
// Mask for allowable bidi classes of the middle characters in an LTR label in a bidi domain.
const MIDDLE_LTR_MASK: u32 = bidi_class_to_mask(BidiClass::LeftToRight)
| bidi_class_to_mask(BidiClass::EuropeanNumber)
| bidi_class_to_mask(BidiClass::EuropeanSeparator)
| bidi_class_to_mask(BidiClass::CommonSeparator)
| bidi_class_to_mask(BidiClass::EuropeanTerminator)
| bidi_class_to_mask(BidiClass::OtherNeutral)
| bidi_class_to_mask(BidiClass::BoundaryNeutral)
| bidi_class_to_mask(BidiClass::NonspacingMark);
// Mask for allowable bidi classes of the middle characters in an RTL label in a bidi domain.
const MIDDLE_RTL_MASK: u32 = bidi_class_to_mask(BidiClass::RightToLeft)
| bidi_class_to_mask(BidiClass::ArabicLetter)
| bidi_class_to_mask(BidiClass::ArabicNumber)
| bidi_class_to_mask(BidiClass::EuropeanNumber)
| bidi_class_to_mask(BidiClass::EuropeanSeparator)
| bidi_class_to_mask(BidiClass::CommonSeparator)
| bidi_class_to_mask(BidiClass::EuropeanTerminator)
| bidi_class_to_mask(BidiClass::OtherNeutral)
| bidi_class_to_mask(BidiClass::BoundaryNeutral)
| bidi_class_to_mask(BidiClass::NonspacingMark);
/// Turns a genecal category into a mask for comparing with multiple categories at once.
const fn general_category_to_mask(gc: GeneralCategory) -> u32 {
1 << (gc as u32)
}
/// Mask for the disallowed general categories of the first character in a label.
const MARK_MASK: u32 = general_category_to_mask(GeneralCategory::NonspacingMark)
| general_category_to_mask(GeneralCategory::SpacingMark)
| general_category_to_mask(GeneralCategory::EnclosingMark);
const PUNYCODE_PREFIX: u32 =
((b'-' as u32) << 24) | ((b'-' as u32) << 16) | ((b'N' as u32) << 8) | b'X' as u32;
@ -566,11 +487,7 @@ pub fn verify_dns_length(domain_name: &str, allow_trailing_dot: bool) -> bool {
/// An implementation of UTS #46.
pub struct Uts46 {
mapper: Uts46Mapper,
canonical_combining_class: CanonicalCombiningClassMap,
general_category: CodePointMapDataBorrowed<'static, GeneralCategory>,
bidi_class: CodePointMapDataBorrowed<'static, BidiClass>,
joining_type: CodePointMapDataBorrowed<'static, JoiningType>,
data: idna_adapter::Adapter,
}
#[cfg(feature = "compiled_data")]
@ -585,11 +502,7 @@ impl Uts46 {
#[cfg(feature = "compiled_data")]
pub const fn new() -> Self {
Self {
mapper: Uts46Mapper::new(),
canonical_combining_class: CanonicalCombiningClassMap::new(),
general_category: icu_properties::maps::general_category(),
bidi_class: icu_properties::maps::bidi_class(),
joining_type: icu_properties::maps::joining_type(),
data: idna_adapter::Adapter::new(),
}
}
@ -602,14 +515,14 @@ impl Uts46 {
/// # Arguments
///
/// * `domain_name` - The input domain name as UTF-8 bytes. (The UTF-8ness is checked by
/// this method and input that is not well-formed UTF-8 is treated as an error. If you
/// already have a `&str`, call `.as_bytes()` on it.)
/// this method and input that is not well-formed UTF-8 is treated as an error. If you
/// already have a `&str`, call `.as_bytes()` on it.)
/// * `ascii_deny_list` - What ASCII deny list, if any, to apply. The UTS 46
/// _UseSTD3ASCIIRules_ flag or the WHATWG URL Standard forbidden domain code point
/// processing is handled via this argument. Most callers are probably the best off
/// by using [`AsciiDenyList::URL`] here.
/// _UseSTD3ASCIIRules_ flag or the WHATWG URL Standard forbidden domain code point
/// processing is handled via this argument. Most callers are probably the best off
/// by using [`AsciiDenyList::URL`] here.
/// * `hyphens` - The UTS 46 _CheckHyphens_ flag. Most callers are probably the best
/// off by using [`Hyphens::Allow`] here.
/// off by using [`Hyphens::Allow`] here.
/// * `dns_length` - The UTS 46 _VerifyDNSLength_ flag.
pub fn to_ascii<'a>(
&self,
@ -668,14 +581,14 @@ impl Uts46 {
/// # Arguments
///
/// * `domain_name` - The input domain name as UTF-8 bytes. (The UTF-8ness is checked by
/// this method and input that is not well-formed UTF-8 is treated as an error. If you
/// already have a `&str`, call `.as_bytes()` on it.)
/// this method and input that is not well-formed UTF-8 is treated as an error. If you
/// already have a `&str`, call `.as_bytes()` on it.)
/// * `ascii_deny_list` - What ASCII deny list, if any, to apply. The UTS 46
/// _UseSTD3ASCIIRules_ flag or the WHATWG URL Standard forbidden domain code point
/// processing is handled via this argument. Most callers are probably the best off
/// by using [`AsciiDenyList::URL`] here.
/// _UseSTD3ASCIIRules_ flag or the WHATWG URL Standard forbidden domain code point
/// processing is handled via this argument. Most callers are probably the best off
/// by using [`AsciiDenyList::URL`] here.
/// * `hyphens` - The UTS 46 _CheckHyphens_ flag. Most callers are probably the best
/// off by using [`Hyphens::Allow`] here.
/// off by using [`Hyphens::Allow`] here.
pub fn to_unicode<'a>(
&self,
domain_name: &'a [u8],
@ -714,23 +627,23 @@ impl Uts46 {
/// # Arguments
///
/// * `domain_name` - The input domain name as UTF-8 bytes. (The UTF-8ness is checked by
/// this method and input that is not well-formed UTF-8 is treated as an error. If you
/// already have a `&str`, call `.as_bytes()` on it.)
/// this method and input that is not well-formed UTF-8 is treated as an error. If you
/// already have a `&str`, call `.as_bytes()` on it.)
/// * `ascii_deny_list` - What ASCII deny list, if any, to apply. The UTS 46
/// _UseSTD3ASCIIRules_ flag or the WHATWG URL Standard forbidden domain code point
/// processing is handled via this argument. Most callers are probably the best off
/// by using [`AsciiDenyList::URL`] here.
/// _UseSTD3ASCIIRules_ flag or the WHATWG URL Standard forbidden domain code point
/// processing is handled via this argument. Most callers are probably the best off
/// by using [`AsciiDenyList::URL`] here.
/// * `hyphens` - The UTS 46 _CheckHyphens_ flag. Most callers are probably the best
/// off by using [`Hyphens::Allow`] here.
/// off by using [`Hyphens::Allow`] here.
/// * `output_as_unicode` - A closure for deciding if a label should be output as Unicode
/// (as opposed to Punycode). The first argument is the label for which a decision is
/// needed (always non-empty slice). The second argument is the TLD (potentially empty).
/// The third argument is `true` iff the domain name as a whole is a bidi domain name.
/// Only non-erroneous labels that contain at least one non-ASCII character are passed
/// to the closure as the first argument. The second and third argument values are
/// guaranteed to remain the same during a single call to `process`, and the closure
/// may cache computations derived from the second and third argument (hence the
/// `FnMut` type).
/// (as opposed to Punycode). The first argument is the label for which a decision is
/// needed (always non-empty slice). The second argument is the TLD (potentially empty).
/// The third argument is `true` iff the domain name as a whole is a bidi domain name.
/// Only non-erroneous labels that contain at least one non-ASCII character are passed
/// to the closure as the first argument. The second and third argument values are
/// guaranteed to remain the same during a single call to `process`, and the closure
/// may cache computations derived from the second and third argument (hence the
/// `FnMut` type).
pub fn to_user_interface<'a, OutputUnicode: FnMut(&[char], &[char], bool) -> bool>(
&self,
domain_name: &'a [u8],
@ -766,59 +679,59 @@ impl Uts46 {
/// # Arguments
///
/// * `domain_name` - The input domain name as UTF-8 bytes. (The UTF-8ness is checked by
/// this method and input that is not well-formed UTF-8 is treated as an error. If you
/// already have a `&str`, call `.as_bytes()` on it.)
/// this method and input that is not well-formed UTF-8 is treated as an error. If you
/// already have a `&str`, call `.as_bytes()` on it.)
/// * `ascii_deny_list` - What ASCII deny list, if any, to apply. The UTS 46
/// _UseSTD3ASCIIRules_ flag or the WHATWG URL Standard forbidden domain code point
/// processing is handled via this argument. Most callers are probably the best off
/// by using [`AsciiDenyList::URL`] here.
/// _UseSTD3ASCIIRules_ flag or the WHATWG URL Standard forbidden domain code point
/// processing is handled via this argument. Most callers are probably the best off
/// by using [`AsciiDenyList::URL`] here.
/// * `hyphens` - The UTS 46 _CheckHyphens_ flag. Most callers are probably the best
/// off by using [`Hyphens::Allow`] here.
/// off by using [`Hyphens::Allow`] here.
/// * `error_policy` - Whether to fail fast or to produce output that may be rendered
/// for the user to examine in case of errors.
/// for the user to examine in case of errors.
/// * `output_as_unicode` - A closure for deciding if a label should be output as Unicode
/// (as opposed to Punycode). The first argument is the label for which a decision is
/// needed (always non-empty slice). The second argument is the TLD (potentially empty).
/// The third argument is `true` iff the domain name as a whole is a bidi domain name.
/// Only non-erroneous labels that contain at least one non-ASCII character are passed
/// to the closure as the first argument. The second and third argument values are
/// guaranteed to remain the same during a single call to `process`, and the closure
/// may cache computations derived from the second and third argument (hence the
/// `FnMut` type). To perform the _ToASCII_ operation, `|_, _, _| false` must be
/// passed as the closure. To perform the _ToUnicode_ operation, `|_, _, _| true` must
/// be passed as the closure. A more complex closure may be used to prepare a domain
/// name for display in a user interface so that labels are converted to the Unicode
/// form in general but potentially misleading labels are converted to the Punycode
/// form.
/// `sink` - The object that receives the output (in the non-passthrough case).
/// `ascii_sink` - A second sink that receives the _ToASCII_ form only if there
/// were no errors and `sink` received at least one character of non-ASCII output.
/// The purpose of this argument is to enable a user interface display form of the
/// domain and the _ToASCII_ form of the domain to be computed efficiently together.
/// This argument is useless when `output_as_unicode` always returns `false`, in
/// which case the _ToASCII_ form ends up in `sink` already. If `ascii_sink` receives
/// no output and the return value is `Ok(ProcessingSuccess::WroteToSink)`, use the
/// output received by `sink` also as the _ToASCII_ result.
/// (as opposed to Punycode). The first argument is the label for which a decision is
/// needed (always non-empty slice). The second argument is the TLD (potentially empty).
/// The third argument is `true` iff the domain name as a whole is a bidi domain name.
/// Only non-erroneous labels that contain at least one non-ASCII character are passed
/// to the closure as the first argument. The second and third argument values are
/// guaranteed to remain the same during a single call to `process`, and the closure
/// may cache computations derived from the second and third argument (hence the
/// `FnMut` type). To perform the _ToASCII_ operation, `|_, _, _| false` must be
/// passed as the closure. To perform the _ToUnicode_ operation, `|_, _, _| true` must
/// be passed as the closure. A more complex closure may be used to prepare a domain
/// name for display in a user interface so that labels are converted to the Unicode
/// form in general but potentially misleading labels are converted to the Punycode
/// form.
/// * `sink` - The object that receives the output (in the non-passthrough case).
/// * `ascii_sink` - A second sink that receives the _ToASCII_ form only if there
/// were no errors and `sink` received at least one character of non-ASCII output.
/// The purpose of this argument is to enable a user interface display form of the
/// domain and the _ToASCII_ form of the domain to be computed efficiently together.
/// This argument is useless when `output_as_unicode` always returns `false`, in
/// which case the _ToASCII_ form ends up in `sink` already. If `ascii_sink` receives
/// no output and the return value is `Ok(ProcessingSuccess::WroteToSink)`, use the
/// output received by `sink` also as the _ToASCII_ result.
///
/// # Return value
///
/// * `Ok(ProcessingSuccess::Passthrough)` - The caller must treat
/// `unsafe { core::str::from_utf8_unchecked(domain_name) }` as the output. (This
/// return value asserts that calling `core::str::from_utf8_unchecked(domain_name)`
/// is safe.)
/// `unsafe { core::str::from_utf8_unchecked(domain_name) }` as the output. (This
/// return value asserts that calling `core::str::from_utf8_unchecked(domain_name)`
/// is safe.)
/// * `Ok(ProcessingSuccess::WroteToSink)` - The caller must treat was was written
/// to `sink` as the output. If another sink was passed as `ascii_sink` but it did
/// not receive output, the caller must treat what was written to `sink` also as
/// the _ToASCII_ output. Otherwise, if `ascii_sink` received output, the caller
/// must treat what was written to `ascii_sink` as the _ToASCII_ output.
/// to `sink` as the output. If another sink was passed as `ascii_sink` but it did
/// not receive output, the caller must treat what was written to `sink` also as
/// the _ToASCII_ output. Otherwise, if `ascii_sink` received output, the caller
/// must treat what was written to `ascii_sink` as the _ToASCII_ output.
/// * `Err(ProcessingError::ValidityError)` - The input was in error and must
/// not be used for DNS lookup or otherwise in a network protocol. If `error_policy`
/// was `ErrorPolicy::MarkErrors`, the output written to `sink` may be displayed
/// to the user as an illustration of where the error was or the errors were.
/// not be used for DNS lookup or otherwise in a network protocol. If `error_policy`
/// was `ErrorPolicy::MarkErrors`, the output written to `sink` may be displayed
/// to the user as an illustration of where the error was or the errors were.
/// * `Err(ProcessingError::SinkError)` - Either `sink` or `ascii_sink` returned
/// [`core::fmt::Error`]. The partial output written to `sink` `ascii_sink` must not
/// be used. If `W` never returns [`core::fmt::Error`], this method never returns
/// `Err(ProcessingError::SinkError)`.
/// [`core::fmt::Error`]. The partial output written to `sink` `ascii_sink` must not
/// be used. If `W` never returns [`core::fmt::Error`], this method never returns
/// `Err(ProcessingError::SinkError)`.
///
/// # Safety-usable invariant
///
@ -1113,9 +1026,8 @@ impl Uts46 {
Ok(ProcessingSuccess::WroteToSink)
}
/// The part of `process` that doesn't need to be generic over the sink and
/// can avoid monomorphizing in the interest of code size.
#[inline(never)]
/// The part of `process` that doesn't need to be generic over the sink.
#[inline(always)]
fn process_inner<'a>(
&self,
domain_name: &'a [u8],
@ -1129,7 +1041,7 @@ impl Uts46 {
// performance.
let mut iter = domain_name.iter();
let mut most_recent_label_start = iter.clone();
let tail = loop {
loop {
if let Some(&b) = iter.next() {
if in_inclusive_range8(b, b'a', b'z') {
continue;
@ -1138,13 +1050,38 @@ impl Uts46 {
most_recent_label_start = iter.clone();
continue;
}
break most_recent_label_start.as_slice();
return self.process_innermost(
domain_name,
ascii_deny_list,
hyphens,
fail_fast,
domain_buffer,
already_punycode,
most_recent_label_start.as_slice(),
);
} else {
// Success! The whole input passes through on the fastest path!
return (domain_name.len(), false, false);
}
};
}
}
/// The part of `process` that doesn't need to be generic over the sink and
/// can avoid monomorphizing in the interest of code size.
/// Separating this into a different stack frame compared to `process_inner`
/// improves performance in the ICU4X case.
#[allow(clippy::too_many_arguments)]
#[inline(never)]
fn process_innermost<'a>(
&self,
domain_name: &'a [u8],
ascii_deny_list: AsciiDenyList,
hyphens: Hyphens,
fail_fast: bool,
domain_buffer: &mut SmallVec<[char; 253]>,
already_punycode: &mut SmallVec<[AlreadyAsciiLabel<'a>; 8]>,
tail: &'a [u8],
) -> (usize, bool, bool) {
let deny_list = ascii_deny_list.bits;
let deny_list_deny_dot = deny_list | DOT_MASK;
@ -1295,7 +1232,7 @@ impl Uts46 {
let mut first_needs_combining_mark_check = ascii.is_empty();
let mut needs_contextj_check = !non_ascii.is_empty();
let mut mapping = self
.mapper
.data
.map_normalize(non_ascii.chars())
.map(|c| apply_ascii_deny_list_to_lower_cased_unicode(c, deny_list));
loop {
@ -1431,8 +1368,8 @@ impl Uts46 {
if is_bidi {
for label in domain_buffer.split_mut(|c| *c == '.') {
if let Some((first, tail)) = label.split_first_mut() {
let first_bc = self.bidi_class.get(*first);
if (FIRST_BC_MASK & bidi_class_to_mask(first_bc)) == 0 {
let first_bc = self.data.bidi_class(*first);
if !FIRST_BC_MASK.intersects(first_bc.to_mask()) {
// Neither RTL label nor LTR label
if fail_fast {
return (0, false, true);
@ -1441,19 +1378,19 @@ impl Uts46 {
*first = '\u{FFFD}';
continue;
}
let is_ltr = first_bc == BidiClass::LeftToRight;
let is_ltr = first_bc.is_ltr();
// Trim NSM
let mut middle = tail;
#[allow(clippy::while_let_loop)]
loop {
if let Some((last, prior)) = middle.split_last_mut() {
let last_bc = self.bidi_class.get(*last);
if last_bc == BidiClass::NonspacingMark {
let last_bc = self.data.bidi_class(*last);
if last_bc.is_nonspacing_mark() {
middle = prior;
continue;
}
let last_mask = if is_ltr { LAST_LTR_MASK } else { LAST_RTL_MASK };
if (bidi_class_to_mask(last_bc) & last_mask) == 0 {
if !last_mask.intersects(last_bc.to_mask()) {
if fail_fast {
return (0, false, true);
}
@ -1462,8 +1399,8 @@ impl Uts46 {
}
if is_ltr {
for c in prior.iter_mut() {
let bc = self.bidi_class.get(*c);
if (bidi_class_to_mask(bc) & MIDDLE_LTR_MASK) == 0 {
let bc = self.data.bidi_class(*c);
if !MIDDLE_LTR_MASK.intersects(bc.to_mask()) {
if fail_fast {
return (0, false, true);
}
@ -1474,8 +1411,8 @@ impl Uts46 {
} else {
let mut numeral_state = RtlNumeralState::Undecided;
for c in prior.iter_mut() {
let bc = self.bidi_class.get(*c);
if (bidi_class_to_mask(bc) & MIDDLE_RTL_MASK) == 0 {
let bc = self.data.bidi_class(*c);
if !MIDDLE_RTL_MASK.intersects(bc.to_mask()) {
if fail_fast {
return (0, false, true);
}
@ -1484,14 +1421,14 @@ impl Uts46 {
} else {
match numeral_state {
RtlNumeralState::Undecided => {
if bc == BidiClass::EuropeanNumber {
if bc.is_european_number() {
numeral_state = RtlNumeralState::European;
} else if bc == BidiClass::ArabicNumber {
} else if bc.is_arabic_number() {
numeral_state = RtlNumeralState::Arabic;
}
}
RtlNumeralState::European => {
if bc == BidiClass::ArabicNumber {
if bc.is_arabic_number() {
if fail_fast {
return (0, false, true);
}
@ -1500,7 +1437,7 @@ impl Uts46 {
}
}
RtlNumeralState::Arabic => {
if bc == BidiClass::EuropeanNumber {
if bc.is_european_number() {
if fail_fast {
return (0, false, true);
}
@ -1512,9 +1449,9 @@ impl Uts46 {
}
}
if (numeral_state == RtlNumeralState::European
&& last_bc == BidiClass::ArabicNumber)
&& last_bc.is_arabic_number())
|| (numeral_state == RtlNumeralState::Arabic
&& last_bc == BidiClass::EuropeanNumber)
&& last_bc.is_european_number())
{
if fail_fast {
return (0, false, true);
@ -1549,7 +1486,7 @@ impl Uts46 {
had_errors: &mut bool,
) -> bool {
for c in self
.mapper
.data
.normalize_validate(label_buffer.iter().copied())
.map(|c| apply_ascii_deny_list_to_lower_cased_unicode(c, deny_list_deny_dot))
{
@ -1606,7 +1543,7 @@ impl Uts46 {
}
if first_needs_combining_mark_check {
if let Some(first) = mut_label.first_mut() {
if (general_category_to_mask(self.general_category.get(*first)) & MARK_MASK) != 0 {
if self.data.is_mark(*first) {
if fail_fast {
return true;
}
@ -1626,9 +1563,7 @@ impl Uts46 {
if let Some((joiner, tail)) = joiner_and_tail.split_first_mut() {
if let Some(previous) = head.last() {
if self.canonical_combining_class.get(*previous)
== CanonicalCombiningClass::Virama
{
if self.data.is_virama(*previous) {
continue;
}
} else {
@ -1686,14 +1621,14 @@ impl Uts46 {
fn has_appropriately_joining_char<I: Iterator<Item = char>>(
&self,
iter: I,
required_mask: u32,
required_mask: JoiningTypeMask,
) -> bool {
for c in iter {
let jt = self.joining_type.get(c);
if (joining_type_to_mask(jt) & required_mask) != 0 {
let jt = self.data.joining_type(c);
if jt.to_mask().intersects(required_mask) {
return true;
}
if jt == JoiningType::Transparent {
if jt.is_transparent() {
continue;
}
return false;
@ -1721,7 +1656,7 @@ impl Uts46 {
if in_inclusive_range_char(c, '\u{11000}', '\u{1E7FF}') {
continue;
}
if (RTL_MASK & bidi_class_to_mask(self.bidi_class.get(c))) != 0 {
if RTL_MASK.intersects(self.data.bidi_class(c).to_mask()) {
return true;
}
}

View File

@ -0,0 +1 @@
{"files":{"Cargo.toml":"6f8d2495f093253add30070ab41e48abb5078fd6cfe5586cf5dae64dc08f8086","LICENSE-APACHE":"a60eea817514531668d7e00765731449fe14d059d3249e0bc93b36de45f759f2","LICENSE-MIT":"8b43ce8accd61e9d370b5ca9e9c4f953279b5c239926c62315b40e24df51b726","README.md":"15500bedc72ff1698c58c7428b15d465ed9f5c1c6f059b9cf4fe366af9dfd811","src/lib.rs":"71feaadef8e68b2c52fcf6196eb6428a349dd61c92fc72dad9273c4229ca79c2"},"package":"daca1df1c957320b2cf139ac61e7bd64fed304c5040df000a745aa1de3b4ef71"}

53
pve-rs/vendor/idna_adapter/Cargo.toml vendored Normal file
View File

@ -0,0 +1,53 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2021"
rust-version = "1.67.0"
name = "idna_adapter"
version = "1.2.0"
authors = ["The rust-url developers"]
build = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "Back end adapter for idna"
homepage = "https://docs.rs/crate/idna_adapter/latest"
documentation = "https://docs.rs/idna_adapter/latest/idna_adapter/"
readme = "README.md"
keywords = [
"unicode",
"dns",
"idna",
]
categories = [
"no-std",
"internationalization",
]
license = "Apache-2.0 OR MIT"
repository = "https://github.com/hsivonen/idna_adapter"
[lib]
name = "idna_adapter"
path = "src/lib.rs"
[dependencies.icu_normalizer]
version = "1.4.3"
[dependencies.icu_properties]
version = "1.4.2"
[features]
compiled_data = [
"icu_normalizer/compiled_data",
"icu_properties/compiled_data",
]

View File

@ -1,4 +1,4 @@
Copyright (c) 2015 The Rust Project Developers
Copyright (c) The rust-url developers
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated

29
pve-rs/vendor/idna_adapter/README.md vendored Normal file
View File

@ -0,0 +1,29 @@
# idna_adapter
This crate abstracts over a Unicode back end for the [`idna`](https://docs.rs/crate/idna/latest) crate.
To work around the lack of [`global-features`](https://internals.rust-lang.org/t/pre-rfc-mutually-excusive-global-features/19618) in Cargo, this crate allows the top level `Cargo.lock` to choose an alternative Unicode back end for the `idna` crate by pinning a version of this crate.
`idna` depends on version 1 of this crate. The version stream 1.2.x uses ICU4X, the version stream 1.1.x uses unicode-rs, and the version stream 1.0.x has a stub implementation without an actual Unicode back end.
It is generally a good idea to refer to the [README of the latest version](https://docs.rs/crate/idna_adapter/latest) instead of the guidance below for up-to-date information about what options are available.
## ICU4X as the default
If you take no action, Cargo will choose the 1.2.x version stream i.e. ICU4X.
## Opting to use unicode-rs
To choose unicode-rs, run `cargo update -p idna_adapter --precise 1.1.0` in the top-level directory of your application.
Compared to ICU4X, this makes build times faster, MSRV lower, binary size larger, and run-time performance slower.
## Turning off IDNA support
Since the ability to turn off actual IDNA processing has been requested again and again, an option to have no Unicode back end is provided. Choosing this option obviously breaks the `idna` crate in the sense that it cannot provide a proper implementation of UTS 46 without any Unicode data. Choosing this option makes your application reject non-ASCII domain name inputs and will fail to enforce the UTS 46 requirements on domain names that have labels in the Punycode form.
Using this option is not recommended, but to make the `idna` crate not actually support IDNA, run `cargo update -p idna_adapter --precise 1.0.0` in the top-level directory of your application.
## License
Apache-2.0 OR MIT

282
pve-rs/vendor/idna_adapter/src/lib.rs vendored Normal file
View File

@ -0,0 +1,282 @@
// Copyright The rust-url developers.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This crate abstracts over a Unicode back end for the [`idna`][1]
//! crate.
//!
//! To work around the lack of [`global-features`][2] in Cargo, this
//! crate allows the top level `Cargo.lock` to choose an alternative
//! Unicode back end for the `idna` crate by pinning a version of this
//! crate.
//!
//! See the [README of the latest version][3] for more details.
//!
//! [1]: https://docs.rs/crate/idna/latest
//! [2]: https://internals.rust-lang.org/t/pre-rfc-mutually-excusive-global-features/19618
//! [3]: https://docs.rs/crate/idna_adapter/latest
#![no_std]
use icu_normalizer::properties::CanonicalCombiningClassMap;
use icu_normalizer::uts46::Uts46Mapper;
use icu_properties::maps::CodePointMapDataBorrowed;
use icu_properties::CanonicalCombiningClass;
use icu_properties::GeneralCategory;
/// Turns a joining type into a mask for comparing with multiple type at once.
const fn joining_type_to_mask(jt: icu_properties::JoiningType) -> u32 {
1u32 << jt.0
}
/// Mask for checking for both left and dual joining.
pub const LEFT_OR_DUAL_JOINING_MASK: JoiningTypeMask = JoiningTypeMask(
joining_type_to_mask(icu_properties::JoiningType::LeftJoining)
| joining_type_to_mask(icu_properties::JoiningType::DualJoining),
);
/// Mask for checking for both left and dual joining.
pub const RIGHT_OR_DUAL_JOINING_MASK: JoiningTypeMask = JoiningTypeMask(
joining_type_to_mask(icu_properties::JoiningType::RightJoining)
| joining_type_to_mask(icu_properties::JoiningType::DualJoining),
);
/// Turns a bidi class into a mask for comparing with multiple classes at once.
const fn bidi_class_to_mask(bc: icu_properties::BidiClass) -> u32 {
1u32 << bc.0
}
/// Mask for checking if the domain is a bidi domain.
pub const RTL_MASK: BidiClassMask = BidiClassMask(
bidi_class_to_mask(icu_properties::BidiClass::RightToLeft)
| bidi_class_to_mask(icu_properties::BidiClass::ArabicLetter)
| bidi_class_to_mask(icu_properties::BidiClass::ArabicNumber),
);
/// Mask for allowable bidi classes in the first character of a label
/// (either LTR or RTL) in a bidi domain.
pub const FIRST_BC_MASK: BidiClassMask = BidiClassMask(
bidi_class_to_mask(icu_properties::BidiClass::LeftToRight)
| bidi_class_to_mask(icu_properties::BidiClass::RightToLeft)
| bidi_class_to_mask(icu_properties::BidiClass::ArabicLetter),
);
// Mask for allowable bidi classes of the last (non-Non-Spacing Mark)
// character in an LTR label in a bidi domain.
pub const LAST_LTR_MASK: BidiClassMask = BidiClassMask(
bidi_class_to_mask(icu_properties::BidiClass::LeftToRight)
| bidi_class_to_mask(icu_properties::BidiClass::EuropeanNumber),
);
// Mask for allowable bidi classes of the last (non-Non-Spacing Mark)
// character in an RTL label in a bidi domain.
pub const LAST_RTL_MASK: BidiClassMask = BidiClassMask(
bidi_class_to_mask(icu_properties::BidiClass::RightToLeft)
| bidi_class_to_mask(icu_properties::BidiClass::ArabicLetter)
| bidi_class_to_mask(icu_properties::BidiClass::EuropeanNumber)
| bidi_class_to_mask(icu_properties::BidiClass::ArabicNumber),
);
// Mask for allowable bidi classes of the middle characters in an LTR label in a bidi domain.
pub const MIDDLE_LTR_MASK: BidiClassMask = BidiClassMask(
bidi_class_to_mask(icu_properties::BidiClass::LeftToRight)
| bidi_class_to_mask(icu_properties::BidiClass::EuropeanNumber)
| bidi_class_to_mask(icu_properties::BidiClass::EuropeanSeparator)
| bidi_class_to_mask(icu_properties::BidiClass::CommonSeparator)
| bidi_class_to_mask(icu_properties::BidiClass::EuropeanTerminator)
| bidi_class_to_mask(icu_properties::BidiClass::OtherNeutral)
| bidi_class_to_mask(icu_properties::BidiClass::BoundaryNeutral)
| bidi_class_to_mask(icu_properties::BidiClass::NonspacingMark),
);
// Mask for allowable bidi classes of the middle characters in an RTL label in a bidi domain.
pub const MIDDLE_RTL_MASK: BidiClassMask = BidiClassMask(
bidi_class_to_mask(icu_properties::BidiClass::RightToLeft)
| bidi_class_to_mask(icu_properties::BidiClass::ArabicLetter)
| bidi_class_to_mask(icu_properties::BidiClass::ArabicNumber)
| bidi_class_to_mask(icu_properties::BidiClass::EuropeanNumber)
| bidi_class_to_mask(icu_properties::BidiClass::EuropeanSeparator)
| bidi_class_to_mask(icu_properties::BidiClass::CommonSeparator)
| bidi_class_to_mask(icu_properties::BidiClass::EuropeanTerminator)
| bidi_class_to_mask(icu_properties::BidiClass::OtherNeutral)
| bidi_class_to_mask(icu_properties::BidiClass::BoundaryNeutral)
| bidi_class_to_mask(icu_properties::BidiClass::NonspacingMark),
);
/// Turns a genecal category into a mask for comparing with multiple categories at once.
const fn general_category_to_mask(gc: GeneralCategory) -> u32 {
1 << (gc as u32)
}
/// Mask for the disallowed general categories of the first character in a label.
const MARK_MASK: u32 = general_category_to_mask(GeneralCategory::NonspacingMark)
| general_category_to_mask(GeneralCategory::SpacingMark)
| general_category_to_mask(GeneralCategory::EnclosingMark);
/// Value for the Joining_Type Unicode property.
#[repr(transparent)]
#[derive(Clone, Copy)]
pub struct JoiningType(icu_properties::JoiningType);
impl JoiningType {
/// Returns the corresponding `JoiningTypeMask`.
#[inline(always)]
pub fn to_mask(self) -> JoiningTypeMask {
JoiningTypeMask(joining_type_to_mask(self.0))
}
// `true` iff this value is the Transparent value.
#[inline(always)]
pub fn is_transparent(self) -> bool {
self.0 == icu_properties::JoiningType::Transparent
}
}
/// A mask representing potentially multiple `JoiningType`
/// values.
#[repr(transparent)]
#[derive(Clone, Copy)]
pub struct JoiningTypeMask(u32);
impl JoiningTypeMask {
/// `true` iff both masks have at `JoiningType` in common.
#[inline(always)]
pub fn intersects(self, other: JoiningTypeMask) -> bool {
self.0 & other.0 != 0
}
}
/// Value for the Bidi_Class Unicode property.
#[repr(transparent)]
#[derive(Clone, Copy)]
pub struct BidiClass(icu_properties::BidiClass);
impl BidiClass {
/// Returns the corresponding `BidiClassMask`.
#[inline(always)]
pub fn to_mask(self) -> BidiClassMask {
BidiClassMask(bidi_class_to_mask(self.0))
}
/// `true` iff this value is Left_To_Right
#[inline(always)]
pub fn is_ltr(self) -> bool {
self.0 == icu_properties::BidiClass::LeftToRight
}
/// `true` iff this value is Nonspacing_Mark
#[inline(always)]
pub fn is_nonspacing_mark(self) -> bool {
self.0 == icu_properties::BidiClass::NonspacingMark
}
/// `true` iff this value is European_Number
#[inline(always)]
pub fn is_european_number(self) -> bool {
self.0 == icu_properties::BidiClass::EuropeanNumber
}
/// `true` iff this value is Arabic_Number
#[inline(always)]
pub fn is_arabic_number(self) -> bool {
self.0 == icu_properties::BidiClass::ArabicNumber
}
}
/// A mask representing potentially multiple `BidiClass`
/// values.
#[repr(transparent)]
#[derive(Clone, Copy)]
pub struct BidiClassMask(u32);
impl BidiClassMask {
/// `true` iff both masks have at `BidiClass` in common.
#[inline(always)]
pub fn intersects(self, other: BidiClassMask) -> bool {
self.0 & other.0 != 0
}
}
/// An adapter between a Unicode back end an the `idna` crate.
pub struct Adapter {
mapper: Uts46Mapper,
canonical_combining_class: CanonicalCombiningClassMap,
general_category: CodePointMapDataBorrowed<'static, GeneralCategory>,
bidi_class: CodePointMapDataBorrowed<'static, icu_properties::BidiClass>,
joining_type: CodePointMapDataBorrowed<'static, icu_properties::JoiningType>,
}
#[cfg(feature = "compiled_data")]
impl Default for Adapter {
fn default() -> Self {
Self::new()
}
}
impl Adapter {
/// Constructor using data compiled into the binary.
#[cfg(feature = "compiled_data")]
#[inline(always)]
pub const fn new() -> Self {
Self {
mapper: Uts46Mapper::new(),
canonical_combining_class: CanonicalCombiningClassMap::new(),
general_category: icu_properties::maps::general_category(),
bidi_class: icu_properties::maps::bidi_class(),
joining_type: icu_properties::maps::joining_type(),
}
}
/// `true` iff the Canonical_Combining_Class of `c` is Virama.
#[inline(always)]
pub fn is_virama(&self, c: char) -> bool {
self.canonical_combining_class.get(c) == CanonicalCombiningClass::Virama
}
/// `true` iff the General_Category of `c` is Mark, i.e. any of Nonspacing_Mark,
/// Spacing_Mark, or Enclosing_Mark.
#[inline(always)]
pub fn is_mark(&self, c: char) -> bool {
(general_category_to_mask(self.general_category.get(c)) & MARK_MASK) != 0
}
/// Returns the Bidi_Class of `c`.
#[inline(always)]
pub fn bidi_class(&self, c: char) -> BidiClass {
BidiClass(self.bidi_class.get(c))
}
/// Returns the Joining_Type of `c`.
#[inline(always)]
pub fn joining_type(&self, c: char) -> JoiningType {
JoiningType(self.joining_type.get(c))
}
/// See the [method of the same name in `icu_normalizer`][1] for the
/// exact semantics.
///
/// [1]: https://docs.rs/icu_normalizer/latest/icu_normalizer/uts46/struct.Uts46Mapper.html#method.map_normalize
#[inline(always)]
pub fn map_normalize<'delegate, I: Iterator<Item = char> + 'delegate>(
&'delegate self,
iter: I,
) -> impl Iterator<Item = char> + 'delegate {
self.mapper.map_normalize(iter)
}
/// See the [method of the same name in `icu_normalizer`][1] for the
/// exact semantics.
///
/// [1]: https://docs.rs/icu_normalizer/latest/icu_normalizer/uts46/struct.Uts46Mapper.html#method.normalize_validate
#[inline(always)]
pub fn normalize_validate<'delegate, I: Iterator<Item = char> + 'delegate>(
&'delegate self,
iter: I,
) -> impl Iterator<Item = char> + 'delegate {
self.mapper.normalize_validate(iter)
}
}

File diff suppressed because one or more lines are too long

View File

@ -126,6 +126,12 @@ impl APTRepositoryFileImpl for APTRepositoryFile {
return Ok(None);
}
#[cfg(feature = "alt-linux")]
// ignore files with .sisyphus (used by README.sisyphus), .rpmnew and .rpmsave
if matches!(extension.as_str(), "rpmnew" | "rpmsave" | "sisyphus") {
return Ok(None);
}
let file_type = extension[..]
.parse()
.map_err(|_| new_err("invalid extension"))?;

View File

@ -4,9 +4,7 @@ use std::path::{Path, PathBuf};
use anyhow::{bail, format_err, Error};
use crate::repositories::standard::APTRepositoryHandleImpl;
use proxmox_apt_api_types::{
APTRepository, APTRepositoryFileType, APTRepositoryHandle,
};
use proxmox_apt_api_types::{APTRepository, APTRepositoryFileType, APTRepositoryHandle};
#[cfg(not(feature = "alt-linux"))]
use crate::repositories::APTRepositoryOption;
@ -148,11 +146,23 @@ impl APTRepositoryImpl for APTRepository {
self.components.contains(&component)
};
self.types.contains(&package_type)
#[cfg(not(feature = "alt-linux"))]
{
self.types.contains(&package_type)
&& found_uri
// using contains would require a &String
&& self.suites.iter().any(|self_suite| self_suite == suite)
&& found_component
}
#[cfg(feature = "alt-linux")]
{
self.types.contains(&package_type)
&& found_uri
// ALT's suites looks like `Sisyphus/noarch`
&& self.suites.iter().any(|self_suite| self_suite.to_lowercase().starts_with(suite))
&& found_component
}
}
fn origin_from_uris(&self) -> Option<String> {
@ -284,7 +294,18 @@ fn uri_to_filename(uri: &str) -> String {
/// Get the host part from a given URI.
fn host_from_uri(uri: &str) -> Option<&str> {
#[cfg(not(feature = "alt-linux"))]
let host = uri.strip_prefix("http")?;
#[cfg(feature = "alt-linux")]
let host = if uri.starts_with("http") {
uri.strip_prefix("http")?
} else if uri.starts_with("ftp") {
uri.strip_prefix("ftp")?
} else {
uri.strip_prefix("rsync")?
};
let host = host.strip_prefix('s').unwrap_or(host);
let mut host = host.strip_prefix("://")?;

View File

@ -205,7 +205,11 @@ impl APTRepositoryHandleImpl for APTRepositoryHandle {
),
APTRepositoryHandle::Classic => (
APTRepositoryPackageType::Rpm,
vec!["http://ftp.altlinux.org/pub/distributions/ALTLinux".to_string()],
vec![
"http://ftp.altlinux.org/pub/distributions/ALTLinux".to_string(),
"ftp://ftp.altlinux.org/pub/distributions/ALTLinux".to_string(),
"rsync://ftp.altlinux.org/ALTLinux".to_string(),
],
"classic".to_string(),
),
APTRepositoryHandle::DebugInfo => (

View File

@ -776,6 +776,8 @@ fn test_standard_repositories() -> Result<(), Error> {
let mut file = APTRepositoryFile::new(&alt_list)?.unwrap();
file.parse()?;
expected[1].status = Some(false);
let std_repos = standard_repositories(&[file], "pve", ALTBranchID::Sisyphus);
assert_eq!(std_repos, &expected[0..=3]);

View File

@ -1 +1 @@
{"files":{"Cargo.toml":"f677f75003078881533cd05c14cad6249910413b5fc997a5751e1271fa00dd55","src/boot_mode.rs":"c8cb771c20f8e81ebd50c2719c55aad9afa25038a0c57a9a3733ef540b9b341d","src/command.rs":"c6582d05cf9f15693036c980f76ab2fa76d9faba35ca362219c79de16e8eb681","src/crypt.rs":"705d0c7c8c210a84bd82d6bdfe1e16c17866fee6aee76d71781ca3801e9df0aa","src/error.rs":"f082870ea57aa8a4d874cfaefe521db59e89307a808fff4d9bc24df6802662c8","src/fd.rs":"d0c82e8cee8fa023bceb536eac444972b29be1ae24e92bd80133cf586d2b7da6","src/fs/acl.rs":"4f2a3b13e02b93d0359caa23f8aa7d6039f4984c8baf866d1461c0c62e827860","src/fs/dir.rs":"4e556b5c53d5b786cc3a3062c59db4d8e0219bc7eac5df6e9fe41b7e0dee9ae1","src/fs/file.rs":"2e0e53e3aa38850d18ae8e13038ae0c10a119b428ed49491513bea42a51258e4","src/fs/fsx_attr.rs":"a21d30282aec66c075c4eaa8038ed845b2cbaae5ce52c14531207e03cfb0d13b","src/fs/mod.rs":"a1e7f644cf36805313e5829a7be9452624c6c67b57aca4b03e62d4f510a3fc8b","src/fs/read_dir.rs":"91e6499893b19539945c6d5a62a9fb152f72a34b14f244f8e0b4909dd7618269","src/fs/xattr.rs":"6e7999b8173506527c7af7f1d957e09c45928a1adcd2d4c52a7c8fadb6b3fa4a","src/lib.rs":"959f1a72959bb09857027ef2699539082062d3ae9d8b24c9b6330497eeae65d7","src/linux/magic.rs":"1316000dc334c2eb29419548517bee455ff3f736c139bc6eaa5c48bbd5e22124","src/linux/mod.rs":"6e8a190e1c0b5c46a40edbb4aa8ad437070ef4dfe6b052f42982965c9cdea259","src/linux/pid.rs":"f4cbf84d71175ce3291f73e03026c56888069ff5a728fa931f06236319b2c662","src/linux/procfs/mod.rs":"1cbbc5b52b2ef5b474aa2917d88f43f675fb3fd12e3621f2fc1fdc2f8d58cb03","src/linux/procfs/mountinfo.rs":"6b00d34338aa659e07fecb8bb682951c1918d16976d9bbd4d06494a8407b0427","src/linux/socket.rs":"ab704b7f3d3177ef963cb57939cc436af0ddd42ab91ed2365a4fc84182118f2b","src/linux/timer.rs":"15fe55e863cf86ebabb0e1d4fb07b136f4534b95fe0db0a176885607338c8f39","src/linux/tty.rs":"51552572ac8d823f703595242d4d0971da40b72d0097ce9465d10de40efbe18b","src/logrotate.rs":"611866c2ab4f6a2c40f565be7e606b5fa407255c53ab54123a256fbefc278cd3","src/macros.rs":"4462ff84a473bfea445a71f22484038e27a8733969d6cb0d8a232538f12dce43","src/mmap.rs":"c4ac4ed11f08aca2fe36068a846d012da14dabebc4c6610c412383ea2fb422cb","src/process_locker.rs":"bb7f1f519bf138ba921886acd39c413f34a9d156c57afdfb1360e004020f1b53","src/systemd.rs":"52ab7d19c5d52ca1c335b486f9ab81b5b04943ff652dd94f4e78f0d2aafa017d","tests/xattr.rs":"e51134acba88b05682115455f6d9b8a551b391d153ced65b8642605c614a03bb"},"package":null}
{"files":{"Cargo.toml":"f677f75003078881533cd05c14cad6249910413b5fc997a5751e1271fa00dd55","src/boot_mode.rs":"c8cb771c20f8e81ebd50c2719c55aad9afa25038a0c57a9a3733ef540b9b341d","src/command.rs":"c6582d05cf9f15693036c980f76ab2fa76d9faba35ca362219c79de16e8eb681","src/crypt.rs":"705d0c7c8c210a84bd82d6bdfe1e16c17866fee6aee76d71781ca3801e9df0aa","src/error.rs":"f082870ea57aa8a4d874cfaefe521db59e89307a808fff4d9bc24df6802662c8","src/fd.rs":"d0c82e8cee8fa023bceb536eac444972b29be1ae24e92bd80133cf586d2b7da6","src/fs/acl.rs":"4f2a3b13e02b93d0359caa23f8aa7d6039f4984c8baf866d1461c0c62e827860","src/fs/dir.rs":"28c28d463b8d37110d28e9f0fc1644c0e8829e336dbb9952151d4353b31e63c0","src/fs/file.rs":"2e0e53e3aa38850d18ae8e13038ae0c10a119b428ed49491513bea42a51258e4","src/fs/fsx_attr.rs":"a21d30282aec66c075c4eaa8038ed845b2cbaae5ce52c14531207e03cfb0d13b","src/fs/mod.rs":"a1e7f644cf36805313e5829a7be9452624c6c67b57aca4b03e62d4f510a3fc8b","src/fs/read_dir.rs":"91e6499893b19539945c6d5a62a9fb152f72a34b14f244f8e0b4909dd7618269","src/fs/xattr.rs":"6e7999b8173506527c7af7f1d957e09c45928a1adcd2d4c52a7c8fadb6b3fa4a","src/lib.rs":"959f1a72959bb09857027ef2699539082062d3ae9d8b24c9b6330497eeae65d7","src/linux/magic.rs":"1316000dc334c2eb29419548517bee455ff3f736c139bc6eaa5c48bbd5e22124","src/linux/mod.rs":"6e8a190e1c0b5c46a40edbb4aa8ad437070ef4dfe6b052f42982965c9cdea259","src/linux/pid.rs":"f4cbf84d71175ce3291f73e03026c56888069ff5a728fa931f06236319b2c662","src/linux/procfs/mod.rs":"1cbbc5b52b2ef5b474aa2917d88f43f675fb3fd12e3621f2fc1fdc2f8d58cb03","src/linux/procfs/mountinfo.rs":"6b00d34338aa659e07fecb8bb682951c1918d16976d9bbd4d06494a8407b0427","src/linux/socket.rs":"ab704b7f3d3177ef963cb57939cc436af0ddd42ab91ed2365a4fc84182118f2b","src/linux/timer.rs":"15fe55e863cf86ebabb0e1d4fb07b136f4534b95fe0db0a176885607338c8f39","src/linux/tty.rs":"51552572ac8d823f703595242d4d0971da40b72d0097ce9465d10de40efbe18b","src/logrotate.rs":"611866c2ab4f6a2c40f565be7e606b5fa407255c53ab54123a256fbefc278cd3","src/macros.rs":"4462ff84a473bfea445a71f22484038e27a8733969d6cb0d8a232538f12dce43","src/mmap.rs":"c4ac4ed11f08aca2fe36068a846d012da14dabebc4c6610c412383ea2fb422cb","src/process_locker.rs":"bb7f1f519bf138ba921886acd39c413f34a9d156c57afdfb1360e004020f1b53","src/systemd.rs":"52ab7d19c5d52ca1c335b486f9ab81b5b04943ff652dd94f4e78f0d2aafa017d","tests/xattr.rs":"e51134acba88b05682115455f6d9b8a551b391d153ced65b8642605c614a03bb"},"package":null}

View File

@ -208,7 +208,7 @@ pub fn make_tmp_dir<P: AsRef<Path>>(
// Push NULL byte so that we have a proper NULL-terminated string
template.push(0);
let returned_buffer = unsafe { libc::mkdtemp(template.as_mut_ptr() as *mut i8) };
let returned_buffer = unsafe { libc::mkdtemp(template.as_mut_ptr() as *mut libc::c_char) };
// Check errno immediately, so that nothing else can overwrite it.
let err = std::io::Error::last_os_error();

File diff suppressed because one or more lines are too long

View File

@ -13,7 +13,7 @@
edition = "2021"
rust-version = "1.63"
name = "rustix"
version = "0.38.38"
version = "0.38.39"
authors = [
"Dan Gohman <dev@sunfishcode.online>",
"Jakub Konka <kubkon@jakubkonka.com>",

View File

@ -205,7 +205,6 @@ fn has_feature(feature: &str) -> bool {
fn can_compile<T: AsRef<str>>(test: T) -> bool {
use std::process::Stdio;
let out_dir = var("OUT_DIR").unwrap();
let rustc = var("RUSTC").unwrap();
let target = var("TARGET").unwrap();
@ -229,8 +228,9 @@ fn can_compile<T: AsRef<str>>(test: T) -> bool {
.arg("--emit=metadata") // Do as little as possible but still parse.
.arg("--target")
.arg(target)
.arg("--out-dir")
.arg(out_dir); // Put the output somewhere inconsequential.
.arg("-o")
.arg("-")
.stdout(Stdio::null()); // We don't care about the output (only whether it builds or not)
// If Cargo wants to set RUSTFLAGS, use that.
if let Ok(rustflags) = var("CARGO_ENCODED_RUSTFLAGS") {

View File

@ -70,7 +70,11 @@ pub(super) fn ret_c_int(raw: c::c_int) -> io::Result<c::c_int> {
}
}
#[cfg(any(linux_kernel, all(target_os = "redox", feature = "event")))]
#[cfg(any(
linux_kernel,
all(solarish, feature = "event"),
all(target_os = "redox", feature = "event")
))]
#[inline]
pub(super) fn ret_u32(raw: c::c_int) -> io::Result<u32> {
if raw == -1 {

View File

@ -5,5 +5,5 @@ pub(crate) mod types;
#[cfg_attr(windows, path = "windows_syscalls.rs")]
pub(crate) mod syscalls;
#[cfg(any(linux_kernel, target_os = "redox"))]
#[cfg(any(linux_kernel, solarish, target_os = "redox"))]
pub mod epoll;

View File

@ -40,10 +40,14 @@ bitflags! {
#[cfg(not(target_os = "espidf"))]
const NVAL = c::POLLNVAL;
/// `POLLRDHUP`
#[cfg(all(
linux_kernel,
not(any(target_arch = "sparc", target_arch = "sparc64"))),
)]
#[cfg(any(
target_os = "freebsd",
target_os = "illumos",
all(
linux_kernel,
not(any(target_arch = "sparc", target_arch = "sparc64"))
),
))]
const RDHUP = c::POLLRDHUP;
/// <https://docs.rs/bitflags/*/bitflags/#externally-defined-flags>

View File

@ -5,7 +5,7 @@ use crate::backend::c;
use crate::backend::conv::ret;
use crate::backend::conv::ret_c_int;
#[cfg(feature = "alloc")]
#[cfg(any(linux_kernel, target_os = "redox"))]
#[cfg(any(linux_kernel, solarish, target_os = "redox"))]
use crate::backend::conv::ret_u32;
#[cfg(solarish)]
use crate::event::port::Event;
@ -22,7 +22,7 @@ use crate::event::PollFd;
use crate::io;
#[cfg(solarish)]
use crate::utils::as_mut_ptr;
#[cfg(any(linux_kernel, target_os = "redox"))]
#[cfg(any(linux_kernel, solarish, target_os = "redox"))]
use crate::utils::as_ptr;
#[cfg(any(
all(feature = "alloc", bsd),
@ -351,13 +351,13 @@ pub(crate) fn pause() {
}
#[inline]
#[cfg(any(linux_kernel, target_os = "redox"))]
#[cfg(any(linux_kernel, solarish, target_os = "redox"))]
pub(crate) fn epoll_create(flags: super::epoll::CreateFlags) -> io::Result<OwnedFd> {
unsafe { ret_owned_fd(c::epoll_create1(bitflags_bits!(flags))) }
}
#[inline]
#[cfg(any(linux_kernel, target_os = "redox"))]
#[cfg(any(linux_kernel, solarish, target_os = "redox"))]
pub(crate) fn epoll_add(
epoll: BorrowedFd<'_>,
source: BorrowedFd<'_>,
@ -378,7 +378,7 @@ pub(crate) fn epoll_add(
}
#[inline]
#[cfg(any(linux_kernel, target_os = "redox"))]
#[cfg(any(linux_kernel, solarish, target_os = "redox"))]
pub(crate) fn epoll_mod(
epoll: BorrowedFd<'_>,
source: BorrowedFd<'_>,
@ -396,7 +396,7 @@ pub(crate) fn epoll_mod(
}
#[inline]
#[cfg(any(linux_kernel, target_os = "redox"))]
#[cfg(any(linux_kernel, solarish, target_os = "redox"))]
pub(crate) fn epoll_del(epoll: BorrowedFd<'_>, source: BorrowedFd<'_>) -> io::Result<()> {
unsafe {
ret(c::epoll_ctl(
@ -410,7 +410,7 @@ pub(crate) fn epoll_del(epoll: BorrowedFd<'_>, source: BorrowedFd<'_>) -> io::Re
#[inline]
#[cfg(feature = "alloc")]
#[cfg(any(linux_kernel, target_os = "redox"))]
#[cfg(any(linux_kernel, solarish, target_os = "redox"))]
pub(crate) fn epoll_wait(
epoll: BorrowedFd<'_>,
events: &mut [MaybeUninit<crate::event::epoll::Event>],

View File

@ -40,7 +40,7 @@ bitflags! {
const SYMLINK_NOFOLLOW = bitcast!(c::AT_SYMLINK_NOFOLLOW);
/// `AT_EACCESS`
#[cfg(not(any(target_os = "emscripten", target_os = "android")))]
#[cfg(not(target_os = "android"))]
const EACCESS = bitcast!(c::AT_EACCESS);
/// `AT_REMOVEDIR`

View File

@ -6,7 +6,13 @@ use crate::fd::BorrowedFd;
use crate::io;
#[cfg(all(
feature = "alloc",
any(apple, linux_like, target_os = "freebsd", target_os = "fuchsia")
any(
apple,
linux_like,
target_os = "freebsd",
target_os = "fuchsia",
target_os = "illumos"
)
))]
use {
crate::ffi::{CStr, CString},
@ -26,7 +32,13 @@ pub(crate) fn openpt(flags: OpenptFlags) -> io::Result<OwnedFd> {
#[cfg(all(
feature = "alloc",
any(apple, linux_like, target_os = "freebsd", target_os = "fuchsia")
any(
apple,
linux_like,
target_os = "freebsd",
target_os = "fuchsia",
target_os = "illumos"
)
))]
#[inline]
pub(crate) fn ptsname(fd: BorrowedFd<'_>, mut buffer: Vec<u8>) -> io::Result<CString> {
@ -38,7 +50,7 @@ pub(crate) fn ptsname(fd: BorrowedFd<'_>, mut buffer: Vec<u8>) -> io::Result<CSt
loop {
// On platforms with `ptsname_r`, use it.
#[cfg(any(linux_like, target_os = "fuchsia"))]
#[cfg(any(linux_like, target_os = "fuchsia", target_os = "illumos"))]
let r = unsafe { c::ptsname_r(borrowed_fd(fd), buffer.as_mut_ptr().cast(), buffer.len()) };
// FreeBSD 12 doesn't have `ptsname_r`.

View File

@ -1,6 +1,6 @@
//! Event operations.
#[cfg(any(linux_kernel, target_os = "redox"))]
#[cfg(any(linux_kernel, solarish, target_os = "redox"))]
pub mod epoll;
#[cfg(any(
linux_kernel,

View File

@ -1,3 +1,6 @@
// wasip2 conditionally gates stdlib APIs.
// https://github.com/rust-lang/rust/issues/130323
#![cfg_attr(all(target_os = "wasi", target_env = "p2"), feature(wasip2))]
//! `rustix` provides efficient memory-safe and [I/O-safe] wrappers to
//! POSIX-like, Unix-like, Linux, and Winsock syscall-like APIs, with
//! configurable backends.

View File

@ -13,7 +13,13 @@ use crate::fs::OFlags;
use crate::{backend, io};
#[cfg(all(
feature = "alloc",
any(apple, linux_like, target_os = "freebsd", target_os = "fuchsia")
any(
apple,
linux_like,
target_os = "freebsd",
target_os = "fuchsia",
target_os = "illumos"
)
))]
use {crate::ffi::CString, alloc::vec::Vec};
@ -115,7 +121,13 @@ pub fn openpt(flags: OpenptFlags) -> io::Result<OwnedFd> {
/// [glibc]: https://sourceware.org/glibc/manual/latest/html_node/Allocation.html#index-ptsname
#[cfg(all(
feature = "alloc",
any(apple, linux_like, target_os = "freebsd", target_os = "fuchsia")
any(
apple,
linux_like,
target_os = "freebsd",
target_os = "fuchsia",
target_os = "illumos"
)
))]
#[inline]
#[doc(alias = "ptsname_r")]

File diff suppressed because one or more lines are too long

View File

@ -13,7 +13,7 @@
edition = "2021"
rust-version = "1.61"
name = "syn"
version = "2.0.85"
version = "2.0.87"
authors = ["David Tolnay <dtolnay@gmail.com>"]
build = false
include = [

View File

@ -183,52 +183,6 @@ impl<'a> Cursor<'a> {
self.ptr == self.scope
}
/// If the cursor is pointing at a `Group` with the given delimiter, returns
/// a cursor into that group and one pointing to the next `TokenTree`.
pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, DelimSpan, Cursor<'a>)> {
// If we're not trying to enter a none-delimited group, we want to
// ignore them. We have to make sure to _not_ ignore them when we want
// to enter them, of course. For obvious reasons.
if delim != Delimiter::None {
self.ignore_none();
}
if let Entry::Group(group, end_offset) = self.entry() {
if group.delimiter() == delim {
let span = group.delim_span();
let end_of_group = unsafe { self.ptr.add(*end_offset) };
let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
return Some((inside_of_group, span, after_group));
}
}
None
}
pub(crate) fn any_group(self) -> Option<(Cursor<'a>, Delimiter, DelimSpan, Cursor<'a>)> {
if let Entry::Group(group, end_offset) = self.entry() {
let delimiter = group.delimiter();
let span = group.delim_span();
let end_of_group = unsafe { self.ptr.add(*end_offset) };
let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
return Some((inside_of_group, delimiter, span, after_group));
}
None
}
pub(crate) fn any_group_token(self) -> Option<(Group, Cursor<'a>)> {
if let Entry::Group(group, end_offset) = self.entry() {
let end_of_group = unsafe { self.ptr.add(*end_offset) };
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
return Some((group.clone(), after_group));
}
None
}
/// If the cursor is pointing at a `Ident`, returns it along with a cursor
/// pointing at the next `TokenTree`.
pub fn ident(mut self) -> Option<(Ident, Cursor<'a>)> {
@ -279,6 +233,54 @@ impl<'a> Cursor<'a> {
}
}
/// If the cursor is pointing at a `Group` with the given delimiter, returns
/// a cursor into that group and one pointing to the next `TokenTree`.
pub fn group(mut self, delim: Delimiter) -> Option<(Cursor<'a>, DelimSpan, Cursor<'a>)> {
// If we're not trying to enter a none-delimited group, we want to
// ignore them. We have to make sure to _not_ ignore them when we want
// to enter them, of course. For obvious reasons.
if delim != Delimiter::None {
self.ignore_none();
}
if let Entry::Group(group, end_offset) = self.entry() {
if group.delimiter() == delim {
let span = group.delim_span();
let end_of_group = unsafe { self.ptr.add(*end_offset) };
let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
return Some((inside_of_group, span, after_group));
}
}
None
}
/// If the cursor is pointing at a `Group`, returns a cursor into the group
/// and one pointing to the next `TokenTree`.
pub fn any_group(self) -> Option<(Cursor<'a>, Delimiter, DelimSpan, Cursor<'a>)> {
if let Entry::Group(group, end_offset) = self.entry() {
let delimiter = group.delimiter();
let span = group.delim_span();
let end_of_group = unsafe { self.ptr.add(*end_offset) };
let inside_of_group = unsafe { Cursor::create(self.ptr.add(1), end_of_group) };
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
return Some((inside_of_group, delimiter, span, after_group));
}
None
}
pub(crate) fn any_group_token(self) -> Option<(Group, Cursor<'a>)> {
if let Entry::Group(group, end_offset) = self.entry() {
let end_of_group = unsafe { self.ptr.add(*end_offset) };
let after_group = unsafe { Cursor::create(end_of_group, self.scope) };
return Some((group.clone(), after_group));
}
None
}
/// Copies all remaining tokens visible from this cursor into a
/// `TokenStream`.
pub fn token_stream(self) -> TokenStream {

View File

@ -248,6 +248,8 @@ pub(crate) mod parsing {
use crate::parse::discouraged::Speculative as _;
use crate::parse::{Parse, ParseStream};
use crate::restriction::{FieldMutability, Visibility};
#[cfg(not(feature = "full"))]
use crate::scan_expr::scan_expr;
use crate::token;
use crate::ty::Type;
use crate::verbatim;
@ -276,7 +278,7 @@ pub(crate) mod parsing {
let mut discriminant: Result<Expr> = ahead.parse();
if discriminant.is_ok() {
input.advance_to(&ahead);
} else if scan_lenient_discriminant(input).is_ok() {
} else if scan_expr(input).is_ok() {
discriminant = Ok(Expr::Verbatim(verbatim::between(&begin, input)));
}
discriminant?
@ -294,85 +296,6 @@ pub(crate) mod parsing {
}
}
#[cfg(not(feature = "full"))]
pub(crate) fn scan_lenient_discriminant(input: ParseStream) -> Result<()> {
use crate::expr::Member;
use crate::lifetime::Lifetime;
use crate::lit::Lit;
use crate::lit::LitFloat;
use crate::op::{BinOp, UnOp};
use crate::path::{self, AngleBracketedGenericArguments};
use proc_macro2::Delimiter::{self, Brace, Bracket, Parenthesis};
let consume = |delimiter: Delimiter| {
Result::unwrap(input.step(|cursor| match cursor.group(delimiter) {
Some((_inside, _span, rest)) => Ok((true, rest)),
None => Ok((false, *cursor)),
}))
};
macro_rules! consume {
[$token:tt] => {
input.parse::<Option<Token![$token]>>().unwrap().is_some()
};
}
let mut initial = true;
let mut depth = 0usize;
loop {
if initial {
if consume![&] {
input.parse::<Option<Token![mut]>>()?;
} else if consume![if] || consume![match] || consume![while] {
depth += 1;
} else if input.parse::<Option<Lit>>()?.is_some()
|| (consume(Brace) || consume(Bracket) || consume(Parenthesis))
|| (consume![async] || consume![const] || consume![loop] || consume![unsafe])
&& (consume(Brace) || break)
{
initial = false;
} else if consume![let] {
while !consume![=] {
if !((consume![|] || consume![ref] || consume![mut] || consume![@])
|| (consume![!] || input.parse::<Option<Lit>>()?.is_some())
|| (consume![..=] || consume![..] || consume![&] || consume![_])
|| (consume(Brace) || consume(Bracket) || consume(Parenthesis)))
{
path::parsing::qpath(input, true)?;
}
}
} else if input.parse::<Option<Lifetime>>()?.is_some() && !consume![:] {
break;
} else if input.parse::<UnOp>().is_err() {
path::parsing::qpath(input, true)?;
initial = consume![!] || depth == 0 && input.peek(token::Brace);
}
} else if input.is_empty() || input.peek(Token![,]) {
return Ok(());
} else if depth > 0 && consume(Brace) {
if consume![else] && !consume(Brace) {
initial = consume![if] || break;
} else {
depth -= 1;
}
} else if input.parse::<BinOp>().is_ok() || (consume![..] | consume![=]) {
initial = true;
} else if consume![.] {
if input.parse::<Option<LitFloat>>()?.is_none()
&& (input.parse::<Member>()?.is_named() && consume![::])
{
AngleBracketedGenericArguments::do_parse(None, input)?;
}
} else if consume![as] {
input.parse::<Type>()?;
} else if !(consume(Brace) || consume(Bracket) || consume(Parenthesis)) {
break;
}
}
Err(input.error("unsupported expression"))
}
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
impl Parse for FieldsNamed {
fn parse(input: ParseStream) -> Result<Self> {

View File

@ -1,15 +1,17 @@
use crate::attr::Attribute;
#[cfg(all(feature = "parsing", feature = "full"))]
use crate::error::Result;
#[cfg(feature = "parsing")]
use crate::ext::IdentExt as _;
#[cfg(feature = "full")]
use crate::generics::BoundLifetimes;
use crate::ident::Ident;
#[cfg(feature = "full")]
#[cfg(any(feature = "parsing", feature = "full"))]
use crate::lifetime::Lifetime;
use crate::lit::Lit;
use crate::mac::Macro;
use crate::op::{BinOp, UnOp};
#[cfg(all(feature = "parsing", feature = "full"))]
#[cfg(feature = "parsing")]
use crate::parse::ParseStream;
#[cfg(feature = "full")]
use crate::pat::Pat;
@ -889,6 +891,36 @@ impl Expr {
parsing::parse_with_earlier_boundary_rule(input)
}
/// Returns whether the next token in the parse stream is one that might
/// possibly form the beginning of an expr.
///
/// This classification is a load-bearing part of the grammar of some Rust
/// expressions, notably `return` and `break`. For example `return < …` will
/// never parse `<` as a binary operator regardless of what comes after,
/// because `<` is a legal starting token for an expression and so it's
/// required to be continued as a return value, such as `return <Struct as
/// Trait>::CONST`. Meanwhile `return > …` treats the `>` as a binary
/// operator because it cannot be a starting token for any Rust expression.
#[cfg(feature = "parsing")]
#[cfg_attr(docsrs, doc(cfg(feature = "parsing")))]
pub fn peek(input: ParseStream) -> bool {
input.peek(Ident::peek_any) // value name or keyword
|| input.peek(token::Paren) // tuple
|| input.peek(token::Bracket) // array
|| input.peek(token::Brace) // block
|| input.peek(Lit) // literal
|| input.peek(Token![!]) && !input.peek(Token![!=]) // operator not
|| input.peek(Token![-]) && !input.peek(Token![-=]) && !input.peek(Token![->]) // unary minus
|| input.peek(Token![*]) && !input.peek(Token![*=]) // dereference
|| input.peek(Token![|]) && !input.peek(Token![|=]) // closure
|| input.peek(Token![&]) && !input.peek(Token![&=]) // reference
|| input.peek(Token![..]) // range
|| input.peek(Token![<]) && !input.peek(Token![<=]) && !input.peek(Token![<<=]) // associated path
|| input.peek(Token![::]) // absolute path
|| input.peek(Lifetime) // labeled loop
|| input.peek(Token![#]) // expression attributes
}
#[cfg(all(feature = "parsing", feature = "full"))]
pub(crate) fn replace_attrs(&mut self, new: Vec<Attribute>) -> Vec<Attribute> {
match self {
@ -1147,8 +1179,6 @@ pub(crate) mod parsing {
FieldValue, Index, Member,
};
#[cfg(feature = "full")]
use crate::ext::IdentExt as _;
#[cfg(feature = "full")]
use crate::generics::BoundLifetimes;
use crate::ident::Ident;
#[cfg(feature = "full")]
@ -1266,25 +1296,6 @@ pub(crate) mod parsing {
}
}
#[cfg(feature = "full")]
fn can_begin_expr(input: ParseStream) -> bool {
input.peek(Ident::peek_any) // value name or keyword
|| input.peek(token::Paren) // tuple
|| input.peek(token::Bracket) // array
|| input.peek(token::Brace) // block
|| input.peek(Lit) // literal
|| input.peek(Token![!]) && !input.peek(Token![!=]) // operator not
|| input.peek(Token![-]) && !input.peek(Token![-=]) && !input.peek(Token![->]) // unary minus
|| input.peek(Token![*]) && !input.peek(Token![*=]) // dereference
|| input.peek(Token![|]) && !input.peek(Token![|=]) // closure
|| input.peek(Token![&]) && !input.peek(Token![&=]) // reference
|| input.peek(Token![..]) // range notation
|| input.peek(Token![<]) && !input.peek(Token![<=]) && !input.peek(Token![<<=]) // associated path
|| input.peek(Token![::]) // global path
|| input.peek(Lifetime) // labeled loop
|| input.peek(Token![#]) // expression attributes
}
#[cfg(feature = "full")]
fn parse_expr(
input: ParseStream,
@ -2439,7 +2450,7 @@ pub(crate) mod parsing {
attrs: Vec::new(),
return_token: input.parse()?,
expr: {
if can_begin_expr(input) {
if Expr::peek(input) {
Some(input.parse()?)
} else {
None
@ -2477,7 +2488,7 @@ pub(crate) mod parsing {
attrs: Vec::new(),
yield_token: input.parse()?,
expr: {
if can_begin_expr(input) {
if Expr::peek(input) {
Some(input.parse()?)
} else {
None
@ -2690,7 +2701,7 @@ pub(crate) mod parsing {
}
input.advance_to(&ahead);
let expr = if can_begin_expr(input) && (allow_struct.0 || !input.peek(token::Brace)) {
let expr = if Expr::peek(input) && (allow_struct.0 || !input.peek(token::Brace)) {
Some(input.parse()?)
} else {
None

View File

@ -249,7 +249,7 @@
//! dynamic library libproc_macro from rustc toolchain.
// Syn types in rustdoc of other crates get linked to here.
#![doc(html_root_url = "https://docs.rs/syn/2.0.85")]
#![doc(html_root_url = "https://docs.rs/syn/2.0.87")]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![deny(unsafe_op_in_unsafe_fn)]
#![allow(non_camel_case_types)]
@ -264,6 +264,7 @@
clippy::derivable_impls,
clippy::diverging_sub_expression,
clippy::doc_markdown,
clippy::enum_glob_use,
clippy::expl_impl_clone_on_copy,
clippy::explicit_auto_deref,
clippy::if_not_else,
@ -307,6 +308,8 @@
clippy::wildcard_imports,
)]
extern crate self as syn;
#[cfg(feature = "proc-macro")]
extern crate proc_macro;
@ -509,6 +512,9 @@ pub use crate::restriction::{FieldMutability, VisRestricted, Visibility};
mod sealed;
#[cfg(all(feature = "parsing", feature = "derive", not(feature = "full")))]
mod scan_expr;
mod span;
#[cfg(all(feature = "parsing", feature = "printing"))]

View File

@ -2,8 +2,8 @@ use crate::buffer::Cursor;
use crate::error::{self, Error};
use crate::sealed::lookahead::Sealed;
use crate::span::IntoSpans;
use crate::token::Token;
use proc_macro2::Span;
use crate::token::{CustomToken, Token};
use proc_macro2::{Delimiter, Span};
use std::cell::RefCell;
/// Support for checking the next token in a stream to decide how to parse.
@ -110,7 +110,18 @@ impl<'a> Lookahead1<'a> {
/// The error message will identify all of the expected token types that
/// have been peeked against this lookahead instance.
pub fn error(self) -> Error {
let comparisons = self.comparisons.into_inner();
let mut comparisons = self.comparisons.into_inner();
comparisons.retain_mut(|display| {
if *display == "`)`" {
*display = match self.cursor.scope_delimiter() {
Delimiter::Parenthesis => "`)`",
Delimiter::Brace => "`}`",
Delimiter::Bracket => "`]`",
Delimiter::None => return false,
}
}
true
});
match comparisons.len() {
0 => {
if self.cursor.eof() {
@ -150,6 +161,160 @@ pub trait Peek: Sealed {
type Token: Token;
}
/// Pseudo-token used for peeking the end of a parse stream.
///
/// This type is only useful as an argument to one of the following functions:
///
/// - [`ParseStream::peek`][crate::parse::ParseBuffer::peek]
/// - [`ParseStream::peek2`][crate::parse::ParseBuffer::peek2]
/// - [`ParseStream::peek3`][crate::parse::ParseBuffer::peek3]
/// - [`Lookahead1::peek`]
///
/// The peek will return `true` if there are no remaining tokens after that
/// point in the parse stream.
///
/// # Example
///
/// Suppose we are parsing attributes containing core::fmt inspired formatting
/// arguments:
///
/// - `#[fmt("simple example")]`
/// - `#[fmt("interpolation e{}ample", self.x)]`
/// - `#[fmt("interpolation e{x}ample")]`
///
/// and we want to recognize the cases where no interpolation occurs so that
/// more efficient code can be generated.
///
/// The following implementation uses `input.peek(Token![,]) &&
/// input.peek2(End)` to recognize the case of a trailing comma without
/// consuming the comma from the parse stream, because if it isn't a trailing
/// comma, that same comma needs to be parsed as part of `args`.
///
/// ```
/// use proc_macro2::TokenStream;
/// use quote::quote;
/// use syn::parse::{End, Parse, ParseStream, Result};
/// use syn::{parse_quote, Attribute, LitStr, Token};
///
/// struct FormatArgs {
/// template: LitStr, // "...{}..."
/// args: TokenStream, // , self.x
/// }
///
/// impl Parse for FormatArgs {
/// fn parse(input: ParseStream) -> Result<Self> {
/// let template: LitStr = input.parse()?;
///
/// let args = if input.is_empty()
/// || input.peek(Token![,]) && input.peek2(End)
/// {
/// input.parse::<Option<Token![,]>>()?;
/// TokenStream::new()
/// } else {
/// input.parse()?
/// };
///
/// Ok(FormatArgs {
/// template,
/// args,
/// })
/// }
/// }
///
/// fn main() -> Result<()> {
/// let attrs: Vec<Attribute> = parse_quote! {
/// #[fmt("simple example")]
/// #[fmt("interpolation e{}ample", self.x)]
/// #[fmt("interpolation e{x}ample")]
/// };
///
/// for attr in &attrs {
/// let FormatArgs { template, args } = attr.parse_args()?;
/// let requires_fmt_machinery =
/// !args.is_empty() || template.value().contains(['{', '}']);
/// let out = if requires_fmt_machinery {
/// quote! {
/// ::core::write!(__formatter, #template #args)
/// }
/// } else {
/// quote! {
/// __formatter.write_str(#template)
/// }
/// };
/// println!("{}", out);
/// }
/// Ok(())
/// }
/// ```
///
/// Implementing this parsing logic without `peek2(End)` is more clumsy because
/// we'd need a parse stream actually advanced past the comma before being able
/// to find out whether there is anything after it. It would look something
/// like:
///
/// ```
/// # use proc_macro2::TokenStream;
/// # use syn::parse::{ParseStream, Result};
/// # use syn::Token;
/// #
/// # fn parse(input: ParseStream) -> Result<()> {
/// use syn::parse::discouraged::Speculative as _;
///
/// let ahead = input.fork();
/// ahead.parse::<Option<Token![,]>>()?;
/// let args = if ahead.is_empty() {
/// input.advance_to(&ahead);
/// TokenStream::new()
/// } else {
/// input.parse()?
/// };
/// # Ok(())
/// # }
/// ```
///
/// or:
///
/// ```
/// # use proc_macro2::TokenStream;
/// # use syn::parse::{ParseStream, Result};
/// # use syn::Token;
/// #
/// # fn parse(input: ParseStream) -> Result<()> {
/// use quote::ToTokens as _;
///
/// let comma: Option<Token![,]> = input.parse()?;
/// let mut args = TokenStream::new();
/// if !input.is_empty() {
/// comma.to_tokens(&mut args);
/// input.parse::<TokenStream>()?.to_tokens(&mut args);
/// }
/// # Ok(())
/// # }
/// ```
pub struct End;
impl Copy for End {}
impl Clone for End {
fn clone(&self) -> Self {
*self
}
}
impl Peek for End {
type Token = Self;
}
impl CustomToken for End {
fn peek(cursor: Cursor) -> bool {
cursor.eof()
}
fn display() -> &'static str {
"`)`" // Lookahead1 error message will fill in the expected close delimiter
}
}
impl<F: Copy + FnOnce(TokenMarker) -> T, T: Token> Peek for F {
type Token = T;
}
@ -163,3 +328,5 @@ impl<S> IntoSpans<S> for TokenMarker {
}
impl<F: Copy + FnOnce(TokenMarker) -> T, T: Token> Sealed for F {}
impl Sealed for End {}

View File

@ -202,7 +202,7 @@ use std::rc::Rc;
use std::str::FromStr;
pub use crate::error::{Error, Result};
pub use crate::lookahead::{Lookahead1, Peek};
pub use crate::lookahead::{End, Lookahead1, Peek};
/// Parsing interface implemented by all types that can be parsed in a default
/// way from a token stream.
@ -751,6 +751,11 @@ impl<'a> ParseBuffer<'a> {
/// set of delimiters, as well as at the end of the tokens provided to the
/// outermost parsing entry point.
///
/// This is equivalent to
/// <code>.<a href="#method.peek">peek</a>(<a href="struct.End.html">syn::parse::End</a>)</code>.
/// Use `.peek2(End)` or `.peek3(End)` to look for the end of a parse stream
/// further ahead than the current position.
///
/// # Example
///
/// ```

View File

@ -153,6 +153,17 @@ impl ParseQuote for Attribute {
}
}
#[cfg(any(feature = "full", feature = "derive"))]
impl ParseQuote for Vec<Attribute> {
fn parse(input: ParseStream) -> Result<Self> {
let mut attrs = Vec::new();
while !input.is_empty() {
attrs.push(ParseQuote::parse(input)?);
}
Ok(attrs)
}
}
#[cfg(any(feature = "full", feature = "derive"))]
impl ParseQuote for Field {
fn parse(input: ParseStream) -> Result<Self> {

264
pve-rs/vendor/syn/src/scan_expr.rs vendored Normal file
View File

@ -0,0 +1,264 @@
use self::{Action::*, Input::*};
use proc_macro2::{Delimiter, Ident, Spacing, TokenTree};
use syn::parse::{ParseStream, Result};
use syn::{AngleBracketedGenericArguments, BinOp, Expr, ExprPath, Lifetime, Lit, Token, Type};
enum Input {
Keyword(&'static str),
Punct(&'static str),
ConsumeAny,
ConsumeBinOp,
ConsumeBrace,
ConsumeDelimiter,
ConsumeIdent,
ConsumeLifetime,
ConsumeLiteral,
ConsumeNestedBrace,
ExpectPath,
ExpectTurbofish,
ExpectType,
CanBeginExpr,
Otherwise,
Empty,
}
enum Action {
SetState(&'static [(Input, Action)]),
IncDepth,
DecDepth,
Finish,
}
static INIT: [(Input, Action); 28] = [
(ConsumeDelimiter, SetState(&POSTFIX)),
(Keyword("async"), SetState(&ASYNC)),
(Keyword("break"), SetState(&BREAK_LABEL)),
(Keyword("const"), SetState(&CONST)),
(Keyword("continue"), SetState(&CONTINUE)),
(Keyword("for"), SetState(&FOR)),
(Keyword("if"), IncDepth),
(Keyword("let"), SetState(&PATTERN)),
(Keyword("loop"), SetState(&BLOCK)),
(Keyword("match"), IncDepth),
(Keyword("move"), SetState(&CLOSURE)),
(Keyword("return"), SetState(&RETURN)),
(Keyword("static"), SetState(&CLOSURE)),
(Keyword("unsafe"), SetState(&BLOCK)),
(Keyword("while"), IncDepth),
(Keyword("yield"), SetState(&RETURN)),
(Keyword("_"), SetState(&POSTFIX)),
(Punct("!"), SetState(&INIT)),
(Punct("#"), SetState(&[(ConsumeDelimiter, SetState(&INIT))])),
(Punct("&"), SetState(&REFERENCE)),
(Punct("*"), SetState(&INIT)),
(Punct("-"), SetState(&INIT)),
(Punct("..="), SetState(&INIT)),
(Punct(".."), SetState(&RANGE)),
(Punct("|"), SetState(&CLOSURE_ARGS)),
(ConsumeLifetime, SetState(&[(Punct(":"), SetState(&INIT))])),
(ConsumeLiteral, SetState(&POSTFIX)),
(ExpectPath, SetState(&PATH)),
];
static POSTFIX: [(Input, Action); 10] = [
(Keyword("as"), SetState(&[(ExpectType, SetState(&POSTFIX))])),
(Punct("..="), SetState(&INIT)),
(Punct(".."), SetState(&RANGE)),
(Punct("."), SetState(&DOT)),
(Punct("?"), SetState(&POSTFIX)),
(ConsumeBinOp, SetState(&INIT)),
(Punct("="), SetState(&INIT)),
(ConsumeNestedBrace, SetState(&IF_THEN)),
(ConsumeDelimiter, SetState(&POSTFIX)),
(Empty, Finish),
];
static ASYNC: [(Input, Action); 3] = [
(Keyword("move"), SetState(&ASYNC)),
(Punct("|"), SetState(&CLOSURE_ARGS)),
(ConsumeBrace, SetState(&POSTFIX)),
];
static BLOCK: [(Input, Action); 1] = [(ConsumeBrace, SetState(&POSTFIX))];
static BREAK_LABEL: [(Input, Action); 2] = [
(ConsumeLifetime, SetState(&BREAK_VALUE)),
(Otherwise, SetState(&BREAK_VALUE)),
];
static BREAK_VALUE: [(Input, Action); 3] = [
(ConsumeNestedBrace, SetState(&IF_THEN)),
(CanBeginExpr, SetState(&INIT)),
(Otherwise, SetState(&POSTFIX)),
];
static CLOSURE: [(Input, Action); 6] = [
(Keyword("async"), SetState(&CLOSURE)),
(Keyword("move"), SetState(&CLOSURE)),
(Punct(","), SetState(&CLOSURE)),
(Punct(">"), SetState(&CLOSURE)),
(Punct("|"), SetState(&CLOSURE_ARGS)),
(ConsumeLifetime, SetState(&CLOSURE)),
];
static CLOSURE_ARGS: [(Input, Action); 2] = [
(Punct("|"), SetState(&CLOSURE_RET)),
(ConsumeAny, SetState(&CLOSURE_ARGS)),
];
static CLOSURE_RET: [(Input, Action); 2] = [
(Punct("->"), SetState(&[(ExpectType, SetState(&BLOCK))])),
(Otherwise, SetState(&INIT)),
];
static CONST: [(Input, Action); 2] = [
(Punct("|"), SetState(&CLOSURE_ARGS)),
(ConsumeBrace, SetState(&POSTFIX)),
];
static CONTINUE: [(Input, Action); 2] = [
(ConsumeLifetime, SetState(&POSTFIX)),
(Otherwise, SetState(&POSTFIX)),
];
static DOT: [(Input, Action); 3] = [
(Keyword("await"), SetState(&POSTFIX)),
(ConsumeIdent, SetState(&METHOD)),
(ConsumeLiteral, SetState(&POSTFIX)),
];
static FOR: [(Input, Action); 2] = [
(Punct("<"), SetState(&CLOSURE)),
(Otherwise, SetState(&PATTERN)),
];
static IF_ELSE: [(Input, Action); 2] = [(Keyword("if"), SetState(&INIT)), (ConsumeBrace, DecDepth)];
static IF_THEN: [(Input, Action); 2] =
[(Keyword("else"), SetState(&IF_ELSE)), (Otherwise, DecDepth)];
static METHOD: [(Input, Action); 1] = [(ExpectTurbofish, SetState(&POSTFIX))];
static PATH: [(Input, Action); 4] = [
(Punct("!="), SetState(&INIT)),
(Punct("!"), SetState(&INIT)),
(ConsumeNestedBrace, SetState(&IF_THEN)),
(Otherwise, SetState(&POSTFIX)),
];
static PATTERN: [(Input, Action); 15] = [
(ConsumeDelimiter, SetState(&PATTERN)),
(Keyword("box"), SetState(&PATTERN)),
(Keyword("in"), IncDepth),
(Keyword("mut"), SetState(&PATTERN)),
(Keyword("ref"), SetState(&PATTERN)),
(Keyword("_"), SetState(&PATTERN)),
(Punct("!"), SetState(&PATTERN)),
(Punct("&"), SetState(&PATTERN)),
(Punct("..="), SetState(&PATTERN)),
(Punct(".."), SetState(&PATTERN)),
(Punct("="), SetState(&INIT)),
(Punct("@"), SetState(&PATTERN)),
(Punct("|"), SetState(&PATTERN)),
(ConsumeLiteral, SetState(&PATTERN)),
(ExpectPath, SetState(&PATTERN)),
];
static RANGE: [(Input, Action); 6] = [
(Punct("..="), SetState(&INIT)),
(Punct(".."), SetState(&RANGE)),
(Punct("."), SetState(&DOT)),
(ConsumeNestedBrace, SetState(&IF_THEN)),
(Empty, Finish),
(Otherwise, SetState(&INIT)),
];
static RAW: [(Input, Action); 3] = [
(Keyword("const"), SetState(&INIT)),
(Keyword("mut"), SetState(&INIT)),
(Otherwise, SetState(&POSTFIX)),
];
static REFERENCE: [(Input, Action); 3] = [
(Keyword("mut"), SetState(&INIT)),
(Keyword("raw"), SetState(&RAW)),
(Otherwise, SetState(&INIT)),
];
static RETURN: [(Input, Action); 2] = [
(CanBeginExpr, SetState(&INIT)),
(Otherwise, SetState(&POSTFIX)),
];
pub(crate) fn scan_expr(input: ParseStream) -> Result<()> {
let mut state = INIT.as_slice();
let mut depth = 0usize;
'table: loop {
for rule in state {
if match rule.0 {
Input::Keyword(expected) => input.step(|cursor| match cursor.ident() {
Some((ident, rest)) if ident == expected => Ok((true, rest)),
_ => Ok((false, *cursor)),
})?,
Input::Punct(expected) => input.step(|cursor| {
let begin = *cursor;
let mut cursor = begin;
for (i, ch) in expected.chars().enumerate() {
match cursor.punct() {
Some((punct, _)) if punct.as_char() != ch => break,
Some((_, rest)) if i == expected.len() - 1 => {
return Ok((true, rest));
}
Some((punct, rest)) if punct.spacing() == Spacing::Joint => {
cursor = rest;
}
_ => break,
}
}
Ok((false, begin))
})?,
Input::ConsumeAny => input.parse::<Option<TokenTree>>()?.is_some(),
Input::ConsumeBinOp => input.parse::<BinOp>().is_ok(),
Input::ConsumeBrace | Input::ConsumeNestedBrace => {
(matches!(rule.0, Input::ConsumeBrace) || depth > 0)
&& input.step(|cursor| match cursor.group(Delimiter::Brace) {
Some((_inside, _span, rest)) => Ok((true, rest)),
None => Ok((false, *cursor)),
})?
}
Input::ConsumeDelimiter => input.step(|cursor| match cursor.any_group() {
Some((_inside, _delimiter, _span, rest)) => Ok((true, rest)),
None => Ok((false, *cursor)),
})?,
Input::ConsumeIdent => input.parse::<Option<Ident>>()?.is_some(),
Input::ConsumeLifetime => input.parse::<Option<Lifetime>>()?.is_some(),
Input::ConsumeLiteral => input.parse::<Option<Lit>>()?.is_some(),
Input::ExpectPath => {
input.parse::<ExprPath>()?;
true
}
Input::ExpectTurbofish => {
if input.peek(Token![::]) {
input.parse::<AngleBracketedGenericArguments>()?;
}
true
}
Input::ExpectType => {
Type::without_plus(input)?;
true
}
Input::CanBeginExpr => Expr::peek(input),
Input::Otherwise => true,
Input::Empty => input.is_empty() || input.peek(Token![,]),
} {
state = match rule.1 {
Action::SetState(next) => next,
Action::IncDepth => (depth += 1, &INIT).1,
Action::DecDepth => (depth -= 1, &POSTFIX).1,
Action::Finish => return if depth == 0 { Ok(()) } else { break },
};
continue 'table;
}
}
return Err(input.error("unsupported expression"));
}
}

View File

@ -498,7 +498,7 @@ spanless_eq_struct!(Fn; defaultness generics sig body);
spanless_eq_struct!(FnDecl; inputs output);
spanless_eq_struct!(FnHeader; constness coroutine_kind safety ext);
spanless_eq_struct!(FnSig; header decl span);
spanless_eq_struct!(ForeignMod; safety abi items);
spanless_eq_struct!(ForeignMod; extern_span safety abi items);
spanless_eq_struct!(FormatArgPosition; index kind span);
spanless_eq_struct!(FormatArgs; span template arguments);
spanless_eq_struct!(FormatArgument; kind expr);

View File

@ -49,6 +49,7 @@ use std::fs;
use std::path::Path;
use std::process;
use std::sync::atomic::{AtomicUsize, Ordering};
use syn::parse::Parser as _;
#[macro_use]
mod macros;
@ -56,6 +57,9 @@ mod macros;
mod common;
mod repo;
#[path = "../src/scan_expr.rs"]
mod scan_expr;
#[test]
fn test_rustc_precedence() {
repo::rayon_init();
@ -115,7 +119,8 @@ fn test_expressions(path: &Path, edition: Edition, exprs: Vec<syn::Expr>) -> (us
rustc_span::create_session_if_not_set_then(edition, |_| {
for expr in exprs {
let source_code = expr.to_token_stream().to_string();
let expr_tokens = expr.to_token_stream();
let source_code = expr_tokens.to_string();
let librustc_ast = if let Some(e) = librustc_parse_and_rewrite(&source_code) {
e
} else {
@ -173,6 +178,16 @@ fn test_expressions(path: &Path, edition: Edition, exprs: Vec<syn::Expr>) -> (us
continue;
}
if scan_expr::scan_expr.parse2(expr_tokens).is_err() {
failed += 1;
errorf!(
"\nFAIL {} - failed to scan expr\n{}\n",
path.display(),
source_code,
);
continue;
}
passed += 1;
}
});

View File

@ -1 +1 @@
{"files":{"Cargo.toml":"8609de4a292528ff7fed321708b95adf5fed4a53d9213fc3d6a0fd3d143c4398","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","src/ast.rs":"9b6cd6b1553483c99cd7e36aa422d37f4353c99b15da55534d28822f7fa7fd08","src/attr.rs":"d662f37c1a892aac6ab652ba6111335121748df728e49399aff736b654d5dd1c","src/expand.rs":"50c30146e65a28ac4f6768e5e9d173bde0162b7ad7c5adc39e4eab6e69650371","src/fmt.rs":"5d1cefc012403c2d4ff7ab2513c0ec559166df4271d5983a6463939b5ec8c3e1","src/generics.rs":"2076cde22271be355a8131a77add4b93f83ab0af4317cd2df5471fffa4f95c66","src/lib.rs":"5eea86c771e643328ad9bc3b881cce4bf9d50adae1b33e0d07645bdd9044003d","src/prop.rs":"5ba613e38430831259f20b258f33d57dcb783fbaeeb49e5faffa7b2a7be99e67","src/span.rs":"430460a4fa0d1fa9c627c1ddd575d2b101778fea84217591e1a93a5f6a2a0132","src/valid.rs":"ac95253944fd360d3578d0643a7baabb2cfa6bf9fbced7a6ce1f7b0529a3bb98"},"package":"ae71770322cbd277e69d762a16c444af02aa0575ac0d174f0b9562d3b37f8602"}
{"files":{"Cargo.toml":"d180d6115d56268eafb6cdfb2a6eb59e0ab11447024232bb644798012c53dc23","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","src/ast.rs":"9b6cd6b1553483c99cd7e36aa422d37f4353c99b15da55534d28822f7fa7fd08","src/attr.rs":"1201dee8b1da10c4dcf5a673412bbd77cda31776deb70b3a423354eca83b917f","src/expand.rs":"50c30146e65a28ac4f6768e5e9d173bde0162b7ad7c5adc39e4eab6e69650371","src/fmt.rs":"63b7d8184308cb1ae1ed0f96980f086a4b255928b05ad9fb44ddcd9ee54c1250","src/generics.rs":"ac493703c9955400d4fab22cbbdbbc4bf4f6f72c112b34be8b784142142ff74f","src/lib.rs":"e114c846bdae34674d3921a344316b33934c467713f593d943c119e5ce73dd9a","src/prop.rs":"5ba613e38430831259f20b258f33d57dcb783fbaeeb49e5faffa7b2a7be99e67","src/scan_expr.rs":"d46ae7a3eaaa6476553db3164676ec71aa82bcd8a2e4cad59deb07893c79f1c8","src/span.rs":"430460a4fa0d1fa9c627c1ddd575d2b101778fea84217591e1a93a5f6a2a0132","src/valid.rs":"ac95253944fd360d3578d0643a7baabb2cfa6bf9fbced7a6ce1f7b0529a3bb98"},"package":"a7c61ec9a6f64d2793d8a45faba21efbe3ced62a886d44c36a009b2b519b4c7e"}

View File

@ -11,9 +11,9 @@
[package]
edition = "2021"
rust-version = "1.56"
rust-version = "1.61"
name = "thiserror-impl"
version = "1.0.65"
version = "1.0.68"
authors = ["David Tolnay <dtolnay@gmail.com>"]
build = false
autolib = false
@ -42,4 +42,4 @@ version = "1.0.74"
version = "1.0.35"
[dependencies.syn]
version = "2.0.46"
version = "2.0.87"

View File

@ -2,7 +2,7 @@ use proc_macro2::{Delimiter, Group, Literal, Punct, Spacing, Span, TokenStream,
use quote::{format_ident, quote, ToTokens};
use std::collections::BTreeSet as Set;
use syn::parse::discouraged::Speculative;
use syn::parse::ParseStream;
use syn::parse::{End, ParseStream};
use syn::{
braced, bracketed, parenthesized, token, Attribute, Error, Ident, Index, LitFloat, LitInt,
LitStr, Meta, Result, Token,
@ -91,7 +91,11 @@ fn parse_error_attribute<'a>(attrs: &mut Attrs<'a>, attr: &'a Attribute) -> Resu
syn::custom_keyword!(transparent);
attr.parse_args_with(|input: ParseStream| {
if let Some(kw) = input.parse::<Option<transparent>>()? {
let lookahead = input.lookahead1();
let fmt = if lookahead.peek(LitStr) {
input.parse::<LitStr>()?
} else if lookahead.peek(transparent) {
let kw: transparent = input.parse()?;
if attrs.transparent.is_some() {
return Err(Error::new_spanned(
attr,
@ -103,14 +107,12 @@ fn parse_error_attribute<'a>(attrs: &mut Attrs<'a>, attr: &'a Attribute) -> Resu
span: kw.span,
});
return Ok(());
}
} else {
return Err(lookahead.error());
};
let fmt: LitStr = input.parse()?;
let ahead = input.fork();
ahead.parse::<Option<Token![,]>>()?;
let args = if ahead.is_empty() {
input.advance_to(&ahead);
let args = if input.is_empty() || input.peek(Token![,]) && input.peek2(End) {
input.parse::<Option<Token![,]>>()?;
TokenStream::new()
} else {
parse_token_expr(input, false)?
@ -140,6 +142,13 @@ fn parse_error_attribute<'a>(attrs: &mut Attrs<'a>, attr: &'a Attribute) -> Resu
fn parse_token_expr(input: ParseStream, mut begin_expr: bool) -> Result<TokenStream> {
let mut tokens = Vec::new();
while !input.is_empty() {
if input.peek(token::Group) {
let group: TokenTree = input.parse()?;
tokens.push(group);
begin_expr = false;
continue;
}
if begin_expr && input.peek(Token![.]) {
if input.peek2(Ident) {
input.parse::<Token![.]>()?;

View File

@ -1,17 +1,19 @@
use crate::ast::Field;
use crate::attr::{Display, Trait};
use proc_macro2::TokenTree;
use quote::{format_ident, quote_spanned};
use crate::scan_expr::scan_expr;
use proc_macro2::{TokenStream, TokenTree};
use quote::{format_ident, quote, quote_spanned};
use std::collections::{BTreeSet as Set, HashMap as Map};
use syn::ext::IdentExt;
use syn::parse::discouraged::Speculative;
use syn::parse::{ParseStream, Parser};
use syn::{Ident, Index, LitStr, Member, Result, Token};
use syn::{Expr, Ident, Index, LitStr, Member, Result, Token};
impl Display<'_> {
// Transform `"error {var}"` to `"error {}", var`.
pub fn expand_shorthand(&mut self, fields: &[Field]) {
let raw_args = self.args.clone();
let mut named_args = explicit_named_args.parse2(raw_args).unwrap();
let mut named_args = explicit_named_args.parse2(raw_args).unwrap().named;
let mut member_index = Map::new();
for (i, field) in fields.iter().enumerate() {
member_index.insert(&field.member, i);
@ -93,11 +95,6 @@ impl Display<'_> {
if formatvar.to_string().starts_with("r#") {
formatvar = format_ident!("r_{}", formatvar);
}
if formatvar.to_string().starts_with('_') {
// Work around leading underscore being rejected by 1.40 and
// older compilers. https://github.com/rust-lang/rust/pull/66847
formatvar = format_ident!("field_{}", formatvar);
}
out += &formatvar.to_string();
if !named_args.insert(formatvar.clone()) {
// Already specified in the format argument list.
@ -122,21 +119,102 @@ impl Display<'_> {
}
}
fn explicit_named_args(input: ParseStream) -> Result<Set<Ident>> {
let mut named_args = Set::new();
struct FmtArguments {
named: Set<Ident>,
unnamed: bool,
}
#[allow(clippy::unnecessary_wraps)]
fn explicit_named_args(input: ParseStream) -> Result<FmtArguments> {
let ahead = input.fork();
if let Ok(set) = try_explicit_named_args(&ahead) {
input.advance_to(&ahead);
return Ok(set);
}
let ahead = input.fork();
if let Ok(set) = fallback_explicit_named_args(&ahead) {
input.advance_to(&ahead);
return Ok(set);
}
input.parse::<TokenStream>().unwrap();
Ok(FmtArguments {
named: Set::new(),
unnamed: false,
})
}
fn try_explicit_named_args(input: ParseStream) -> Result<FmtArguments> {
let mut syn_full = None;
let mut args = FmtArguments {
named: Set::new(),
unnamed: false,
};
while !input.is_empty() {
if input.peek(Token![,]) && input.peek2(Ident::peek_any) && input.peek3(Token![=]) {
input.parse::<Token![,]>()?;
if input.is_empty() {
break;
}
if input.peek(Ident::peek_any) && input.peek2(Token![=]) && !input.peek2(Token![==]) {
let ident = input.call(Ident::parse_any)?;
input.parse::<Token![=]>()?;
args.named.insert(ident);
} else {
args.unnamed = true;
}
if *syn_full.get_or_insert_with(is_syn_full) {
let ahead = input.fork();
if ahead.parse::<Expr>().is_ok() {
input.advance_to(&ahead);
continue;
}
}
scan_expr(input)?;
}
Ok(args)
}
fn fallback_explicit_named_args(input: ParseStream) -> Result<FmtArguments> {
let mut args = FmtArguments {
named: Set::new(),
unnamed: false,
};
while !input.is_empty() {
if input.peek(Token![,])
&& input.peek2(Ident::peek_any)
&& input.peek3(Token![=])
&& !input.peek3(Token![==])
{
input.parse::<Token![,]>()?;
let ident = input.call(Ident::parse_any)?;
input.parse::<Token![=]>()?;
named_args.insert(ident);
} else {
input.parse::<TokenTree>()?;
args.named.insert(ident);
}
}
Ok(named_args)
Ok(args)
}
fn is_syn_full() -> bool {
// Expr::Block contains syn::Block which contains Vec<syn::Stmt>. In the
// current version of Syn, syn::Stmt is exhaustive and could only plausibly
// represent `trait Trait {}` in Stmt::Item which contains syn::Item. Most
// of the point of syn's non-"full" mode is to avoid compiling Item and the
// entire expansive syntax tree it comprises. So the following expression
// being parsed to Expr::Block is a reliable indication that "full" is
// enabled.
let test = quote!({
trait Trait {}
});
match syn::parse2(test) {
Ok(Expr::Verbatim(_)) | Err(_) => false,
Ok(Expr::Block(_)) => true,
Ok(_) => unreachable!(),
}
}
fn take_int(read: &mut &str) -> String {

View File

@ -57,7 +57,6 @@ impl InferredBounds {
}
}
#[allow(clippy::type_repetition_in_bounds, clippy::trait_duplication_in_bounds)] // clippy bug: https://github.com/rust-lang/rust-clippy/issues/8771
pub fn insert(&mut self, ty: impl ToTokens, bound: impl ToTokens) {
let ty = ty.to_token_stream();
let bound = bound.to_token_stream();

View File

@ -2,6 +2,7 @@
clippy::blocks_in_conditions,
clippy::cast_lossless,
clippy::cast_possible_truncation,
clippy::enum_glob_use,
clippy::manual_find,
clippy::manual_let_else,
clippy::manual_map,
@ -23,6 +24,7 @@ mod expand;
mod fmt;
mod generics;
mod prop;
mod scan_expr;
mod span;
mod valid;

View File

@ -0,0 +1,264 @@
use self::{Action::*, Input::*};
use proc_macro2::{Delimiter, Ident, Spacing, TokenTree};
use syn::parse::{ParseStream, Result};
use syn::{AngleBracketedGenericArguments, BinOp, Expr, ExprPath, Lifetime, Lit, Token, Type};
enum Input {
Keyword(&'static str),
Punct(&'static str),
ConsumeAny,
ConsumeBinOp,
ConsumeBrace,
ConsumeDelimiter,
ConsumeIdent,
ConsumeLifetime,
ConsumeLiteral,
ConsumeNestedBrace,
ExpectPath,
ExpectTurbofish,
ExpectType,
CanBeginExpr,
Otherwise,
Empty,
}
enum Action {
SetState(&'static [(Input, Action)]),
IncDepth,
DecDepth,
Finish,
}
static INIT: [(Input, Action); 28] = [
(ConsumeDelimiter, SetState(&POSTFIX)),
(Keyword("async"), SetState(&ASYNC)),
(Keyword("break"), SetState(&BREAK_LABEL)),
(Keyword("const"), SetState(&CONST)),
(Keyword("continue"), SetState(&CONTINUE)),
(Keyword("for"), SetState(&FOR)),
(Keyword("if"), IncDepth),
(Keyword("let"), SetState(&PATTERN)),
(Keyword("loop"), SetState(&BLOCK)),
(Keyword("match"), IncDepth),
(Keyword("move"), SetState(&CLOSURE)),
(Keyword("return"), SetState(&RETURN)),
(Keyword("static"), SetState(&CLOSURE)),
(Keyword("unsafe"), SetState(&BLOCK)),
(Keyword("while"), IncDepth),
(Keyword("yield"), SetState(&RETURN)),
(Keyword("_"), SetState(&POSTFIX)),
(Punct("!"), SetState(&INIT)),
(Punct("#"), SetState(&[(ConsumeDelimiter, SetState(&INIT))])),
(Punct("&"), SetState(&REFERENCE)),
(Punct("*"), SetState(&INIT)),
(Punct("-"), SetState(&INIT)),
(Punct("..="), SetState(&INIT)),
(Punct(".."), SetState(&RANGE)),
(Punct("|"), SetState(&CLOSURE_ARGS)),
(ConsumeLifetime, SetState(&[(Punct(":"), SetState(&INIT))])),
(ConsumeLiteral, SetState(&POSTFIX)),
(ExpectPath, SetState(&PATH)),
];
static POSTFIX: [(Input, Action); 10] = [
(Keyword("as"), SetState(&[(ExpectType, SetState(&POSTFIX))])),
(Punct("..="), SetState(&INIT)),
(Punct(".."), SetState(&RANGE)),
(Punct("."), SetState(&DOT)),
(Punct("?"), SetState(&POSTFIX)),
(ConsumeBinOp, SetState(&INIT)),
(Punct("="), SetState(&INIT)),
(ConsumeNestedBrace, SetState(&IF_THEN)),
(ConsumeDelimiter, SetState(&POSTFIX)),
(Empty, Finish),
];
static ASYNC: [(Input, Action); 3] = [
(Keyword("move"), SetState(&ASYNC)),
(Punct("|"), SetState(&CLOSURE_ARGS)),
(ConsumeBrace, SetState(&POSTFIX)),
];
static BLOCK: [(Input, Action); 1] = [(ConsumeBrace, SetState(&POSTFIX))];
static BREAK_LABEL: [(Input, Action); 2] = [
(ConsumeLifetime, SetState(&BREAK_VALUE)),
(Otherwise, SetState(&BREAK_VALUE)),
];
static BREAK_VALUE: [(Input, Action); 3] = [
(ConsumeNestedBrace, SetState(&IF_THEN)),
(CanBeginExpr, SetState(&INIT)),
(Otherwise, SetState(&POSTFIX)),
];
static CLOSURE: [(Input, Action); 6] = [
(Keyword("async"), SetState(&CLOSURE)),
(Keyword("move"), SetState(&CLOSURE)),
(Punct(","), SetState(&CLOSURE)),
(Punct(">"), SetState(&CLOSURE)),
(Punct("|"), SetState(&CLOSURE_ARGS)),
(ConsumeLifetime, SetState(&CLOSURE)),
];
static CLOSURE_ARGS: [(Input, Action); 2] = [
(Punct("|"), SetState(&CLOSURE_RET)),
(ConsumeAny, SetState(&CLOSURE_ARGS)),
];
static CLOSURE_RET: [(Input, Action); 2] = [
(Punct("->"), SetState(&[(ExpectType, SetState(&BLOCK))])),
(Otherwise, SetState(&INIT)),
];
static CONST: [(Input, Action); 2] = [
(Punct("|"), SetState(&CLOSURE_ARGS)),
(ConsumeBrace, SetState(&POSTFIX)),
];
static CONTINUE: [(Input, Action); 2] = [
(ConsumeLifetime, SetState(&POSTFIX)),
(Otherwise, SetState(&POSTFIX)),
];
static DOT: [(Input, Action); 3] = [
(Keyword("await"), SetState(&POSTFIX)),
(ConsumeIdent, SetState(&METHOD)),
(ConsumeLiteral, SetState(&POSTFIX)),
];
static FOR: [(Input, Action); 2] = [
(Punct("<"), SetState(&CLOSURE)),
(Otherwise, SetState(&PATTERN)),
];
static IF_ELSE: [(Input, Action); 2] = [(Keyword("if"), SetState(&INIT)), (ConsumeBrace, DecDepth)];
static IF_THEN: [(Input, Action); 2] =
[(Keyword("else"), SetState(&IF_ELSE)), (Otherwise, DecDepth)];
static METHOD: [(Input, Action); 1] = [(ExpectTurbofish, SetState(&POSTFIX))];
static PATH: [(Input, Action); 4] = [
(Punct("!="), SetState(&INIT)),
(Punct("!"), SetState(&INIT)),
(ConsumeNestedBrace, SetState(&IF_THEN)),
(Otherwise, SetState(&POSTFIX)),
];
static PATTERN: [(Input, Action); 15] = [
(ConsumeDelimiter, SetState(&PATTERN)),
(Keyword("box"), SetState(&PATTERN)),
(Keyword("in"), IncDepth),
(Keyword("mut"), SetState(&PATTERN)),
(Keyword("ref"), SetState(&PATTERN)),
(Keyword("_"), SetState(&PATTERN)),
(Punct("!"), SetState(&PATTERN)),
(Punct("&"), SetState(&PATTERN)),
(Punct("..="), SetState(&PATTERN)),
(Punct(".."), SetState(&PATTERN)),
(Punct("="), SetState(&INIT)),
(Punct("@"), SetState(&PATTERN)),
(Punct("|"), SetState(&PATTERN)),
(ConsumeLiteral, SetState(&PATTERN)),
(ExpectPath, SetState(&PATTERN)),
];
static RANGE: [(Input, Action); 6] = [
(Punct("..="), SetState(&INIT)),
(Punct(".."), SetState(&RANGE)),
(Punct("."), SetState(&DOT)),
(ConsumeNestedBrace, SetState(&IF_THEN)),
(Empty, Finish),
(Otherwise, SetState(&INIT)),
];
static RAW: [(Input, Action); 3] = [
(Keyword("const"), SetState(&INIT)),
(Keyword("mut"), SetState(&INIT)),
(Otherwise, SetState(&POSTFIX)),
];
static REFERENCE: [(Input, Action); 3] = [
(Keyword("mut"), SetState(&INIT)),
(Keyword("raw"), SetState(&RAW)),
(Otherwise, SetState(&INIT)),
];
static RETURN: [(Input, Action); 2] = [
(CanBeginExpr, SetState(&INIT)),
(Otherwise, SetState(&POSTFIX)),
];
pub(crate) fn scan_expr(input: ParseStream) -> Result<()> {
let mut state = INIT.as_slice();
let mut depth = 0usize;
'table: loop {
for rule in state {
if match rule.0 {
Input::Keyword(expected) => input.step(|cursor| match cursor.ident() {
Some((ident, rest)) if ident == expected => Ok((true, rest)),
_ => Ok((false, *cursor)),
})?,
Input::Punct(expected) => input.step(|cursor| {
let begin = *cursor;
let mut cursor = begin;
for (i, ch) in expected.chars().enumerate() {
match cursor.punct() {
Some((punct, _)) if punct.as_char() != ch => break,
Some((_, rest)) if i == expected.len() - 1 => {
return Ok((true, rest));
}
Some((punct, rest)) if punct.spacing() == Spacing::Joint => {
cursor = rest;
}
_ => break,
}
}
Ok((false, begin))
})?,
Input::ConsumeAny => input.parse::<Option<TokenTree>>()?.is_some(),
Input::ConsumeBinOp => input.parse::<BinOp>().is_ok(),
Input::ConsumeBrace | Input::ConsumeNestedBrace => {
(matches!(rule.0, Input::ConsumeBrace) || depth > 0)
&& input.step(|cursor| match cursor.group(Delimiter::Brace) {
Some((_inside, _span, rest)) => Ok((true, rest)),
None => Ok((false, *cursor)),
})?
}
Input::ConsumeDelimiter => input.step(|cursor| match cursor.any_group() {
Some((_inside, _delimiter, _span, rest)) => Ok((true, rest)),
None => Ok((false, *cursor)),
})?,
Input::ConsumeIdent => input.parse::<Option<Ident>>()?.is_some(),
Input::ConsumeLifetime => input.parse::<Option<Lifetime>>()?.is_some(),
Input::ConsumeLiteral => input.parse::<Option<Lit>>()?.is_some(),
Input::ExpectPath => {
input.parse::<ExprPath>()?;
true
}
Input::ExpectTurbofish => {
if input.peek(Token![::]) {
input.parse::<AngleBracketedGenericArguments>()?;
}
true
}
Input::ExpectType => {
Type::without_plus(input)?;
true
}
Input::CanBeginExpr => Expr::peek(input),
Input::Otherwise => true,
Input::Empty => input.is_empty() || input.peek(Token![,]),
} {
state = match rule.1 {
Action::SetState(next) => next,
Action::IncDepth => (depth += 1, &INIT).1,
Action::DecDepth => (depth -= 1, &POSTFIX).1,
Action::Finish => return if depth == 0 { Ok(()) } else { break },
};
continue 'table;
}
}
return Err(input.error("unsupported expression"));
}
}

File diff suppressed because one or more lines are too long

View File

@ -11,9 +11,9 @@
[package]
edition = "2021"
rust-version = "1.56"
rust-version = "1.61"
name = "thiserror"
version = "1.0.65"
version = "1.0.68"
authors = ["David Tolnay <dtolnay@gmail.com>"]
build = "build.rs"
autolib = false
@ -94,7 +94,7 @@ name = "test_transparent"
path = "tests/test_transparent.rs"
[dependencies.thiserror-impl]
version = "=1.0.65"
version = "=1.0.68"
[dev-dependencies.anyhow]
version = "1.0.73"

View File

@ -16,7 +16,7 @@ This library provides a convenient derive macro for the standard library's
thiserror = "1.0"
```
*Compiler support: requires rustc 1.56+*
*Compiler support: requires rustc 1.61+*
<br>

View File

@ -137,10 +137,7 @@ fn compile_probe(rustc_bootstrap: bool) -> bool {
fn cargo_env_var(key: &str) -> OsString {
env::var_os(key).unwrap_or_else(|| {
eprintln!(
"Environment variable ${} is not set during execution of build script",
key,
);
eprintln!("Environment variable ${key} is not set during execution of build script");
process::exit(1);
})
}

View File

@ -2,7 +2,7 @@ use core::fmt::Display;
use std::path::{self, Path, PathBuf};
#[doc(hidden)]
pub trait AsDisplay<'a> {
pub trait AsDisplay<'a>: Sealed {
// TODO: convert to generic associated type.
// https://github.com/dtolnay/thiserror/pull/253
type Target: Display;
@ -38,3 +38,9 @@ impl<'a> AsDisplay<'a> for PathBuf {
self.display()
}
}
#[doc(hidden)]
pub trait Sealed {}
impl<T: Display> Sealed for &T {}
impl Sealed for Path {}
impl Sealed for PathBuf {}

View File

@ -258,7 +258,7 @@
//!
//! [`anyhow`]: https://github.com/dtolnay/anyhow
#![doc(html_root_url = "https://docs.rs/thiserror/1.0.65")]
#![doc(html_root_url = "https://docs.rs/thiserror/1.0.68")]
#![allow(
clippy::module_name_repetitions,
clippy::needless_lifetimes,

View File

@ -1,6 +1,7 @@
#![allow(clippy::iter_cloned_collect, clippy::uninlined_format_args)]
use core::fmt::Display;
use std::path::PathBuf;
use thiserror::Error;
// Some of the elaborate cases from the rcc codebase, which is a C compiler in
@ -50,6 +51,7 @@ pub enum RustupError {
},
}
#[track_caller]
fn assert<T: Display>(expected: &str, value: T) {
assert_eq!(expected, value.to_string());
}
@ -86,3 +88,29 @@ fn test_rustup() {
},
);
}
// Regression test for https://github.com/dtolnay/thiserror/issues/335
#[test]
#[allow(non_snake_case)]
fn test_assoc_type_equality_constraint() {
pub trait Trait<T>: Display {
type A;
}
impl<T> Trait<T> for i32 {
type A = i32;
}
#[derive(Error, Debug)]
#[error("{A} {b}", b = &0 as &dyn Trait<i32, A = i32>)]
pub struct Error {
pub A: PathBuf,
}
assert(
"... 0",
Error {
A: PathBuf::from("..."),
},
);
}

View File

@ -1,4 +1,4 @@
error: expected string literal
error: expected string literal or `transparent`
--> tests/ui/concat-display.rs:8:17
|
8 | #[error(concat!("invalid ", $what))]

View File

@ -1 +0,0 @@
{"files":{"CHANGELOG.md":"a5906792b5a2953db3e47ec6c64eef004fb40849795bca69f0430a843d996e5b","Cargo.toml":"f69b2a2493b02d8278fe23017ac62f8fb20dba2175428179bfa5bf074b015d32","LICENSE-APACHE.md":"cfc7749b96f63bd31c3c42b5c471bf756814053e847c10f3eb003417bc523d30","LICENSE-MIT.md":"fd80a26fbb3f644af1fa994134446702932968519797227e07a1368dea80f0bc","LICENSE-ZLIB.md":"84b34dd7608f7fb9b17bd588a6bf392bf7de504e2716f024a77d89f1b145a151","README.md":"3e1fb710657302adceca0f3a5ca37670503fa376fdc60f1abfd441a023d1489b","benches/macros.rs":"5a5a6f260dec23ccd52de69b6ec5996db66c5deb67a47398cdf1f06a777580b3","benches/smallvec.rs":"7db69f165cf6ca2da1b4f306eb912aa50d7218b3120489a25fa1f0317a7f9fda","debug_metadata/README.md":"28d60ba44609690415f49d3dfddb199f04f2e7404bfbfcc1655b723447428c52","debug_metadata/tinyvec.natvis":"8161c399a4aeec63a1077b4f99ced085e8815c4dda548e93441f17ebedb8be11","rustfmt.toml":"c41ed86ba202d525e9163deeb2d9bb8fc7e3a8fbeab720def088317445add5ef","src/array.rs":"3ce5e89c628a0690e912f0c2cfb18d86d6316726071d5f70429bc1f1c7a66e65","src/array/const_generic_impl.rs":"3972d876f1b08dedd7915c62e8f60259168b2d7cd29b01b8834812bab96f47fb","src/array/generated_impl.rs":"e80c770c3702a7ff3efeb2a7bafd56102a3f5d8847180085de980f58693c8019","src/arrayvec.rs":"d9fd821108e020323cdf2150f0b839361cbfff0bb423a8e476f2342f4180e442","src/arrayvec_drain.rs":"dce67509b43f0e35cf0e20920983b04b17d0eb20272ed07c37b0d1b764d35094","src/lib.rs":"0e2c6f8a54b56804252898f72c366979093b30743acb1a48cb021dc2c91c3d6f","src/slicevec.rs":"98d8b19b731d0f91a75f7e5b514c7b9384b63fd4a7ff9abda698d217eb425b62","src/tinyvec.rs":"ad0815f95a69254c1d676d3d42ec2d152d41f043903d274acdb990761c547438","tests/arrayvec.rs":"2c90c7ee6da97b18d826fa685642e5a84320bb9b8b7393ad8d6014cfabd76655","tests/debugger_visualizer.rs":"d4de73d4875a6febaa317f103886eab0a95780124f746b218885559a0269beb0","tests/tinyvec.rs":"856c61421b1164b79f0e2560191555280d3153c7e91341359ab2074bfa9479fb"},"package":"445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938"}

Some files were not shown because too many files have changed in this diff Show More