Bump dependencies

This commit is contained in:
Laurenz 2022-06-10 23:53:20 +02:00
parent 6aff11057b
commit ed6550fdb0
22 changed files with 309 additions and 1417 deletions

537
Cargo.lock generated
View File

@ -23,15 +23,6 @@ dependencies = [
"memchr",
]
[[package]]
name = "approx"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0e60b75072ecd4168020818c0107f2857bb6c4e64252d8d3983f6263b40a5c3"
dependencies = [
"num-traits",
]
[[package]]
name = "arrayref"
version = "0.3.6"
@ -92,12 +83,6 @@ version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
[[package]]
name = "bumpalo"
version = "3.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899"
[[package]]
name = "bytemuck"
version = "1.9.1"
@ -150,28 +135,6 @@ dependencies = [
"matches",
]
[[package]]
name = "decorum"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "281759d3c8a14f5c3f0c49363be56810fcd7f910422f97f2db850c2920fde5cf"
dependencies = [
"approx",
"num-traits",
"serde",
"serde_derive",
]
[[package]]
name = "deflate"
version = "0.8.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73770f8e1fe7d64df17ca66ad28994a0a623ea497fa69486e14984e715c5d174"
dependencies = [
"adler32",
"byteorder",
]
[[package]]
name = "deflate"
version = "1.0.0"
@ -219,14 +182,12 @@ dependencies = [
[[package]]
name = "flate2"
version = "1.0.23"
version = "1.0.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b39522e96686d38f4bc984b9198e3a0613264abaebaff2c5c918bfa6b6da09af"
checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6"
dependencies = [
"cfg-if",
"crc32fast",
"libc",
"miniz_oxide 0.5.1",
"miniz_oxide",
]
[[package]]
@ -241,25 +202,6 @@ version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
[[package]]
name = "font"
version = "0.1.0"
source = "git+https://github.com/pdf-rs/font#f0ea791543140ccbe0d4fb20bac363ab66c53f68"
dependencies = [
"decorum",
"indexmap",
"itertools",
"log",
"nom",
"pathfinder_color",
"pathfinder_content",
"pathfinder_geometry",
"pdf_encoding",
"rand 0.7.3",
"slotmap",
"tuple",
]
[[package]]
name = "fxhash"
version = "0.2.1"
@ -269,19 +211,6 @@ dependencies = [
"byteorder",
]
[[package]]
name = "getrandom"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce"
dependencies = [
"cfg-if",
"js-sys",
"libc",
"wasi 0.9.0+wasi-snapshot-preview1",
"wasm-bindgen",
]
[[package]]
name = "getrandom"
version = "0.2.6"
@ -290,14 +219,18 @@ checksum = "9be70c98951c83b8d2f8f60d7065fa6d5146873094452a1008da8c2f1e4205ad"
dependencies = [
"cfg-if",
"libc",
"wasi 0.10.2+wasi-snapshot-preview1",
"wasi",
]
[[package]]
name = "hashbrown"
version = "0.11.2"
name = "gif"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab5ef0d4909ef3724cc8cce6ccc8572c5c817592e9285f5464f8e86f8bd3726e"
checksum = "c3a7187e78088aead22ceedeee99779455b23fc231fe13ec443f99bb71694e5b"
dependencies = [
"color_quant",
"weezl",
]
[[package]]
name = "hypher"
@ -315,28 +248,19 @@ dependencies = [
[[package]]
name = "image"
version = "0.23.14"
version = "0.24.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24ffcb7e7244a9bf19d35bf2883b9c080c4ced3c07a9895572178cdb8f13f6a1"
checksum = "28edd9d7bc256be2502e325ac0628bde30b7001b9b52e0abe31a1a9dc2701212"
dependencies = [
"bytemuck",
"byteorder",
"color_quant",
"gif",
"jpeg-decoder",
"num-iter",
"num-rational",
"num-traits",
"png 0.16.8",
]
[[package]]
name = "indexmap"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f647032dfaa1f8b6dc29bd3edb7bbef4861b8b8007ebb118d6db284fd59f6ee"
dependencies = [
"autocfg",
"hashbrown",
"png",
]
[[package]]
@ -348,12 +272,6 @@ dependencies = [
"either",
]
[[package]]
name = "itoa"
version = "0.4.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4"
[[package]]
name = "itoa"
version = "1.0.2"
@ -362,18 +280,9 @@ checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d"
[[package]]
name = "jpeg-decoder"
version = "0.1.22"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "229d53d58899083193af11e15917b5640cd40b29ff475a1fe4ef725deb02d0f2"
[[package]]
name = "js-sys"
version = "0.3.57"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "671a26f820db17c2a2750743f1dd03bafd15b98c9f30c7c2628c024c05d73397"
dependencies = [
"wasm-bindgen",
]
checksum = "9478aa10f73e7528198d75109c8be5cd7d15fb530238040148d5f9a22d4c5b3b"
[[package]]
name = "kurbo"
@ -390,12 +299,6 @@ version = "1.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
[[package]]
name = "lazycell"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
[[package]]
name = "lexical-core"
version = "0.7.6"
@ -415,22 +318,13 @@ version = "0.2.126"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836"
[[package]]
name = "line-wrap"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f30344350a2a51da54c1d53be93fade8a237e545dbcc4bdbe635413f2117cab9"
dependencies = [
"safemem",
]
[[package]]
name = "lipsum"
version = "0.8.0"
source = "git+https://github.com/reknih/lipsum#c97ce95ba01ed2cce1d1b0b230b6b78295b0720b"
version = "0.8.2"
source = "git+https://github.com/reknih/lipsum#d6d8f2cba12f8dee8c8ed4af62858cdb061c0801"
dependencies = [
"rand 0.8.5",
"rand_chacha 0.3.1",
"rand",
"rand_chacha",
]
[[package]]
@ -456,37 +350,18 @@ checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d"
[[package]]
name = "memmap2"
version = "0.5.3"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "057a3db23999c867821a7a59feb06a578fcb03685e983dff90daf9e7d24ac08f"
checksum = "d5172b50c23043ff43dd53e51392f36519d9b35a8f3a410d30ece5d1aedd58ae"
dependencies = [
"libc",
]
[[package]]
name = "miniz_oxide"
version = "0.3.7"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "791daaae1ed6889560f8c4359194f56648355540573244a5448a83ba1ecc7435"
dependencies = [
"adler32",
]
[[package]]
name = "miniz_oxide"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a92518e98c078586bc6c934028adcca4c92a53d6a958196de835170a01d84e4b"
dependencies = [
"adler",
"autocfg",
]
[[package]]
name = "miniz_oxide"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2b29bd4bc3f33391105ebee3589c19197c4271e3e5a9ec9bfe8127eeff8f082"
checksum = "6f5c75688da582b8ffc1f1799e9db273f32133c49e048f614d22ec3256773ccc"
dependencies = [
"adler",
]
@ -525,9 +400,9 @@ dependencies = [
[[package]]
name = "num-rational"
version = "0.3.2"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "12ac428b1cb17fce6f731001d307d351ec70a6d202fc2e60f7d4c5e42d8f4f07"
checksum = "d41702bd167c2df5520b384281bc111a4b5efcf7fbc4c9c222c815b07e0a6a6a"
dependencies = [
"autocfg",
"num-integer",
@ -543,90 +418,23 @@ dependencies = [
"autocfg",
]
[[package]]
name = "num_threads"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2819ce041d2ee131036f4fc9d6ae7ae125a3a40e97ba64d04fe799ad9dabbb44"
dependencies = [
"libc",
]
[[package]]
name = "once_cell"
version = "1.10.0"
version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87f3e037eac156d1775da914196f0f37741a274155e34a0b7e427c35d2a2ecb9"
[[package]]
name = "pathfinder_color"
version = "0.5.0"
source = "git+https://github.com/servo/pathfinder/#038a3476d803fd77a6e66a74117b5b8803a2cb49"
dependencies = [
"pathfinder_simd",
]
[[package]]
name = "pathfinder_content"
version = "0.5.0"
source = "git+https://github.com/servo/pathfinder/#038a3476d803fd77a6e66a74117b5b8803a2cb49"
dependencies = [
"arrayvec 0.5.2",
"bitflags",
"image",
"log",
"pathfinder_color",
"pathfinder_geometry",
"pathfinder_simd",
"smallvec",
]
[[package]]
name = "pathfinder_geometry"
version = "0.5.1"
source = "git+https://github.com/servo/pathfinder/#038a3476d803fd77a6e66a74117b5b8803a2cb49"
dependencies = [
"log",
"pathfinder_simd",
]
[[package]]
name = "pathfinder_simd"
version = "0.5.1"
source = "git+https://github.com/servo/pathfinder/#038a3476d803fd77a6e66a74117b5b8803a2cb49"
dependencies = [
"rustc_version",
]
checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225"
[[package]]
name = "pdf-writer"
version = "0.4.1"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36d760a6f2ac90811cba1006a298e8a7e5ce2c922bb5dc7f7000911a4a6b60f4"
checksum = "249f9b33a3192626f2cd9f4b0cd66c1ec32d65968d58cf4d8239977feddddead"
dependencies = [
"bitflags",
"itoa 0.4.8",
"itoa",
"ryu",
]
[[package]]
name = "pdf_encoding"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed7468173909bb32dbc74ca454c82dfdfe994ad1133ddf78d6c31715c9b88c40"
dependencies = [
"lazy_static",
]
[[package]]
name = "pest"
version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "10f4872ae94d7b90ae48754df22fd42ad52ce740b8f370b03da4835417403e53"
dependencies = [
"ucd-trie",
]
[[package]]
name = "pico-args"
version = "0.4.2"
@ -636,37 +444,11 @@ checksum = "db8bcd96cb740d03149cbad5518db9fd87126a10ab519c011893b1754134c468"
[[package]]
name = "pixglyph"
version = "0.1.0"
source = "git+https://github.com/typst/pixglyph#8ee0d4517d887125e9184916780ac230e40a042a"
source = "git+https://github.com/typst/pixglyph#f1aae13ae622f4640a1cfac231525f02978fc305"
dependencies = [
"ttf-parser",
]
[[package]]
name = "plist"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd39bc6cdc9355ad1dc5eeedefee696bb35c34caf21768741e81826c0bbd7225"
dependencies = [
"base64",
"indexmap",
"line-wrap",
"serde",
"time",
"xml-rs",
]
[[package]]
name = "png"
version = "0.16.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c3287920cb847dee3de33d301c463fba14dda99db24214ddf93f83d3021f4c6"
dependencies = [
"bitflags",
"crc32fast",
"deflate 0.8.6",
"miniz_oxide 0.3.7",
]
[[package]]
name = "png"
version = "0.17.5"
@ -675,8 +457,8 @@ checksum = "dc38c0ad57efb786dd57b9864e5b18bae478c00c824dc55a38bbc9da95dde3ba"
dependencies = [
"bitflags",
"crc32fast",
"deflate 1.0.0",
"miniz_oxide 0.5.1",
"deflate",
"miniz_oxide",
]
[[package]]
@ -703,36 +485,13 @@ dependencies = [
"proc-macro2",
]
[[package]]
name = "rand"
version = "0.7.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03"
dependencies = [
"getrandom 0.1.16",
"libc",
"rand_chacha 0.2.2",
"rand_core 0.5.1",
"rand_hc",
]
[[package]]
name = "rand"
version = "0.8.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404"
dependencies = [
"rand_core 0.6.3",
]
[[package]]
name = "rand_chacha"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402"
dependencies = [
"ppv-lite86",
"rand_core 0.5.1",
"rand_core",
]
[[package]]
@ -742,16 +501,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88"
dependencies = [
"ppv-lite86",
"rand_core 0.6.3",
]
[[package]]
name = "rand_core"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19"
dependencies = [
"getrandom 0.1.16",
"rand_core",
]
[[package]]
@ -760,15 +510,6 @@ version = "0.6.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7"
[[package]]
name = "rand_hc"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
dependencies = [
"rand_core 0.5.1",
]
[[package]]
name = "rctree"
version = "0.4.0"
@ -790,16 +531,16 @@ version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b"
dependencies = [
"getrandom 0.2.6",
"getrandom",
"redox_syscall",
"thiserror",
]
[[package]]
name = "regex"
version = "1.5.5"
version = "1.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a11647b6b25ff05a515cb92c365cec08801e83423a235b51e231e1808747286"
checksum = "d83f127d94bdbcda4c8cc2e50f6f84f4b611f69c902699ca385a39c3a75f9ff1"
dependencies = [
"aho-corasick",
"memchr",
@ -808,20 +549,21 @@ dependencies = [
[[package]]
name = "regex-syntax"
version = "0.6.25"
version = "0.6.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f497285884f3fcff424ffc933e56d7cbca511def0c9831a7f9b5f6153e3cc89b"
checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64"
[[package]]
name = "resvg"
version = "0.20.0"
version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d94a32ca845cdda27237a40beba9bd3d3858ac8fc5356eb9442bdeecfe34d9e0"
checksum = "2e702d1e8e00a3a0717b96244cba840f34f542d8f23097c8903266c4e2975658"
dependencies = [
"gif",
"jpeg-decoder",
"log",
"pico-args",
"png 0.17.5",
"png",
"rgb",
"svgtypes",
"tiny-skia",
@ -831,9 +573,11 @@ dependencies = [
[[package]]
name = "rex"
version = "0.1.2"
source = "git+https://github.com/laurmaedje/ReX#7362b0cbb229211d6206198d80382a9b23eda993"
source = "git+https://github.com/laurmaedje/ReX#6e4bada20d2b1685940e2d752630cb9ad3d797b0"
dependencies = [
"font",
"itertools",
"nom",
"ttf-parser",
"unicode-math",
]
@ -855,20 +599,11 @@ dependencies = [
"xmlparser",
]
[[package]]
name = "rustc_version"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0dfe2087c51c460008730de8b57e6a320782fbfb312e1f4d520e6c6fae155ee"
dependencies = [
"semver",
]
[[package]]
name = "rustybuzz"
version = "0.4.0"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44561062e583c4873162861261f16fd1d85fe927c4904d71329a4fe43dc355ef"
checksum = "25ff94f20221325d000e552781713e53b0d85c1d9551b6f420d12daf5a08eace"
dependencies = [
"bitflags",
"bytemuck",
@ -895,12 +630,6 @@ dependencies = [
"bytemuck",
]
[[package]]
name = "safemem"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072"
[[package]]
name = "same-file"
version = "1.0.6"
@ -910,24 +639,6 @@ dependencies = [
"winapi-util",
]
[[package]]
name = "semver"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f301af10236f6df4160f7c3f04eec6dbc70ace82d23326abad5edee88801c6b6"
dependencies = [
"semver-parser",
]
[[package]]
name = "semver-parser"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00b0bef5b7f9e0df16536d3961cfb6e84331c065b4066afb39768d0e319411f7"
dependencies = [
"pest",
]
[[package]]
name = "serde"
version = "1.0.137"
@ -954,7 +665,7 @@ version = "1.0.81"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b7ce2b32a1aed03c558dc61a5cd328f15aff2dbc17daad8fb8af04d2100e15c"
dependencies = [
"itoa 1.0.2",
"itoa",
"ryu",
"serde",
]
@ -974,12 +685,6 @@ version = "0.3.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7bd3e3206899af3f8b12af284fafc038cc1dc2b41d1b89dd17297221c5d225de"
[[package]]
name = "slotmap"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6bf34684c5767b87de9119790e92e9a1d60056be2ceeaf16a8e6ef13082aeab1"
[[package]]
name = "smallvec"
version = "1.8.0"
@ -992,14 +697,19 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]]
name = "subsetter"
version = "0.1.0"
source = "git+https://github.com/typst/subsetter#51c0eaf166631917bb29b0a1bddac276472c8e6c"
[[package]]
name = "svg2pdf"
version = "0.2.1"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b7feae49dae1a460ecd13b50e4389204672daac9c7133fd830132f44486ab84d"
checksum = "2f7148684e1eb24d211bf6a5bd6dad43957d665b1fcfc77daf9a72961fe41d33"
dependencies = [
"image",
"miniz_oxide 0.4.4",
"miniz_oxide",
"pdf-writer",
"usvg",
]
@ -1015,9 +725,9 @@ dependencies = [
[[package]]
name = "syn"
version = "1.0.95"
version = "1.0.96"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fbaf6116ab8924f39d52792136fb74fd60a80194cf1b1c6ffa6453eef1c3f942"
checksum = "0748dd251e24453cb8717f0354206b91557e4ec8703673a4b30208f2abaf1ebf"
dependencies = [
"proc-macro2",
"quote",
@ -1026,9 +736,9 @@ dependencies = [
[[package]]
name = "syntect"
version = "4.6.0"
version = "5.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b20815bbe80ee0be06e6957450a841185fcf690fe0178f14d77a05ce2caa031"
checksum = "c6c454c27d9d7d9a84c7803aaa3c50cd088d2906fe3c6e42da3209aa623576a8"
dependencies = [
"bincode",
"bitflags",
@ -1036,12 +746,12 @@ dependencies = [
"flate2",
"fnv",
"lazy_static",
"lazycell",
"plist",
"once_cell",
"regex-syntax",
"serde",
"serde_derive",
"serde_json",
"thiserror",
"walkdir",
]
@ -1074,45 +784,25 @@ dependencies = [
"syn",
]
[[package]]
name = "time"
version = "0.3.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2702e08a7a860f005826c6815dcac101b19b5eb330c27fe4a5928fec1d20ddd"
dependencies = [
"itoa 1.0.2",
"libc",
"num_threads",
]
[[package]]
name = "tiny-skia"
version = "0.6.3"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1bcfd4339bdd4545eabed74b208f2f1555f2e6540fb58135c01f46c0940aa138"
checksum = "78b3e1db967020dd509b49cecc024025beba2b1b6cf204618610ba266269d6b9"
dependencies = [
"arrayref",
"arrayvec 0.5.2",
"bytemuck",
"cfg-if",
"png 0.17.5",
"png",
"safe_arch",
]
[[package]]
name = "ttf-parser"
version = "0.12.3"
version = "0.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ae2f58a822f08abdaf668897e96a5656fe72f5a9ce66422423e8849384872e6"
[[package]]
name = "tuple"
version = "0.5.1"
source = "git+https://github.com/s3bk/tuple/#fdf8b4400ffb10506c711018a3cb918412a3c8c1"
dependencies = [
"num-traits",
"serde",
]
checksum = "42d4b50cba812f0f04f0707bb6a0eaa5fae4ae05d90fc2a377998d2f21e77a1c"
[[package]]
name = "typed-arena"
@ -1127,7 +817,6 @@ dependencies = [
"bytemuck",
"codespan-reporting",
"dirs",
"either",
"flate2",
"fxhash",
"hypher",
@ -1136,7 +825,7 @@ dependencies = [
"kurbo",
"lipsum",
"memmap2",
"miniz_oxide 0.4.4",
"miniz_oxide",
"once_cell",
"pdf-writer",
"pico-args",
@ -1148,6 +837,7 @@ dependencies = [
"rustybuzz",
"same-file",
"serde",
"subsetter",
"svg2pdf",
"syntect",
"tiny-skia",
@ -1173,12 +863,6 @@ dependencies = [
"syn",
]
[[package]]
name = "ucd-trie"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c"
[[package]]
name = "unicode-bidi"
version = "0.3.8"
@ -1244,13 +928,14 @@ checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04"
[[package]]
name = "unscanny"
version = "0.1.0"
source = "git+https://github.com/typst/unscanny#168fa7a05fe2931f86a788e26d7bfb67185767b5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9df2af067a7953e9c3831320f35c1cc0600c30d44d9f7a12b01db1cd88d6b47"
[[package]]
name = "usvg"
version = "0.20.0"
version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00f064d38f79ff69e3160e2fba884e4ede897061c15178041a3976371c68cab1"
checksum = "a261d60a7215fa339482047cc3dafd4e22e2bf34396aaebef2b707355bbb39c0"
dependencies = [
"base64",
"data-url",
@ -1283,12 +968,6 @@ dependencies = [
"winapi-util",
]
[[package]]
name = "wasi"
version = "0.9.0+wasi-snapshot-preview1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
[[package]]
name = "wasi"
version = "0.10.2+wasi-snapshot-preview1"
@ -1296,58 +975,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
[[package]]
name = "wasm-bindgen"
version = "0.2.80"
name = "weezl"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "27370197c907c55e3f1a9fbe26f44e937fe6451368324e009cba39e139dc08ad"
dependencies = [
"cfg-if",
"wasm-bindgen-macro",
]
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.80"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53e04185bfa3a779273da532f5025e33398409573f348985af9a1cbf3774d3f4"
dependencies = [
"bumpalo",
"lazy_static",
"log",
"proc-macro2",
"quote",
"syn",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.80"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17cae7ff784d7e83a2fe7611cfe766ecf034111b49deb850a3dc7699c08251f5"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
]
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.80"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99ec0dc7a4756fffc231aab1b9f2f578d23cd391390ab27f952ae0c9b3ece20b"
dependencies = [
"proc-macro2",
"quote",
"syn",
"wasm-bindgen-backend",
"wasm-bindgen-shared",
]
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.80"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d554b7f530dee5964d9a9468d95c1f8b8acae4f282807e7d27d4b03099a46744"
checksum = "9c97e489d8f836838d497091de568cf16b117486d529ec5579233521065bd5e4"
[[package]]
name = "winapi"
@ -1386,12 +1017,6 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a67300977d3dc3f8034dae89778f502b6ba20b269527b3223ba59c0cf393bb8a"
[[package]]
name = "xml-rs"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2d7d3948613f75c98fd9328cfdcc45acc4d360655289d0a7d4ec931392200a3"
[[package]]
name = "xmlparser"
version = "0.13.3"

View File

@ -15,20 +15,19 @@ typst-macros = { path = "./macros" }
# Utilities
bytemuck = "1"
either = "1"
fxhash = "0.2"
lipsum = { git = "https://github.com/reknih/lipsum", default-features = false }
once_cell = "1"
serde = { version = "1", features = ["derive"] }
typed-arena = "2"
unscanny = { git = "https://github.com/typst/unscanny" }
unscanny = "0.1"
regex = "1"
# Text and font handling
hypher = "0.1"
kurbo = "0.8"
ttf-parser = "0.12"
rustybuzz = "0.4"
ttf-parser = "0.15"
rustybuzz = "0.5"
unicode-bidi = "0.3.5"
unicode-segmentation = "1"
unicode-xid = "0.2"
@ -36,22 +35,23 @@ unicode-script = "0.5"
xi-unicode = "0.3"
# Raster and vector graphics handling
image = { version = "0.23", default-features = false, features = ["png", "jpeg"] }
usvg = { version = "0.20", default-features = false }
image = { version = "0.24", default-features = false, features = ["png", "jpeg", "gif"] }
usvg = { version = "0.22", default-features = false }
# External implementation of user-facing features
syntect = { version = "4.6", default-features = false, features = ["dump-load", "parsing", "regex-fancy", "assets"] }
syntect = { version = "5", default-features = false, features = ["default-syntaxes", "regex-fancy"] }
rex = { git = "https://github.com/laurmaedje/ReX" }
# PDF export
miniz_oxide = "0.4"
pdf-writer = "0.4"
svg2pdf = "0.2"
miniz_oxide = "0.5"
pdf-writer = "0.6"
subsetter = { git = "https://github.com/typst/subsetter" }
svg2pdf = "0.4"
# Raster export / rendering
tiny-skia = "0.6.2"
pixglyph = { git = "https://github.com/typst/pixglyph" }
resvg = { version = "0.20", default-features = false }
resvg = { version = "0.22", default-features = false }
roxmltree = "0.14"
flate2 = "1"

View File

@ -1,9 +1,9 @@
//! Diagnostics.
use std::fmt::{self, Display, Formatter};
use std::ops::Range;
use crate::syntax::{Span, Spanned};
use crate::Context;
/// Early-return with a [`TypError`].
#[macro_export]
@ -39,8 +39,6 @@ pub type StrResult<T> = Result<T, String>;
pub struct Error {
/// The erroneous node in the source code.
pub span: Span,
/// Where in the node the error should be annotated.
pub pos: ErrorPos,
/// A diagnostic message describing the problem.
pub message: String,
/// The trace of function calls leading to the error.
@ -52,35 +50,12 @@ impl Error {
pub fn new(span: Span, message: impl Into<String>) -> Self {
Self {
span,
pos: ErrorPos::Full,
trace: vec![],
message: message.into(),
}
}
}
/// Where in a node an error should be annotated.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum ErrorPos {
/// At the start of the node.
Start,
/// Over the full width of the node.
Full,
/// At the end of the node.
End,
}
impl ErrorPos {
/// Apply this to a node's byte range.
pub fn apply(self, range: Range<usize>) -> Range<usize> {
match self {
ErrorPos::Start => range.start .. range.start,
ErrorPos::Full => range,
ErrorPos::End => range.end .. range.end,
}
}
}
/// A part of an error's [trace](Error::trace).
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub enum Tracepoint {
@ -124,18 +99,25 @@ where
/// Enrich a [`TypResult`] with a tracepoint.
pub trait Trace<T> {
/// Add the tracepoint to all errors that lie outside the `span`.
fn trace<F>(self, make_point: F, span: Span) -> Self
fn trace<F>(self, ctx: &Context, make_point: F, span: Span) -> Self
where
F: Fn() -> Tracepoint;
}
impl<T> Trace<T> for TypResult<T> {
fn trace<F>(self, make_point: F, span: Span) -> Self
fn trace<F>(self, ctx: &Context, make_point: F, span: Span) -> Self
where
F: Fn() -> Tracepoint,
{
self.map_err(|mut errors| {
let range = ctx.sources.range(span);
for error in errors.iter_mut() {
// Skip traces that surround the error.
let error_range = ctx.sources.range(error.span);
if range.start <= error_range.start && range.end >= error_range.end {
continue;
}
error.trace.push(Spanned::new(make_point(), span));
}
errors

View File

@ -606,7 +606,7 @@ impl Eval for FuncCall {
Value::Dict(dict) => dict.get(&args.into_key()?).at(self.span())?.clone(),
Value::Func(func) => {
let point = || Tracepoint::Call(func.name().map(ToString::to_string));
func.call(vm, args).trace(point, self.span())?
func.call(vm, args).trace(vm.ctx, point, self.span())?
}
v => bail!(
@ -629,12 +629,13 @@ impl Eval for MethodCall {
Ok(if methods::is_mutating(&method) {
let args = self.args().eval(vm)?;
let mut value = self.receiver().access(vm)?;
methods::call_mut(&mut value, &method, args, span).trace(point, span)?;
methods::call_mut(&mut value, &method, args, span)
.trace(vm.ctx, point, span)?;
Value::None
} else {
let value = self.receiver().eval(vm)?;
let args = self.args().eval(vm)?;
methods::call(vm, value, &method, args, span).trace(point, span)?
methods::call(vm, value, &method, args, span).trace(vm.ctx, point, span)?
})
}
}
@ -980,7 +981,9 @@ fn import(vm: &mut Machine, path: &str, span: Span) -> TypResult<Module> {
// Evaluate the file.
let route = vm.route.clone();
let module = evaluate(vm.ctx, id, route).trace(|| Tracepoint::Import, span)?;
let module =
evaluate(vm.ctx, id, route).trace(vm.ctx, || Tracepoint::Import, span)?;
vm.deps.extend(module.deps.iter().cloned());
Ok(module)

View File

@ -2,7 +2,6 @@
mod pdf;
mod render;
mod subset;
pub use pdf::*;
pub use render::*;

View File

@ -3,6 +3,7 @@
use std::cmp::Eq;
use std::collections::{BTreeMap, HashMap, HashSet};
use std::hash::Hash;
use std::io::Cursor;
use std::sync::Arc;
use image::{DynamicImage, GenericImageView, ImageFormat, ImageResult, Rgba};
@ -14,7 +15,6 @@ use pdf_writer::writers::ColorSpace;
use pdf_writer::{Content, Filter, Finish, Name, PdfWriter, Rect, Ref, Str, TextStr};
use ttf_parser::{name_id, GlyphId, Tag};
use super::subset::subset;
use crate::font::{find_name, FaceId, FontStore};
use crate::frame::{Destination, Element, Frame, Group, Role, Text};
use crate::geom::{
@ -24,6 +24,7 @@ use crate::geom::{
use crate::image::{Image, ImageId, ImageStore, RasterImage};
use crate::library::prelude::EcoString;
use crate::library::text::Lang;
use crate::util::SliceExt;
use crate::Context;
/// Export a collection of frames into a PDF file.
@ -39,7 +40,7 @@ pub fn pdf(ctx: &Context, frames: &[Arc<Frame>]) -> Vec<u8> {
/// Identifies the color space definitions.
const SRGB: Name<'static> = Name(b"srgb");
const SRGB_GRAY: Name<'static> = Name(b"srgbgray");
const D65_GRAY: Name<'static> = Name(b"d65gray");
/// An exporter for a whole PDF document.
struct PdfExporter<'a> {
@ -155,23 +156,37 @@ impl<'a> PdfExporter<'a> {
// Write the CID font referencing the font descriptor.
let mut cid = self.writer.cid_font(cid_ref);
cid.subtype(subtype)
.base_font(base_font)
.system_info(system_info)
.font_descriptor(descriptor_ref)
.widths()
.consecutive(0, {
let num_glyphs = ttf.number_of_glyphs();
(0 .. num_glyphs).map(|g| {
let x = ttf.glyph_hor_advance(GlyphId(g)).unwrap_or(0);
face.to_em(x).to_font_units()
})
});
cid.subtype(subtype);
cid.base_font(base_font);
cid.system_info(system_info);
cid.font_descriptor(descriptor_ref);
cid.default_width(0.0);
if subtype == CidFontType::Type2 {
cid.cid_to_gid_map_predefined(Name(b"Identity"));
}
// Extract the widths of all glyphs.
let num_glyphs = ttf.number_of_glyphs();
let mut widths = vec![0.0; num_glyphs as usize];
for &g in glyphs {
let x = ttf.glyph_hor_advance(GlyphId(g)).unwrap_or(0);
widths[g as usize] = face.to_em(x).to_font_units();
}
// Write all non-zero glyph widths.
let mut first = 0;
let mut width_writer = cid.widths();
for (w, group) in widths.group_by_key(|&w| w) {
let end = first + group.len();
if w != 0.0 {
let last = end - 1;
width_writer.same(first as u16, last as u16, w);
}
first = end;
}
width_writer.finish();
cid.finish();
let mut flags = FontFlags::empty();
@ -217,7 +232,9 @@ impl<'a> PdfExporter<'a> {
// Compute a reverse mapping from glyphs to unicode.
let cmap = {
let mut mapping = BTreeMap::new();
for subtable in ttf.character_mapping_subtables() {
for subtable in
ttf.tables().cmap.into_iter().flat_map(|table| table.subtables)
{
if subtable.is_unicode() {
subtable.codepoints(|n| {
if let Some(c) = std::char::from_u32(n) {
@ -245,16 +262,24 @@ impl<'a> PdfExporter<'a> {
.filter(Filter::FlateDecode);
// Subset and write the face's bytes.
let buffer = face.buffer();
let subsetted = subset(buffer, face.index(), glyphs);
let data = deflate(subsetted.as_deref().unwrap_or(buffer));
let mut font_stream = self.writer.stream(data_ref, &data);
let data = face.buffer();
let subsetted = {
let glyphs: Vec<_> = glyphs.iter().copied().collect();
let profile = subsetter::Profile::pdf(&glyphs);
subsetter::subset(data, face.index(), profile)
};
// Compress and write the face's byte.
let data = subsetted.as_deref().unwrap_or(data);
let data = deflate(data);
let mut stream = self.writer.stream(data_ref, &data);
stream.filter(Filter::FlateDecode);
if subtype == CidFontType::Type0 {
font_stream.pair(Name(b"Subtype"), Name(b"OpenType"));
stream.pair(Name(b"Subtype"), Name(b"OpenType"));
}
font_stream.filter(Filter::FlateDecode).finish();
stream.finish();
}
}
@ -346,13 +371,15 @@ impl<'a> PdfExporter<'a> {
.uri(Str(uri.as_str().as_bytes()));
}
Destination::Internal(loc) => {
let index = loc.page - 1;
let height = self.page_heights[index];
link.action()
.action_type(ActionType::GoTo)
.destination_direct()
.page(self.page_refs[index])
.xyz(loc.pos.x.to_f32(), height - loc.pos.y.to_f32(), None);
if (1 ..= self.page_heights.len()).contains(&loc.page) {
let index = loc.page - 1;
let height = self.page_heights[index];
link.action()
.action_type(ActionType::GoTo)
.destination_direct()
.page(self.page_refs[index])
.xyz(loc.pos.x.to_f32(), height - loc.pos.y.to_f32(), None);
}
}
}
}
@ -360,9 +387,9 @@ impl<'a> PdfExporter<'a> {
annotations.finish();
page_writer.finish();
self.writer
.stream(content_id, &deflate(&page.content.finish()))
.filter(Filter::FlateDecode);
let data = page.content.finish();
let data = deflate(&data);
self.writer.stream(content_id, &data).filter(Filter::FlateDecode);
}
fn write_page_tree(&mut self) {
@ -374,7 +401,7 @@ impl<'a> PdfExporter<'a> {
let mut resources = pages.resources();
let mut spaces = resources.color_spaces();
spaces.insert(SRGB).start::<ColorSpace>().srgb();
spaces.insert(SRGB_GRAY).start::<ColorSpace>().srgb_gray();
spaces.insert(D65_GRAY).start::<ColorSpace>().d65_gray();
spaces.finish();
let mut fonts = resources.fonts();
@ -855,7 +882,7 @@ impl<'a, 'b> PageExporter<'a, 'b> {
let Paint::Solid(color) = fill;
match color {
Color::Luma(c) => {
self.set_fill_color_space(SRGB_GRAY);
self.set_fill_color_space(D65_GRAY);
self.content.set_fill_gray(f(c.0));
}
Color::Rgba(c) => {
@ -883,7 +910,7 @@ impl<'a, 'b> PageExporter<'a, 'b> {
let Paint::Solid(color) = stroke.paint;
match color {
Color::Luma(c) => {
self.set_stroke_color_space(SRGB_GRAY);
self.set_stroke_color_space(D65_GRAY);
self.content.set_stroke_gray(f(c.0));
}
Color::Rgba(c) => {
@ -916,16 +943,16 @@ fn encode_image(img: &RasterImage) -> ImageResult<(Vec<u8>, Filter, bool)> {
Ok(match (img.format, &img.buf) {
// 8-bit gray JPEG.
(ImageFormat::Jpeg, DynamicImage::ImageLuma8(_)) => {
let mut data = vec![];
let mut data = Cursor::new(vec![]);
img.buf.write_to(&mut data, img.format)?;
(data, Filter::DctDecode, false)
(data.into_inner(), Filter::DctDecode, false)
}
// 8-bit Rgb JPEG (Cmyk JPEGs get converted to Rgb earlier).
(ImageFormat::Jpeg, DynamicImage::ImageRgb8(_)) => {
let mut data = vec![];
let mut data = Cursor::new(vec![]);
img.buf.write_to(&mut data, img.format)?;
(data, Filter::DctDecode, true)
(data.into_inner(), Filter::DctDecode, true)
}
// TODO: Encode flate streams with PNG-predictor?

View File

@ -1,814 +0,0 @@
//! OpenType font subsetting.
use std::borrow::Cow;
use std::collections::HashSet;
use std::iter;
use std::ops::Range;
use ttf_parser::parser::{
FromData, LazyArray16, LazyArray32, Offset, Offset16, Offset32, Stream, F2DOT14,
};
use ttf_parser::Tag;
/// Subset a font face for PDF embedding.
///
/// This will remove the outlines of all glyphs that are not part of the given
/// slice. Furthmore, all character mapping and layout tables are dropped as
/// shaping has already happened.
///
/// Returns `None` if the font data is fatally broken (in which case
/// `ttf-parser` would probably already have rejected the font, so this
/// shouldn't happen if the font data has already passed through `ttf-parser`).
pub fn subset(data: &[u8], index: u32, glyphs: &HashSet<u16>) -> Option<Vec<u8>> {
Some(Subsetter::new(data, index, glyphs)?.subset())
}
struct Subsetter<'a> {
data: &'a [u8],
magic: Magic,
records: LazyArray16<'a, TableRecord>,
num_glyphs: u16,
glyphs: &'a HashSet<u16>,
tables: Vec<(Tag, Cow<'a, [u8]>)>,
}
impl<'a> Subsetter<'a> {
/// Parse the font header and create a new subsetter.
fn new(data: &'a [u8], index: u32, glyphs: &'a HashSet<u16>) -> Option<Self> {
let mut s = Stream::new(data);
let mut magic = s.read::<Magic>()?;
if magic == Magic::Collection {
// Parse font collection header if necessary.
s.skip::<u32>();
let num_faces = s.read::<u32>()?;
let offsets = s.read_array32::<Offset32>(num_faces)?;
let offset = offsets.get(index)?.to_usize();
s = Stream::new_at(data, offset)?;
magic = s.read::<Magic>()?;
if magic == Magic::Collection {
return None;
}
}
// Read number of table records.
let count = s.read::<u16>()?;
// Skip boring parts of header.
s.skip::<u16>();
s.skip::<u16>();
s.skip::<u16>();
// Read the table records.
let records = s.read_array16::<TableRecord>(count)?;
let mut subsetter = Self {
data,
magic,
records,
num_glyphs: 0,
glyphs,
tables: vec![],
};
// Find out number of glyphs.
let maxp = subsetter.table_data(MAXP)?;
subsetter.num_glyphs = Stream::read_at::<u16>(maxp, 4)?;
Some(subsetter)
}
/// Encode the subsetted font file.
fn subset(mut self) -> Vec<u8> {
self.subset_tables();
// Start writing a brand new font.
let mut w = Vec::new();
w.write(self.magic);
// Write table directory.
let count = self.tables.len() as u16;
let entry_selector = (count as f32).log2().floor() as u16;
let search_range = 2u16.pow(u32::from(entry_selector)) * 16;
let range_shift = count * 16 - search_range;
w.write(count);
w.write(search_range);
w.write(entry_selector);
w.write(range_shift);
// Tables shall be sorted by tag.
self.tables.sort_by_key(|&(tag, _)| tag);
// This variable will hold the offset to the checksum adjustment field
// in the head table, which we'll have to write in the end (after
// checksumming the whole font).
let mut checksum_adjustment_offset = None;
// Write table records.
let mut offset = 12 + self.tables.len() * TableRecord::SIZE;
for (tag, data) in &mut self.tables {
if *tag == HEAD {
// Zero out checksum field in head table.
data.to_mut()[8 .. 12].fill(0);
checksum_adjustment_offset = Some(offset + 8);
}
let len = data.len();
w.write(TableRecord {
tag: *tag,
checksum: checksum(data),
offset: offset as u32,
length: len as u32,
});
// Increase offset, plus padding zeros to align to 4 bytes.
offset += len;
while offset % 4 != 0 {
offset += 1;
}
}
// Write tables.
for (_, data) in &self.tables {
// Write data plus padding zeros to align to 4 bytes.
w.extend(data.as_ref());
while w.len() % 4 != 0 {
w.push(0);
}
}
// Write checksumAdjustment field in head table.
if let Some(i) = checksum_adjustment_offset {
let sum = checksum(&w);
let val = 0xB1B0AFBA_u32.wrapping_sub(sum);
w[i .. i + 4].copy_from_slice(&val.to_be_bytes());
}
w
}
/// Subset, drop and copy tables.
fn subset_tables(&mut self) {
// Remove unnecessary name information.
let handled_post = post::subset(self).is_some();
// Remove unnecessary glyph outlines.
let handled_glyf_loca = glyf::subset(self).is_some();
let handled_cff1 = cff::subset_v1(self).is_some();
for record in self.records {
// If `handled` is true, we don't take any further action, if it's
// false, we copy the table.
#[rustfmt::skip]
let handled = match &record.tag.to_bytes() {
// Drop: Glyphs are already mapped.
b"cmap" => true,
// Drop: Layout is already finished.
b"GPOS" | b"GSUB" | b"BASE" | b"JSTF" | b"MATH" |
b"ankr" | b"kern" | b"kerx" | b"mort" | b"morx" |
b"trak" | b"bsln" | b"just" | b"feat" | b"prop" => true,
// Drop: They don't render in PDF viewers anyway.
// TODO: We probably have to convert fonts with one of these
// tables into Type 3 fonts where glyphs are described by either
// PDF graphics operators or XObject images.
b"CBDT" | b"CBLC" | b"COLR" | b"CPAL" | b"sbix" | b"SVG " => true,
// Subsetted: Subsetting happens outside the loop, but if it
// failed, we simply copy the affected table(s).
b"post" => handled_post,
b"loca" | b"glyf" => handled_glyf_loca,
b"CFF " => handled_cff1,
// Copy: All other tables are simply copied.
_ => false,
};
if !handled {
if let Some(data) = self.table_data(record.tag) {
self.push_table(record.tag, data);
}
}
}
}
/// Retrieve the table data for a table.
fn table_data(&mut self, tag: Tag) -> Option<&'a [u8]> {
let (_, record) = self.records.binary_search_by(|record| record.tag.cmp(&tag))?;
let start = record.offset as usize;
let end = start + (record.length as usize);
self.data.get(start .. end)
}
/// Push a new table.
fn push_table(&mut self, tag: Tag, data: impl Into<Cow<'a, [u8]>>) {
self.tables.push((tag, data.into()));
}
}
// Some common tags.
const HEAD: Tag = Tag::from_bytes(b"head");
const MAXP: Tag = Tag::from_bytes(b"maxp");
const POST: Tag = Tag::from_bytes(b"post");
const LOCA: Tag = Tag::from_bytes(b"loca");
const GLYF: Tag = Tag::from_bytes(b"glyf");
const CFF1: Tag = Tag::from_bytes(b"CFF ");
/// Calculate a checksum over the sliced data as a sum of u32s. If the data
/// length is not a multiple of four, it is treated as if padded with zero to a
/// length that is a multiple of four.
fn checksum(data: &[u8]) -> u32 {
let mut sum = 0u32;
for chunk in data.chunks(4) {
let mut bytes = [0; 4];
bytes[.. chunk.len()].copy_from_slice(chunk);
sum = sum.wrapping_add(u32::from_be_bytes(bytes));
}
sum
}
/// Zero all bytes in a slice.
fn memzero(slice: &mut [u8]) {
for byte in slice {
*byte = 0;
}
}
/// Convenience trait for writing into a byte buffer.
trait BufExt {
fn write<T: ToData>(&mut self, v: T);
}
impl BufExt for Vec<u8> {
fn write<T: ToData>(&mut self, v: T) {
v.write(self);
}
}
/// A trait for writing raw binary data.
trait ToData {
fn write(&self, data: &mut Vec<u8>);
}
impl ToData for u8 {
fn write(&self, data: &mut Vec<u8>) {
data.push(*self);
}
}
impl ToData for u16 {
fn write(&self, data: &mut Vec<u8>) {
data.extend(&self.to_be_bytes());
}
}
impl ToData for Offset16 {
fn write(&self, data: &mut Vec<u8>) {
self.0.write(data);
}
}
impl ToData for u32 {
fn write(&self, data: &mut Vec<u8>) {
data.extend(&self.to_be_bytes());
}
}
impl ToData for Offset32 {
fn write(&self, data: &mut Vec<u8>) {
self.0.write(data);
}
}
impl ToData for Tag {
fn write(&self, data: &mut Vec<u8>) {
self.as_u32().write(data);
}
}
/// Font magic number.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
enum Magic {
TrueType,
OpenType,
Collection,
}
impl FromData for Magic {
const SIZE: usize = 4;
fn parse(data: &[u8]) -> Option<Self> {
match u32::parse(data)? {
0x00010000 | 0x74727565 => Some(Magic::TrueType),
0x4F54544F => Some(Magic::OpenType),
0x74746366 => Some(Magic::Collection),
_ => None,
}
}
}
impl ToData for Magic {
fn write(&self, data: &mut Vec<u8>) {
let value: u32 = match self {
Magic::TrueType => 0x00010000,
Magic::OpenType => 0x4F54544F,
Magic::Collection => 0x74746366,
};
value.write(data);
}
}
/// Locates a table in the font file.
#[derive(Debug, Copy, Clone, Eq, PartialEq)]
struct TableRecord {
tag: Tag,
checksum: u32,
offset: u32,
length: u32,
}
impl FromData for TableRecord {
const SIZE: usize = 16;
fn parse(data: &[u8]) -> Option<Self> {
let mut s = Stream::new(data);
Some(TableRecord {
tag: s.read::<Tag>()?,
checksum: s.read::<u32>()?,
offset: s.read::<u32>()?,
length: s.read::<u32>()?,
})
}
}
impl ToData for TableRecord {
fn write(&self, data: &mut Vec<u8>) {
self.tag.write(data);
self.checksum.write(data);
self.offset.write(data);
self.length.write(data);
}
}
mod post {
use super::*;
/// Subset the post table by removing the name information.
pub(super) fn subset(subsetter: &mut Subsetter) -> Option<()> {
// Table version three is the one without names.
let mut new = 0x00030000_u32.to_be_bytes().to_vec();
new.extend(subsetter.table_data(POST)?.get(4 .. 32)?);
subsetter.push_table(POST, new);
Some(())
}
}
mod glyf {
use super::*;
/// Subset the glyf and loca tables by clearing out glyph data for
/// unused glyphs.
pub(super) fn subset(subsetter: &mut Subsetter) -> Option<()> {
let head = subsetter.table_data(HEAD)?;
let short = Stream::read_at::<i16>(head, 50)? == 0;
if short {
subset_impl::<Offset16>(subsetter)
} else {
subset_impl::<Offset32>(subsetter)
}
}
fn subset_impl<T>(subsetter: &mut Subsetter) -> Option<()>
where
T: LocaOffset,
{
let loca = subsetter.table_data(LOCA)?;
let glyf = subsetter.table_data(GLYF)?;
let offsets = LazyArray32::<T>::new(loca);
let glyph_data = |id: u16| {
let from = offsets.get(u32::from(id))?.loca_to_usize();
let to = offsets.get(u32::from(id) + 1)?.loca_to_usize();
glyf.get(from .. to)
};
// The set of all glyphs we will include in the subset.
let mut subset = HashSet::new();
// Because glyphs may depend on other glyphs as components (also with
// multiple layers of nesting), we have to process all glyphs to find
// their components. For notdef and all requested glyphs we simply use
// an iterator, but to track other glyphs that need processing we create
// a work stack.
let mut iter = iter::once(0).chain(subsetter.glyphs.iter().copied());
let mut work = vec![];
// Find composite glyph descriptions.
while let Some(id) = work.pop().or_else(|| iter.next()) {
if subset.insert(id) {
let mut s = Stream::new(glyph_data(id)?);
if let Some(num_contours) = s.read::<i16>() {
// Negative means this is a composite glyph.
if num_contours < 0 {
// Skip min/max metrics.
s.read::<i16>();
s.read::<i16>();
s.read::<i16>();
s.read::<i16>();
// Read component glyphs.
work.extend(component_glyphs(s));
}
}
}
}
let mut sub_loca = vec![];
let mut sub_glyf = vec![];
for id in 0 .. subsetter.num_glyphs {
// If the glyph shouldn't be contained in the subset, it will
// still get a loca entry, but the glyf data is simply empty.
sub_loca.write(T::usize_to_loca(sub_glyf.len())?);
if subset.contains(&id) {
sub_glyf.extend(glyph_data(id)?);
}
}
sub_loca.write(T::usize_to_loca(sub_glyf.len())?);
subsetter.push_table(LOCA, sub_loca);
subsetter.push_table(GLYF, sub_glyf);
Some(())
}
trait LocaOffset: Sized + FromData + ToData {
fn loca_to_usize(self) -> usize;
fn usize_to_loca(offset: usize) -> Option<Self>;
}
impl LocaOffset for Offset16 {
fn loca_to_usize(self) -> usize {
2 * usize::from(self.0)
}
fn usize_to_loca(offset: usize) -> Option<Self> {
if offset % 2 == 0 {
(offset / 2).try_into().ok().map(Self)
} else {
None
}
}
}
impl LocaOffset for Offset32 {
fn loca_to_usize(self) -> usize {
self.0 as usize
}
fn usize_to_loca(offset: usize) -> Option<Self> {
offset.try_into().ok().map(Self)
}
}
/// Returns an iterator over the component glyphs referenced by the given
/// `glyf` table composite glyph description.
fn component_glyphs(mut s: Stream) -> impl Iterator<Item = u16> + '_ {
const ARG_1_AND_2_ARE_WORDS: u16 = 0x0001;
const WE_HAVE_A_SCALE: u16 = 0x0008;
const MORE_COMPONENTS: u16 = 0x0020;
const WE_HAVE_AN_X_AND_Y_SCALE: u16 = 0x0040;
const WE_HAVE_A_TWO_BY_TWO: u16 = 0x0080;
let mut done = false;
iter::from_fn(move || {
if done {
return None;
}
let flags = s.read::<u16>()?;
let component = s.read::<u16>()?;
if flags & ARG_1_AND_2_ARE_WORDS != 0 {
s.skip::<i16>();
s.skip::<i16>();
} else {
s.skip::<u16>();
}
if flags & WE_HAVE_A_SCALE != 0 {
s.skip::<F2DOT14>();
} else if flags & WE_HAVE_AN_X_AND_Y_SCALE != 0 {
s.skip::<F2DOT14>();
s.skip::<F2DOT14>();
} else if flags & WE_HAVE_A_TWO_BY_TWO != 0 {
s.skip::<F2DOT14>();
s.skip::<F2DOT14>();
s.skip::<F2DOT14>();
s.skip::<F2DOT14>();
}
done = flags & MORE_COMPONENTS == 0;
Some(component)
})
}
}
mod cff {
use super::*;
/// Subset the CFF table by zeroing glyph data for unused glyphs.
pub(super) fn subset_v1(subsetter: &mut Subsetter) -> Option<()> {
let cff = subsetter.table_data(CFF1)?;
let mut s = Stream::new(cff);
let (major, _) = (s.read::<u8>()?, s.skip::<u8>());
if major != 1 {
return None;
}
let header_size = s.read::<u8>()?;
s = Stream::new_at(cff, usize::from(header_size))?;
// Skip the name index.
Index::parse_stream(&mut s);
// Read the top dict. The index should contain only one item.
let top_dict_index = Index::parse_stream(&mut s)?;
let top_dict = Dict::parse(top_dict_index.get(0)?);
let mut sub_cff = cff.to_vec();
// Because completely rebuilding the CFF structure would be pretty
// complex, for now, we employ a peculiar strategy for CFF subsetting:
// We simply replace unused data with zeros. This way, the font
// structure and offsets can stay the same. And while the CFF table
// itself doesn't shrink, the actual embedded font is compressed and
// greatly benefits from the repeated zeros.
zero_char_strings(subsetter, cff, &top_dict, &mut sub_cff);
zero_subr_indices(subsetter, cff, &top_dict, &mut sub_cff);
subsetter.push_table(CFF1, sub_cff);
Some(())
}
/// Zero unused char strings.
fn zero_char_strings(
subsetter: &Subsetter,
cff: &[u8],
top_dict: &Dict,
sub_cff: &mut [u8],
) -> Option<()> {
let char_strings_offset = top_dict.get_offset(Op::CHAR_STRINGS)?;
let char_strings = Index::parse(cff.get(char_strings_offset ..)?)?;
for (id, _, range) in char_strings.iter() {
if !subsetter.glyphs.contains(&id) {
let start = char_strings_offset + range.start;
let end = char_strings_offset + range.end;
memzero(sub_cff.get_mut(start .. end)?);
}
}
Some(())
}
/// Zero unused local subroutine indices. We don't currently remove
/// individual subroutines because finding out which ones are used is
/// complicated.
fn zero_subr_indices(
subsetter: &Subsetter,
cff: &[u8],
top_dict: &Dict,
sub_cff: &mut [u8],
) -> Option<()> {
// Parse FD Select data structure, which maps from glyph ids to find
// dict indices.
let fd_select_offset = top_dict.get_offset(Op::FD_SELECT)?;
let fd_select =
parse_fd_select(cff.get(fd_select_offset ..)?, subsetter.num_glyphs)?;
// Clear local subrs from unused font dicts.
let fd_array_offset = top_dict.get_offset(Op::FD_ARRAY)?;
let fd_array = Index::parse(cff.get(fd_array_offset ..)?)?;
// Determine which font dict's subrs to keep.
let mut sub_fds = HashSet::new();
for &glyph in subsetter.glyphs {
sub_fds.insert(fd_select.get(usize::from(glyph))?);
}
for (i, data, _) in fd_array.iter() {
if !sub_fds.contains(&(i as u8)) {
let font_dict = Dict::parse(data);
if let Some(private_range) = font_dict.get_range(Op::PRIVATE) {
let private_dict = Dict::parse(cff.get(private_range.clone())?);
if let Some(subrs_offset) = private_dict.get_offset(Op::SUBRS) {
let start = private_range.start + subrs_offset;
let index = Index::parse(cff.get(start ..)?)?;
let end = start + index.data.len();
memzero(sub_cff.get_mut(start .. end)?);
}
}
}
}
Some(())
}
/// Returns the font dict index for each glyph.
fn parse_fd_select(data: &[u8], num_glyphs: u16) -> Option<Cow<'_, [u8]>> {
let mut s = Stream::new(data);
let format = s.read::<u8>()?;
Some(match format {
0 => Cow::Borrowed(s.read_bytes(usize::from(num_glyphs))?),
3 => {
let count = usize::from(s.read::<u16>()?);
let mut fds = vec![];
let mut start = s.read::<u16>()?;
for _ in 0 .. count {
let fd = s.read::<u8>()?;
let end = s.read::<u16>()?;
for _ in start .. end {
fds.push(fd);
}
start = end;
}
Cow::Owned(fds)
}
_ => Cow::Borrowed(&[]),
})
}
struct Index<'a> {
/// The data of the whole index (including its header).
data: &'a [u8],
/// The data ranges for the actual items.
items: Vec<Range<usize>>,
}
impl<'a> Index<'a> {
fn parse(data: &'a [u8]) -> Option<Self> {
let mut s = Stream::new(data);
let count = usize::from(s.read::<u16>()?);
let mut items = Vec::with_capacity(count);
let mut len = 2;
if count > 0 {
let offsize = usize::from(s.read::<u8>()?);
if !matches!(offsize, 1 ..= 4) {
return None;
}
// Read an offset and transform it to be relative to the start
// of the index.
let data_offset = 3 + offsize * (count + 1);
let mut read_offset = || {
let mut bytes = [0u8; 4];
bytes[4 - offsize .. 4].copy_from_slice(s.read_bytes(offsize)?);
Some(data_offset - 1 + u32::from_be_bytes(bytes) as usize)
};
let mut last = read_offset()?;
for _ in 0 .. count {
let offset = read_offset()?;
data.get(last .. offset)?;
items.push(last .. offset);
last = offset;
}
len = last;
}
Some(Self { data: data.get(.. len)?, items })
}
fn parse_stream(s: &'a mut Stream) -> Option<Self> {
let index = Index::parse(s.tail()?)?;
s.advance(index.data.len());
Some(index)
}
fn get(&self, idx: usize) -> Option<&'a [u8]> {
self.data.get(self.items.get(idx)?.clone())
}
fn iter(&self) -> impl Iterator<Item = (u16, &'a [u8], Range<usize>)> + '_ {
self.items
.iter()
.enumerate()
.map(move |(i, item)| (i as u16, &self.data[item.clone()], item.clone()))
}
}
struct Dict<'a>(Vec<Pair<'a>>);
impl<'a> Dict<'a> {
fn parse(data: &'a [u8]) -> Self {
let mut s = Stream::new(data);
Self(iter::from_fn(|| Pair::parse(&mut s)).collect())
}
fn get(&self, op: Op) -> Option<&[Operand<'a>]> {
self.0
.iter()
.find(|pair| pair.op == op)
.map(|pair| pair.operands.as_slice())
}
fn get_offset(&self, op: Op) -> Option<usize> {
match self.get(op)? {
&[Operand::Int(offset)] if offset > 0 => usize::try_from(offset).ok(),
_ => None,
}
}
fn get_range(&self, op: Op) -> Option<Range<usize>> {
match self.get(op)? {
&[Operand::Int(len), Operand::Int(offset)] if offset > 0 => {
let offset = usize::try_from(offset).ok()?;
let len = usize::try_from(len).ok()?;
Some(offset .. offset + len)
}
_ => None,
}
}
}
#[derive(Debug)]
struct Pair<'a> {
operands: Vec<Operand<'a>>,
op: Op,
}
impl<'a> Pair<'a> {
fn parse(s: &mut Stream<'a>) -> Option<Self> {
let mut operands = vec![];
while s.clone().read::<u8>()? > 21 {
operands.push(Operand::parse(s)?);
}
Some(Self { operands, op: Op::parse(s)? })
}
}
#[derive(Debug, Eq, PartialEq)]
struct Op(u8, u8);
impl Op {
const CHAR_STRINGS: Self = Self(17, 0);
const PRIVATE: Self = Self(18, 0);
const SUBRS: Self = Self(19, 0);
const FD_ARRAY: Self = Self(12, 36);
const FD_SELECT: Self = Self(12, 37);
fn parse(s: &mut Stream) -> Option<Self> {
let b0 = s.read::<u8>()?;
match b0 {
12 => Some(Self(b0, s.read::<u8>()?)),
0 ..= 21 => Some(Self(b0, 0)),
_ => None,
}
}
}
#[derive(Debug)]
enum Operand<'a> {
Int(i32),
Real(&'a [u8]),
}
impl<'a> Operand<'a> {
fn parse(s: &mut Stream<'a>) -> Option<Self> {
let b0 = i32::from(s.read::<u8>()?);
Some(match b0 {
30 => {
let mut len = 0;
for &byte in s.tail()? {
len += 1;
if byte & 0x0f == 0x0f {
break;
}
}
Self::Real(s.read_bytes(len)?)
}
32 ..= 246 => Self::Int(b0 - 139),
247 ..= 250 => {
let b1 = i32::from(s.read::<u8>()?);
Self::Int((b0 - 247) * 256 + b1 + 108)
}
251 ..= 254 => {
let b1 = i32::from(s.read::<u8>()?);
Self::Int(-(b0 - 251) * 256 - b1 - 108)
}
28 => Self::Int(i32::from(s.read::<i16>()?)),
29 => Self::Int(s.read::<i32>()?),
_ => return None,
})
}
}
}

View File

@ -7,7 +7,7 @@ use std::path::{Path, PathBuf};
use std::sync::Arc;
use once_cell::sync::OnceCell;
use rex::font::MathFont;
use rex::font::MathHeader;
use serde::{Deserialize, Serialize};
use ttf_parser::{name_id, GlyphId, PlatformId, Tag};
use unicode_segmentation::UnicodeSegmentation;
@ -254,7 +254,7 @@ pub struct Face {
/// The faces metrics.
metrics: FaceMetrics,
/// The parsed ReX math font.
math: OnceCell<Option<MathFont>>,
math: OnceCell<Option<MathHeader>>,
}
impl Face {
@ -308,9 +308,14 @@ impl Face {
&self.metrics
}
/// Access the math font, if any.
pub fn math(&self) -> Option<&MathFont> {
self.math.get_or_init(|| MathFont::parse(self.buffer()).ok()).as_ref()
/// Access the math header, if any.
pub fn math(&self) -> Option<&MathHeader> {
self.math
.get_or_init(|| {
let data = self.ttf().table_data(Tag::from_bytes(b"MATH"))?;
MathHeader::parse(data).ok()
})
.as_ref()
}
/// Convert from font units to an em length.
@ -350,7 +355,7 @@ pub struct FaceMetrics {
impl FaceMetrics {
/// Extract the face's metrics.
pub fn from_ttf(ttf: &ttf_parser::Face) -> Self {
let units_per_em = f64::from(ttf.units_per_em().unwrap_or(0));
let units_per_em = f64::from(ttf.units_per_em());
let to_em = |units| Em::from_units(units, units_per_em);
let ascender = to_em(ttf.typographic_ascender().unwrap_or(ttf.ascender()));
@ -517,7 +522,7 @@ impl FaceInfo {
// Determine the unicode coverage.
let mut codepoints = vec![];
for subtable in ttf.character_mapping_subtables() {
for subtable in ttf.tables().cmap.into_iter().flat_map(|table| table.subtables) {
if subtable.is_unicode() {
subtable.codepoints(|c| codepoints.push(c));
}
@ -550,14 +555,14 @@ impl FaceInfo {
/// Try to find and decode the name with the given id.
pub fn find_name(ttf: &ttf_parser::Face, name_id: u16) -> Option<String> {
ttf.names().find_map(|entry| {
if entry.name_id() == name_id {
ttf.names().into_iter().find_map(|entry| {
if entry.name_id == name_id {
if let Some(string) = entry.to_string() {
return Some(string);
}
if entry.platform_id() == PlatformId::Macintosh && entry.encoding_id() == 0 {
return Some(decode_mac_roman(entry.name()));
if entry.platform_id == PlatformId::Macintosh && entry.encoding_id == 0 {
return Some(decode_mac_roman(entry.name));
}
}

View File

@ -181,8 +181,14 @@ impl FromStr for RgbaColor {
}
impl From<SynColor> for RgbaColor {
fn from(color: SynColor) -> Self {
Self::new(color.r, color.g, color.b, color.a)
fn from(SynColor { r, g, b, a }: SynColor) -> Self {
Self { r, g, b, a }
}
}
impl From<RgbaColor> for SynColor {
fn from(RgbaColor { r, g, b, a }: RgbaColor) -> Self {
Self { r, g, b, a }
}
}

View File

@ -8,7 +8,7 @@ use std::path::Path;
use std::sync::Arc;
use image::io::Reader as ImageReader;
use image::{DynamicImage, GenericImageView, ImageFormat};
use image::{DynamicImage, ImageFormat};
use crate::loading::{FileHash, Loader};
@ -97,7 +97,7 @@ impl Image {
match RasterImage::parse(data) {
Ok(raster) => return Ok(Self::Raster(raster)),
Err(err) if matches!(ext, "png" | "jpg" | "jpeg") => return Err(err),
Err(err) if matches!(ext, "png" | "jpg" | "jpeg" | "gif") => return Err(err),
Err(_) => {}
}

View File

@ -201,7 +201,10 @@ impl Cast<Spanned<Value>> for Marginal {
Value::Str(v) => Ok(Self::Content(Content::Text(v))),
Value::Content(v) => Ok(Self::Content(v)),
Value::Func(v) => Ok(Self::Func(v, value.span)),
_ => Err("expected none, content or function")?,
v => Err(format!(
"expected none, content or function, found {}",
v.type_name(),
)),
}
}
}

View File

@ -1,5 +1,5 @@
use rex::error::{Error, LayoutError};
use rex::font::{FontContext, MathFont};
use rex::font::FontContext;
use rex::layout::{LayoutSettings, Style};
use rex::parser::color::RGBA;
use rex::render::{Backend, Cursor, Renderer};
@ -38,7 +38,7 @@ impl Layout for RexNode {
let face = ctx.fonts.get(face_id);
let ctx = face
.math()
.and_then(FontContext::new)
.map(|math| FontContext::new(face.ttf(), math))
.ok_or("font is not suitable for math")
.at(span)?;
@ -110,7 +110,7 @@ impl FrameBackend {
}
impl Backend for FrameBackend {
fn symbol(&mut self, pos: Cursor, gid: u16, scale: f64, _: &MathFont) {
fn symbol(&mut self, pos: Cursor, gid: u16, scale: f64) {
self.frame.push(
self.transform(pos),
Element::Text(Text {

View File

@ -238,6 +238,7 @@ impl Cast<Spanned<Value>> for Label {
fn cast(value: Spanned<Value>) -> StrResult<Self> {
match value.v {
Value::None => Ok(Self::Content(Content::Empty)),
Value::Str(pattern) => {
let mut s = Scanner::new(&pattern);
let mut prefix;
@ -258,7 +259,10 @@ impl Cast<Spanned<Value>> for Label {
}
Value::Content(v) => Ok(Self::Content(v)),
Value::Func(v) => Ok(Self::Func(v, value.span)),
_ => Err("expected pattern, content or function")?,
v => Err(format!(
"expected string, content or function, found {}",
v.type_name(),
)),
}
}
}

View File

@ -90,7 +90,9 @@ impl Show for RawNode {
seq.push(Content::Linebreak { justified: false });
}
for (style, piece) in highlighter.highlight(line, &SYNTAXES) {
for (style, piece) in
highlighter.highlight_line(line, &SYNTAXES).into_iter().flatten()
{
seq.push(styled(piece, foreground, style));
}
}
@ -177,6 +179,7 @@ pub static THEME: Lazy<Theme> = Lazy::new(|| Theme {
item("string", Some("#298e0d"), None),
item("punctuation.shortcut", Some("#1d6c76"), None),
item("constant.character.escape", Some("#1d6c76"), None),
item("invalid", Some("#ff0000"), None),
],
});
@ -185,7 +188,7 @@ fn item(scope: &str, color: Option<&str>, font_style: Option<FontStyle>) -> Them
ThemeItem {
scope: scope.parse().unwrap(),
style: StyleModifier {
foreground: color.map(|s| s.parse().unwrap()),
foreground: color.map(|s| s.parse::<RgbaColor>().unwrap().into()),
background: None,
font_style,
},

View File

@ -221,14 +221,12 @@ impl<'a> ShapedText<'a> {
let x_advance = face.to_em(ttf.glyph_hor_advance(glyph_id)?);
let cluster = self.glyphs.last().map(|g| g.cluster).unwrap_or_default();
self.width += x_advance.at(self.size);
let baseline_shift = self.styles.get(TextNode::BASELINE);
self.glyphs.to_mut().push(ShapedGlyph {
face_id,
glyph_id: glyph_id.0,
x_advance,
x_offset: Em::zero(),
y_offset: Em::from_length(baseline_shift, self.size),
y_offset: Em::zero(),
cluster,
safe_to_break: true,
c: '-',
@ -491,7 +489,7 @@ fn shape_tofus(ctx: &mut ShapingContext, base: usize, text: &str, face_id: FaceI
glyph_id: 0,
x_advance,
x_offset: Em::zero(),
y_offset: Em::from_length(ctx.styles.get(TextNode::BASELINE), ctx.size),
y_offset: Em::zero(),
cluster: base + cluster,
safe_to_break: true,
c,

View File

@ -239,9 +239,10 @@ fn print_diagnostics(
for error in errors {
// The main diagnostic.
let diag = Diagnostic::error().with_message(error.message).with_labels(vec![
Label::primary(error.span.source(), sources.range(error.span)),
]);
let range = sources.range(error.span);
let diag = Diagnostic::error()
.with_message(error.message)
.with_labels(vec![Label::primary(error.span.source(), range)]);
term::emit(&mut w, &config, sources, &diag)?;

View File

@ -11,9 +11,8 @@ pub use tokens::*;
use std::collections::HashSet;
use crate::diag::ErrorPos;
use crate::syntax::ast::{Associativity, BinOp, UnOp};
use crate::syntax::{NodeKind, SyntaxNode};
use crate::syntax::{NodeKind, SpanPos, SyntaxNode};
use crate::util::EcoString;
/// Parse a source file.
@ -648,7 +647,7 @@ fn item(p: &mut Parser, keyed: bool) -> ParseResult<NodeKind> {
msg.push_str(", found ");
msg.push_str(kind.as_str());
}
let error = NodeKind::Error(ErrorPos::Full, msg);
let error = NodeKind::Error(SpanPos::Full, msg);
marker.end(p, error);
p.eat();
marker.perform(p, NodeKind::Named, expr).ok();

View File

@ -3,8 +3,7 @@ use std::mem;
use std::ops::Range;
use super::{TokenMode, Tokens};
use crate::diag::ErrorPos;
use crate::syntax::{InnerNode, NodeData, NodeKind, SyntaxNode};
use crate::syntax::{InnerNode, NodeData, NodeKind, SpanPos, SyntaxNode};
use crate::util::EcoString;
/// A convenient token-based parser.
@ -385,7 +384,7 @@ impl Parser<'_> {
pub fn unexpected(&mut self) {
if let Some(found) = self.peek() {
let msg = format_eco!("unexpected {}", found);
let error = NodeKind::Error(ErrorPos::Full, msg);
let error = NodeKind::Error(SpanPos::Full, msg);
self.perform(error, Self::eat);
}
}
@ -399,7 +398,7 @@ impl Parser<'_> {
/// Insert an error message that `what` was expected at the marker position.
pub fn expected_at(&mut self, marker: Marker, what: &str) {
let msg = format_eco!("expected {}", what);
let error = NodeKind::Error(ErrorPos::Full, msg);
let error = NodeKind::Error(SpanPos::Full, msg);
self.children.insert(marker.0, NodeData::new(error, 0).into());
}
@ -409,7 +408,7 @@ impl Parser<'_> {
match self.peek() {
Some(found) => {
let msg = format_eco!("expected {}, found {}", thing, found);
let error = NodeKind::Error(ErrorPos::Full, msg);
let error = NodeKind::Error(SpanPos::Full, msg);
self.perform(error, Self::eat);
}
None => self.expected(thing),
@ -481,7 +480,7 @@ impl Marker {
msg.push_str(", found ");
msg.push_str(child.kind().as_str());
}
let error = NodeKind::Error(ErrorPos::Full, msg);
let error = NodeKind::Error(SpanPos::Full, msg);
let inner = mem::take(child);
*child = InnerNode::with_child(error, inner).into();
}

View File

@ -4,10 +4,9 @@ use unicode_xid::UnicodeXID;
use unscanny::Scanner;
use super::resolve::{resolve_hex, resolve_raw, resolve_string};
use crate::diag::ErrorPos;
use crate::geom::{AngleUnit, LengthUnit};
use crate::syntax::ast::{MathNode, RawNode, Unit};
use crate::syntax::NodeKind;
use crate::syntax::{NodeKind, SpanPos};
use crate::util::EcoString;
/// An iterator over the tokens of a string of source code.
@ -287,14 +286,14 @@ impl<'s> Tokens<'s> {
NodeKind::Escape(c)
} else {
NodeKind::Error(
ErrorPos::Full,
SpanPos::Full,
"invalid unicode escape sequence".into(),
)
}
} else {
self.terminated = false;
NodeKind::Error(
ErrorPos::End,
SpanPos::End,
"expected closing brace".into(),
)
}
@ -394,7 +393,7 @@ impl<'s> Tokens<'s> {
self.terminated = false;
NodeKind::Error(
ErrorPos::End,
SpanPos::End,
if found == 0 {
format_eco!("expected {} {}", remaining, noun)
} else {
@ -442,7 +441,7 @@ impl<'s> Tokens<'s> {
} else {
self.terminated = false;
NodeKind::Error(
ErrorPos::End,
SpanPos::End,
if !display || (!escaped && dollar) {
"expected closing dollar sign".into()
} else {
@ -531,7 +530,7 @@ impl<'s> Tokens<'s> {
NodeKind::Str(string)
} else {
self.terminated = false;
NodeKind::Error(ErrorPos::End, "expected quote".into())
NodeKind::Error(SpanPos::End, "expected quote".into())
}
}
@ -677,12 +676,12 @@ mod tests {
use super::*;
use crate::parse::tests::check;
use ErrorPos::*;
use NodeKind::*;
use Option::None;
use SpanPos::*;
use TokenMode::{Code, Markup};
fn Error(pos: ErrorPos, message: &str) -> NodeKind {
fn Error(pos: SpanPos, message: &str) -> NodeKind {
NodeKind::Error(pos, message.into())
}

View File

@ -13,7 +13,7 @@ pub use highlight::*;
pub use span::*;
use self::ast::{MathNode, RawNode, TypedNode, Unit};
use crate::diag::{Error, ErrorPos};
use crate::diag::Error;
use crate::source::SourceId;
use crate::util::EcoString;
@ -82,7 +82,7 @@ impl SyntaxNode {
match self.kind() {
&NodeKind::Error(pos, ref message) => {
vec![Error { pos, ..Error::new(self.span(), message) }]
vec![Error::new(self.span().with_pos(pos), message)]
}
_ => self
.children()
@ -150,9 +150,7 @@ impl SyntaxNode {
pub fn range(&self, span: Span, offset: usize) -> Option<Range<usize>> {
match self {
Self::Inner(inner) => inner.range(span, offset),
Self::Leaf(leaf) => {
(span == leaf.span).then(|| offset .. offset + self.len())
}
Self::Leaf(leaf) => leaf.range(span, offset),
}
}
@ -324,8 +322,8 @@ impl InnerNode {
/// If the span points into this node, convert it to a byte range.
pub fn range(&self, span: Span, mut offset: usize) -> Option<Range<usize>> {
// Check whether we found it.
if self.data.span == span {
return Some(offset .. offset + self.len());
if let Some(range) = self.data.range(span, offset) {
return Some(range);
}
// The parent of a subtree has a smaller span number than all of its
@ -536,6 +534,18 @@ impl NodeData {
Err(Unnumberable)
}
}
/// If the span points into this node, convert it to a byte range.
pub fn range(&self, span: Span, offset: usize) -> Option<Range<usize>> {
(span.with_pos(SpanPos::Full) == self.span).then(|| {
let end = offset + self.len();
match span.pos() {
SpanPos::Full => offset .. end,
SpanPos::Start => offset .. offset,
SpanPos::End => end .. end,
}
})
}
}
impl From<NodeData> for SyntaxNode {
@ -787,7 +797,7 @@ pub enum NodeKind {
/// The comment can contain nested block comments.
BlockComment,
/// Tokens that appear in the wrong place.
Error(ErrorPos, EcoString),
Error(SpanPos, EcoString),
/// Unknown character sequences.
Unknown(EcoString),
}

View File

@ -62,20 +62,25 @@ impl<T: Debug> Debug for Spanned<T> {
pub struct Span(NonZeroU64);
impl Span {
// Data layout:
// | 2 bits span pos | 16 bits source id | 46 bits number |
// Number of bits for and minimum and maximum numbers assignable to spans.
const BITS: usize = 48;
const BITS: usize = 46;
const DETACHED: u64 = 1;
const MIN: u64 = 2;
const MAX: u64 = (1 << Self::BITS) - 1;
/// The full range of numbers available to spans.
pub const FULL: Range<u64> = Self::MIN .. Self::MAX + 1;
pub const FULL: Range<u64> = 2 .. (1 << Self::BITS);
/// Create a new span from a source id and a unique number.
///
/// Panics if the `number` is not contained in `FULL`.
pub const fn new(id: SourceId, number: u64) -> Self {
assert!(number >= Self::MIN && number <= Self::MAX);
assert!(
Self::FULL.start <= number && number < Self::FULL.end,
"span number outside valid range"
);
let bits = ((id.into_raw() as u64) << Self::BITS) | number;
Self(to_non_zero(bits))
}
@ -85,6 +90,12 @@ impl Span {
Self(to_non_zero(Self::DETACHED))
}
/// Return a new span with updated position.
pub const fn with_pos(self, pos: SpanPos) -> Self {
let bits = (self.0.get() & ((1 << 62) - 1)) | ((pos as u64) << 62);
Self(to_non_zero(bits))
}
/// The id of the source file the span points into.
pub const fn source(self) -> SourceId {
SourceId::from_raw((self.0.get() >> Self::BITS) as u16)
@ -94,16 +105,37 @@ impl Span {
pub const fn number(self) -> u64 {
self.0.get() & ((1 << Self::BITS) - 1)
}
/// Where in the node the span points to.
pub const fn pos(self) -> SpanPos {
match self.0.get() >> 62 {
0 => SpanPos::Full,
1 => SpanPos::Start,
2 => SpanPos::End,
_ => panic!("span pos encoding is invalid"),
}
}
}
/// Convert to a non zero u64.
const fn to_non_zero(v: u64) -> NonZeroU64 {
match NonZeroU64::new(v) {
Some(v) => v,
None => unreachable!(),
None => panic!("span encoding is zero"),
}
}
/// Where in a node a span points.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum SpanPos {
/// Over the full width of the node.
Full = 0,
/// At the start of the node.
Start = 1,
/// At the end of the node.
End = 2,
}
/// Result of numbering a node within an interval.
pub type NumberingResult = Result<(), Unnumberable>;
@ -118,3 +150,17 @@ impl Display for Unnumberable {
}
impl std::error::Error for Unnumberable {}
#[cfg(test)]
mod tests {
use super::{SourceId, Span, SpanPos};
#[test]
fn test_span_encoding() {
let id = SourceId::from_raw(5);
let span = Span::new(id, 10).with_pos(SpanPos::End);
assert_eq!(span.source(), id);
assert_eq!(span.number(), 10);
assert_eq!(span.pos(), SpanPos::End);
}
}

View File

@ -316,10 +316,7 @@ fn test_part(
let mut errors: Vec<_> = errors
.into_iter()
.filter(|error| error.span.source() == id)
.map(|error| {
let range = error.pos.apply(ctx.sources.range(error.span));
(range, error.message)
})
.map(|error| (ctx.sources.range(error.span), error.message))
.collect();
errors.sort_by_key(|error| error.0.start);