Many fixes

This commit is contained in:
Laurenz 2022-06-13 23:16:40 +02:00
parent 891af17260
commit c81e2a5f56
74 changed files with 593 additions and 675 deletions

View File

@ -116,7 +116,7 @@ fn process_const(
) -> Result<(Property, syn::ItemMod)> {
let property = parse_property(item)?;
// The display name, e.g. `TextNode::STRONG`.
// The display name, e.g. `TextNode::BOLD`.
let name = format!("{}::{}", self_name, &item.ident);
// The type of the property's value is what the user of our macro wrote
@ -134,7 +134,7 @@ fn process_const(
value_ty.clone()
};
// ... but the real type of the const becomes this..
// ... but the real type of the const becomes this ...
let key = quote! { Key<#value_ty, #self_args> };
let phantom_args = self_args.iter().filter(|arg| match arg {
syn::GenericArgument::Type(syn::Type::Path(path)) => {
@ -148,9 +148,11 @@ fn process_const(
let default = &item.expr;
// Ensure that the type is either `Copy` or that the property is referenced
// or that the property isn't copy but can't be referenced because it needs
// folding.
// Ensure that the type is
// - either `Copy`, or
// - that the property is referenced, or
// - that the property isn't copy but can't be referenced because it needs
// folding.
let get;
let mut copy = None;

View File

@ -137,7 +137,7 @@ impl Array {
}
/// Return a new array with only those elements for which the function
/// return true.
/// returns true.
pub fn filter(&self, vm: &mut Machine, f: Spanned<Func>) -> TypResult<Self> {
let mut kept = vec![];
for item in self.iter() {
@ -154,7 +154,7 @@ impl Array {
/// Return a new array with all items from this and nested arrays.
pub fn flatten(&self) -> Self {
let mut flat = vec![];
let mut flat = Vec::with_capacity(self.0.len());
for item in self.iter() {
if let Value::Array(nested) = item {
flat.extend(nested.flatten().into_iter());

View File

@ -43,8 +43,8 @@ impl<'a> CapturesVisitor<'a> {
match node.cast() {
// Every identifier is a potential variable that we need to capture.
// Identifiers that shouldn't count as captures because they
// actually bind a new name are handled further below (individually
// through the expressions that contain them).
// actually bind a new name are handled below (individually through
// the expressions that contain them).
Some(Expr::Ident(ident)) => self.capture(ident),
// Code and content blocks create a scope.
@ -179,7 +179,7 @@ mod tests {
test("#import x, y from z", &["z"]);
test("#import x, y, z from x + y", &["x", "y"]);
// Scoping.
// Blocks.
test("{ let x = 1; { let y = 2; y }; x + y }", &["y"]);
test("[#let x = 1]#x", &["x"]);
}

View File

@ -46,7 +46,7 @@ impl Dict {
}
/// Borrow the value the given `key` maps to.
pub fn get(&self, key: &EcoString) -> StrResult<&Value> {
pub fn get(&self, key: &str) -> StrResult<&Value> {
self.0.get(key).ok_or_else(|| missing_key(key))
}
@ -59,7 +59,7 @@ impl Dict {
}
/// Whether the dictionary contains a specific key.
pub fn contains(&self, key: &EcoString) -> bool {
pub fn contains(&self, key: &str) -> bool {
self.0.contains_key(key)
}
@ -69,7 +69,7 @@ impl Dict {
}
/// Remove a mapping by `key`.
pub fn remove(&mut self, key: &EcoString) -> StrResult<()> {
pub fn remove(&mut self, key: &str) -> StrResult<()> {
match Arc::make_mut(&mut self.0).remove(key) {
Some(_) => Ok(()),
None => Err(missing_key(key)),
@ -87,12 +87,12 @@ impl Dict {
/// Return the keys of the dictionary as an array.
pub fn keys(&self) -> Array {
self.iter().map(|(key, _)| Value::Str(key.clone())).collect()
self.0.keys().cloned().map(Value::Str).collect()
}
/// Return the values of the dictionary as an array.
pub fn values(&self) -> Array {
self.iter().map(|(_, value)| value.clone()).collect()
self.0.values().cloned().collect()
}
/// Transform each pair in the array with a function.
@ -114,8 +114,8 @@ impl Dict {
/// The missing key access error message.
#[cold]
fn missing_key(key: &EcoString) -> String {
format!("dictionary does not contain key {:?}", key)
fn missing_key(key: &str) -> String {
format!("dictionary does not contain key {:?}", EcoString::from(key))
}
impl Debug for Dict {

View File

@ -105,7 +105,7 @@ impl Func {
self.call(&mut vm, args)
}
/// Execute the function's set rule.
/// Execute the function's set rule and return the resulting style map.
pub fn set(&self, mut args: Args) -> TypResult<StyleMap> {
let styles = match self.0.as_ref() {
Repr::Native(Native { set: Some(set), .. }) => set(&mut args)?,
@ -139,7 +139,7 @@ impl PartialEq for Func {
}
}
/// A native rust function.
/// A function defined by a native rust function or node.
struct Native {
/// The name of the function.
pub name: &'static str,
@ -171,17 +171,17 @@ pub trait Node: 'static {
/// node's set rule.
fn construct(vm: &mut Machine, args: &mut Args) -> TypResult<Content>;
/// Parse the arguments into style properties for this node.
/// Parse relevant arguments into style properties for this node.
///
/// When `constructor` is true, [`construct`](Self::construct) will run
/// after this invocation of `set`.
/// after this invocation of `set` with the remaining arguments.
fn set(args: &mut Args, constructor: bool) -> TypResult<StyleMap>;
}
/// A user-defined closure.
#[derive(Hash)]
pub struct Closure {
/// The location where the closure was defined.
/// The source file where the closure was defined.
pub location: Option<SourceId>,
/// The name of the closure.
pub name: Option<EcoString>,
@ -199,8 +199,8 @@ pub struct Closure {
impl Closure {
/// Call the function in the context with the arguments.
pub fn call(&self, vm: &mut Machine, args: &mut Args) -> TypResult<Value> {
// Don't leak the scopes from the call site. Instead, we use the
// scope of captured variables we collected earlier.
// Don't leak the scopes from the call site. Instead, we use the scope
// of captured variables we collected earlier.
let mut scopes = Scopes::new(None);
scopes.top = self.captured.clone();

View File

@ -11,7 +11,7 @@ use crate::Context;
pub struct Machine<'a> {
/// The core context.
pub ctx: &'a mut Context,
/// The route of source ids at which the machine is located.
/// The route of source ids the machine took to reach its current location.
pub route: Vec<SourceId>,
/// The dependencies of the current evaluation process.
pub deps: Vec<(SourceId, usize)>,

View File

@ -72,7 +72,7 @@ pub fn call(
Value::Dyn(dynamic) => match method {
"matches" => {
if let Some(regex) = dynamic.downcast::<Regex>() {
Value::Bool(regex.matches(&args.expect::<EcoString>("text")?))
Value::Bool(regex.is_match(&args.expect::<EcoString>("text")?))
} else {
missing()?
}
@ -125,7 +125,7 @@ pub fn call_mut(
},
Value::Dict(dict) => match method {
"remove" => dict.remove(&args.expect("key")?).at(span)?,
"remove" => dict.remove(&args.expect::<EcoString>("key")?).at(span)?,
_ => missing()?,
},

View File

@ -30,7 +30,7 @@ pub fn join(lhs: Value, rhs: Value) -> StrResult<Value> {
})
}
/// Apply the plus operator to a value.
/// Apply the unary plus operator to a value.
pub fn pos(value: Value) -> StrResult<Value> {
Ok(match value {
Int(v) => Int(v),
@ -281,7 +281,7 @@ pub fn eq(lhs: Value, rhs: Value) -> StrResult<Value> {
Ok(Bool(equal(&lhs, &rhs)))
}
/// Compute whether two values are equal.
/// Compute whether two values are unequal.
pub fn neq(lhs: Value, rhs: Value) -> StrResult<Value> {
Ok(Bool(!equal(&lhs, &rhs)))
}

View File

@ -92,7 +92,7 @@ pub struct RawStroke<T = RawLength> {
}
impl RawStroke<Length> {
/// Unpack the stroke, filling missing fields with `default`.
/// Unpack the stroke, filling missing fields from the `default`.
pub fn unwrap_or(self, default: Stroke) -> Stroke {
Stroke {
paint: self.paint.unwrap_or(default.paint),

View File

@ -122,7 +122,7 @@ impl Debug for Scope {
}
}
/// A slot where a variable is stored.
/// A slot where a value is stored.
#[derive(Clone, Hash)]
struct Slot {
/// The stored value.
@ -141,17 +141,17 @@ enum Kind {
}
impl Slot {
/// Create a new constant slot.
/// Create a new slot.
fn new(value: Value, kind: Kind) -> Self {
Self { value, kind }
}
/// Read the variable.
/// Read the value.
fn read(&self) -> &Value {
&self.value
}
/// Try to write to the variable.
/// Try to write to the value.
fn write(&mut self) -> StrResult<&mut Value> {
match self.kind {
Kind::Normal => Ok(&mut self.value),

View File

@ -45,15 +45,10 @@ impl StrExt for EcoString {
pub struct Regex(regex::Regex);
impl Regex {
/// Create a new regex.
/// Create a new regular expression.
pub fn new(re: &str) -> StrResult<Self> {
regex::Regex::new(re).map(Self).map_err(|err| err.to_string())
}
/// Whether the regex matches the given `text`.
pub fn matches(&self, text: &str) -> bool {
self.0.is_match(text)
}
}
impl Deref for Regex {

View File

@ -28,7 +28,7 @@ pub enum Value {
Int(i64),
/// A floating-point number: `1.2`, `10e-4`.
Float(f64),
/// A length: `12pt`, `3cm`.
/// A length: `12pt`, `3cm`, `1.5em`.
Length(RawLength),
/// An angle: `1.5rad`, `90deg`.
Angle(Angle),
@ -532,10 +532,9 @@ impl<T: Cast> Cast for Option<T> {
/// A value that can be automatically determined.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum Smart<T> {
/// The value should be determined smartly based on the
/// circumstances.
/// The value should be determined smartly based on the circumstances.
Auto,
/// A forced, specific value.
/// A specific value.
Custom(T),
}
@ -629,23 +628,23 @@ where
}
let sides = Sides {
left: dict.get(&"left".into()).or_else(|_| dict.get(&"x".into())),
top: dict.get(&"top".into()).or_else(|_| dict.get(&"y".into())),
right: dict.get(&"right".into()).or_else(|_| dict.get(&"x".into())),
bottom: dict.get(&"bottom".into()).or_else(|_| dict.get(&"y".into())),
}
.map(|side| {
side.or_else(|_| dict.get(&"rest".into()))
.and_then(|v| T::cast(v.clone()))
.unwrap_or_default()
});
left: dict.get("left").or(dict.get("x")),
top: dict.get("top").or(dict.get("y")),
right: dict.get("right").or(dict.get("x")),
bottom: dict.get("bottom").or(dict.get("y")),
};
Ok(sides)
Ok(sides.map(|side| {
side.or(dict.get("rest"))
.cloned()
.and_then(T::cast)
.unwrap_or_default()
}))
}
v => T::cast(v).map(Sides::splat).map_err(|msg| {
with_alternative(
msg,
"dictionary with any of `left`, `top`, `right`, `bottom`,\
"dictionary with any of `left`, `top`, `right`, `bottom`, \
`x`, `y`, or `rest` as keys",
)
}),

View File

@ -14,7 +14,7 @@ use pdf_writer::writers::ColorSpace;
use pdf_writer::{Content, Filter, Finish, Name, PdfWriter, Rect, Ref, Str, TextStr};
use ttf_parser::{name_id, GlyphId, Tag};
use crate::font::{find_name, FaceId, FontStore};
use crate::font::{FaceId, FontStore};
use crate::frame::{Destination, Element, Frame, Group, Role, Text};
use crate::geom::{
self, Color, Dir, Em, Geometry, Length, Numeric, Paint, Point, Ratio, Shape, Size,
@ -88,7 +88,6 @@ impl<'a> PdfExporter<'a> {
self.write_fonts();
self.write_images();
// The root page tree.
for page in std::mem::take(&mut self.pages).into_iter() {
self.write_page(page);
}
@ -123,7 +122,8 @@ impl<'a> PdfExporter<'a> {
let metrics = face.metrics();
let ttf = face.ttf();
let postscript_name = find_name(ttf, name_id::POST_SCRIPT_NAME)
let postscript_name = face
.find_name(name_id::POST_SCRIPT_NAME)
.unwrap_or_else(|| "unknown".to_string());
let base_font = format_eco!("ABCDEF+{}", postscript_name);
@ -370,9 +370,8 @@ impl<'a> PdfExporter<'a> {
.uri(Str(uri.as_str().as_bytes()));
}
Destination::Internal(loc) => {
if (1 ..= self.page_heights.len()).contains(&loc.page) {
let index = loc.page - 1;
let height = self.page_heights[index];
let index = loc.page.get() - 1;
if let Some(&height) = self.page_heights.get(index) {
link.action()
.action_type(ActionType::GoTo)
.destination_direct()
@ -457,8 +456,10 @@ impl<'a> PdfExporter<'a> {
Direction::L2R
};
// Write the document information, catalog and wrap it up!
// Write the document information.
self.writer.document_info(self.alloc.bump()).creator(TextStr("Typst"));
// Write the document catalog.
let mut catalog = self.writer.catalog(self.alloc.bump());
catalog.pages(self.page_tree_ref);
catalog.viewer_preferences().direction(dir);
@ -556,46 +557,6 @@ struct State {
stroke_space: Option<Name<'static>>,
}
/// A heading that can later be linked in the outline panel.
#[derive(Debug, Clone)]
struct Heading {
content: EcoString,
level: usize,
position: Point,
page: Ref,
}
#[derive(Debug, Clone)]
struct HeadingNode {
heading: Heading,
children: Vec<HeadingNode>,
}
impl HeadingNode {
fn leaf(heading: Heading) -> Self {
HeadingNode { heading, children: Vec::new() }
}
fn len(&self) -> usize {
1 + self.children.iter().map(Self::len).sum::<usize>()
}
fn insert(&mut self, other: Heading, level: usize) -> bool {
if level >= other.level {
return false;
}
if let Some(child) = self.children.last_mut() {
if child.insert(other.clone(), level + 1) {
return true;
}
}
self.children.push(Self::leaf(other));
true
}
}
impl<'a, 'b> PageExporter<'a, 'b> {
fn new(exporter: &'a mut PdfExporter<'b>, page_ref: Ref) -> Self {
Self {
@ -940,6 +901,47 @@ impl<'a, 'b> PageExporter<'a, 'b> {
}
}
/// A heading that can later be linked in the outline panel.
#[derive(Debug, Clone)]
struct Heading {
content: EcoString,
level: usize,
position: Point,
page: Ref,
}
/// A node in the outline tree.
#[derive(Debug, Clone)]
struct HeadingNode {
heading: Heading,
children: Vec<HeadingNode>,
}
impl HeadingNode {
fn leaf(heading: Heading) -> Self {
HeadingNode { heading, children: Vec::new() }
}
fn len(&self) -> usize {
1 + self.children.iter().map(Self::len).sum::<usize>()
}
fn insert(&mut self, other: Heading, level: usize) -> bool {
if level >= other.level {
return false;
}
if let Some(child) = self.children.last_mut() {
if child.insert(other.clone(), level + 1) {
return true;
}
}
self.children.push(Self::leaf(other));
true
}
}
/// Encode an image with a suitable filter and return the data, filter and
/// whether the image has color.
///
@ -953,7 +955,7 @@ fn encode_image(img: &RasterImage) -> ImageResult<(Vec<u8>, Filter, bool)> {
(data.into_inner(), Filter::DctDecode, false)
}
// 8-bit Rgb JPEG (Cmyk JPEGs get converted to Rgb earlier).
// 8-bit RGB JPEG (CMYK JPEGs get converted to RGB earlier).
(ImageFormat::Jpeg, DynamicImage::ImageRgb8(_)) => {
let mut data = Cursor::new(vec![]);
img.buf.write_to(&mut data, img.format)?;

View File

@ -20,8 +20,7 @@ use crate::Context;
/// returns the resulting `tiny-skia` pixel buffer.
///
/// In addition to the frame, you need to pass in the context used during
/// compilation so that fonts and images can be rendered and rendering artifacts
/// can be cached.
/// compilation so that fonts and images can be rendered.
pub fn render(ctx: &Context, frame: &Frame, pixel_per_pt: f32) -> sk::Pixmap {
let size = frame.size();
let pxw = (pixel_per_pt * size.x.to_f32()).round().max(1.0) as u32;
@ -298,7 +297,7 @@ fn render_outline_glyph(
Some(())
}
/// Renders a geometrical shape into the canvas.
/// Render a geometrical shape into the canvas.
fn render_shape(
canvas: &mut sk::Pixmap,
ts: sk::Transform,
@ -341,7 +340,7 @@ fn render_shape(
Some(())
}
/// Renders a raster or SVG image into the canvas.
/// Render a raster or SVG image into the canvas.
fn render_image(
canvas: &mut sk::Pixmap,
ts: sk::Transform,

View File

@ -14,7 +14,6 @@ use unicode_segmentation::UnicodeSegmentation;
use crate::geom::Em;
use crate::loading::{FileHash, Loader};
use crate::util::decode_mac_roman;
/// A unique identifier for a loaded font face.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
@ -139,8 +138,8 @@ impl FontStore {
/// To do that we compute a key for all variants and select the one with the
/// minimal key. This key prioritizes:
/// - If `like` is some other face:
/// - Are both faces (not) monospaced.
/// - Do both faces (not) have serifs.
/// - Are both faces (not) monospaced?
/// - Do both faces (not) have serifs?
/// - How many words do the families share in their prefix? E.g. "Noto
/// Sans" and "Noto Sans Arabic" share two words, whereas "IBM Plex
/// Arabic" shares none with "Noto Sans", so prefer "Noto Sans Arabic"
@ -165,7 +164,6 @@ impl FontStore {
let mut best = None;
let mut best_key = None;
// Find the best matching variant of this font.
for id in ids {
let current = &infos[id.0 as usize];
@ -237,23 +235,22 @@ fn shared_prefix_words(left: &str, right: &str) -> usize {
}
impl_track_empty!(FontStore);
impl_track_empty!(&'_ mut FontStore);
impl_track_hash!(FaceId);
impl_track_hash!(GlyphId);
/// A font face.
pub struct Face {
/// The raw face data, possibly shared with other faces from the same
/// collection. Must stay alive put, because `ttf` points into it using
/// unsafe code.
/// collection. The vector's allocation must not move, because `ttf` points
/// into it using unsafe code.
buffer: Arc<Vec<u8>>,
/// The face's index in the collection (zero if not a collection).
index: u32,
/// The underlying ttf-parser/rustybuzz face.
ttf: rustybuzz::Face<'static>,
/// The faces metrics.
/// The face's metrics.
metrics: FaceMetrics,
/// The parsed ReX math font.
/// The parsed ReX math header.
math: OnceCell<Option<MathHeader>>,
}
@ -298,7 +295,7 @@ impl Face {
&self.ttf
}
/// The number of units per em.
/// The number of font units per one em.
pub fn units_per_em(&self) -> f64 {
self.metrics.units_per_em
}
@ -308,16 +305,6 @@ impl Face {
&self.metrics
}
/// Access the math header, if any.
pub fn math(&self) -> Option<&MathHeader> {
self.math
.get_or_init(|| {
let data = self.ttf().table_data(Tag::from_bytes(b"MATH"))?;
MathHeader::parse(data).ok()
})
.as_ref()
}
/// Convert from font units to an em length.
pub fn to_em(&self, units: impl Into<f64>) -> Em {
Em::from_units(units, self.units_per_em())
@ -329,6 +316,21 @@ impl Face {
.glyph_hor_advance(GlyphId(glyph))
.map(|units| self.to_em(units))
}
/// Access the math header, if any.
pub fn math(&self) -> Option<&MathHeader> {
self.math
.get_or_init(|| {
let data = self.ttf().table_data(Tag::from_bytes(b"MATH"))?;
MathHeader::parse(data).ok()
})
.as_ref()
}
/// Lookup a name by id.
pub fn find_name(&self, name_id: u16) -> Option<String> {
find_name_ttf(&self.ttf, name_id)
}
}
/// Metrics for a font face.
@ -396,7 +398,7 @@ impl FaceMetrics {
}
}
/// Look up a vertical metric at the given font size.
/// Look up a vertical metric.
pub fn vertical(&self, metric: VerticalFontMetric) -> Em {
match metric {
VerticalFontMetric::Ascender => self.ascender,
@ -491,15 +493,15 @@ impl FaceInfo {
// sometimes doesn't for the Display variants and that mixes things
// up.
let family = {
let mut family = find_name(ttf, name_id::FAMILY)?;
let mut family = find_name_ttf(ttf, name_id::FAMILY)?;
if family.starts_with("Noto") {
family = find_name(ttf, name_id::FULL_NAME)?;
family = find_name_ttf(ttf, name_id::FULL_NAME)?;
}
trim_styles(&family).to_string()
};
let variant = {
let mut full = find_name(ttf, name_id::FULL_NAME).unwrap_or_default();
let mut full = find_name_ttf(ttf, name_id::FULL_NAME).unwrap_or_default();
full.make_ascii_lowercase();
// Some fonts miss the relevant bits for italic or oblique, so
@ -554,7 +556,7 @@ impl FaceInfo {
}
/// Try to find and decode the name with the given id.
pub fn find_name(ttf: &ttf_parser::Face, name_id: u16) -> Option<String> {
fn find_name_ttf(ttf: &ttf_parser::Face, name_id: u16) -> Option<String> {
ttf.names().into_iter().find_map(|entry| {
if entry.name_id == name_id {
if let Some(string) = entry.to_string() {
@ -570,6 +572,31 @@ pub fn find_name(ttf: &ttf_parser::Face, name_id: u16) -> Option<String> {
})
}
/// Decode mac roman encoded bytes into a string.
fn decode_mac_roman(coded: &[u8]) -> String {
#[rustfmt::skip]
const TABLE: [char; 128] = [
'Ä', 'Å', 'Ç', 'É', 'Ñ', 'Ö', 'Ü', 'á', 'à', 'â', 'ä', 'ã', 'å', 'ç', 'é', 'è',
'ê', 'ë', 'í', 'ì', 'î', 'ï', 'ñ', 'ó', 'ò', 'ô', 'ö', 'õ', 'ú', 'ù', 'û', 'ü',
'†', '°', '¢', '£', '§', '•', '¶', 'ß', '®', '©', '™', '´', '¨', '≠', 'Æ', 'Ø',
'∞', '±', '≤', '≥', '¥', 'µ', '∂', '∑', '∏', 'π', '∫', 'ª', 'º', 'Ω', 'æ', 'ø',
'¿', '¡', '¬', '√', 'ƒ', '≈', '∆', '«', '»', '…', '\u{a0}', 'À', 'Ã', 'Õ', 'Œ', 'œ',
'', '—', '“', '”', '', '', '÷', '◊', 'ÿ', 'Ÿ', '', '€', '', '', 'fi', 'fl',
'‡', '·', '', '„', '‰', 'Â', 'Ê', 'Á', 'Ë', 'È', 'Í', 'Î', 'Ï', 'Ì', 'Ó', 'Ô',
'\u{f8ff}', 'Ò', 'Ú', 'Û', 'Ù', 'ı', 'ˆ', '˜', '¯', '˘', '˙', '˚', '¸', '˝', '˛', 'ˇ',
];
fn char_from_mac_roman(code: u8) -> char {
if code < 128 {
code as char
} else {
TABLE[(code - 128) as usize]
}
}
coded.iter().copied().map(char_from_mac_roman).collect()
}
/// Trim style naming from a family name.
fn trim_styles(mut family: &str) -> &str {
// Separators between names, modifiers and styles.
@ -944,7 +971,7 @@ mod tests {
test(&[0, 1], &[0, 2]);
test(&[0, 1, 3], &[0, 2, 1, 1]);
test(
// [2, 3, 4, 9, 10, 11, 15, 18, 19]
// {2, 3, 4, 9, 10, 11, 15, 18, 19}
&[18, 19, 2, 4, 9, 11, 15, 3, 3, 10],
&[2, 3, 4, 3, 3, 1, 2, 2],
)

View File

@ -27,7 +27,7 @@ pub struct Frame {
elements: Arc<Vec<(Point, Element)>>,
}
/// Accessors and setters.
/// Constructor, accessors and setters.
impl Frame {
/// Create a new, empty frame.
///
@ -120,10 +120,10 @@ impl Frame {
Arc::make_mut(&mut self.elements).push((pos, element));
}
/// Add a frame.
/// Add a frame at a position in the foreground.
///
/// Automatically decides whether to inline the frame or to include it as a
/// group based on the number of elements in the frame.
/// group based on the number of elements in and the role of the frame.
pub fn push_frame(&mut self, pos: Point, frame: Frame) {
if self.should_inline(&frame) {
self.inline(self.layer(), pos, frame);
@ -146,6 +146,9 @@ impl Frame {
}
/// Add multiple elements at a position in the background.
///
/// The first element in the iterator will be the one that is most in the
/// background.
pub fn prepend_multiple<I>(&mut self, elements: I)
where
I: IntoIterator<Item = (Point, Element)>,
@ -163,20 +166,20 @@ impl Frame {
}
/// Whether the given frame should be inlined.
pub fn should_inline(&self, frame: &Frame) -> bool {
fn should_inline(&self, frame: &Frame) -> bool {
(self.elements.is_empty() || frame.elements.len() <= 5)
&& frame.role().map_or(true, |role| role.is_weak())
}
/// Inline a frame at the given layer.
pub fn inline(&mut self, layer: usize, pos: Point, frame: Frame) {
fn inline(&mut self, layer: usize, pos: Point, frame: Frame) {
// Try to just reuse the elements.
if pos.is_zero() && self.elements.is_empty() {
self.elements = frame.elements;
return;
}
// Try to copy the elements without adjusting the position.
// Try to transfer the elements without adjusting the position.
// Also try to reuse the elements if the Arc isn't shared.
let range = layer .. layer;
if pos.is_zero() {
@ -192,7 +195,7 @@ impl Frame {
return;
}
// We must adjust the element positioned.
// We must adjust the element positions.
// But still try to reuse the elements if the Arc isn't shared.
let sink = Arc::make_mut(&mut self.elements);
match Arc::try_unwrap(frame.elements) {
@ -210,7 +213,11 @@ impl Frame {
impl Frame {
/// Remove all elements from the frame.
pub fn clear(&mut self) {
self.elements = Arc::new(vec![]);
if Arc::strong_count(&self.elements) == 1 {
Arc::make_mut(&mut self.elements).clear();
} else {
self.elements = Arc::new(vec![]);
}
}
/// Resize the frame to a new size, distributing new space according to the
@ -407,7 +414,7 @@ pub enum Destination {
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub struct Location {
/// The page, starting at 1.
pub page: usize,
pub page: NonZeroUsize,
/// The exact coordinates on the page (from the top left, as usual).
pub pos: Point,
}
@ -416,7 +423,7 @@ impl Location {
/// Encode into a user-facing dictionary.
pub fn encode(&self) -> Dict {
dict! {
"page" => Value::Int(self.page as i64),
"page" => Value::Int(self.page.get() as i64),
"x" => Value::Length(self.pos.x.into()),
"y" => Value::Length(self.pos.y.into()),
}
@ -428,27 +435,28 @@ impl Location {
pub enum Role {
/// A paragraph.
Paragraph,
/// A heading with some level and whether it should be part of the outline.
/// A heading of the given level and whether it should be part of the
/// outline.
Heading { level: NonZeroUsize, outlined: bool },
/// A generic block-level subdivision.
GenericBlock,
/// A generic inline subdivision.
GenericInline,
/// A list. The boolean indicates whether it is ordered.
/// A list and whether it is ordered.
List { ordered: bool },
/// A list item. Must have a list parent.
ListItem,
/// The label of a list item.
/// The label of a list item. Must have a list item parent.
ListLabel,
/// The body of a list item.
/// The body of a list item. Must have a list item parent.
ListItemBody,
/// A mathematical formula.
Formula,
/// A table.
Table,
/// A table row.
/// A table row. Must have a table parent.
TableRow,
/// A table cell.
/// A table cell. Must have a table row parent.
TableCell,
/// A code fragment.
Code,
@ -466,6 +474,8 @@ impl Role {
/// Whether the role describes a generic element and is not very
/// descriptive.
pub fn is_weak(self) -> bool {
// In Typst, all text is in a paragraph, so paragraph isn't very
// descriptive.
match self {
Self::Paragraph | Self::GenericBlock | Self::GenericInline => true,
_ => false,

View File

@ -35,7 +35,7 @@ impl Angle {
(self.0).0
}
/// Get the value of this length in unit.
/// Get the value of this angle in a unit.
pub fn to_unit(self, unit: AngleUnit) -> f64 {
self.to_raw() / unit.raw_scale()
}

View File

@ -22,7 +22,7 @@ impl Em {
Self(Scalar(em))
}
/// Create font units at the given units per em.
/// Create an em length from font units at the given units per em.
pub fn from_units(units: impl Into<f64>, units_per_em: f64) -> Self {
Self(Scalar(units.into() / units_per_em))
}

View File

@ -25,7 +25,7 @@ impl Fraction {
(self.0).0
}
/// The absolute value of the this fraction.
/// The absolute value of this fraction.
pub fn abs(self) -> Self {
Self::new(self.get().abs())
}

View File

@ -31,7 +31,7 @@ impl<T> Gen<T> {
Gen { cross: f(self.cross), main: f(self.main) }
}
/// Convert to the specific representation, given the current block axis.
/// Convert to the specific representation, given the current main axis.
pub fn to_spec(self, main: SpecAxis) -> Spec<T> {
match main {
SpecAxis::Horizontal => Spec::new(self.main, self.cross),
@ -82,9 +82,9 @@ impl<T: Debug> Debug for Gen<T> {
/// Two generic axes of a container.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum GenAxis {
/// The minor axis.
/// The minor / inline axis.
Cross,
/// The major axis.
/// The major / block axis.
Main,
}

View File

@ -10,7 +10,7 @@ impl Length {
Self(Scalar(0.0))
}
/// The inifinite length.
/// The infinite length.
pub const fn inf() -> Self {
Self(Scalar(f64::INFINITY))
}
@ -50,7 +50,7 @@ impl Length {
(self.0).0
}
/// Get the value of this length in unit.
/// Get the value of this length in a unit.
pub fn to_unit(self, unit: LengthUnit) -> f64 {
self.to_raw() / unit.raw_scale()
}
@ -75,7 +75,7 @@ impl Length {
self.to_unit(LengthUnit::In)
}
/// The absolute value of the this length.
/// The absolute value of this length.
pub fn abs(self) -> Self {
Self::raw(self.to_raw().abs())
}
@ -100,7 +100,8 @@ impl Length {
*self = (*self).max(other);
}
/// Whether the other length fits into this one (i.e. is smaller).
/// Whether the other length fits into this one (i.e. is smaller). Allows
/// for a bit of slack.
pub fn fits(self, other: Self) -> bool {
self.0 + 1e-6 >= other.0
}

View File

@ -79,7 +79,7 @@ impl Debug for Color {
}
}
/// An 8-bit Luma color.
/// An 8-bit grayscale color.
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct LumaColor(pub u8);

View File

@ -15,7 +15,7 @@ impl Point {
Self { x: Length::zero(), y: Length::zero() }
}
/// Create a new point from x and y coordinate.
/// Create a new point from x and y coordinates.
pub const fn new(x: Length, y: Length) -> Self {
Self { x, y }
}

View File

@ -4,13 +4,15 @@ use std::mem;
/// A rectangle with rounded corners.
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct Rect {
size: Size,
radius: Sides<Length>,
pub struct RoundedRect {
/// The size of the rectangle.
pub size: Size,
/// The radius at each side.
pub radius: Sides<Length>,
}
impl Rect {
/// Create a new rectangle.
impl RoundedRect {
/// Create a new rounded rectangle.
pub fn new(size: Size, radius: Sides<Length>) -> Self {
Self { size, radius }
}
@ -55,7 +57,6 @@ impl Rect {
} else {
let mut paths = self.stroke_segments(Sides::splat(None));
assert_eq!(paths.len(), 1);
Geometry::Path(paths.pop().unwrap().0)
}
}
@ -103,7 +104,7 @@ impl Rect {
}
/// Draws one side of the rounded rectangle. Will always draw the left arc. The
/// right arc will be drawn halfway iff there is no connection.
/// right arc will be drawn halfway if and only if there is no connection.
fn draw_side(
path: &mut Path,
side: Side,
@ -114,7 +115,6 @@ fn draw_side(
) {
let angle_left = Angle::deg(if connection.prev { 90.0 } else { 45.0 });
let angle_right = Angle::deg(if connection.next { 90.0 } else { 45.0 });
let length = size.get(side.axis());
// The arcs for a border of the rectangle along the x-axis, starting at (0,0).
@ -166,9 +166,9 @@ fn draw_side(
}
}
/// A state machine that indicates which sides of the border strokes in a 2D
/// polygon are connected to their neighboring sides.
#[derive(Debug, Default, Copy, Clone, PartialEq, Eq)]
/// Indicates which sides of the border strokes in a 2D polygon are connected to
/// their neighboring sides.
#[derive(Debug, Default, Copy, Clone, Eq, PartialEq)]
struct Connection {
prev: bool,
next: bool,

View File

@ -32,7 +32,7 @@ impl<T> Sides<T> {
}
}
/// Maps the individual fields with `f`.
/// Map the individual fields with `f`.
pub fn map<F, U>(self, mut f: F) -> Sides<U>
where
F: FnMut(T) -> U,
@ -58,12 +58,12 @@ impl<T> Sides<T> {
}
}
/// Returns an iterator over the sides.
/// An iterator over the sides.
pub fn iter(&self) -> impl Iterator<Item = &T> {
[&self.left, &self.top, &self.right, &self.bottom].into_iter()
}
/// Returns whether all sides are equal.
/// Whether all sides are equal.
pub fn is_uniform(&self) -> bool
where
T: PartialEq,
@ -72,10 +72,7 @@ impl<T> Sides<T> {
}
}
impl<T> Sides<T>
where
T: Add,
{
impl<T: Add> Sides<T> {
/// Sums up `left` and `right` into `x`, and `top` and `bottom` into `y`.
pub fn sum_by_axis(self) -> Spec<T::Output> {
Spec::new(self.left + self.right, self.top + self.bottom)

View File

@ -26,7 +26,7 @@ impl<T> Spec<T> {
Self { x: v.clone(), y: v }
}
/// Maps the individual fields with `f`.
/// Map the individual fields with `f`.
pub fn map<F, U>(self, mut f: F) -> Spec<U>
where
F: FnMut(T) -> U,

View File

@ -48,8 +48,8 @@ impl Transform {
}
/// Whether this is the identity transformation.
pub fn is_identity(&self) -> bool {
*self == Self::identity()
pub fn is_identity(self) -> bool {
self == Self::identity()
}
/// Pre-concatenate another transformation.

View File

@ -133,7 +133,7 @@ pub struct RasterImage {
}
impl RasterImage {
/// Parse an image from raw data in a supported format (PNG or JPEG).
/// Parse an image from raw data in a supported format (PNG, JPEG or GIF).
///
/// The image format is determined automatically.
pub fn parse(data: &[u8]) -> io::Result<Self> {

View File

@ -3,17 +3,17 @@
//! # Steps
//! - **Parsing:** The parsing step first transforms a plain string into an
//! [iterator of tokens][tokens]. This token stream is [parsed] into a [syntax
//! tree]. The tree itself is untyped, but a typed layer over it is provided
//! in the [AST] module.
//! tree]. The tree itself is untyped, but the [AST] module provides a typed
//! layer over it.
//! - **Evaluation:** The next step is to [evaluate] the markup. This produces a
//! [module], consisting of a scope of values that were exported by the code
//! and [content], a hierarchical, styled representation with the contents of
//! the module. The nodes of the content tree are well structured and
//! order-independent and thus much better suited for layouting than the raw
//! markup.
//! - **Layouting:** Next, the tree is [layouted] into a portable version of the
//! typeset document. The output of this is a collection of [`Frame`]s (one
//! per page), ready for exporting.
//! and [content], a hierarchical, styled representation of the text,
//! structure, layouts, etc. of the module. The nodes of the content tree are
//! well structured and order-independent and thus much better suited for
//! layouting than the raw markup.
//! - **Layouting:** Next, the content is [layouted] into a portable version of
//! the typeset document. The output of this is a collection of [`Frame`]s
//! (one per page), ready for exporting.
//! - **Exporting:** The finished layout can be exported into a supported
//! format. Currently, the only supported output format is [PDF].
//!

View File

@ -19,12 +19,9 @@ impl Layout for HideNode {
styles: StyleChain,
) -> TypResult<Vec<Frame>> {
let mut frames = self.0.layout(ctx, regions, styles)?;
// Clear the frames.
for frame in &mut frames {
frame.clear();
}
Ok(frames)
}
}

View File

@ -165,7 +165,7 @@ impl<const S: ShapeKind> Layout for ShapeNode<S> {
frame.prepend(pos, Element::Shape(shape));
} else {
frame.prepend_multiple(
Rect::new(size, radius)
RoundedRect::new(size, radius)
.shapes(fill, stroke)
.into_iter()
.map(|x| (pos, Element::Shape(x))),

View File

@ -107,6 +107,8 @@ impl<const T: TransformKind> Layout for TransformNode<T> {
}
/// Kinds of transformations.
///
/// The move transformation is handled separately.
pub type TransformKind = usize;
/// A rotational transformation.

View File

@ -58,10 +58,10 @@ impl Layout for ColumnsNode {
// Layout the children.
let mut frames = self.child.layout(ctx, &pod, styles)?.into_iter();
let mut finished = vec![];
let dir = styles.get(TextNode::DIR);
let total_regions = (frames.len() as f32 / columns as f32).ceil() as usize;
let mut finished = vec![];
// Stitch together the columns for each region.
for region in regions.iter().take(total_regions) {

View File

@ -67,7 +67,7 @@ pub enum TrackSizing {
castable! {
Vec<TrackSizing>,
Expected: "integer, auto, relative length, fraction, or array of the latter three)",
Expected: "integer, auto, relative length, fraction, or array of the latter three",
Value::Auto => vec![TrackSizing::Auto],
Value::Length(v) => vec![TrackSizing::Relative(v.into())],
Value::Ratio(v) => vec![TrackSizing::Relative(v.into())],

View File

@ -65,7 +65,7 @@ fn shrink(size: Size, padding: Sides<Relative<Length>>) -> Size {
/// (Vertical axis is analogous.)
///
/// Let w be the grown target width,
/// s be given width,
/// s be the given width,
/// l be the left padding,
/// r be the right padding,
/// p = l + r.

View File

@ -18,7 +18,7 @@ impl PageNode {
/// Whether the page is flipped into landscape orientation.
pub const FLIPPED: bool = false;
/// The page margin.
/// The page's margins.
#[property(fold)]
pub const MARGINS: Sides<Option<Smart<Relative<RawLength>>>> =
Sides::splat(Smart::Auto);

View File

@ -39,9 +39,8 @@ impl Layout for PlaceNode {
// If expansion is off, zero all sizes so that we don't take up any
// space in our parent. Otherwise, respect the expand settings.
let frame = &mut frames[0];
let target = regions.expand.select(regions.first, Size::zero());
frame.resize(target, Align::LEFT_TOP);
frames[0].resize(target, Align::LEFT_TOP);
Ok(frames)
}

View File

@ -176,7 +176,8 @@ impl<'a> StackLayouter<'a> {
self.finish_region();
}
// Align nodes' block-axis alignment is respected by the stack node.
// Block-axis alignment of the `AlignNode` is respected
// by the stack node.
let align = node
.downcast::<AlignNode>()
.and_then(|node| node.aligns.get(self.axis))

View File

@ -34,7 +34,7 @@ impl Layout for RexNode {
.ok_or("failed to find math font")
.at(span)?;
// Prepare the font.
// Prepare the font context.
let face = ctx.fonts.get(face_id);
let ctx = face
.math()

View File

@ -19,5 +19,5 @@ pub use crate::model::{
StyleChain, StyleMap, StyleVec,
};
pub use crate::syntax::{Span, Spanned};
pub use crate::util::{EcoString, OptionExt};
pub use crate::util::EcoString;
pub use crate::Context;

View File

@ -20,8 +20,8 @@ pub type OverlineNode = DecoNode<OVERLINE>;
#[node(showable)]
impl<const L: DecoLine> DecoNode<L> {
/// How to stroke the line. The text color and thickness read from the font
/// tables if `auto`.
/// How to stroke the line. The text color and thickness are read from the
/// font tables if `auto`.
#[property(shorthand, resolve, fold)]
pub const STROKE: Smart<RawStroke> = Smart::Auto;
/// Position of the line relative to the baseline, read from the font tables

View File

@ -1,7 +1,7 @@
use super::TextNode;
use crate::library::prelude::*;
/// Link text and other elements to an URL.
/// Link text and other elements to a destination.
#[derive(Debug, Hash)]
pub struct LinkNode {
/// The destination the link points to.
@ -15,7 +15,7 @@ impl LinkNode {
/// The fill color of text in the link. Just the surrounding text color
/// if `auto`.
pub const FILL: Smart<Paint> = Smart::Auto;
/// Whether to underline link.
/// Whether to underline the link.
pub const UNDERLINE: Smart<bool> = Smart::Auto;
fn construct(_: &mut Machine, args: &mut Args) -> TypResult<Content> {
@ -35,10 +35,10 @@ castable! {
Expected: "string or dictionary with `page`, `x`, and `y` keys",
Value::Str(string) => Self::Url(string),
Value::Dict(dict) => {
let page: i64 = dict.get(&"page".into())?.clone().cast()?;
let x: RawLength = dict.get(&"x".into())?.clone().cast()?;
let y: RawLength = dict.get(&"y".into())?.clone().cast()?;
Self::Internal(Location { page: page as usize, pos: Point::new(x.length, y.length) })
let page = dict.get("page")?.clone().cast()?;
let x: RawLength = dict.get("x")?.clone().cast()?;
let y: RawLength = dict.get("y")?.clone().cast()?;
Self::Internal(Location { page, pos: Point::new(x.length, y.length) })
},
}

View File

@ -59,13 +59,13 @@ impl TextNode {
/// The amount of space that should be added between characters.
#[property(resolve)]
pub const TRACKING: RawLength = RawLength::zero();
/// The width of spaces relative to the default space width.
/// The width of spaces relative to the font's space width.
#[property(resolve)]
pub const SPACING: Relative<RawLength> = Relative::one();
/// The offset of the baseline.
#[property(resolve)]
pub const BASELINE: RawLength = RawLength::zero();
/// Whether glyphs can hang over into the margin.
/// Whether certain glyphs can hang over into the margin.
pub const OVERHANG: bool = true;
/// The top end of the text bounding box.
pub const TOP_EDGE: TextEdge = TextEdge::Metric(VerticalFontMetric::CapHeight);
@ -114,7 +114,7 @@ impl TextNode {
/// Whether the font weight should be increased by 300.
#[property(skip, fold)]
pub const BOLD: Toggle = false;
/// Whether the the font style should be inverted.
/// Whether the font style should be inverted.
#[property(skip, fold)]
pub const ITALIC: Toggle = false;
/// A case transformation that should be applied to the text.
@ -123,7 +123,7 @@ impl TextNode {
/// Whether small capital glyphs should be used. ("smcp")
#[property(skip)]
pub const SMALLCAPS: bool = false;
/// An URL the text should link to.
/// A destination the text should be linked to.
#[property(skip, referenced)]
pub const LINK: Option<Destination> = None;
/// Decorative lines.
@ -168,7 +168,7 @@ impl TextNode {
}
}
/// A font family like "Arial".
/// A lowercased font family like "arial".
#[derive(Clone, Eq, PartialEq, Hash)]
pub struct FontFamily(EcoString);
@ -338,7 +338,7 @@ impl Resolve for Smart<Hyphenate> {
pub struct StylisticSet(u8);
impl StylisticSet {
/// Creates a new set, clamping to 1-20.
/// Create a new set, clamping to 1-20.
pub fn new(index: u8) -> Self {
Self(index.clamp(1, 20))
}
@ -363,7 +363,7 @@ castable! {
pub enum NumberType {
/// Numbers that fit well with capital text. ("lnum")
Lining,
/// Numbers that fit well into flow of upper- and lowercase text. ("onum")
/// Numbers that fit well into a flow of upper- and lowercase text. ("onum")
OldStyle,
}
@ -396,28 +396,6 @@ castable! {
},
}
/// How to position numbers.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum NumberPosition {
/// Numbers are positioned on the same baseline as text.
Normal,
/// Numbers are smaller and placed at the bottom. ("subs")
Subscript,
/// Numbers are smaller and placed at the top. ("sups")
Superscript,
}
castable! {
NumberPosition,
Expected: "string",
Value::Str(string) => match string.as_str() {
"normal" => Self::Normal,
"subscript" => Self::Subscript,
"superscript" => Self::Superscript,
_ => Err(r#"expected "normal", "subscript" or "superscript""#)?,
},
}
castable! {
Vec<(Tag, u32)>,
Expected: "array of strings or dictionary mapping tags to integers",
@ -445,12 +423,12 @@ impl Fold for Vec<(Tag, u32)> {
}
}
/// Convert text to lowercase.
/// Convert a string or content to lowercase.
pub fn lower(_: &mut Machine, args: &mut Args) -> TypResult<Value> {
case(Case::Lower, args)
}
/// Convert text to uppercase.
/// Convert a string or content to uppercase.
pub fn upper(_: &mut Machine, args: &mut Args) -> TypResult<Value> {
case(Case::Upper, args)
}
@ -475,7 +453,7 @@ pub enum Case {
}
impl Case {
/// Apply the case to a string of text.
/// Apply the case to a string.
pub fn apply(self, text: &str) -> String {
match self {
Self::Upper => text.to_uppercase(),

View File

@ -297,7 +297,7 @@ impl Segment<'_> {
/// A prepared item in a paragraph layout.
#[derive(Debug)]
enum Item<'a> {
/// A shaped text run with consistent direction.
/// A shaped text run with consistent style and direction.
Text(ShapedText<'a>),
/// Absolute spacing between other items.
Absolute(Length),
@ -305,7 +305,7 @@ enum Item<'a> {
Fractional(Fraction),
/// A layouted child node.
Frame(Frame),
/// A repeating node.
/// A repeating node that fills the remaining space.
Repeat(&'a RepeatNode, StyleChain<'a>),
/// A pin identified by index.
Pin(usize),
@ -330,7 +330,7 @@ impl<'a> Item<'a> {
}
}
/// The natural width of the item.
/// The natural layouted width of the item.
fn width(&self) -> Length {
match self {
Self::Text(shaped) => shaped.width,
@ -366,7 +366,7 @@ struct Line<'a> {
last: Option<Item<'a>>,
/// The width of the line.
width: Length,
/// Whether the line is allowed to be justified.
/// Whether the line should be justified.
justify: bool,
/// Whether the line ends with a hyphen or dash, either naturally or through
/// hyphenation.
@ -403,7 +403,7 @@ impl<'a> Line<'a> {
self.items().skip(start).take(end - start)
}
// How many justifiable glyphs the line contains.
/// How many justifiable glyphs the line contains.
fn justifiables(&self) -> usize {
let mut count = 0;
for shaped in self.items().filter_map(Item::text) {
@ -528,7 +528,7 @@ fn prepare<'a>(
let mut cursor = 0;
let mut items = vec![];
// Layout the children and collect them into items.
// Shape / layout the children and collect them into items.
for (segment, styles) in segments {
let end = cursor + segment.len();
match segment {
@ -654,7 +654,7 @@ fn linebreak<'a>(
}
/// Perform line breaking in simple first-fit style. This means that we build
/// lines a greedily, always taking the longest possible line. This may lead to
/// lines greedily, always taking the longest possible line. This may lead to
/// very unbalanced line, but is fast and simple.
fn linebreak_simple<'a>(
p: &'a Preparation<'a>,
@ -670,8 +670,8 @@ fn linebreak_simple<'a>(
let mut attempt = line(p, fonts, start .. end, mandatory, hyphen);
// If the line doesn't fit anymore, we push the last fitting attempt
// into the stack and rebuild the line from its end. The resulting
// line cannot be broken up further.
// into the stack and rebuild the line from the attempt's end. The
// resulting line cannot be broken up further.
if !width.fits(attempt.width) {
if let Some((last_attempt, last_end)) = last.take() {
lines.push(last_attempt);
@ -771,17 +771,18 @@ fn linebreak_optimized<'a>(
ratio = ratio.min(10.0);
// Determine the cost of the line.
let mut cost = if ratio < if attempt.justify { MIN_RATIO } else { 0.0 } {
let min_ratio = if attempt.justify { MIN_RATIO } else { 0.0 };
let mut cost = if ratio < min_ratio {
// The line is overfull. This is the case if
// - justification is on, but we'd need to shrink to much
// - justification is on, but we'd need to shrink too much
// - justification is off and the line just doesn't fit
// Since any longer line will also be overfull, we can deactive
// this breakpoint.
active = i + 1;
MAX_COST
} else if eof {
// This is the final line and its not overfull since then
// we would have taken the above branch.
// This is the final line and its not overfull since then we
// would have taken the above branch.
0.0
} else if mandatory {
// This is a mandatory break and the line is not overfull, so it

View File

@ -1,6 +1,6 @@
use crate::library::prelude::*;
/// Fill space by repeating something horizontally.
/// A node that should be repeated to fill up a line.
#[derive(Debug, Hash)]
pub struct RepeatNode(pub LayoutNode);

View File

@ -43,7 +43,9 @@ pub struct ShapedGlyph {
pub x_offset: Em,
/// The vertical offset of the glyph.
pub y_offset: Em,
/// A value that is the same for all glyphs belong to one cluster.
/// The byte index in the source text where this glyph's cluster starts. A
/// cluster is a sequence of one or multiple glyphs that cannot be
/// separated and must always be treated as a union.
pub cluster: usize,
/// Whether splitting the shaping result before this glyph would yield the
/// same results as shaping the parts to both sides of `text_index`
@ -67,9 +69,9 @@ impl ShapedGlyph {
/// A side you can go toward.
enum Side {
/// Go toward the west.
/// To the left-hand side.
Left,
/// Go toward the east.
/// To the right-hand side.
Right,
}
@ -141,7 +143,7 @@ impl<'a> ShapedText<'a> {
frame
}
/// Measure the top and bottom extent of a this text.
/// Measure the top and bottom extent of this text.
fn measure(&self, fonts: &mut FontStore) -> (Length, Length) {
let mut top = Length::zero();
let mut bottom = Length::zero();
@ -498,7 +500,7 @@ fn shape_tofus(ctx: &mut ShapingContext, base: usize, text: &str, face_id: FaceI
}
}
/// Apply tracking and spacing to a slice of shaped glyphs.
/// Apply tracking and spacing to the shaped glyphs.
fn track_and_space(ctx: &mut ShapingContext) {
let tracking = Em::from_length(ctx.styles.get(TextNode::TRACKING), ctx.size);
let spacing = ctx
@ -522,7 +524,7 @@ fn track_and_space(ctx: &mut ShapingContext) {
}
}
/// Resolve the font variant with `STRONG` and `EMPH` factored in.
/// Resolve the font variant with `BOLD` and `ITALIC` factored in.
pub fn variant(styles: StyleChain) -> FontVariant {
let mut variant = FontVariant::new(
styles.get(TextNode::STYLE),

View File

@ -3,11 +3,12 @@ use crate::font::FontStore;
use crate::library::prelude::*;
use crate::util::EcoString;
/// Sub or superscript text. The text is rendered smaller and its baseline is raised.
/// Sub or superscript text.
///
/// To provide the best typography possible, we first try to transform the
/// text to superscript codepoints. If that fails, we fall back to rendering
/// shrunk normal letters in a raised way.
/// The text is rendered smaller and its baseline is raised. To provide the best
/// typography possible, we first try to transform the text to superscript
/// codepoints. If that fails, we fall back to rendering shrunk normal letters
/// in a raised way.
#[derive(Debug, Hash)]
pub struct ShiftNode<const S: ScriptKind>(pub Content);
@ -19,7 +20,8 @@ pub type SubNode = ShiftNode<SUBSCRIPT>;
#[node]
impl<const S: ScriptKind> ShiftNode<S> {
/// Whether to prefer the dedicated sub- and superscript characters of the font.
/// Whether to prefer the dedicated sub- and superscript characters of the
/// font.
pub const TYPOGRAPHIC: bool = true;
/// The baseline shift for synthetic sub- and superscripts.
pub const BASELINE: RawLength =
@ -60,9 +62,8 @@ impl<const S: ScriptKind> Show for ShiftNode<S> {
}
}
/// Find and transform the text contained in `content` iff it only consists of
/// `Text`, `Space`, and `Empty` leaf nodes. The text is transformed to the
/// given script kind.
/// Find and transform the text contained in `content` to the given script kind
/// if and only if it only consists of `Text`, `Space`, and `Empty` leaf nodes.
fn search_text(content: &Content, mode: ScriptKind) -> Option<EcoString> {
match content {
Content::Text(_) => {

View File

@ -2,7 +2,7 @@ use std::cmp::Ordering;
use crate::library::prelude::*;
/// Convert a value to a integer.
/// Convert a value to an integer.
pub fn int(_: &mut Machine, args: &mut Args) -> TypResult<Value> {
let Spanned { v, span } = args.expect("value")?;
Ok(Value::Int(match v {

View File

@ -6,7 +6,7 @@ pub fn repr(_: &mut Machine, args: &mut Args) -> TypResult<Value> {
Ok(args.expect::<Value>("value")?.repr().into())
}
/// Cconvert a value to a string.
/// Convert a value to a string.
pub fn str(_: &mut Machine, args: &mut Args) -> TypResult<Value> {
let Spanned { v, span } = args.expect("value")?;
Ok(Value::Str(match v {
@ -31,20 +31,20 @@ pub fn regex(_: &mut Machine, args: &mut Args) -> TypResult<Value> {
/// Converts an integer into one or multiple letters.
pub fn letter(_: &mut Machine, args: &mut Args) -> TypResult<Value> {
convert(Numbering::Letter, args)
numbered(Numbering::Letter, args)
}
/// Converts an integer into a roman numeral.
pub fn roman(_: &mut Machine, args: &mut Args) -> TypResult<Value> {
convert(Numbering::Roman, args)
numbered(Numbering::Roman, args)
}
/// Convert a number into a symbol.
pub fn symbol(_: &mut Machine, args: &mut Args) -> TypResult<Value> {
convert(Numbering::Symbol, args)
numbered(Numbering::Symbol, args)
}
fn convert(numbering: Numbering, args: &mut Args) -> TypResult<Value> {
fn numbered(numbering: Numbering, args: &mut Args) -> TypResult<Value> {
let n = args.expect::<usize>("non-negative integer")?;
Ok(Value::Str(numbering.apply(n)))
}

View File

@ -80,7 +80,7 @@ mod tests {
let path = Path::new("PTSans.ttf");
let loader = MemLoader::new().with(path, &data[..]);
// Test that the found was found.
// Test that the face was found.
let info = &loader.faces[0];
assert_eq!(info.path, path);
assert_eq!(info.index, 0);

View File

@ -52,7 +52,7 @@ SUBCOMMANDS:
--fonts List all discovered system fonts
";
/// Highlight a .typ file into a HTML file.
/// Highlight a .typ file into an HTML file.
struct HighlightCommand {
input: PathBuf,
output: PathBuf,
@ -72,7 +72,7 @@ OPTIONS:
-h, --help Print this help
";
/// List discovered fonts.
/// List discovered system fonts.
struct FontsCommand {
variants: bool,
}
@ -142,7 +142,7 @@ fn parse_args() -> StrResult<Command> {
}
/// Parse two freestanding path arguments, with the output path being optional.
/// If it is omitted, it is determined from the input path's filename with the
/// If it is omitted, it is determined from the input path's file stem plus the
/// given extension.
fn parse_input_output(args: &mut Arguments, ext: &str) -> StrResult<(PathBuf, PathBuf)> {
let input: PathBuf = args.free_from_str().map_err(|_| "missing input file")?;
@ -229,7 +229,7 @@ fn typeset(command: TypesetCommand) -> StrResult<()> {
Ok(())
}
/// Print diagnostics messages to the terminal.
/// Print diagnostic messages to the terminal.
fn print_diagnostics(
sources: &SourceStore,
errors: Vec<Error>,

View File

@ -14,7 +14,7 @@ thread_local! {
/// A map from hashes to cache entries.
type Cache = HashMap<u64, CacheEntry>;
/// Access the cache.
/// Access the cache mutably.
fn with<F, R>(f: F) -> R
where
F: FnOnce(&mut Cache) -> R,
@ -24,7 +24,8 @@ where
/// An entry in the cache.
struct CacheEntry {
/// The memoized function's result plus constraints on the input.
/// The memoized function's result plus constraints on the input in the form
/// `(O, I::Contrast)`.
data: Box<dyn Any>,
/// How many evictions have passed since the entry has been last used.
age: usize,
@ -32,9 +33,9 @@ struct CacheEntry {
/// Execute a memoized function call.
///
/// This hashes all inputs to the function and then either returns a cached
/// version from the thread-local cache or executes the function and saves a
/// copy of the results in the cache.
/// This [tracks](Track) all inputs to the function and then either returns a
/// cached version from the thread-local cache or executes the function and
/// saves a copy of the results in the cache.
///
/// Note that `f` must be a pure function.
pub fn memoized<I, O>(input: I, f: fn(input: I) -> (O, I::Constraint)) -> O
@ -48,7 +49,7 @@ where
/// Execute a function and then call another function with a reference to the
/// result.
///
/// This hashes all inputs to the function and then either
/// This [tracks](Track) all inputs to the function and then either
/// - calls `g` with a cached version from the thread-local cache,
/// - or executes `f`, calls `g` with the fresh version and saves the result in
/// the cache.

View File

@ -154,7 +154,7 @@ impl Content {
Self::Show(node.pack(), None)
}
/// Create a new sequence nodes from multiples nodes.
/// Create a new sequence node from multiples nodes.
pub fn sequence(seq: Vec<Self>) -> Self {
match seq.as_slice() {
[] => Self::Empty,
@ -204,7 +204,7 @@ impl Content {
Self::Styled(Arc::new((self, styles)))
}
/// Assign a role to this content by adding a style map.
/// Assign a semantic role to this content.
pub fn role(self, role: Role) -> Self {
self.styled_with_entry(StyleEntry::Role(role))
}

View File

@ -19,7 +19,7 @@ use crate::Context;
/// A node that can be layouted into a sequence of regions.
///
/// Layouting return one frame per used region.
/// Layouting returns one frame per used region.
pub trait Layout: 'static {
/// Layout this node into the given regions, producing frames.
fn layout(
@ -377,7 +377,7 @@ impl Layout for SizedNode {
struct FillNode {
/// How to fill the frames resulting from the `child`.
fill: Paint,
/// The node to fill.
/// The node whose frames should be filled.
child: LayoutNode,
}
@ -402,7 +402,7 @@ impl Layout for FillNode {
struct StrokeNode {
/// How to stroke the frames resulting from the `child`.
stroke: Stroke,
/// The node to stroke.
/// The node whose frames should be stroked.
child: LayoutNode,
}

View File

@ -1,6 +1,7 @@
use std::cell::Cell;
use std::fmt::{self, Debug, Formatter};
use std::hash::{Hash, Hasher};
use std::num::NonZeroUsize;
use std::sync::Arc;
use super::Content;
@ -55,7 +56,7 @@ impl LocateNode {
Self(Arc::new(Repr::Entry(EntryNode { group, recipe, value })))
}
/// Create a new node with access to a group's members.
/// Create a new node with access to all of a group's members.
pub fn all(group: Group, recipe: Spanned<Func>) -> Self {
Self(Arc::new(Repr::All(AllNode { group, recipe })))
}
@ -278,7 +279,7 @@ impl PinBoard {
locate_in_frame(
&mut self.list,
&mut flow,
1 + i,
NonZeroUsize::new(1 + i).unwrap(),
frame,
Transform::identity(),
);
@ -295,7 +296,7 @@ impl PinBoard {
fn locate_in_frame(
pins: &mut [Pin],
flow: &mut usize,
page: usize,
page: NonZeroUsize,
frame: &Frame,
ts: Transform,
) {
@ -384,7 +385,10 @@ impl Pin {
impl Default for Pin {
fn default() -> Self {
Self {
loc: Location { page: 0, pos: Point::zero() },
loc: Location {
page: NonZeroUsize::new(1).unwrap(),
pos: Point::zero(),
},
flow: 0,
group: None,
value: None,

View File

@ -18,7 +18,7 @@ pub struct Property {
pub key: KeyId,
/// The id of the node the property belongs to.
pub node: NodeId,
/// Whether the property should only affects the first node down the
/// Whether the property should only affect the first node down the
/// hierarchy. Used by constructors.
pub scoped: bool,
/// The property's value.
@ -143,10 +143,10 @@ pub trait Key<'a>: Copy + 'static {
/// The name of the property, used for debug printing.
const NAME: &'static str;
/// The ids of the key and of the node the key belongs to.
/// The id of the node the key belongs to.
fn node() -> NodeId;
/// Compute an output value from a sequence of values belong to this key,
/// Compute an output value from a sequence of values belonging to this key,
/// folding if necessary.
fn get(
chain: StyleChain<'a>,

View File

@ -216,7 +216,7 @@ impl StyleEntry {
}
}
/// The highest-level kind of of structure the entry interrupts.
/// The highest-level kind of structure the entry interrupts.
pub fn interruption(&self) -> Option<Interruption> {
match self {
Self::Property(property) => property.interruption(),
@ -328,7 +328,7 @@ impl<'a> StyleChain<'a> {
Ok(realized)
}
/// Retrieve the current role
/// Retrieve the current role.
pub fn role(self) -> Option<Role> {
let mut depth = 0;
@ -522,6 +522,15 @@ impl<T> StyleVec<T> {
}
}
/// Iterate over references to the contained items and associated style maps.
pub fn iter(&self) -> impl Iterator<Item = (&T, &StyleMap)> + '_ {
self.items().zip(
self.maps
.iter()
.flat_map(|(map, count)| iter::repeat(map).take(*count)),
)
}
/// Iterate over the contained items.
pub fn items(&self) -> std::slice::Iter<'_, T> {
self.items.iter()
@ -535,15 +544,6 @@ impl<T> StyleVec<T> {
pub fn styles(&self) -> impl Iterator<Item = &StyleMap> {
self.maps.iter().map(|(map, _)| map)
}
/// Iterate over references to the contained items and associated style maps.
pub fn iter(&self) -> impl Iterator<Item = (&T, &StyleMap)> + '_ {
self.items().zip(
self.maps
.iter()
.flat_map(|(map, count)| iter::repeat(map).take(*count)),
)
}
}
impl<T> Default for StyleVec<T> {

View File

@ -54,7 +54,7 @@ impl Reparser<'_> {
outermost: bool,
safe_to_replace: bool,
) -> Option<Range<usize>> {
let is_markup = matches!(node.kind(), NodeKind::Markup(_));
let is_markup = matches!(node.kind(), NodeKind::Markup { .. });
let original_count = node.children().len();
let original_offset = offset;
@ -96,9 +96,8 @@ impl Reparser<'_> {
} else {
// Update compulsary state of `ahead_nontrivia`.
if let Some(ahead_nontrivia) = ahead.as_mut() {
match child.kind() {
NodeKind::Space(n) if n > &0 => ahead_nontrivia.newline(),
_ => {}
if let NodeKind::Space { newlines: (1 ..) } = child.kind() {
ahead_nontrivia.newline();
}
}
@ -156,7 +155,6 @@ impl Reparser<'_> {
// Do not allow replacement of elements inside of constructs whose
// opening and closing brackets look the same.
let safe_inside = node.kind().is_bounded();
let child = &mut node.children_mut()[pos.idx];
let prev_len = child.len();
let prev_descendants = child.descendants();
@ -200,8 +198,8 @@ impl Reparser<'_> {
// Make sure this is a markup node and that we may replace. If so, save
// the current indent.
let indent = match node.kind() {
NodeKind::Markup(n) if safe_to_replace => *n,
let min_indent = match node.kind() {
NodeKind::Markup { min_indent } if safe_to_replace => *min_indent,
_ => return None,
};
@ -220,7 +218,7 @@ impl Reparser<'_> {
self.replace(
node,
ReparseMode::MarkupElements(at_start, indent),
ReparseMode::MarkupElements { at_start, min_indent },
start.idx .. end.idx + 1,
superseded_span,
outermost,
@ -261,15 +259,17 @@ impl Reparser<'_> {
&self.src[newborn_span.start ..],
newborn_span.len(),
),
ReparseMode::MarkupElements(at_start, indent) => reparse_markup_elements(
&prefix,
&self.src[newborn_span.start ..],
newborn_span.len(),
differential,
&node.children().as_slice()[superseded_start ..],
at_start,
indent,
),
ReparseMode::MarkupElements { at_start, min_indent } => {
reparse_markup_elements(
&prefix,
&self.src[newborn_span.start ..],
newborn_span.len(),
differential,
&node.children().as_slice()[superseded_start ..],
at_start,
min_indent,
)
}
}?;
// Do not accept unclosed nodes if the old node wasn't at the right edge
@ -294,12 +294,12 @@ struct NodePos {
offset: usize,
}
/// Encodes the state machine of the search for the node which is pending for
/// Encodes the state machine of the search for the nodes are pending for
/// replacement.
#[derive(Clone, Copy, Debug, PartialEq)]
enum SearchState {
/// Neither an end nor a start have been found as of now.
/// The last non-whitespace child is continually saved.
/// The latest non-trivia child is continually saved.
NoneFound,
/// The search has concluded by finding a node that fully contains the
/// modifications.
@ -332,15 +332,18 @@ impl SearchState {
}
}
/// An ahead element with an index and whether it is `at_start`.
/// An ahead node with an index and whether it is `at_start`.
#[derive(Clone, Copy, Debug, PartialEq)]
struct Ahead {
/// The position of the node.
pos: NodePos,
/// The `at_start` before this node.
at_start: bool,
/// The kind of ahead node.
kind: AheadKind,
}
/// The kind of ahead element.
/// The kind of ahead node.
#[derive(Clone, Copy, Debug, PartialEq)]
enum AheadKind {
/// A normal non-trivia child has been found.
@ -382,9 +385,9 @@ enum ReparseMode {
Code,
/// Reparse a content block, including its square brackets.
Content,
/// Reparse elements of the markup. The variant carries whether the node is
/// `at_start` and the minimum indent of the containing markup node.
MarkupElements(bool, usize),
/// Reparse elements of the markup. Also specified the initial `at_start`
/// state for the reparse and the minimum indent of the reparsed nodes.
MarkupElements { at_start: bool, min_indent: usize },
}
#[cfg(test)]

View File

@ -77,7 +77,7 @@ fn reparse_content_block(
Some((vec![first], terminated, 1))
}
/// Reparse some markup elements without the topmost node.
/// Reparse a sequence markup elements without the topmost node.
///
/// Returns `Some` if all of the input was consumed.
fn reparse_markup_elements(
@ -87,7 +87,7 @@ fn reparse_markup_elements(
differential: isize,
reference: &[SyntaxNode],
mut at_start: bool,
column: usize,
min_indent: usize,
) -> Option<(Vec<SyntaxNode>, bool, usize)> {
let mut p = Parser::with_prefix(prefix, src, TokenMode::Markup);
@ -98,8 +98,8 @@ fn reparse_markup_elements(
let mut stopped = false;
'outer: while !p.eof() {
if let Some(NodeKind::Space(1 ..)) = p.peek() {
if p.column(p.current_end()) < column {
if let Some(NodeKind::Space { newlines: (1 ..) }) = p.peek() {
if p.column(p.current_end()) < min_indent {
return None;
}
}
@ -155,7 +155,7 @@ fn reparse_markup_elements(
/// If `at_start` is true, things like headings that may only appear at the
/// beginning of a line or content block are initially allowed.
fn markup(p: &mut Parser, mut at_start: bool) {
p.perform(NodeKind::Markup(0), |p| {
p.perform(NodeKind::Markup { min_indent: 0 }, |p| {
while !p.eof() {
markup_node(p, &mut at_start);
}
@ -168,18 +168,18 @@ fn markup_line(p: &mut Parser) {
}
/// Parse markup that stays right of the given `column`.
fn markup_indented(p: &mut Parser, column: usize) {
fn markup_indented(p: &mut Parser, min_indent: usize) {
p.eat_while(|t| match t {
NodeKind::Space(n) => *n == 0,
NodeKind::Space { newlines } => *newlines == 0,
NodeKind::LineComment | NodeKind::BlockComment => true,
_ => false,
});
let mut at_start = false;
p.perform(NodeKind::Markup(column), |p| {
p.perform(NodeKind::Markup { min_indent }, |p| {
while !p.eof() {
if let Some(NodeKind::Space(1 ..)) = p.peek() {
if p.column(p.current_end()) < column {
if let Some(NodeKind::Space { newlines: (1 ..) }) = p.peek() {
if p.column(p.current_end()) < min_indent {
break;
}
}
@ -198,7 +198,7 @@ fn markup_node(p: &mut Parser, at_start: &mut bool) {
match token {
// Whitespace.
NodeKind::Space(newlines) => {
NodeKind::Space { newlines } => {
*at_start |= *newlines > 0;
p.eat();
return;
@ -284,7 +284,7 @@ fn heading(p: &mut Parser, at_start: bool) {
while p.eat_if(NodeKind::Eq) {}
if at_start && p.peek().map_or(true, |kind| kind.is_space()) {
p.eat_while(|kind| kind == &NodeKind::Space(0));
p.eat_while(|kind| *kind == NodeKind::Space { newlines: 0 });
markup_line(p);
marker.end(p, NodeKind::Heading);
} else {
@ -299,9 +299,9 @@ fn list_node(p: &mut Parser, at_start: bool) {
let text: EcoString = p.peek_src().into();
p.assert(NodeKind::Minus);
let column = p.column(p.prev_end());
if at_start && p.eat_if(NodeKind::Space(0)) && !p.eof() {
markup_indented(p, column);
let min_indent = p.column(p.prev_end());
if at_start && p.eat_if(NodeKind::Space { newlines: 0 }) && !p.eof() {
markup_indented(p, min_indent);
marker.end(p, NodeKind::List);
} else {
marker.convert(p, NodeKind::Text(text));
@ -314,16 +314,16 @@ fn enum_node(p: &mut Parser, at_start: bool) {
let text: EcoString = p.peek_src().into();
p.eat();
let column = p.column(p.prev_end());
if at_start && p.eat_if(NodeKind::Space(0)) && !p.eof() {
markup_indented(p, column);
let min_indent = p.column(p.prev_end());
if at_start && p.eat_if(NodeKind::Space { newlines: 0 }) && !p.eof() {
markup_indented(p, min_indent);
marker.end(p, NodeKind::Enum);
} else {
marker.convert(p, NodeKind::Text(text));
}
}
/// Parse an expression within markup mode.
/// Parse an expression within a markup mode.
fn markup_expr(p: &mut Parser) {
// Does the expression need termination or can content follow directly?
let stmt = matches!(
@ -556,10 +556,10 @@ fn parenthesized(p: &mut Parser, atomic: bool) -> ParseResult {
enum CollectionKind {
/// The collection is only one item and has no comma.
Group,
/// The collection starts with a positional and has more items or a trailing
/// comma.
/// The collection starts with a positional item and has multiple items or a
/// trailing comma.
Positional,
/// The collection starts with a named item.
/// The collection starts with a colon or named item.
Named,
}
@ -672,7 +672,7 @@ fn array(p: &mut Parser, marker: Marker) {
}
/// Convert a collection into a dictionary, producing errors for anything other
/// than named pairs.
/// than named and keyed pairs.
fn dict(p: &mut Parser, marker: Marker) {
let mut used = HashSet::new();
marker.filter_children(p, |x| match x.kind() {
@ -731,11 +731,11 @@ fn code(p: &mut Parser) {
p.end_group();
// Forcefully skip over newlines since the group's contents can't.
p.eat_while(|t| matches!(t, NodeKind::Space(_)));
p.eat_while(NodeKind::is_space);
}
}
// Parse a content block: `[...]`.
/// Parse a content block: `[...]`.
fn content_block(p: &mut Parser) {
p.perform(NodeKind::ContentBlock, |p| {
p.start_group(Group::Bracket);
@ -857,7 +857,7 @@ fn wrap_expr(p: &mut Parser) -> ParseResult {
})
}
/// Parse an if expresion.
/// Parse an if-else expresion.
fn if_expr(p: &mut Parser) -> ParseResult {
p.perform(NodeKind::IfExpr, |p| {
p.assert(NodeKind::If);
@ -886,7 +886,7 @@ fn while_expr(p: &mut Parser) -> ParseResult {
})
}
/// Parse a for expression.
/// Parse a for-in expression.
fn for_expr(p: &mut Parser) -> ParseResult {
p.perform(NodeKind::ForExpr, |p| {
p.assert(NodeKind::For);

View File

@ -24,7 +24,7 @@ pub struct Parser<'s> {
children: Vec<SyntaxNode>,
/// Whether the last group was not correctly terminated.
unterminated_group: bool,
/// Whether a group terminator was found, that did not close a group.
/// Whether a group terminator was found that did not close a group.
stray_terminator: bool,
}
@ -58,9 +58,10 @@ impl<'s> Parser<'s> {
self.children
}
/// End the parsing process and return the parsed children and whether the
/// last token was terminated if all groups were terminated correctly or
/// `None` otherwise.
/// End the parsing process and return
/// - the parsed children and whether the last token was terminated, if all
/// groups were terminated correctly, or
/// - `None` otherwise.
pub fn consume(self) -> Option<(Vec<SyntaxNode>, bool)> {
self.terminated().then(|| (self.children, self.tokens.terminated()))
}
@ -131,7 +132,7 @@ impl<'s> Parser<'s> {
self.repeek();
}
/// Eat if the current token it is the given one.
/// Consume the current token if it is the given one.
pub fn eat_if(&mut self, kind: NodeKind) -> bool {
let at = self.at(kind);
if at {
@ -150,7 +151,8 @@ impl<'s> Parser<'s> {
}
}
/// Eat if the current token is the given one and produce an error if not.
/// Consume the current token if it is the given one and produce an error if
/// not.
pub fn expect(&mut self, kind: NodeKind) -> ParseResult {
let at = self.peek() == Some(&kind);
if at {
@ -162,7 +164,7 @@ impl<'s> Parser<'s> {
}
}
/// Eat, debug-asserting that the token is the given one.
/// Consume the current token, debug-asserting that it is the given one.
#[track_caller]
pub fn assert(&mut self, kind: NodeKind) {
debug_assert_eq!(self.peek(), Some(&kind));
@ -179,8 +181,8 @@ impl<'s> Parser<'s> {
if self.eof { None } else { self.current.as_ref() }
}
/// Peek at the current token, if it follows immediately after the last one
/// without any trivia in between.
/// Peek at the current token, but only if it follows immediately after the
/// last one without any trivia in between.
pub fn peek_direct(&self) -> Option<&NodeKind> {
if self.prev_end() == self.current_start() {
self.peek()
@ -267,9 +269,9 @@ impl<'s> Parser<'s> {
Group::Imports => None,
} {
if self.current.as_ref() == Some(&end) {
// If another group closes after a group with the missing terminator,
// its scope of influence ends here and no longer taints the rest of the
// reparse.
// If another group closes after a group with the missing
// terminator, its scope of influence ends here and no longer
// taints the rest of the reparse.
self.unterminated_group = false;
// Bump the delimeter and return. No need to rescan in this
@ -330,7 +332,7 @@ impl<'s> Parser<'s> {
Some(NodeKind::Underscore) => self.inside(Group::Emph),
Some(NodeKind::Semicolon) => self.inside(Group::Expr),
Some(NodeKind::From) => self.inside(Group::Imports),
Some(NodeKind::Space(n)) => self.space_ends_group(*n),
Some(NodeKind::Space { newlines }) => self.space_ends_group(*newlines),
Some(_) => false,
None => true,
};
@ -339,7 +341,7 @@ impl<'s> Parser<'s> {
/// Returns whether the given type can be skipped over.
fn is_trivia(&self, token: &NodeKind) -> bool {
match token {
NodeKind::Space(n) => !self.space_ends_group(*n),
NodeKind::Space { newlines } => !self.space_ends_group(*newlines),
NodeKind::LineComment => true,
NodeKind::BlockComment => true,
_ => false,
@ -491,8 +493,8 @@ impl Marker {
/// A logical group of tokens, e.g. `[...]`.
#[derive(Debug)]
struct GroupEntry {
/// The kind of group this is. This decides which tokens will end the group.
/// For example, a [`Group::Paren`] will be ended by
/// The kind of group this is. This decides which token(s) will end the
/// group. For example, a [`Group::Paren`] will be ended by
/// [`Token::RightParen`].
pub kind: Group,
/// The mode the parser was in _before_ the group started (to which we go

View File

@ -47,7 +47,7 @@ pub fn resolve_hex(sequence: &str) -> Option<char> {
u32::from_str_radix(sequence, 16).ok().and_then(std::char::from_u32)
}
/// Resolve the language tag and trims the raw text.
/// Resolve the language tag and trim the raw text.
pub fn resolve_raw(column: usize, backticks: usize, text: &str) -> RawNode {
if backticks > 1 {
let (tag, inner) = split_at_lang_tag(text);
@ -77,7 +77,7 @@ fn split_at_lang_tag(raw: &str) -> (&str, &str) {
/// Trim raw text and splits it into lines.
///
/// Returns whether at least one newline was contained in `raw`.
/// Also returns whether at least one newline was contained in `raw`.
fn trim_and_split_raw(column: usize, mut raw: &str) -> (String, bool) {
// Trims one space at the start.
raw = raw.strip_prefix(' ').unwrap_or(raw);

View File

@ -110,7 +110,9 @@ impl<'s> Iterator for Tokens<'s> {
']' => NodeKind::RightBracket,
// Whitespace.
' ' if self.s.done() || !self.s.at(char::is_whitespace) => NodeKind::Space(0),
' ' if self.s.done() || !self.s.at(char::is_whitespace) => {
NodeKind::Space { newlines: 0 }
}
c if c.is_whitespace() => self.whitespace(),
// Comments with special case for URLs.
@ -260,7 +262,7 @@ impl<'s> Tokens<'s> {
}
}
NodeKind::Space(newlines)
NodeKind::Space { newlines }
}
fn backslash(&mut self) -> NodeKind {
@ -681,8 +683,8 @@ mod tests {
use SpanPos::*;
use TokenMode::{Code, Markup};
fn Error(pos: SpanPos, message: &str) -> NodeKind {
NodeKind::Error(pos, message.into())
fn Space(newlines: usize) -> NodeKind {
NodeKind::Space { newlines }
}
fn Raw(text: &str, lang: Option<&str>, block: bool) -> NodeKind {
@ -709,6 +711,10 @@ mod tests {
NodeKind::Ident(ident.into())
}
fn Error(pos: SpanPos, message: &str) -> NodeKind {
NodeKind::Error(pos, message.into())
}
fn Invalid(invalid: &str) -> NodeKind {
NodeKind::Unknown(invalid.into())
}

View File

@ -107,7 +107,7 @@ impl SourceStore {
return id;
}
// No existing file yet.
// No existing file yet, so we allocate a new id.
let id = SourceId(self.sources.len() as u16);
self.sources.push(SourceFile::new(id, path, src));
@ -166,8 +166,9 @@ pub struct SourceFile {
impl SourceFile {
/// Create a new source file.
pub fn new(id: SourceId, path: &Path, src: String) -> Self {
let mut lines = vec![Line { byte_idx: 0, utf16_idx: 0 }];
lines.extend(Line::iter(0, 0, &src));
let lines = std::iter::once(Line { byte_idx: 0, utf16_idx: 0 })
.chain(lines(0, 0, &src))
.collect();
let mut root = parse(&src);
root.numberize(id, Span::FULL).unwrap();
@ -242,7 +243,7 @@ impl SourceFile {
pub fn replace(&mut self, src: String) {
self.src = src;
self.lines = vec![Line { byte_idx: 0, utf16_idx: 0 }];
self.lines.extend(Line::iter(0, 0, &self.src));
self.lines.extend(lines(0, 0, &self.src));
self.root = parse(&self.src);
self.root.numberize(self.id(), Span::FULL).unwrap();
self.rev = self.rev.wrapping_add(1);
@ -271,22 +272,19 @@ impl SourceFile {
}
// Recalculate the line starts after the edit.
self.lines.extend(Line::iter(
start_byte,
start_utf16,
&self.src[start_byte ..],
));
self.lines
.extend(lines(start_byte, start_utf16, &self.src[start_byte ..]));
// Incrementally reparse the replaced range.
reparse(&mut self.root, &self.src, replace, with.len())
}
/// Get the length of the file in bytes.
/// Get the length of the file in UTF-8 encoded bytes.
pub fn len_bytes(&self) -> usize {
self.src.len()
}
/// Get the length of the file in UTF16 code units.
/// Get the length of the file in UTF-16 code units.
pub fn len_utf16(&self) -> usize {
let last = self.lines.last().unwrap();
last.utf16_idx + self.src[last.byte_idx ..].len_utf16()
@ -396,56 +394,48 @@ struct Line {
utf16_idx: usize,
}
impl Line {
/// Iterate over the lines in the string.
fn iter(
byte_offset: usize,
utf16_offset: usize,
string: &str,
) -> impl Iterator<Item = Line> + '_ {
let mut s = Scanner::new(string);
let mut utf16_idx = utf16_offset;
/// Iterate over the lines in the string.
fn lines(
byte_offset: usize,
utf16_offset: usize,
string: &str,
) -> impl Iterator<Item = Line> + '_ {
let mut s = Scanner::new(string);
let mut utf16_idx = utf16_offset;
std::iter::from_fn(move || {
s.eat_until(|c: char| {
utf16_idx += c.len_utf16();
is_newline(c)
});
std::iter::from_fn(move || {
s.eat_until(|c: char| {
utf16_idx += c.len_utf16();
is_newline(c)
});
if s.done() {
return None;
}
if s.done() {
return None;
}
if s.eat() == Some('\r') && s.eat_if('\n') {
utf16_idx += 1;
}
if s.eat() == Some('\r') && s.eat_if('\n') {
utf16_idx += 1;
}
Some(Line {
byte_idx: byte_offset + s.cursor(),
utf16_idx,
})
Some(Line {
byte_idx: byte_offset + s.cursor(),
utf16_idx,
})
}
}
impl AsRef<str> for SourceFile {
fn as_ref(&self) -> &str {
&self.src
}
})
}
#[cfg(feature = "codespan-reporting")]
impl<'a> Files<'a> for SourceStore {
type FileId = SourceId;
type Name = std::path::Display<'a>;
type Source = &'a SourceFile;
type Source = &'a str;
fn name(&'a self, id: SourceId) -> Result<Self::Name, files::Error> {
Ok(self.get(id).path().display())
}
fn source(&'a self, id: SourceId) -> Result<Self::Source, files::Error> {
Ok(self.get(id))
Ok(self.get(id).src())
}
fn line_index(&'a self, id: SourceId, given: usize) -> Result<usize, files::Error> {
@ -571,6 +561,7 @@ mod tests {
let result = SourceFile::detached(after);
source.edit(range, with);
assert_eq!(source.src, result.src);
assert_eq!(source.root, result.root);
assert_eq!(source.lines, result.lines);
}

View File

@ -54,15 +54,15 @@ macro_rules! node {
node! {
/// The syntactical root capable of representing a full parsed document.
Markup: NodeKind::Markup(_)
Markup: NodeKind::Markup { .. }
}
impl Markup {
/// The markup nodes.
pub fn nodes(&self) -> impl Iterator<Item = MarkupNode> + '_ {
self.0.children().filter_map(|node| match node.kind() {
NodeKind::Space(2 ..) => Some(MarkupNode::Parbreak),
NodeKind::Space(_) => Some(MarkupNode::Space),
NodeKind::Space { newlines: (2 ..) } => Some(MarkupNode::Parbreak),
NodeKind::Space { .. } => Some(MarkupNode::Space),
&NodeKind::Linebreak { justified } => {
Some(MarkupNode::Linebreak { justified })
}
@ -159,7 +159,7 @@ pub struct RawNode {
pub block: bool,
}
/// A math formula: `$a^2 + b^2 = c^2$`.
/// A math formula: `$x$`, `$[x^2]$`.
#[derive(Debug, Clone, PartialEq, Hash)]
pub struct MathNode {
/// The formula between the dollars / brackets.
@ -514,7 +514,7 @@ impl DictExpr {
pub enum DictItem {
/// A named pair: `thickness: 3pt`.
Named(Named),
/// A keyed pair: `"spaced key": true`.
/// A keyed pair: `"spacy key": true`.
Keyed(Keyed),
/// A spreaded value: `..things`.
Spread(Expr),
@ -557,12 +557,12 @@ impl Named {
}
node! {
/// A pair of a string key and an expression: `"spaced key": true`.
/// A pair of a string key and an expression: `"spacy key": true`.
Keyed
}
impl Keyed {
/// The key: `"spaced key"`.
/// The key: `"spacy key"`.
pub fn key(&self) -> EcoString {
self.0
.children()
@ -593,7 +593,7 @@ impl UnaryExpr {
.expect("unary expression is missing operator")
}
/// The expression to operator on: `x`.
/// The expression to operate on: `x`.
pub fn expr(&self) -> Expr {
self.0.cast_last_child().expect("unary expression is missing child")
}
@ -1010,9 +1010,10 @@ impl LetExpr {
/// The expression the binding is initialized with.
pub fn init(&self) -> Option<Expr> {
if self.0.cast_first_child::<Ident>().is_some() {
// This is a normal binding like `let x = 1`.
self.0.children().filter_map(SyntaxNode::cast).nth(1)
} else {
// This is a let .. with expression.
// This is a closure binding like `let f(x) = 1`.
self.0.cast_first_child()
}
}
@ -1187,7 +1188,7 @@ impl ImportExpr {
.expect("import is missing items")
}
/// The location of the importable file.
/// The path to the file that should be imported.
pub fn path(&self) -> Expr {
self.0.cast_last_child().expect("import is missing path")
}
@ -1208,7 +1209,7 @@ node! {
}
impl IncludeExpr {
/// The location of the file to be included.
/// The path to the file that should be included.
pub fn path(&self) -> Expr {
self.0.cast_last_child().expect("include is missing path")
}
@ -1225,7 +1226,7 @@ node! {
}
node! {
/// A return expression: `return x + 1`.
/// A return expression: `return`, `return x + 1`.
ReturnExpr
}

View File

@ -60,7 +60,7 @@ where
highlight_themed_impl(text, 0, &root, vec![], &highlighter, &mut f);
}
/// Recursive implementation for returning syntect styles.
/// Recursive implementation for highlighting with a syntect theme.
fn highlight_themed_impl<F>(
text: &str,
mut offset: usize,
@ -273,7 +273,7 @@ impl Category {
NodeKind::None => Some(Category::None),
NodeKind::Auto => Some(Category::Auto),
NodeKind::Ident(_) => match parent.kind() {
NodeKind::Markup(_) => Some(Category::Interpolated),
NodeKind::Markup { .. } => Some(Category::Interpolated),
NodeKind::FuncCall => Some(Category::Function),
NodeKind::MethodCall if i > 0 => Some(Category::Function),
NodeKind::ClosureExpr if i == 0 => Some(Category::Function),
@ -298,8 +298,8 @@ impl Category {
NodeKind::Error(_, _) => Some(Category::Invalid),
NodeKind::Unknown(_) => Some(Category::Invalid),
NodeKind::Underscore => None,
NodeKind::Markup(_) => None,
NodeKind::Space(_) => None,
NodeKind::Markup { .. } => None,
NodeKind::Space { .. } => None,
NodeKind::Text(_) => None,
NodeKind::Quote { .. } => None,
NodeKind::List => None,

View File

@ -27,7 +27,7 @@ pub enum SyntaxNode {
}
impl SyntaxNode {
/// Returns the metadata of the node.
/// The metadata of the node.
pub fn data(&self) -> &NodeData {
match self {
Self::Inner(inner) => &inner.data,
@ -58,14 +58,6 @@ impl SyntaxNode {
self.data().span()
}
/// The node's children.
pub fn children(&self) -> std::slice::Iter<'_, SyntaxNode> {
match self {
Self::Inner(inner) => inner.children(),
Self::Leaf(_) => [].iter(),
}
}
/// Whether the node or its children contain an error.
pub fn erroneous(&self) -> bool {
match self {
@ -92,6 +84,14 @@ impl SyntaxNode {
}
}
/// The node's children.
pub fn children(&self) -> std::slice::Iter<'_, SyntaxNode> {
match self {
Self::Inner(inner) => inner.children(),
Self::Leaf(_) => [].iter(),
}
}
/// Convert the node to a typed AST node.
pub fn cast<T>(&self) -> Option<T>
where
@ -100,12 +100,12 @@ impl SyntaxNode {
T::from_untyped(self)
}
/// Get the first child that can cast to some AST type.
/// Get the first child that can cast to the AST type `T`.
pub fn cast_first_child<T: TypedNode>(&self) -> Option<T> {
self.children().find_map(Self::cast)
}
/// Get the last child that can cast to some AST type.
/// Get the last child that can cast to the AST type `T`.
pub fn cast_last_child<T: TypedNode>(&self) -> Option<T> {
self.children().rev().find_map(Self::cast)
}
@ -358,7 +358,7 @@ impl InnerNode {
&mut self.children
}
/// Replaces a range of children with some replacement.
/// Replaces a range of children with a replacement.
///
/// May have mutated the children if it returns `Err(_)`.
pub(crate) fn replace_children(
@ -440,8 +440,7 @@ impl InnerNode {
}
}
/// Update the this node given after changes were made to one of its
/// children.
/// Update this node after changes were made to one of its children.
pub(crate) fn update_parent(
&mut self,
prev_len: usize,
@ -572,57 +571,61 @@ impl PartialEq for NodeData {
/// the parser.
#[derive(Debug, Clone, PartialEq)]
pub enum NodeKind {
/// A left curly brace: `{`.
/// A left curly brace, starting a code block: `{`.
LeftBrace,
/// A right curly brace: `}`.
/// A right curly brace, terminating a code block: `}`.
RightBrace,
/// A left square bracket: `[`.
/// A left square bracket, starting a content block: `[`.
LeftBracket,
/// A right square bracket: `]`.
/// A right square bracket, terminating a content block: `]`.
RightBracket,
/// A left round parenthesis: `(`.
/// A left round parenthesis, starting a grouped expression, collection,
/// argument or parameter list: `(`.
LeftParen,
/// A right round parenthesis: `)`.
/// A right round parenthesis, terminating a grouped expression, collection,
/// argument or parameter list: `)`.
RightParen,
/// An asterisk: `*`.
/// The strong text toggle, multiplication operator, and wildcard import
/// symbol: `*`.
Star,
/// An underscore: `_`.
/// Toggles emphasized text: `_`.
Underscore,
/// A comma: `,`.
/// A comma separator in a sequence: `,`.
Comma,
/// A semicolon: `;`.
/// A semicolon terminating an expression: `;`.
Semicolon,
/// A colon: `:`.
/// A colon between name / key and value in a dictionary, argument or
/// parameter list: `:`.
Colon,
/// A plus: `+`.
/// The unary plus and addition operator: `+`.
Plus,
/// A hyphen: `-`.
/// The unary negation and subtraction operator: `-`.
Minus,
/// A slash: `/`.
/// The division operator: `/`.
Slash,
/// A dot: `.`.
/// A field access and method call operator: `.`.
Dot,
/// A single equals sign: `=`.
/// The assignment operator: `=`.
Eq,
/// Two equals signs: `==`.
/// The equality operator: `==`.
EqEq,
/// An exclamation mark followed by an equals sign: `!=`.
/// The inequality operator: `!=`.
ExclEq,
/// A less-than sign: `<`.
/// The less-than operator: `<`.
Lt,
/// A less-than sign followed by an equals sign: `<=`.
/// The less-than or equal operator: `<=`.
LtEq,
/// A greater-than sign: `>`.
/// The greater-than operator: `>`.
Gt,
/// A greater-than sign followed by an equals sign: `>=`.
/// The greater-than or equal operator: `>=`.
GtEq,
/// A plus followed by an equals sign: `+=`.
/// The add-assign operator: `+=`.
PlusEq,
/// A hyphen followed by an equals sign: `-=`.
/// The subtract-assign operator: `-=`.
HyphEq,
/// An asterisk followed by an equals sign: `*=`.
/// The multiply-assign operator: `*=`.
StarEq,
/// A slash followed by an equals sign: `/=`.
/// The divide-assign operator: `/=`.
SlashEq,
/// The `not` operator.
Not,
@ -630,9 +633,9 @@ pub enum NodeKind {
And,
/// The `or` operator.
Or,
/// Two dots: `..`.
/// The spread operator: `..`.
Dots,
/// An equals sign followed by a greater-than sign: `=>`.
/// An arrow between a closure's parameters and body: `=>`.
Arrow,
/// The none literal: `none`.
None,
@ -670,15 +673,20 @@ pub enum NodeKind {
From,
/// The `as` keyword.
As,
/// Markup of which all lines must start in some column.
/// Markup of which all lines must have a minimal indentation.
///
/// Notably, the number does not determine in which column the markup
/// started, but to the right of which column all markup elements must be,
/// so it is zero except for headings and lists.
Markup(usize),
/// One or more whitespace characters.
Space(usize),
/// A consecutive non-markup string.
Markup { min_indent: usize },
/// One or more whitespace characters. Single spaces are collapsed into text
/// nodes if they would otherwise be surrounded by text nodes.
///
/// Also stores how many newlines are contained.
Space { newlines: usize },
/// Consecutive text without markup. While basic text with just single
/// spaces is collapsed into a single node, certain symbols that could
/// possibly be markup force text into multiple nodes.
Text(EcoString),
/// A forced line break: `\` or `\+` if justified.
Linebreak { justified: bool },
@ -701,10 +709,9 @@ pub enum NodeKind {
Strong,
/// Emphasized content: `_Emphasized_`.
Emph,
/// An arbitrary number of backticks followed by inner contents, terminated
/// with the same number of backticks: `` `...` ``.
/// A raw block with optional syntax highlighting: `` `...` ``.
Raw(Arc<RawNode>),
/// Dollar signs surrounding inner contents.
/// A math formula: `$x$`, `$[x^2]$`.
Math(Arc<MathNode>),
/// A section heading: `= Introduction`.
Heading,
@ -740,7 +747,7 @@ pub enum NodeKind {
DictExpr,
/// A named pair: `thickness: 3pt`.
Named,
/// A keyed pair: `"spaced key": true`.
/// A keyed pair: `"spacy key": true`.
Keyed,
/// A unary operation: `-x`.
UnaryExpr,
@ -803,24 +810,14 @@ pub enum NodeKind {
}
impl NodeKind {
/// Whether this is some kind of brace.
pub fn is_brace(&self) -> bool {
matches!(self, Self::LeftBrace | Self::RightBrace)
}
/// Whether this is some kind of bracket.
pub fn is_bracket(&self) -> bool {
matches!(self, Self::LeftBracket | Self::RightBracket)
}
/// Whether this is some kind of parenthesis.
/// Whether this is a kind of parenthesis.
pub fn is_paren(&self) -> bool {
matches!(self, Self::LeftParen | Self::RightParen)
}
/// Whether this is a space.
pub fn is_space(&self) -> bool {
matches!(self, Self::Space(_))
matches!(self, Self::Space { .. })
}
/// Whether this is trivia.
@ -828,31 +825,23 @@ impl NodeKind {
self.is_space() || matches!(self, Self::LineComment | Self::BlockComment)
}
/// Whether this is some kind of error.
/// Whether this is a kind of error.
pub fn is_error(&self) -> bool {
matches!(self, NodeKind::Error(_, _) | NodeKind::Unknown(_))
}
/// Whether this node is `at_start` given the previous value of the property.
/// Whether `at_start` would still be true after this node given the
/// previous value of the property.
pub fn is_at_start(&self, prev: bool) -> bool {
match self {
Self::Space(1 ..) => true,
Self::Space(_) | Self::LineComment | Self::BlockComment => prev,
Self::Space { newlines: (1 ..) } => true,
Self::Space { .. } | Self::LineComment | Self::BlockComment => prev,
_ => false,
}
}
/// Whether this node has to appear at the start of a line.
pub fn only_at_start(&self) -> bool {
match self {
Self::Heading | Self::Enum | Self::List => true,
Self::Text(t) => t == "-" || t.ends_with('.'),
_ => false,
}
}
/// Whether this is a node that is clearly delimited by a character and may
/// appear in markup.
/// Whether changes _inside_ this node are safely encapuslated, so that only
/// this node must be reparsed.
pub fn is_bounded(&self) -> bool {
match self {
Self::CodeBlock
@ -865,7 +854,7 @@ impl NodeKind {
| Self::Ellipsis
| Self::Quote { .. }
| Self::BlockComment
| Self::Space(_)
| Self::Space { .. }
| Self::Escape(_) => true,
Self::Text(t) => t != "-" && !t.ends_with('.'),
_ => false,
@ -924,9 +913,9 @@ impl NodeKind {
Self::Import => "keyword `import`",
Self::Include => "keyword `include`",
Self::From => "keyword `from`",
Self::Markup(_) => "markup",
Self::Space(2 ..) => "paragraph break",
Self::Space(_) => "space",
Self::Markup { .. } => "markup",
Self::Space { newlines: (2 ..) } => "paragraph break",
Self::Space { .. } => "space",
Self::Linebreak { justified: false } => "linebreak",
Self::Linebreak { justified: true } => "justified linebreak",
Self::Text(_) => "text",
@ -1052,8 +1041,8 @@ impl Hash for NodeKind {
Self::Import => {}
Self::Include => {}
Self::From => {}
Self::Markup(c) => c.hash(state),
Self::Space(n) => n.hash(state),
Self::Markup { min_indent } => min_indent.hash(state),
Self::Space { newlines } => newlines.hash(state),
Self::Linebreak { justified } => justified.hash(state),
Self::Text(s) => s.hash(state),
Self::NonBreakingSpace => {}

View File

@ -4,19 +4,19 @@ use std::ops::Range;
use crate::syntax::SourceId;
/// A value with the span it corresponds to in the source code.
/// A value with a span locating it in the source code.
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct Spanned<T> {
/// The spanned value.
pub v: T,
/// The location in source code of the value.
/// The value's location in source code.
pub span: Span,
}
impl<T> Spanned<T> {
/// Create a new instance from a value and its span.
pub fn new(v: T, span: impl Into<Span>) -> Self {
Self { v, span: span.into() }
pub fn new(v: T, span: Span) -> Self {
Self { v, span }
}
/// Convert from `&Spanned<T>` to `Spanned<&T>`
@ -24,7 +24,7 @@ impl<T> Spanned<T> {
Spanned { v: &self.v, span: self.span }
}
/// Map the value using a function keeping the span.
/// Map the value using a function.
pub fn map<F, U>(self, f: F) -> Spanned<U>
where
F: FnOnce(T) -> U,
@ -52,11 +52,11 @@ impl<T: Debug> Debug for Spanned<T> {
/// sibling and smaller than any id in the subtrees of any right sibling.
///
/// The internal ids of spans stay mostly stable, even for nodes behind an
/// insertion. This is not true for simple ranges as they shift. Spans can be
/// used as inputs to memoized functions without hurting cache performance when
/// text is inserted somewhere in the document other than the end.
/// insertion. This is not true for simple ranges as they would shift. Spans can
/// be used as inputs to memoized functions without hurting cache performance
/// when text is inserted somewhere in the document other than the end.
///
/// This type takes 8 bytes and is null-optimized (i.e. `Option<Span>` also
/// This type takes up 8 bytes and is null-optimized (i.e. `Option<Span>` also
/// takes 8 bytes).
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub struct Span(NonZeroU64);
@ -90,7 +90,7 @@ impl Span {
Self(to_non_zero(Self::DETACHED))
}
/// Return a new span with updated position.
/// Return this span, but with updated position.
pub const fn with_pos(self, pos: SpanPos) -> Self {
let bits = (self.0.get() & ((1 << 62) - 1)) | ((pos as u64) << 62);
Self(to_non_zero(bits))

View File

@ -22,8 +22,8 @@ macro_rules! format_eco {
pub struct EcoString(Repr);
/// The internal representation. Either:
/// - inline when below a certain number of bytes,
/// - or reference-counted on the heap with COW semantics.
/// - inline when below a certain number of bytes, or
/// - reference-counted on the heap with clone-on-write semantics.
#[derive(Clone)]
enum Repr {
Small { buf: [u8; LIMIT], len: u8 },

View File

@ -1,25 +0,0 @@
/// Decode mac roman encoded bytes into a string.
pub fn decode_mac_roman(coded: &[u8]) -> String {
coded.iter().copied().map(char_from_mac_roman).collect()
}
/// Convert a mac roman coded character to a unicode char.
fn char_from_mac_roman(code: u8) -> char {
#[rustfmt::skip]
const TABLE: [char; 128] = [
'Ä', 'Å', 'Ç', 'É', 'Ñ', 'Ö', 'Ü', 'á', 'à', 'â', 'ä', 'ã', 'å', 'ç', 'é', 'è',
'ê', 'ë', 'í', 'ì', 'î', 'ï', 'ñ', 'ó', 'ò', 'ô', 'ö', 'õ', 'ú', 'ù', 'û', 'ü',
'†', '°', '¢', '£', '§', '•', '¶', 'ß', '®', '©', '™', '´', '¨', '≠', 'Æ', 'Ø',
'∞', '±', '≤', '≥', '¥', 'µ', '∂', '∑', '∏', 'π', '∫', 'ª', 'º', 'Ω', 'æ', 'ø',
'¿', '¡', '¬', '√', 'ƒ', '≈', '∆', '«', '»', '…', '\u{a0}', 'À', 'Ã', 'Õ', 'Œ', 'œ',
'', '—', '“', '”', '', '', '÷', '◊', 'ÿ', 'Ÿ', '', '€', '', '', 'fi', 'fl',
'‡', '·', '', '„', '‰', 'Â', 'Ê', 'Á', 'Ë', 'È', 'Í', 'Î', 'Ï', 'Ì', 'Ó', 'Ô',
'\u{f8ff}', 'Ò', 'Ú', 'Û', 'Ù', 'ı', 'ˆ', '˜', '¯', '˘', '˙', '˚', '¸', '˝', '˛', 'ˇ',
];
if code < 128 {
code as char
} else {
TABLE[(code - 128) as usize]
}
}

View File

@ -1,18 +1,14 @@
//! Utilities.
#[macro_use]
mod eco_string;
mod mac_roman;
mod prehashed;
mod eco;
mod hash;
pub use eco_string::EcoString;
pub use mac_roman::decode_mac_roman;
pub use prehashed::Prehashed;
pub use eco::EcoString;
pub use hash::Prehashed;
use std::any::TypeId;
use std::cmp::Ordering;
use std::fmt::{self, Debug, Formatter};
use std::ops::Range;
use std::path::{Component, Path, PathBuf};
use std::sync::Arc;
@ -35,35 +31,6 @@ where
Wrapper(f)
}
/// An alternative type id that prints as something readable in debug mode.
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct ReadableTypeId {
id: TypeId,
#[cfg(debug_assertions)]
name: &'static str,
}
impl ReadableTypeId {
/// The type id of the given type.
pub fn of<T: 'static>() -> Self {
Self {
id: TypeId::of::<T>(),
#[cfg(debug_assertions)]
name: std::any::type_name::<T>(),
}
}
}
impl Debug for ReadableTypeId {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
#[cfg(debug_assertions)]
f.pad(self.name)?;
#[cfg(not(debug_assertions))]
f.pad("ReadableTypeId")?;
Ok(())
}
}
/// Extra methods for [`str`].
pub trait StrExt {
/// The number of code units this string would use if it was encoded in
@ -77,41 +44,6 @@ impl StrExt for str {
}
}
/// Extra methods for [`Option<T>`].
pub trait OptionExt<T> {
/// Sets `other` as the value if `self` is `None` or if it contains a value
/// larger than `other`.
fn set_min(&mut self, other: T)
where
T: Ord;
/// Sets `other` as the value if `self` is `None` or if it contains a value
/// smaller than `other`.
fn set_max(&mut self, other: T)
where
T: Ord;
}
impl<T> OptionExt<T> for Option<T> {
fn set_min(&mut self, other: T)
where
T: Ord,
{
if self.as_ref().map_or(true, |x| other < *x) {
*self = Some(other);
}
}
fn set_max(&mut self, other: T)
where
T: Ord,
{
if self.as_ref().map_or(true, |x| other > *x) {
*self = Some(other);
}
}
}
/// Extra methods for [`Arc`].
pub trait ArcExt<T> {
/// Takes the inner value if there is exactly one strong reference and
@ -131,7 +63,7 @@ where
}
}
/// Extra methods for `[T]`.
/// Extra methods for [`[T]`](slice).
pub trait SliceExt<T> {
/// Split a slice into consecutive runs with the same key and yield for
/// each such run the key and the slice of elements with that key.
@ -170,34 +102,6 @@ where
}
}
/// Extra methods for [`Range<usize>`].
pub trait RangeExt {
/// Locate a position relative to a range.
///
/// This can be used for binary searching the range that contains the
/// position as follows:
/// ```
/// # use typst::util::RangeExt;
/// assert_eq!(
/// [1..2, 2..7, 7..10].binary_search_by(|r| r.locate(5)),
/// Ok(1),
/// );
/// ```
fn locate(&self, pos: usize) -> Ordering;
}
impl RangeExt for Range<usize> {
fn locate(&self, pos: usize) -> Ordering {
if pos < self.start {
Ordering::Greater
} else if pos < self.end {
Ordering::Equal
} else {
Ordering::Less
}
}
}
/// Extra methods for [`Path`].
pub trait PathExt {
/// Lexically normalize a path.
@ -222,3 +126,32 @@ impl PathExt for Path {
out
}
}
/// An alternative type id that prints as something readable in debug mode.
#[derive(Copy, Clone, Eq, PartialEq, Hash)]
pub struct ReadableTypeId {
id: TypeId,
#[cfg(debug_assertions)]
name: &'static str,
}
impl ReadableTypeId {
/// The type id of the given type.
pub fn of<T: 'static>() -> Self {
Self {
id: TypeId::of::<T>(),
#[cfg(debug_assertions)]
name: std::any::type_name::<T>(),
}
}
}
impl Debug for ReadableTypeId {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
#[cfg(debug_assertions)]
f.pad(self.name)?;
#[cfg(not(debug_assertions))]
f.pad("ReadableTypeId")?;
Ok(())
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 4.7 KiB

After

Width:  |  Height:  |  Size: 4.5 KiB

View File

@ -8,11 +8,11 @@
{(:)}
// Two pairs and string key.
#let dict = (normal: 1, "spaced key": 2)
#let dict = (normal: 1, "spacy key": 2)
#dict
#test(dict.normal, 1)
#test(dict("spaced key"), 2)
#test(dict("spacy key"), 2)
---
// Test lvalue and rvalue access.

View File

@ -32,8 +32,8 @@ fn main() {
let args = Args::new(env::args().skip(1));
let mut filtered = Vec::new();
// Since differents tests can affect each other through the layout cache, a
// deterministic order is very important for reproducibility.
// Since differents tests can affect each other through the memoization
// cache, a deterministic order is important for reproducibility.
for entry in WalkDir::new(".").sort_by_file_name() {
let entry = entry.unwrap();
if entry.depth() <= 1 {
@ -599,8 +599,8 @@ fn render_links(
}
}
/// This is a Linear-feedback shift register using XOR as its shifting
/// function. It can be used as PRNG.
/// A Linear-feedback shift register using XOR as its shifting function.
/// Can be used as PRNG.
struct LinearShift(u64);
impl LinearShift {