Extract syntax module into typst-syntax crate

This commit is contained in:
Laurenz 2023-07-18 20:11:31 +02:00
parent 7dc605307c
commit f5953887c9
29 changed files with 539 additions and 476 deletions

16
Cargo.lock generated
View File

@ -2408,6 +2408,7 @@ dependencies = [
"tracing",
"ttf-parser",
"typst-macros",
"typst-syntax",
"unicode-general-category",
"unicode-ident",
"unicode-math-class",
@ -2516,6 +2517,21 @@ dependencies = [
"syn 2.0.16",
]
[[package]]
name = "typst-syntax"
version = "0.6.0"
dependencies = [
"comemo",
"ecow",
"once_cell",
"serde",
"tracing",
"unicode-ident",
"unicode-math-class",
"unicode-segmentation",
"unscanny",
]
[[package]]
name = "typst-tests"
version = "0.6.0"

View File

@ -7,9 +7,8 @@ use termcolor::{ColorChoice, StandardStream};
use typst::diag::{bail, SourceError, StrResult};
use typst::doc::Document;
use typst::eval::eco_format;
use typst::file::FileId;
use typst::geom::Color;
use typst::syntax::Source;
use typst::syntax::{FileId, Source};
use typst::World;
use crate::args::{CompileCommand, DiagnosticFormat};
@ -168,7 +167,7 @@ fn print_diagnostics(
.map(|e| (eco_format!("hint: {e}")).into())
.collect(),
)
.with_labels(vec![Label::primary(error.span.id(), error.span.range(world))]);
.with_labels(vec![Label::primary(error.span.id(), world.range(error.span))]);
term::emit(&mut w, &config, world, &diag)?;
@ -176,7 +175,7 @@ fn print_diagnostics(
for point in error.trace {
let message = point.v.to_string();
let help = Diagnostic::help().with_message(message).with_labels(vec![
Label::primary(point.span.id(), point.span.range(world)),
Label::primary(point.span.id(), world.range(point.span)),
]);
term::emit(&mut w, &config, world, &help)?;

View File

@ -5,7 +5,7 @@ use std::path::{Path, PathBuf};
use codespan_reporting::term::{self, termcolor};
use termcolor::WriteColor;
use typst::diag::{PackageError, PackageResult};
use typst::file::PackageSpec;
use typst::syntax::PackageSpec;
use super::color_stream;

View File

@ -10,9 +10,8 @@ use same_file::Handle;
use siphasher::sip128::{Hasher128, SipHasher13};
use typst::diag::{FileError, FileResult, StrResult};
use typst::eval::{eco_format, Datetime, Library};
use typst::file::FileId;
use typst::font::{Font, FontBook};
use typst::syntax::Source;
use typst::syntax::{FileId, Source};
use typst::util::{Bytes, PathExt};
use typst::World;

View File

@ -5,10 +5,9 @@ use pulldown_cmark as md;
use typed_arena::Arena;
use typst::diag::FileResult;
use typst::eval::Datetime;
use typst::file::FileId;
use typst::font::{Font, FontBook};
use typst::geom::{Point, Size};
use typst::syntax::Source;
use typst::syntax::{FileId, Source};
use typst::util::Bytes;
use typst::World;
use yaml_front_matter::YamlFrontMatter;

View File

@ -19,8 +19,6 @@ pub use typst::eval::{
Func, IntoValue, Never, NoneValue, Scope, Str, Symbol, Type, Value, Vm,
};
#[doc(no_inline)]
pub use typst::file::FileId;
#[doc(no_inline)]
pub use typst::geom::*;
#[doc(no_inline)]
pub use typst::model::{
@ -30,7 +28,7 @@ pub use typst::model::{
Unlabellable, Vt,
};
#[doc(no_inline)]
pub use typst::syntax::{Span, Spanned};
pub use typst::syntax::{FileId, Span, Spanned};
#[doc(no_inline)]
pub use typst::util::NonZeroExt;
#[doc(no_inline)]

View File

@ -0,0 +1,27 @@
[package]
name = "typst-syntax"
description = "Parser and syntax tree for Typst."
categories = ["compilers", "science"]
keywords = ["typst"]
version.workspace = true
rust-version.workspace = true
authors.workspace = true
edition.workspace = true
homepage.workspace = true
repository.workspace = true
license.workspace = true
[lib]
doctest = false
bench = false
[dependencies]
comemo = "0.3"
ecow = "0.1.1"
once_cell = "1"
serde = { version = "1", features = ["derive"] }
tracing = "0.1.37"
unicode-ident = "1.0"
unicode-math-class = "0.1"
unicode-segmentation = "1"
unscanny = "0.1"

View File

@ -11,8 +11,6 @@ use unscanny::Scanner;
use super::{
is_id_continue, is_id_start, is_newline, split_newlines, Span, SyntaxKind, SyntaxNode,
};
use crate::geom::{AbsUnit, AngleUnit};
use crate::util::NonZeroExt;
/// A typed AST node.
pub trait AstNode: Sized {
@ -680,7 +678,7 @@ impl Heading {
.children()
.find(|node| node.kind() == SyntaxKind::HeadingMarker)
.and_then(|node| node.len().try_into().ok())
.unwrap_or(NonZeroUsize::ONE)
.unwrap_or(NonZeroUsize::new(1).unwrap())
}
}
@ -1012,12 +1010,12 @@ impl Numeric {
let split = text.len() - count;
let value = text[..split].parse().unwrap_or_default();
let unit = match &text[split..] {
"pt" => Unit::Length(AbsUnit::Pt),
"mm" => Unit::Length(AbsUnit::Mm),
"cm" => Unit::Length(AbsUnit::Cm),
"in" => Unit::Length(AbsUnit::In),
"deg" => Unit::Angle(AngleUnit::Deg),
"rad" => Unit::Angle(AngleUnit::Rad),
"pt" => Unit::Pt,
"mm" => Unit::Mm,
"cm" => Unit::Cm,
"in" => Unit::In,
"deg" => Unit::Deg,
"rad" => Unit::Rad,
"em" => Unit::Em,
"fr" => Unit::Fr,
"%" => Unit::Percent,
@ -1031,10 +1029,18 @@ impl Numeric {
/// Unit of a numeric value.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash)]
pub enum Unit {
/// An absolute length unit.
Length(AbsUnit),
/// An angular unit.
Angle(AngleUnit),
/// Points.
Pt,
/// Millimeters.
Mm,
/// Centimeters.
Cm,
/// Inches.
In,
/// Radians.
Rad,
/// Degrees.
Deg,
/// Font-relative: `1em` is the same as the font size.
Em,
/// Fractions: `fr`.

View File

@ -2,7 +2,7 @@
use std::collections::HashMap;
use std::fmt::{self, Debug, Display, Formatter};
use std::path::{Path, PathBuf};
use std::path::{Component, Path, PathBuf};
use std::str::FromStr;
use std::sync::RwLock;
@ -10,9 +10,7 @@ use ecow::{eco_format, EcoString};
use once_cell::sync::Lazy;
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use crate::diag::{bail, FileError, StrResult};
use crate::syntax::is_ident;
use crate::util::PathExt;
use super::is_ident;
/// The global package-path interner.
static INTERNER: Lazy<RwLock<Interner>> =
@ -27,7 +25,7 @@ struct Interner {
/// An interned pair of a package specification and a path.
type Pair = &'static (Option<PackageSpec>, PathBuf);
/// Identifies a file.
/// Identifies a file in a project or package.
///
/// This type is globally interned and thus cheap to copy, compare, and hash.
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
@ -48,7 +46,7 @@ impl FileId {
);
// Try to find an existing entry that we can reuse.
let pair = (package, path.normalize());
let pair = (package, normalize_path(path));
if let Some(&id) = INTERNER.read().unwrap().to_id.get(&pair) {
return id;
}
@ -99,9 +97,9 @@ impl FileId {
}
/// Resolve a file location relative to this file.
pub fn join(self, path: &str) -> StrResult<Self> {
pub fn join(self, path: &str) -> Result<Self, EcoString> {
if self.is_detached() {
bail!("cannot access file system from here");
Err("cannot access file system from here")?;
}
let package = self.package().cloned();
@ -145,6 +143,29 @@ impl Debug for FileId {
}
}
/// Lexically normalize a path.
fn normalize_path(path: &Path) -> PathBuf {
let mut out = PathBuf::new();
for component in path.components() {
match component {
Component::CurDir => {}
Component::ParentDir => match out.components().next_back() {
Some(Component::Normal(_)) => {
out.pop();
}
_ => out.push(component),
},
Component::Prefix(_) | Component::RootDir | Component::Normal(_) => {
out.push(component)
}
}
}
if out.as_os_str().is_empty() {
out.push(Component::CurDir);
}
out
}
/// Identifies a package.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct PackageSpec {
@ -153,7 +174,7 @@ pub struct PackageSpec {
/// The name of the package within its namespace.
pub name: EcoString,
/// The package's version.
pub version: Version,
pub version: PackageVersion,
}
impl FromStr for PackageSpec {
@ -162,30 +183,30 @@ impl FromStr for PackageSpec {
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut s = unscanny::Scanner::new(s);
if !s.eat_if('@') {
bail!("package specification must start with '@'");
Err("package specification must start with '@'")?;
}
let namespace = s.eat_until('/');
if namespace.is_empty() {
bail!("package specification is missing namespace");
Err("package specification is missing namespace")?;
} else if !is_ident(namespace) {
bail!("`{namespace}` is not a valid package namespace");
Err(eco_format!("`{namespace}` is not a valid package namespace"))?;
}
s.eat_if('/');
let name = s.eat_until(':');
if name.is_empty() {
bail!("package specification is missing name");
Err("package specification is missing name")?;
} else if !is_ident(name) {
bail!("`{name}` is not a valid package name");
Err(eco_format!("`{name}` is not a valid package name"))?;
}
s.eat_if(':');
let version = s.after();
if version.is_empty() {
bail!("package specification is missing version");
Err("package specification is missing version")?;
}
Ok(Self {
@ -204,7 +225,7 @@ impl Display for PackageSpec {
/// A package's version.
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub struct Version {
pub struct PackageVersion {
/// The package's major version.
pub major: u32,
/// The package's minor version.
@ -213,15 +234,16 @@ pub struct Version {
pub patch: u32,
}
impl FromStr for Version {
impl FromStr for PackageVersion {
type Err = EcoString;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut parts = s.split('.');
let mut next = |kind| {
let Some(part) = parts.next().filter(|s| !s.is_empty()) else {
bail!("version number is missing {kind} version");
};
let part = parts
.next()
.filter(|s| !s.is_empty())
.ok_or_else(|| eco_format!("version number is missing {kind} version"))?;
part.parse::<u32>()
.map_err(|_| eco_format!("`{part}` is not a valid {kind} version"))
};
@ -230,74 +252,28 @@ impl FromStr for Version {
let minor = next("minor")?;
let patch = next("patch")?;
if let Some(rest) = parts.next() {
bail!("version number has unexpected fourth component: `{rest}`");
Err(eco_format!("version number has unexpected fourth component: `{rest}`"))?;
}
Ok(Self { major, minor, patch })
}
}
impl Display for Version {
impl Display for PackageVersion {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}.{}.{}", self.major, self.minor, self.patch)
}
}
impl Serialize for Version {
impl Serialize for PackageVersion {
fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
s.collect_str(self)
}
}
impl<'de> Deserialize<'de> for Version {
impl<'de> Deserialize<'de> for PackageVersion {
fn deserialize<D: Deserializer<'de>>(d: D) -> Result<Self, D::Error> {
let string = EcoString::deserialize(d)?;
string.parse().map_err(serde::de::Error::custom)
}
}
/// A parsed package manifest.
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub struct PackageManifest {
/// Details about the package itself.
pub package: PackageInfo,
}
impl PackageManifest {
/// Parse the manifest from raw bytes.
pub fn parse(bytes: &[u8]) -> StrResult<Self> {
let string = std::str::from_utf8(bytes).map_err(FileError::from)?;
toml::from_str(string).map_err(|err| {
eco_format!("package manifest is malformed: {}", err.message())
})
}
/// Ensure that this manifest is indeed for the specified package.
pub fn validate(&self, spec: &PackageSpec) -> StrResult<()> {
if self.package.name != spec.name {
bail!("package manifest contains mismatched name `{}`", self.package.name);
}
if self.package.version != spec.version {
bail!(
"package manifest contains mismatched version {}",
self.package.version
);
}
Ok(())
}
}
/// The `package` key in the manifest.
///
/// More fields are specified, but they are not relevant to the compiler.
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
pub struct PackageInfo {
/// The name of the package within its namespace.
pub name: EcoString,
/// The package's version.
pub version: Version,
/// The path of the entrypoint into the package.
pub entrypoint: EcoString,
}

View File

@ -649,7 +649,7 @@ fn keyword(ident: &str) -> Option<SyntaxKind> {
})
}
/// Whether this character denotes a newline.
/// Whether a character is interpreted as a newline by Typst.
#[inline]
pub fn is_newline(character: char) -> bool {
matches!(
@ -716,13 +716,13 @@ pub fn is_ident(string: &str) -> bool {
/// Whether a character can start an identifier.
#[inline]
pub(crate) fn is_id_start(c: char) -> bool {
pub fn is_id_start(c: char) -> bool {
is_xid_start(c) || c == '_'
}
/// Whether a character can continue an identifier.
#[inline]
pub(crate) fn is_id_continue(c: char) -> bool {
pub fn is_id_continue(c: char) -> bool {
is_xid_continue(c) || c == '_' || c == '-'
}

View File

@ -0,0 +1,23 @@
//! Parser and syntax tree for Typst.
pub mod ast;
mod file;
mod kind;
mod lexer;
mod node;
mod parser;
mod reparser;
mod source;
mod span;
pub use self::file::{FileId, PackageSpec, PackageVersion};
pub use self::kind::SyntaxKind;
pub use self::lexer::{is_id_continue, is_id_start, is_ident, is_newline};
pub use self::node::{LinkedChildren, LinkedNode, SyntaxError, SyntaxNode};
pub use self::parser::{parse, parse_code, parse_math};
pub use self::source::Source;
pub use self::span::{Span, Spanned};
use self::lexer::{split_newlines, LexMode, Lexer};
use self::parser::{reparse_block, reparse_markup};

View File

@ -6,9 +6,7 @@ use std::sync::Arc;
use ecow::EcoString;
use super::ast::AstNode;
use super::{Span, SyntaxKind};
use crate::diag::SourceError;
use crate::file::FileId;
use super::{FileId, Span, SyntaxKind};
/// A node in the untyped syntax tree.
#[derive(Clone, Eq, PartialEq, Hash)]
@ -60,7 +58,7 @@ impl SyntaxNode {
match &self.0 {
Repr::Leaf(leaf) => leaf.len(),
Repr::Inner(inner) => inner.len,
Repr::Error(error) => error.len(),
Repr::Error(node) => node.len(),
}
}
@ -69,19 +67,19 @@ impl SyntaxNode {
match &self.0 {
Repr::Leaf(leaf) => leaf.span,
Repr::Inner(inner) => inner.span,
Repr::Error(error) => error.span,
Repr::Error(node) => node.error.span,
}
}
/// The text of the node if it is a leaf node.
/// The text of the node if it is a leaf or error node.
///
/// Returns the empty string if this is an inner node.
pub fn text(&self) -> &EcoString {
static EMPTY: EcoString = EcoString::new();
match &self.0 {
Repr::Leaf(leaf) => &leaf.text,
Repr::Error(error) => &error.text,
Repr::Inner(_) => &EMPTY,
Repr::Error(node) => &node.text,
}
}
@ -91,10 +89,10 @@ impl SyntaxNode {
pub fn into_text(self) -> EcoString {
match self.0 {
Repr::Leaf(leaf) => leaf.text,
Repr::Error(error) => error.text.clone(),
Repr::Inner(node) => {
node.children.iter().cloned().map(Self::into_text).collect()
Repr::Inner(inner) => {
inner.children.iter().cloned().map(Self::into_text).collect()
}
Repr::Error(node) => node.text.clone(),
}
}
@ -130,27 +128,19 @@ impl SyntaxNode {
pub fn erroneous(&self) -> bool {
match &self.0 {
Repr::Leaf(_) => false,
Repr::Inner(node) => node.erroneous,
Repr::Inner(inner) => inner.erroneous,
Repr::Error(_) => true,
}
}
/// Adds a user-presentable hint if this is an error node.
pub fn hint(&mut self, hint: impl Into<EcoString>) {
if let Repr::Error(error) = &mut self.0 {
Arc::make_mut(error).hint(hint);
}
}
/// The error messages for this node and its descendants.
pub fn errors(&self) -> Vec<SourceError> {
pub fn errors(&self) -> Vec<SyntaxError> {
if !self.erroneous() {
return vec![];
}
if let Repr::Error(error) = &self.0 {
vec![SourceError::new(error.span, error.message.clone())
.with_hints(error.hints.to_owned())]
if let Repr::Error(node) = &self.0 {
vec![node.error.clone()]
} else {
self.children()
.filter(|node| node.erroneous())
@ -159,12 +149,19 @@ impl SyntaxNode {
}
}
/// Add a user-presentable hint if this is an error node.
pub fn hint(&mut self, hint: impl Into<EcoString>) {
if let Repr::Error(node) = &mut self.0 {
Arc::make_mut(node).hint(hint);
}
}
/// Set a synthetic span for the node and all its descendants.
pub fn synthesize(&mut self, span: Span) {
match &mut self.0 {
Repr::Leaf(leaf) => leaf.span = span,
Repr::Inner(inner) => Arc::make_mut(inner).synthesize(span),
Repr::Error(error) => Arc::make_mut(error).span = span,
Repr::Error(node) => Arc::make_mut(node).error.span = span,
}
}
}
@ -209,7 +206,7 @@ impl SyntaxNode {
match &mut self.0 {
Repr::Leaf(leaf) => leaf.span = mid,
Repr::Inner(inner) => Arc::make_mut(inner).numberize(id, None, within)?,
Repr::Error(error) => Arc::make_mut(error).span = mid,
Repr::Error(node) => Arc::make_mut(node).error.span = mid,
}
Ok(())
@ -271,9 +268,9 @@ impl SyntaxNode {
/// The upper bound of assigned numbers in this subtree.
pub(super) fn upper(&self) -> u64 {
match &self.0 {
Repr::Inner(inner) => inner.upper,
Repr::Leaf(leaf) => leaf.span.number() + 1,
Repr::Error(error) => error.span.number() + 1,
Repr::Inner(inner) => inner.upper,
Repr::Error(node) => node.error.span.number() + 1,
}
}
}
@ -281,8 +278,8 @@ impl SyntaxNode {
impl Debug for SyntaxNode {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match &self.0 {
Repr::Inner(node) => node.fmt(f),
Repr::Leaf(node) => node.fmt(f),
Repr::Leaf(leaf) => leaf.fmt(f),
Repr::Inner(inner) => inner.fmt(f),
Repr::Error(node) => node.fmt(f),
}
}
@ -541,25 +538,22 @@ impl Debug for InnerNode {
/// An error node in the untyped syntax tree.
#[derive(Clone, Eq, PartialEq, Hash)]
struct ErrorNode {
/// The error message.
message: EcoString,
/// The source text of the node.
text: EcoString,
/// The node's span.
span: Span,
/// Additonal hints to the user, indicating how this error could be avoided
/// or worked around.
hints: Vec<EcoString>,
/// The syntax error.
error: SyntaxError,
}
impl ErrorNode {
/// Create new error node.
fn new(message: impl Into<EcoString>, text: impl Into<EcoString>) -> Self {
Self {
message: message.into(),
text: text.into(),
span: Span::detached(),
hints: vec![],
error: SyntaxError {
span: Span::detached(),
message: message.into(),
hints: vec![],
},
}
}
@ -570,16 +564,28 @@ impl ErrorNode {
/// Add a user-presentable hint to this error node.
fn hint(&mut self, hint: impl Into<EcoString>) {
self.hints.push(hint.into());
self.error.hints.push(hint.into());
}
}
impl Debug for ErrorNode {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "Error: {:?} ({})", self.text, self.message)
write!(f, "Error: {:?} ({})", self.text, self.error.message)
}
}
/// A syntactical error.
#[derive(Debug, Clone, Eq, PartialEq, Hash)]
pub struct SyntaxError {
/// The node's span.
pub span: Span,
/// The error message.
pub message: EcoString,
/// Additonal hints to the user, indicating how this error could be avoided
/// or worked around.
pub hints: Vec<EcoString>,
}
/// A syntax node in a context.
///
/// Knows its exact offset in the file and provides access to its
@ -870,7 +876,7 @@ impl std::error::Error for Unnumberable {}
#[cfg(test)]
mod tests {
use super::*;
use crate::syntax::Source;
use crate::Source;
#[test]
fn test_linked_node() {

View File

@ -7,15 +7,15 @@ use unicode_math_class::MathClass;
use super::{ast, is_newline, LexMode, Lexer, SyntaxKind, SyntaxNode};
/// Parse a source file.
#[tracing::instrument(skip_all)]
pub fn parse(text: &str) -> SyntaxNode {
let mut p = Parser::new(text, 0, LexMode::Markup);
markup(&mut p, true, 0, |_| false);
p.finish().into_iter().next().unwrap()
}
/// Parse code directly.
///
/// This is only used for syntax highlighting.
/// Parse top-level code.
#[tracing::instrument(skip_all)]
pub fn parse_code(text: &str) -> SyntaxNode {
let mut p = Parser::new(text, 0, LexMode::Code);
let m = p.marker();
@ -25,6 +25,14 @@ pub fn parse_code(text: &str) -> SyntaxNode {
p.finish().into_iter().next().unwrap()
}
/// Parse top-level math.
#[tracing::instrument(skip_all)]
pub fn parse_math(text: &str) -> SyntaxNode {
let mut p = Parser::new(text, 0, LexMode::Math);
math(&mut p, |_| false);
p.finish().into_iter().next().unwrap()
}
fn markup(
p: &mut Parser,
mut at_start: bool,

View File

@ -7,12 +7,8 @@ use std::sync::Arc;
use comemo::Prehashed;
use super::ast::Markup;
use super::reparser::reparse;
use super::{is_newline, parse, LinkedNode, Span, SyntaxNode};
use crate::diag::SourceResult;
use crate::file::FileId;
use crate::util::StrExt;
use super::{is_newline, parse, FileId, LinkedNode, Span, SyntaxNode};
/// A source file.
///
@ -68,16 +64,6 @@ impl Source {
&self.0.root
}
/// The root node of the file's typed abstract syntax tree.
pub fn ast(&self) -> SourceResult<Markup> {
let errors = self.root().errors();
if errors.is_empty() {
Ok(self.root().cast().expect("root node must be markup"))
} else {
Err(Box::new(errors))
}
}
/// The id of the source file.
pub fn id(&self) -> FileId {
self.0.id
@ -148,7 +134,7 @@ impl Source {
/// Get the length of the file in UTF-16 code units.
pub fn len_utf16(&self) -> usize {
let last = self.0.lines.last().unwrap();
last.utf16_idx + self.0.text[last.byte_idx..].len_utf16()
last.utf16_idx + len_utf16(&self.0.text[last.byte_idx..])
}
/// Get the length of the file in lines.
@ -163,12 +149,22 @@ impl Source {
LinkedNode::new(self.root()).find(span)
}
/// Get the byte range for the given span in this file.
///
/// Panics if the span does not point into this source file.
#[track_caller]
pub fn range(&self, span: Span) -> Range<usize> {
self.find(span)
.expect("span does not point into this source file")
.range()
}
/// Return the index of the UTF-16 code unit at the byte index.
pub fn byte_to_utf16(&self, byte_idx: usize) -> Option<usize> {
let line_idx = self.byte_to_line(byte_idx)?;
let line = self.0.lines.get(line_idx)?;
let head = self.0.text.get(line.byte_idx..byte_idx)?;
Some(line.utf16_idx + head.len_utf16())
Some(line.utf16_idx + len_utf16(head))
}
/// Return the index of the line that contains the given byte index.
@ -306,6 +302,12 @@ fn lines_from(
})
}
/// The number of code units this string would use if it was encoded in
/// UTF16. This runs in linear time.
fn len_utf16(string: &str) -> usize {
string.chars().map(char::len_utf16).sum()
}
#[cfg(test)]
mod tests {
use super::*;

View File

@ -2,15 +2,13 @@ use std::fmt::{self, Debug, Formatter};
use std::num::NonZeroU64;
use std::ops::Range;
use super::Source;
use crate::file::FileId;
use crate::World;
use super::FileId;
/// A unique identifier for a syntax node.
///
/// This is used throughout the compiler to track which source section an error
/// or element stems from. Can be [mapped back](Self::range) to a byte range for
/// user facing display.
/// or element stems from. Can be [mapped back](super::Source::range) to a byte
/// range for user facing display.
///
/// During editing, the span values stay mostly stable, even for nodes behind an
/// insertion. This is not true for simple ranges as they would shift. Spans can
@ -79,24 +77,6 @@ impl Span {
pub const fn is_detached(self) -> bool {
self.id().is_detached()
}
/// Get the byte range for this span.
#[track_caller]
pub fn range(self, world: &dyn World) -> Range<usize> {
let source = world
.source(self.id())
.expect("span does not point into any source file");
self.range_in(&source)
}
/// Get the byte range for this span in the given source file.
#[track_caller]
pub fn range_in(self, source: &Source) -> Range<usize> {
source
.find(self)
.expect("span does not point into this source file")
.range()
}
}
/// A value with a span locating it in the source code.

View File

@ -17,6 +17,7 @@ bench = false
[dependencies]
typst-macros = { path = "../typst-macros" }
typst-syntax = { path = "../typst-syntax" }
bitflags = { version = "2", features = ["serde"] }
bytemuck = "1"
comemo = "0.3"

View File

@ -8,8 +8,7 @@ use std::string::FromUtf8Error;
use comemo::Tracked;
use crate::file::PackageSpec;
use crate::syntax::{Span, Spanned};
use crate::syntax::{PackageSpec, Span, Spanned, SyntaxError};
use crate::World;
/// Early-return with a [`StrResult`] or [`SourceResult`].
@ -103,6 +102,17 @@ impl SourceError {
}
}
impl From<SyntaxError> for SourceError {
fn from(error: SyntaxError) -> Self {
Self {
span: error.span,
message: error.message,
trace: vec![],
hints: error.hints,
}
}
}
/// A part of an error's [trace](SourceError::trace).
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum Tracepoint {
@ -151,11 +161,11 @@ impl<T> Trace<T> for SourceResult<T> {
return errors;
}
let trace_range = span.range(&*world);
let trace_range = world.range(span);
for error in errors.iter_mut().filter(|e| !e.span.is_detached()) {
// Skip traces that surround the error.
if error.span.id() == span.id() {
let error_range = error.span.range(&*world);
let error_range = world.range(error.span);
if trace_range.start <= error_range.start
&& trace_range.end >= error_range.end
{

View File

@ -11,10 +11,9 @@ use super::{
Value, Vm,
};
use crate::diag::{bail, SourceResult, StrResult};
use crate::file::FileId;
use crate::model::{DelayedErrors, ElemFunc, Introspector, Locator, Vt};
use crate::syntax::ast::{self, AstNode, Expr, Ident};
use crate::syntax::{Span, SyntaxNode};
use crate::syntax::{FileId, Span, SyntaxNode};
use crate::World;
/// An evaluatable function.
@ -380,8 +379,9 @@ impl Closure {
}
ast::Pattern::Normal(_) => unreachable!(),
_ => {
pattern.define(
super::define_pattern(
&mut vm,
pattern,
args.expect::<Value>("pattern parameter")?,
)?;
}

View File

@ -61,19 +61,22 @@ use std::path::Path;
use comemo::{Track, Tracked, TrackedMut, Validate};
use ecow::{EcoString, EcoVec};
use serde::{Deserialize, Serialize};
use unicode_segmentation::UnicodeSegmentation;
use self::func::{CapturesVisitor, Closure};
use crate::diag::{
bail, error, At, SourceError, SourceResult, StrResult, Trace, Tracepoint,
bail, error, At, FileError, SourceError, SourceResult, StrResult, Trace, Tracepoint,
};
use crate::file::{FileId, PackageManifest, PackageSpec};
use crate::model::{
Content, DelayedErrors, Introspector, Label, Locator, Recipe, ShowableSelector,
Styles, Transform, Unlabellable, Vt,
};
use crate::syntax::ast::{self, AstNode};
use crate::syntax::{parse_code, Source, Span, Spanned, SyntaxKind, SyntaxNode};
use crate::syntax::{
parse_code, FileId, PackageSpec, PackageVersion, Source, Span, Spanned, SyntaxKind,
SyntaxNode,
};
use crate::World;
const MAX_ITERATIONS: usize = 10_000;
@ -114,13 +117,16 @@ pub fn eval(
let route = Route::insert(route, id);
let scopes = Scopes::new(Some(library));
let mut vm = Vm::new(vt, route.track(), id, scopes);
let root = match source.root().cast::<ast::Markup>() {
Some(markup) if vm.traced.is_some() => markup,
_ => source.ast()?,
};
let root = source.root();
let errors = root.errors();
if !errors.is_empty() && vm.traced.is_none() {
return Err(Box::new(errors.into_iter().map(Into::into).collect()));
}
// Evaluate the module.
let result = root.eval(&mut vm);
let markup = root.cast::<ast::Markup>().unwrap();
let result = markup.eval(&mut vm);
// Handle control flow.
if let Some(flow) = vm.flow {
@ -146,7 +152,7 @@ pub fn eval_string(
let errors = root.errors();
if !errors.is_empty() {
return Err(Box::new(errors));
return Err(Box::new(errors.into_iter().map(Into::into).collect()));
}
// Prepare VT.
@ -506,7 +512,11 @@ impl Eval for ast::Expr {
}
}
impl ast::Expr {
trait ExprExt {
fn eval_display(&self, vm: &mut Vm) -> SourceResult<Content>;
}
impl ExprExt for ast::Expr {
fn eval_display(&self, vm: &mut Vm) -> SourceResult<Content> {
Ok(self.eval(vm)?.display().spanned(self.span()))
}
@ -1013,73 +1023,71 @@ impl Eval for ast::Binary {
#[tracing::instrument(name = "Binary::eval", skip_all)]
fn eval(&self, vm: &mut Vm) -> SourceResult<Self::Output> {
match self.op() {
ast::BinOp::Add => self.apply(vm, ops::add),
ast::BinOp::Sub => self.apply(vm, ops::sub),
ast::BinOp::Mul => self.apply(vm, ops::mul),
ast::BinOp::Div => self.apply(vm, ops::div),
ast::BinOp::And => self.apply(vm, ops::and),
ast::BinOp::Or => self.apply(vm, ops::or),
ast::BinOp::Eq => self.apply(vm, ops::eq),
ast::BinOp::Neq => self.apply(vm, ops::neq),
ast::BinOp::Lt => self.apply(vm, ops::lt),
ast::BinOp::Leq => self.apply(vm, ops::leq),
ast::BinOp::Gt => self.apply(vm, ops::gt),
ast::BinOp::Geq => self.apply(vm, ops::geq),
ast::BinOp::In => self.apply(vm, ops::in_),
ast::BinOp::NotIn => self.apply(vm, ops::not_in),
ast::BinOp::Assign => self.assign(vm, |_, b| Ok(b)),
ast::BinOp::AddAssign => self.assign(vm, ops::add),
ast::BinOp::SubAssign => self.assign(vm, ops::sub),
ast::BinOp::MulAssign => self.assign(vm, ops::mul),
ast::BinOp::DivAssign => self.assign(vm, ops::div),
ast::BinOp::Add => apply_binary_expr(self, vm, ops::add),
ast::BinOp::Sub => apply_binary_expr(self, vm, ops::sub),
ast::BinOp::Mul => apply_binary_expr(self, vm, ops::mul),
ast::BinOp::Div => apply_binary_expr(self, vm, ops::div),
ast::BinOp::And => apply_binary_expr(self, vm, ops::and),
ast::BinOp::Or => apply_binary_expr(self, vm, ops::or),
ast::BinOp::Eq => apply_binary_expr(self, vm, ops::eq),
ast::BinOp::Neq => apply_binary_expr(self, vm, ops::neq),
ast::BinOp::Lt => apply_binary_expr(self, vm, ops::lt),
ast::BinOp::Leq => apply_binary_expr(self, vm, ops::leq),
ast::BinOp::Gt => apply_binary_expr(self, vm, ops::gt),
ast::BinOp::Geq => apply_binary_expr(self, vm, ops::geq),
ast::BinOp::In => apply_binary_expr(self, vm, ops::in_),
ast::BinOp::NotIn => apply_binary_expr(self, vm, ops::not_in),
ast::BinOp::Assign => apply_assignment(self, vm, |_, b| Ok(b)),
ast::BinOp::AddAssign => apply_assignment(self, vm, ops::add),
ast::BinOp::SubAssign => apply_assignment(self, vm, ops::sub),
ast::BinOp::MulAssign => apply_assignment(self, vm, ops::mul),
ast::BinOp::DivAssign => apply_assignment(self, vm, ops::div),
}
}
}
impl ast::Binary {
/// Apply a basic binary operation.
fn apply(
&self,
vm: &mut Vm,
op: fn(Value, Value) -> StrResult<Value>,
) -> SourceResult<Value> {
let lhs = self.lhs().eval(vm)?;
/// Apply a basic binary operation.
fn apply_binary_expr(
binary: &ast::Binary,
vm: &mut Vm,
op: fn(Value, Value) -> StrResult<Value>,
) -> SourceResult<Value> {
let lhs = binary.lhs().eval(vm)?;
// Short-circuit boolean operations.
if (self.op() == ast::BinOp::And && lhs == Value::Bool(false))
|| (self.op() == ast::BinOp::Or && lhs == Value::Bool(true))
{
return Ok(lhs);
}
let rhs = self.rhs().eval(vm)?;
op(lhs, rhs).at(self.span())
// Short-circuit boolean operations.
if (binary.op() == ast::BinOp::And && lhs == Value::Bool(false))
|| (binary.op() == ast::BinOp::Or && lhs == Value::Bool(true))
{
return Ok(lhs);
}
/// Apply an assignment operation.
fn assign(
&self,
vm: &mut Vm,
op: fn(Value, Value) -> StrResult<Value>,
) -> SourceResult<Value> {
let rhs = self.rhs().eval(vm)?;
let lhs = self.lhs();
let rhs = binary.rhs().eval(vm)?;
op(lhs, rhs).at(binary.span())
}
// An assignment to a dictionary field is different from a normal access
// since it can create the field instead of just modifying it.
if self.op() == ast::BinOp::Assign {
if let ast::Expr::FieldAccess(access) = &lhs {
let dict = access.access_dict(vm)?;
dict.insert(access.field().take().into(), rhs);
return Ok(Value::None);
}
/// Apply an assignment operation.
fn apply_assignment(
binary: &ast::Binary,
vm: &mut Vm,
op: fn(Value, Value) -> StrResult<Value>,
) -> SourceResult<Value> {
let rhs = binary.rhs().eval(vm)?;
let lhs = binary.lhs();
// An assignment to a dictionary field is different from a normal access
// since it can create the field instead of just modifying it.
if binary.op() == ast::BinOp::Assign {
if let ast::Expr::FieldAccess(access) = &lhs {
let dict = access_dict(vm, access)?;
dict.insert(access.field().take().into(), rhs);
return Ok(Value::None);
}
let location = self.lhs().access(vm)?;
let lhs = std::mem::take(&mut *location);
*location = op(lhs, rhs).at(self.span())?;
Ok(Value::None)
}
let location = binary.lhs().access(vm)?;
let lhs = std::mem::take(&mut *location);
*location = op(lhs, rhs).at(binary.span())?;
Ok(Value::None)
}
impl Eval for ast::FieldAccess {
@ -1293,150 +1301,160 @@ impl Eval for ast::Closure {
}
}
impl ast::Pattern {
fn destruct_array<F>(
&self,
vm: &mut Vm,
value: Array,
f: F,
destruct: &ast::Destructuring,
) -> SourceResult<Value>
where
F: Fn(&mut Vm, ast::Expr, Value) -> SourceResult<Value>,
{
let mut i = 0;
let len = value.as_slice().len();
for p in destruct.bindings() {
match p {
ast::DestructuringKind::Normal(expr) => {
let Ok(v) = value.at(i as i64, None) else {
/// Destruct the value into the pattern by binding.
fn define_pattern(
vm: &mut Vm,
pattern: &ast::Pattern,
value: Value,
) -> SourceResult<Value> {
destructure(vm, pattern, value, |vm, expr, value| match expr {
ast::Expr::Ident(ident) => {
vm.define(ident, value);
Ok(Value::None)
}
_ => bail!(expr.span(), "nested patterns are currently not supported"),
})
}
/// Destruct the value into the pattern by assignment.
fn assign_pattern(
vm: &mut Vm,
pattern: &ast::Pattern,
value: Value,
) -> SourceResult<Value> {
destructure(vm, pattern, value, |vm, expr, value| {
let location = expr.access(vm)?;
*location = value;
Ok(Value::None)
})
}
/// Destruct the given value into the pattern and apply the function to each binding.
#[tracing::instrument(skip_all)]
fn destructure<T>(
vm: &mut Vm,
pattern: &ast::Pattern,
value: Value,
f: T,
) -> SourceResult<Value>
where
T: Fn(&mut Vm, ast::Expr, Value) -> SourceResult<Value>,
{
match pattern {
ast::Pattern::Normal(expr) => {
f(vm, expr.clone(), value)?;
Ok(Value::None)
}
ast::Pattern::Placeholder(_) => Ok(Value::None),
ast::Pattern::Destructuring(destruct) => match value {
Value::Array(value) => destructure_array(vm, pattern, value, f, destruct),
Value::Dict(value) => destructure_dict(vm, value, f, destruct),
_ => bail!(pattern.span(), "cannot destructure {}", value.type_name()),
},
}
}
fn destructure_array<F>(
vm: &mut Vm,
pattern: &ast::Pattern,
value: Array,
f: F,
destruct: &ast::Destructuring,
) -> SourceResult<Value>
where
F: Fn(&mut Vm, ast::Expr, Value) -> SourceResult<Value>,
{
let mut i = 0;
let len = value.as_slice().len();
for p in destruct.bindings() {
match p {
ast::DestructuringKind::Normal(expr) => {
let Ok(v) = value.at(i as i64, None) else {
bail!(expr.span(), "not enough elements to destructure");
};
f(vm, expr, v.clone())?;
i += 1;
}
ast::DestructuringKind::Sink(spread) => {
let sink_size = (1 + len).checked_sub(destruct.bindings().count());
let sink = sink_size.and_then(|s| value.as_slice().get(i..i + s));
if let (Some(sink_size), Some(sink)) = (sink_size, sink) {
if let Some(expr) = spread.expr() {
f(vm, expr, Value::Array(sink.into()))?;
}
i += sink_size;
} else {
bail!(self.span(), "not enough elements to destructure")
f(vm, expr, v.clone())?;
i += 1;
}
ast::DestructuringKind::Sink(spread) => {
let sink_size = (1 + len).checked_sub(destruct.bindings().count());
let sink = sink_size.and_then(|s| value.as_slice().get(i..i + s));
if let (Some(sink_size), Some(sink)) = (sink_size, sink) {
if let Some(expr) = spread.expr() {
f(vm, expr, Value::Array(sink.into()))?;
}
i += sink_size;
} else {
bail!(pattern.span(), "not enough elements to destructure")
}
ast::DestructuringKind::Named(named) => {
bail!(named.span(), "cannot destructure named elements from an array")
}
ast::DestructuringKind::Placeholder(underscore) => {
if i < len {
i += 1
} else {
bail!(underscore.span(), "not enough elements to destructure")
}
}
ast::DestructuringKind::Named(named) => {
bail!(named.span(), "cannot destructure named elements from an array")
}
ast::DestructuringKind::Placeholder(underscore) => {
if i < len {
i += 1
} else {
bail!(underscore.span(), "not enough elements to destructure")
}
}
}
if i < len {
bail!(self.span(), "too many elements to destructure");
}
Ok(Value::None)
}
if i < len {
bail!(pattern.span(), "too many elements to destructure");
}
fn destruct_dict<F>(
&self,
vm: &mut Vm,
dict: Dict,
f: F,
destruct: &ast::Destructuring,
) -> SourceResult<Value>
where
F: Fn(&mut Vm, ast::Expr, Value) -> SourceResult<Value>,
{
let mut sink = None;
let mut used = HashSet::new();
for p in destruct.bindings() {
match p {
ast::DestructuringKind::Normal(ast::Expr::Ident(ident)) => {
let v = dict
.at(&ident, None)
.map_err(|_| "destructuring key not found in dictionary")
.at(ident.span())?;
f(vm, ast::Expr::Ident(ident.clone()), v.clone())?;
used.insert(ident.take());
}
ast::DestructuringKind::Sink(spread) => sink = spread.expr(),
ast::DestructuringKind::Named(named) => {
let name = named.name();
let v = dict
.at(&name, None)
.map_err(|_| "destructuring key not found in dictionary")
.at(name.span())?;
f(vm, named.expr(), v.clone())?;
used.insert(name.take());
}
ast::DestructuringKind::Placeholder(_) => {}
ast::DestructuringKind::Normal(expr) => {
bail!(expr.span(), "expected key, found expression");
}
Ok(Value::None)
}
fn destructure_dict<F>(
vm: &mut Vm,
dict: Dict,
f: F,
destruct: &ast::Destructuring,
) -> SourceResult<Value>
where
F: Fn(&mut Vm, ast::Expr, Value) -> SourceResult<Value>,
{
let mut sink = None;
let mut used = HashSet::new();
for p in destruct.bindings() {
match p {
ast::DestructuringKind::Normal(ast::Expr::Ident(ident)) => {
let v = dict
.at(&ident, None)
.map_err(|_| "destructuring key not found in dictionary")
.at(ident.span())?;
f(vm, ast::Expr::Ident(ident.clone()), v.clone())?;
used.insert(ident.take());
}
ast::DestructuringKind::Sink(spread) => sink = spread.expr(),
ast::DestructuringKind::Named(named) => {
let name = named.name();
let v = dict
.at(&name, None)
.map_err(|_| "destructuring key not found in dictionary")
.at(name.span())?;
f(vm, named.expr(), v.clone())?;
used.insert(name.take());
}
ast::DestructuringKind::Placeholder(_) => {}
ast::DestructuringKind::Normal(expr) => {
bail!(expr.span(), "expected key, found expression");
}
}
}
if let Some(expr) = sink {
let mut sink = Dict::new();
for (key, value) in dict {
if !used.contains(key.as_str()) {
sink.insert(key, value);
}
if let Some(expr) = sink {
let mut sink = Dict::new();
for (key, value) in dict {
if !used.contains(key.as_str()) {
sink.insert(key, value);
}
f(vm, expr, Value::Dict(sink))?;
}
Ok(Value::None)
f(vm, expr, Value::Dict(sink))?;
}
/// Destruct the given value into the pattern and apply the function to each binding.
#[tracing::instrument(skip_all)]
fn apply<T>(&self, vm: &mut Vm, value: Value, f: T) -> SourceResult<Value>
where
T: Fn(&mut Vm, ast::Expr, Value) -> SourceResult<Value>,
{
match self {
ast::Pattern::Normal(expr) => {
f(vm, expr.clone(), value)?;
Ok(Value::None)
}
ast::Pattern::Placeholder(_) => Ok(Value::None),
ast::Pattern::Destructuring(destruct) => match value {
Value::Array(value) => self.destruct_array(vm, value, f, destruct),
Value::Dict(value) => self.destruct_dict(vm, value, f, destruct),
_ => bail!(self.span(), "cannot destructure {}", value.type_name()),
},
}
}
/// Destruct the value into the pattern by binding.
pub fn define(&self, vm: &mut Vm, value: Value) -> SourceResult<Value> {
self.apply(vm, value, |vm, expr, value| match expr {
ast::Expr::Ident(ident) => {
vm.define(ident, value);
Ok(Value::None)
}
_ => bail!(expr.span(), "nested patterns are currently not supported"),
})
}
/// Destruct the value into the pattern by assignment.
pub fn assign(&self, vm: &mut Vm, value: Value) -> SourceResult<Value> {
self.apply(vm, value, |vm, expr, value| {
let location = expr.access(vm)?;
*location = value;
Ok(Value::None)
})
}
Ok(Value::None)
}
impl Eval for ast::LetBinding {
@ -1450,7 +1468,7 @@ impl Eval for ast::LetBinding {
};
match self.kind() {
ast::LetBindingKind::Normal(pattern) => pattern.define(vm, value),
ast::LetBindingKind::Normal(pattern) => define_pattern(vm, &pattern, value),
ast::LetBindingKind::Closure(ident) => {
vm.define(ident, value);
Ok(Value::None)
@ -1464,7 +1482,7 @@ impl Eval for ast::DestructAssignment {
fn eval(&self, vm: &mut Vm) -> SourceResult<Self::Output> {
let value = self.value().eval(vm)?;
self.pattern().assign(vm, value)?;
assign_pattern(vm, &self.pattern(), value)?;
Ok(Value::None)
}
}
@ -1614,7 +1632,7 @@ impl Eval for ast::ForLoop {
#[allow(unused_parens)]
for value in $iter {
$pat.define(vm, value.into_value())?;
define_pattern(vm, &$pat, value.into_value())?;
let body = self.body();
let value = body.eval(vm)?;
@ -1812,6 +1830,52 @@ fn import_file(vm: &mut Vm, path: &str, span: Span) -> SourceResult<Module> {
.trace(world, point, span)
}
/// A parsed package manifest.
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
struct PackageManifest {
/// Details about the package itself.
package: PackageInfo,
}
impl PackageManifest {
/// Parse the manifest from raw bytes.
fn parse(bytes: &[u8]) -> StrResult<Self> {
let string = std::str::from_utf8(bytes).map_err(FileError::from)?;
toml::from_str(string).map_err(|err| {
eco_format!("package manifest is malformed: {}", err.message())
})
}
/// Ensure that this manifest is indeed for the specified package.
fn validate(&self, spec: &PackageSpec) -> StrResult<()> {
if self.package.name != spec.name {
bail!("package manifest contains mismatched name `{}`", self.package.name);
}
if self.package.version != spec.version {
bail!(
"package manifest contains mismatched version {}",
self.package.version
);
}
Ok(())
}
}
/// The `package` key in the manifest.
///
/// More fields are specified, but they are not relevant to the compiler.
#[derive(Debug, Clone, Eq, PartialEq, Hash, Serialize, Deserialize)]
struct PackageInfo {
/// The name of the package within its namespace.
name: EcoString,
/// The package's version.
version: PackageVersion,
/// The path of the entrypoint into the package.
entrypoint: EcoString,
}
impl Eval for ast::LoopBreak {
type Output = Value;
@ -1889,20 +1953,21 @@ impl Access for ast::Parenthesized {
impl Access for ast::FieldAccess {
fn access<'a>(&self, vm: &'a mut Vm) -> SourceResult<&'a mut Value> {
self.access_dict(vm)?.at_mut(&self.field().take()).at(self.span())
access_dict(vm, self)?.at_mut(&self.field().take()).at(self.span())
}
}
impl ast::FieldAccess {
fn access_dict<'a>(&self, vm: &'a mut Vm) -> SourceResult<&'a mut Dict> {
match self.target().access(vm)? {
Value::Dict(dict) => Ok(dict),
value => bail!(
self.target().span(),
"expected dictionary, found {}",
value.type_name(),
),
}
fn access_dict<'a>(
vm: &'a mut Vm,
access: &ast::FieldAccess,
) -> SourceResult<&'a mut Dict> {
match access.target().access(vm)? {
Value::Dict(dict) => Ok(dict),
value => bail!(
access.target().span(),
"expected dictionary, found {}",
value.type_name(),
),
}
}

View File

@ -82,8 +82,12 @@ impl Value {
pub fn numeric(pair: (f64, ast::Unit)) -> Self {
let (v, unit) = pair;
match unit {
ast::Unit::Length(unit) => Abs::with_unit(v, unit).into_value(),
ast::Unit::Angle(unit) => Angle::with_unit(v, unit).into_value(),
ast::Unit::Pt => Abs::pt(v).into_value(),
ast::Unit::Mm => Abs::mm(v).into_value(),
ast::Unit::Cm => Abs::cm(v).into_value(),
ast::Unit::In => Abs::inches(v).into_value(),
ast::Unit::Rad => Angle::rad(v).into_value(),
ast::Unit::Deg => Angle::deg(v).into_value(),
ast::Unit::Em => Em::new(v).into_value(),
ast::Unit::Fr => Fr::new(v).into_value(),
ast::Unit::Percent => Ratio::new(v / 100.0).into_value(),

View File

@ -3,10 +3,9 @@ use std::num::NonZeroUsize;
use ecow::EcoString;
use crate::doc::{Destination, Frame, FrameItem, Meta, Position};
use crate::file::FileId;
use crate::geom::{Geometry, Point, Size};
use crate::model::Introspector;
use crate::syntax::{LinkedNode, Source, Span, SyntaxKind};
use crate::syntax::{FileId, LinkedNode, Source, Span, SyntaxKind};
use crate::World;
/// Where to [jump](jump_from_click) to.

View File

@ -40,18 +40,20 @@ extern crate self as typst;
#[macro_use]
pub mod util;
#[macro_use]
pub mod diag;
#[macro_use]
pub mod eval;
pub mod diag;
pub mod doc;
pub mod export;
pub mod file;
pub mod font;
pub mod geom;
pub mod ide;
pub mod image;
pub mod model;
pub mod syntax;
#[doc(inline)]
pub use typst_syntax as syntax;
use std::ops::Range;
use comemo::{Prehashed, Track, TrackedMut};
use ecow::EcoString;
@ -59,9 +61,8 @@ use ecow::EcoString;
use crate::diag::{FileResult, SourceResult};
use crate::doc::Document;
use crate::eval::{Datetime, Library, Route, Tracer};
use crate::file::{FileId, PackageSpec};
use crate::font::{Font, FontBook};
use crate::syntax::Source;
use crate::syntax::{FileId, PackageSpec, Source, Span};
use crate::util::Bytes;
/// Compile a source file into a fully layouted document.
@ -75,7 +76,6 @@ pub fn compile(world: &dyn World) -> SourceResult<Document> {
let mut tracer = tracer.track_mut();
// Evaluate the source file into a module.
tracing::info!("Starting evaluation");
let module = eval::eval(
world,
route.track(),
@ -144,4 +144,12 @@ pub trait World {
fn packages(&self) -> &[(PackageSpec, Option<EcoString>)] {
&[]
}
/// Get the byte range for a span.
#[track_caller]
fn range(&self, span: Span) -> Range<usize> {
self.source(span.id())
.expect("span does not point into any source file")
.range(span)
}
}

View File

@ -1,23 +0,0 @@
//! Syntax definition, parsing, and highlighting.
pub mod ast;
mod kind;
mod lexer;
mod node;
mod parser;
mod reparser;
mod source;
mod span;
pub use self::kind::SyntaxKind;
pub use self::lexer::{is_ident, is_newline};
pub use self::node::{LinkedChildren, LinkedNode, SyntaxNode};
pub use self::parser::{parse, parse_code};
pub use self::source::Source;
pub use self::span::{Span, Spanned};
pub(crate) use self::lexer::{is_id_continue, is_id_start};
use self::lexer::{split_newlines, LexMode, Lexer};
use self::parser::{reparse_block, reparse_markup};

View File

@ -53,19 +53,6 @@ impl NonZeroExt for NonZeroUsize {
};
}
/// Extra methods for [`str`].
pub trait StrExt {
/// The number of code units this string would use if it was encoded in
/// UTF16. This runs in linear time.
fn len_utf16(&self) -> usize;
}
impl StrExt for str {
fn len_utf16(&self) -> usize {
self.chars().map(char::len_utf16).sum()
}
}
/// Extra methods for [`Arc`].
pub trait ArcExt<T> {
/// Takes the inner value if there is exactly one strong reference and
@ -123,9 +110,6 @@ where
/// Extra methods for [`Path`].
pub trait PathExt {
/// Lexically normalize a path.
fn normalize(&self) -> PathBuf;
/// Treat `self` as a virtual root relative to which the `path` is resolved.
///
/// Returns `None` if the path lexically escapes the root. The path
@ -134,28 +118,6 @@ pub trait PathExt {
}
impl PathExt for Path {
fn normalize(&self) -> PathBuf {
let mut out = PathBuf::new();
for component in self.components() {
match component {
Component::CurDir => {}
Component::ParentDir => match out.components().next_back() {
Some(Component::Normal(_)) => {
out.pop();
}
_ => out.push(component),
},
Component::Prefix(_) | Component::RootDir | Component::Normal(_) => {
out.push(component)
}
}
}
if out.as_os_str().is_empty() {
out.push(Component::CurDir);
}
out
}
fn join_rooted(&self, path: &Path) -> Option<PathBuf> {
let mut parts: Vec<_> = self.components().collect();
let root = parts.len();

View File

@ -41,7 +41,7 @@ them.
## Parsing
The syntax tree and parser are located in `crates/typst/src/syntax`. Parsing is
The syntax tree and parser are located in `crates/typst-syntax`. Parsing is
a pure function `&str -> SyntaxNode` without any further dependencies. The
result is a concrete syntax tree reflecting the whole file structure, including
whitespace and comments. Parsing cannot fail. If there are syntactic errors, the

View File

@ -2,10 +2,9 @@ use comemo::{Prehashed, Track, Tracked};
use iai::{black_box, main, Iai};
use typst::diag::FileResult;
use typst::eval::{Datetime, Library};
use typst::file::FileId;
use typst::font::{Font, FontBook};
use typst::geom::Color;
use typst::syntax::Source;
use typst::syntax::{FileId, Source};
use typst::util::Bytes;
use typst::World;
use unscanny::Scanner;

View File

@ -17,7 +17,6 @@ use oxipng::{InFile, Options, OutFile};
use rayon::iter::{ParallelBridge, ParallelIterator};
use std::cell::OnceCell;
use tiny_skia as sk;
use typst::file::FileId;
use unscanny::Scanner;
use walkdir::WalkDir;
@ -26,7 +25,7 @@ use typst::doc::{Document, Frame, FrameItem, Meta};
use typst::eval::{eco_format, func, Datetime, Library, NoneValue, Value};
use typst::font::{Font, FontBook};
use typst::geom::{Abs, Color, RgbaColor, Smart};
use typst::syntax::{Source, Span, SyntaxNode};
use typst::syntax::{FileId, Source, Span, SyntaxNode};
use typst::util::{Bytes, PathExt};
use typst::World;
use typst_library::layout::{Margin, PageElem};
@ -541,7 +540,7 @@ fn test_part(
.inspect(|error| assert!(!error.span.is_detached()))
.filter(|error| error.span.id() == source.id())
.flat_map(|error| {
let range = error.span.range(world);
let range = world.range(error.span);
let output_error =
UserOutput::Error(range.clone(), error.message.replace('\\', "/"));
let hints = error