11134: internal: tighten up parser API r=matklad a=matklad

It's tempting to expose things like `Expr::parse`, 
but they'll necessary have somewhat ad-hoc semantics. 

Instead, we narrow down the parser's interface strictly 
to what's needed for MBE. For everything else (eg, parsing
imports), the proper way is enclose the input string into 
some context, parse the whole as a file, and then verify 
that the input was parsed as intended.

bors r+
🤖

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
This commit is contained in:
bors[bot] 2021-12-28 16:22:20 +00:00 committed by GitHub
commit 355a4bdb88
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
81 changed files with 342 additions and 662 deletions

View File

@ -11,7 +11,7 @@ use hir_def::{
}; };
use hir_expand::{hygiene::Hygiene, MacroDefId}; use hir_expand::{hygiene::Hygiene, MacroDefId};
use hir_ty::db::HirDatabase; use hir_ty::db::HirDatabase;
use syntax::ast; use syntax::{ast, AstNode};
use crate::{ use crate::{
Adt, AssocItem, Const, ConstParam, Enum, Field, Function, GenericParam, Impl, LifetimeParam, Adt, AssocItem, Const, ConstParam, Enum, Field, Function, GenericParam, Impl, LifetimeParam,
@ -147,8 +147,18 @@ fn resolve_doc_path(
// FIXME // FIXME
AttrDefId::MacroDefId(_) => return None, AttrDefId::MacroDefId(_) => return None,
}; };
let path = ast::Path::parse(link).ok()?;
let modpath = ModPath::from_src(db.upcast(), path, &Hygiene::new_unhygienic())?; let modpath = {
let ast_path = ast::SourceFile::parse(&format!("type T = {};", link))
.syntax_node()
.descendants()
.find_map(ast::Path::cast)?;
if ast_path.to_string() != link {
return None;
}
ModPath::from_src(db.upcast(), ast_path, &Hygiene::new_unhygienic())?
};
let resolved = resolver.resolve_module_path_in_items(db.upcast(), &modpath); let resolved = resolver.resolve_module_path_in_items(db.upcast(), &modpath);
let resolved = if resolved == PerNs::none() { let resolved = if resolved == PerNs::none() {
resolver.resolve_module_path_in_trait_assoc_items(db.upcast(), &modpath)? resolver.resolve_module_path_in_trait_assoc_items(db.upcast(), &modpath)?

View File

@ -714,8 +714,7 @@ impl Attr {
hygiene: &Hygiene, hygiene: &Hygiene,
id: AttrId, id: AttrId,
) -> Option<Attr> { ) -> Option<Attr> {
let (parse, _) = let (parse, _) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MetaItem).ok()?;
mbe::token_tree_to_syntax_node(tt, mbe::ParserEntryPoint::MetaItem).ok()?;
let ast = ast::Meta::cast(parse.syntax_node())?; let ast = ast::Meta::cast(parse.syntax_node())?;
Self::from_src(db, ast, hygiene, id) Self::from_src(db, ast, hygiene, id)

View File

@ -72,7 +72,7 @@ struct BasicAdtInfo {
} }
fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, mbe::ExpandError> { fn parse_adt(tt: &tt::Subtree) -> Result<BasicAdtInfo, mbe::ExpandError> {
let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::ParserEntryPoint::Items)?; // FragmentKind::Items doesn't parse attrs? let (parsed, token_map) = mbe::token_tree_to_syntax_node(tt, mbe::TopEntryPoint::MacroItems)?; // FragmentKind::Items doesn't parse attrs?
let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| { let macro_items = ast::MacroItems::cast(parsed.syntax_node()).ok_or_else(|| {
debug!("derive node didn't parse"); debug!("derive node didn't parse");
mbe::ExpandError::UnexpectedToken mbe::ExpandError::UnexpectedToken

View File

@ -497,11 +497,11 @@ fn token_tree_to_syntax_node(
expand_to: ExpandTo, expand_to: ExpandTo,
) -> Result<(Parse<SyntaxNode>, mbe::TokenMap), ExpandError> { ) -> Result<(Parse<SyntaxNode>, mbe::TokenMap), ExpandError> {
let entry_point = match expand_to { let entry_point = match expand_to {
ExpandTo::Statements => mbe::ParserEntryPoint::Statements, ExpandTo::Statements => mbe::TopEntryPoint::MacroStmts,
ExpandTo::Items => mbe::ParserEntryPoint::Items, ExpandTo::Items => mbe::TopEntryPoint::MacroItems,
ExpandTo::Pattern => mbe::ParserEntryPoint::Pattern, ExpandTo::Pattern => mbe::TopEntryPoint::Pattern,
ExpandTo::Type => mbe::ParserEntryPoint::Type, ExpandTo::Type => mbe::TopEntryPoint::Type,
ExpandTo::Expr => mbe::ParserEntryPoint::Expr, ExpandTo::Expr => mbe::TopEntryPoint::Expr,
}; };
mbe::token_tree_to_syntax_node(tt, entry_point) mbe::token_tree_to_syntax_node(tt, entry_point)
} }

View File

@ -131,7 +131,7 @@ pub fn expand_eager_macro(
let arg_file_id = arg_id; let arg_file_id = arg_id;
let parsed_args = diagnostic_sink let parsed_args = diagnostic_sink
.result(mbe::token_tree_to_syntax_node(&parsed_args, mbe::ParserEntryPoint::Expr))? .result(mbe::token_tree_to_syntax_node(&parsed_args, mbe::TopEntryPoint::Expr))?
.0; .0;
let result = eager_macro_recur( let result = eager_macro_recur(
db, db,

View File

@ -36,9 +36,8 @@ pub(crate) fn remove_dbg(acc: &mut Assists, ctx: &AssistContext) -> Option<()> {
let input_expressions = input_expressions let input_expressions = input_expressions
.into_iter() .into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group)) .filter_map(|(is_sep, group)| (!is_sep).then(|| group))
.map(|mut tokens| ast::Expr::parse(&tokens.join(""))) .map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
.collect::<Result<Vec<ast::Expr>, _>>() .collect::<Option<Vec<ast::Expr>>>()?;
.ok()?;
let parent = macro_call.syntax().parent()?; let parent = macro_call.syntax().parent()?;
let (range, text) = match &*input_expressions { let (range, text) = match &*input_expressions {

View File

@ -309,7 +309,7 @@ fn parse_comma_sep_expr(input: ast::TokenTree) -> Option<Vec<ast::Expr>> {
input_expressions input_expressions
.into_iter() .into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group)) .filter_map(|(is_sep, group)| (!is_sep).then(|| group))
.filter_map(|mut tokens| ast::Expr::parse(&tokens.join("")).ok()) .filter_map(|mut tokens| syntax::hacks::parse_expr_from_str(&tokens.join("")))
.collect::<Vec<ast::Expr>>(), .collect::<Vec<ast::Expr>>(),
) )
} }

View File

@ -212,15 +212,14 @@ fn validate_snippet(
) -> Option<(Box<[GreenNode]>, String, Option<Box<str>>)> { ) -> Option<(Box<[GreenNode]>, String, Option<Box<str>>)> {
let mut imports = Vec::with_capacity(requires.len()); let mut imports = Vec::with_capacity(requires.len());
for path in requires.iter() { for path in requires.iter() {
let path = ast::Path::parse(path).ok()?; let use_path = ast::SourceFile::parse(&format!("use {};", path))
let valid_use_path = path.segments().all(|seg| { .syntax_node()
matches!(seg.kind(), Some(ast::PathSegmentKind::Name(_))) .descendants()
|| seg.generic_arg_list().is_none() .find_map(ast::Path::cast)?;
}); if use_path.syntax().text() != path.as_str() {
if !valid_use_path {
return None; return None;
} }
let green = path.syntax().green().into_owned(); let green = use_path.syntax().green().into_owned();
imports.push(green); imports.push(green);
} }
let snippet = snippet.iter().join("\n"); let snippet = snippet.iter().join("\n");

View File

@ -67,7 +67,11 @@ pub fn get_path_at_cursor_in_tt(cursor: &ast::Ident) -> Option<ast::Path> {
.filter_map(SyntaxElement::into_token) .filter_map(SyntaxElement::into_token)
.take_while(|tok| tok != cursor); .take_while(|tok| tok != cursor);
ast::Path::parse(&path_tokens.chain(iter::once(cursor.clone())).join("")).ok() syntax::hacks::parse_expr_from_str(&path_tokens.chain(iter::once(cursor.clone())).join(""))
.and_then(|expr| match expr {
ast::Expr::PathExpr(it) => it.path(),
_ => None,
})
} }
/// Parses and resolves the path at the cursor position in the given attribute, if it is a derive. /// Parses and resolves the path at the cursor position in the given attribute, if it is a derive.
@ -323,7 +327,12 @@ pub fn parse_tt_as_comma_sep_paths(input: ast::TokenTree) -> Option<Vec<ast::Pat
let paths = input_expressions let paths = input_expressions
.into_iter() .into_iter()
.filter_map(|(is_sep, group)| (!is_sep).then(|| group)) .filter_map(|(is_sep, group)| (!is_sep).then(|| group))
.filter_map(|mut tokens| ast::Path::parse(&tokens.join("")).ok()) .filter_map(|mut tokens| {
syntax::hacks::parse_expr_from_str(&tokens.join("")).and_then(|expr| match expr {
ast::Expr::PathExpr(it) => it.path(),
_ => None,
})
})
.collect(); .collect();
Some(paths) Some(paths)
} }

View File

@ -0,0 +1,58 @@
//! When specifying SSR rule, you generally want to map one *kind* of thing to
//! the same kind of thing: path to path, expression to expression, type to
//! type.
//!
//! The problem is, while this *kind* is generally obvious to the human, the ide
//! needs to determine it somehow. We do this in a stupid way -- by pasting SSR
//! rule into different contexts and checking what works.
use syntax::{ast, AstNode, SyntaxNode};
pub(crate) fn ty(s: &str) -> Result<SyntaxNode, ()> {
fragment::<ast::Type>("type T = {};", s)
}
pub(crate) fn item(s: &str) -> Result<SyntaxNode, ()> {
fragment::<ast::Item>("{}", s)
}
pub(crate) fn pat(s: &str) -> Result<SyntaxNode, ()> {
fragment::<ast::Pat>("const _: () = {let {} = ();};", s)
}
pub(crate) fn expr(s: &str) -> Result<SyntaxNode, ()> {
fragment::<ast::Expr>("const _: () = {};", s)
}
pub(crate) fn stmt(s: &str) -> Result<SyntaxNode, ()> {
let template = "const _: () = { {}; };";
let input = template.replace("{}", s);
let parse = syntax::SourceFile::parse(&input);
if !parse.errors().is_empty() {
return Err(());
}
let mut node =
parse.tree().syntax().descendants().skip(2).find_map(ast::Stmt::cast).ok_or(())?;
if !s.ends_with(';') && node.to_string().ends_with(';') {
node = node.clone_for_update();
node.syntax().last_token().map(|it| it.detach());
}
if node.to_string() != s {
return Err(());
}
Ok(node.syntax().clone_subtree())
}
fn fragment<T: AstNode>(template: &str, s: &str) -> Result<SyntaxNode, ()> {
let s = s.trim();
let input = template.replace("{}", s);
let parse = syntax::SourceFile::parse(&input);
if !parse.errors().is_empty() {
return Err(());
}
let node = parse.tree().syntax().descendants().find_map(T::cast).ok_or(())?;
if node.syntax().text() != s {
return Err(());
}
Ok(node.syntax().clone_subtree())
}

View File

@ -71,6 +71,7 @@ mod from_comment;
mod matching; mod matching;
mod nester; mod nester;
mod parsing; mod parsing;
mod fragments;
mod replacing; mod replacing;
mod resolving; mod resolving;
mod search; mod search;

View File

@ -4,12 +4,12 @@
//! placeholders, which start with `$`. For replacement templates, this is the final form. For //! placeholders, which start with `$`. For replacement templates, this is the final form. For
//! search patterns, we go further and parse the pattern as each kind of thing that we can match. //! search patterns, we go further and parse the pattern as each kind of thing that we can match.
//! e.g. expressions, type references etc. //! e.g. expressions, type references etc.
use crate::errors::bail;
use crate::{SsrError, SsrPattern, SsrRule};
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
use std::{fmt::Display, str::FromStr}; use std::{fmt::Display, str::FromStr};
use syntax::{ast, AstNode, SmolStr, SyntaxKind, SyntaxNode, T}; use syntax::{SmolStr, SyntaxKind, SyntaxNode, T};
use crate::errors::bail;
use crate::{fragments, SsrError, SsrPattern, SsrRule};
#[derive(Debug)] #[derive(Debug)]
pub(crate) struct ParsedRule { pub(crate) struct ParsedRule {
@ -73,17 +73,16 @@ impl ParsedRule {
rules: Vec::new(), rules: Vec::new(),
}; };
let raw_template_stmt = raw_template.map(ast::Stmt::parse); let raw_template_stmt = raw_template.map(fragments::stmt);
if let raw_template_expr @ Some(Ok(_)) = raw_template.map(ast::Expr::parse) { if let raw_template_expr @ Some(Ok(_)) = raw_template.map(fragments::expr) {
builder.try_add(ast::Expr::parse(&raw_pattern), raw_template_expr); builder.try_add(fragments::expr(&raw_pattern), raw_template_expr);
} else { } else {
builder.try_add(ast::Expr::parse(&raw_pattern), raw_template_stmt.clone()); builder.try_add(fragments::expr(&raw_pattern), raw_template_stmt.clone());
} }
builder.try_add(ast::Type::parse(&raw_pattern), raw_template.map(ast::Type::parse)); builder.try_add(fragments::ty(&raw_pattern), raw_template.map(fragments::ty));
builder.try_add(ast::Item::parse(&raw_pattern), raw_template.map(ast::Item::parse)); builder.try_add(fragments::item(&raw_pattern), raw_template.map(fragments::item));
builder.try_add(ast::Path::parse(&raw_pattern), raw_template.map(ast::Path::parse)); builder.try_add(fragments::pat(&raw_pattern), raw_template.map(fragments::pat));
builder.try_add(ast::Pat::parse(&raw_pattern), raw_template.map(ast::Pat::parse)); builder.try_add(fragments::stmt(&raw_pattern), raw_template_stmt);
builder.try_add(ast::Stmt::parse(&raw_pattern), raw_template_stmt);
builder.build() builder.build()
} }
} }
@ -94,20 +93,20 @@ struct RuleBuilder {
} }
impl RuleBuilder { impl RuleBuilder {
fn try_add<T: AstNode, T2: AstNode>( fn try_add(
&mut self, &mut self,
pattern: Result<T, ()>, pattern: Result<SyntaxNode, ()>,
template: Option<Result<T2, ()>>, template: Option<Result<SyntaxNode, ()>>,
) { ) {
match (pattern, template) { match (pattern, template) {
(Ok(pattern), Some(Ok(template))) => self.rules.push(ParsedRule { (Ok(pattern), Some(Ok(template))) => self.rules.push(ParsedRule {
placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
pattern: pattern.syntax().clone(), pattern,
template: Some(template.syntax().clone()), template: Some(template),
}), }),
(Ok(pattern), None) => self.rules.push(ParsedRule { (Ok(pattern), None) => self.rules.push(ParsedRule {
placeholders_by_stand_in: self.placeholders_by_stand_in.clone(), placeholders_by_stand_in: self.placeholders_by_stand_in.clone(),
pattern: pattern.syntax().clone(), pattern,
template: None, template: None,
}), }),
_ => {} _ => {}

View File

@ -1,5 +1,6 @@
//! Code for applying replacement templates for matches that have previously been found. //! Code for applying replacement templates for matches that have previously been found.
use crate::fragments;
use crate::{resolving::ResolvedRule, Match, SsrMatches}; use crate::{resolving::ResolvedRule, Match, SsrMatches};
use itertools::Itertools; use itertools::Itertools;
use rustc_hash::{FxHashMap, FxHashSet}; use rustc_hash::{FxHashMap, FxHashSet};
@ -225,12 +226,13 @@ fn token_is_method_call_receiver(token: &SyntaxToken) -> bool {
fn parse_as_kind(code: &str, kind: SyntaxKind) -> Option<SyntaxNode> { fn parse_as_kind(code: &str, kind: SyntaxKind) -> Option<SyntaxNode> {
if ast::Expr::can_cast(kind) { if ast::Expr::can_cast(kind) {
if let Ok(expr) = ast::Expr::parse(code) { if let Ok(expr) = fragments::expr(code) {
return Some(expr.syntax().clone()); return Some(expr);
} }
} else if ast::Item::can_cast(kind) { }
if let Ok(item) = ast::Item::parse(code) { if ast::Item::can_cast(kind) {
return Some(item.syntax().clone()); if let Ok(item) = fragments::item(code) {
return Some(item);
} }
} }
None None

View File

@ -331,6 +331,15 @@ fn ssr_struct_lit() {
) )
} }
#[test]
fn ssr_struct_def() {
assert_ssr_transform(
"struct Foo { $f: $t } ==>> struct Foo($t);",
r#"struct Foo { field: i32 }"#,
expect![[r#"struct Foo(i32);"#]],
)
}
#[test] #[test]
fn ignores_whitespace() { fn ignores_whitespace() {
assert_matches("1+2", "fn f() -> i32 {1 + 2}", &["1 + 2"]); assert_matches("1+2", "fn f() -> i32 {1 + 2}", &["1 + 2"]);
@ -792,6 +801,19 @@ fn replace_type() {
"struct Result<T, E> {} struct Option<T> {} fn f1() -> Option<Vec<Error>> {foo()}" "struct Result<T, E> {} struct Option<T> {} fn f1() -> Option<Vec<Error>> {foo()}"
]], ]],
); );
assert_ssr_transform(
"dyn Trait<$a> ==>> DynTrait<$a>",
r#"
trait Trait<T> {}
struct DynTrait<T> {}
fn f1() -> dyn Trait<Vec<Error>> {foo()}
"#,
expect![[r#"
trait Trait<T> {}
struct DynTrait<T> {}
fn f1() -> DynTrait<Vec<Error>> {foo()}
"#]],
);
} }
#[test] #[test]

View File

@ -61,18 +61,16 @@
use std::rc::Rc; use std::rc::Rc;
use smallvec::{smallvec, SmallVec};
use syntax::SmolStr;
use crate::{ use crate::{
expander::{Binding, Bindings, Fragment}, expander::{Binding, Bindings, ExpandResult, Fragment},
parser::{Op, RepeatKind, Separator}, parser::{Op, RepeatKind, Separator},
tt_iter::TtIter, tt_iter::TtIter,
ExpandError, MetaTemplate, ExpandError, MetaTemplate,
}; };
use super::ExpandResult;
use parser::ParserEntryPoint::*;
use smallvec::{smallvec, SmallVec};
use syntax::SmolStr;
impl Bindings { impl Bindings {
fn push_optional(&mut self, name: &SmolStr) { fn push_optional(&mut self, name: &SmolStr) {
// FIXME: Do we have a better way to represent an empty token ? // FIXME: Do we have a better way to represent an empty token ?
@ -691,14 +689,21 @@ fn match_leaf(lhs: &tt::Leaf, src: &mut TtIter) -> Result<(), ExpandError> {
fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragment>> { fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragment>> {
let fragment = match kind { let fragment = match kind {
"path" => Path, "path" => parser::PrefixEntryPoint::Path,
"expr" => Expr, "ty" => parser::PrefixEntryPoint::Ty,
"ty" => Type, // FIXME: These two should actually behave differently depending on the edition.
"pat" | "pat_param" => Pattern, // FIXME: edition2021 //
"stmt" => Statement, // https://doc.rust-lang.org/edition-guide/rust-2021/or-patterns-macro-rules.html
"block" => Block, "pat" | "pat_param" => parser::PrefixEntryPoint::Pat,
"meta" => MetaItem, "stmt" => parser::PrefixEntryPoint::Stmt,
"item" => Item, "block" => parser::PrefixEntryPoint::Block,
"meta" => parser::PrefixEntryPoint::MetaItem,
"item" => parser::PrefixEntryPoint::Item,
"expr" => {
return input
.expect_fragment(parser::PrefixEntryPoint::Expr)
.map(|tt| tt.map(Fragment::Expr))
}
_ => { _ => {
let tt_result = match kind { let tt_result = match kind {
"ident" => input "ident" => input
@ -726,17 +731,13 @@ fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragmen
.map_err(|()| err!()) .map_err(|()| err!())
} }
// `vis` is optional // `vis` is optional
"vis" => match input.eat_vis() { "vis" => Ok(input.expect_fragment(parser::PrefixEntryPoint::Vis).value),
Some(vis) => Ok(Some(vis)),
None => Ok(None),
},
_ => Err(ExpandError::UnexpectedToken), _ => Err(ExpandError::UnexpectedToken),
}; };
return tt_result.map(|it| it.map(Fragment::Tokens)).into(); return tt_result.map(|it| it.map(Fragment::Tokens)).into();
} }
}; };
let result = input.expect_fragment(fragment); input.expect_fragment(fragment).map(|it| it.map(Fragment::Tokens))
result.map(|tt| if kind == "expr" { tt.map(Fragment::Expr) } else { tt.map(Fragment::Tokens) })
} }
fn collect_vars(buf: &mut Vec<SmolStr>, pattern: &MetaTemplate) { fn collect_vars(buf: &mut Vec<SmolStr>, pattern: &MetaTemplate) {
@ -898,17 +899,6 @@ impl<'a> TtIter<'a> {
.into()) .into())
} }
fn eat_vis(&mut self) -> Option<tt::TokenTree> {
let mut fork = self.clone();
match fork.expect_fragment(Visibility) {
ExpandResult { value: tt, err: None } => {
*self = fork;
tt
}
ExpandResult { value: _, err: Some(_) } => None,
}
}
fn eat_char(&mut self, c: char) -> Option<tt::TokenTree> { fn eat_char(&mut self, c: char) -> Option<tt::TokenTree> {
let mut fork = self.clone(); let mut fork = self.clone();
match fork.expect_char(c) { match fork.expect_char(c) {

View File

@ -24,7 +24,7 @@ use crate::{
}; };
// FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces // FIXME: we probably should re-think `token_tree_to_syntax_node` interfaces
pub use ::parser::ParserEntryPoint; pub use ::parser::TopEntryPoint;
pub use tt::{Delimiter, DelimiterKind, Punct}; pub use tt::{Delimiter, DelimiterKind, Punct};
#[derive(Debug, PartialEq, Eq, Clone)] #[derive(Debug, PartialEq, Eq, Clone)]

View File

@ -9,9 +9,7 @@ use syntax::{
}; };
use tt::buffer::{Cursor, TokenBuffer}; use tt::buffer::{Cursor, TokenBuffer};
use crate::{ use crate::{to_parser_input::to_parser_input, tt_iter::TtIter, ExpandError, TokenMap};
to_parser_input::to_parser_input, tt_iter::TtIter, ExpandError, ParserEntryPoint, TokenMap,
};
/// Convert the syntax node to a `TokenTree` (what macro /// Convert the syntax node to a `TokenTree` (what macro
/// will consume). /// will consume).
@ -46,7 +44,7 @@ pub fn syntax_node_to_token_tree_censored(
pub fn token_tree_to_syntax_node( pub fn token_tree_to_syntax_node(
tt: &tt::Subtree, tt: &tt::Subtree,
entry_point: ParserEntryPoint, entry_point: parser::TopEntryPoint,
) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> { ) -> Result<(Parse<SyntaxNode>, TokenMap), ExpandError> {
let buffer = match tt { let buffer = match tt {
tt::Subtree { delimiter: None, token_trees } => { tt::Subtree { delimiter: None, token_trees } => {
@ -55,7 +53,7 @@ pub fn token_tree_to_syntax_node(
_ => TokenBuffer::from_subtree(tt), _ => TokenBuffer::from_subtree(tt),
}; };
let parser_input = to_parser_input(&buffer); let parser_input = to_parser_input(&buffer);
let parser_output = parser::parse(&parser_input, entry_point); let parser_output = entry_point.parse(&parser_input);
let mut tree_sink = TtTreeSink::new(buffer.begin()); let mut tree_sink = TtTreeSink::new(buffer.begin());
for event in parser_output.iter() { for event in parser_output.iter() {
match event { match event {
@ -106,7 +104,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
let mut res = Vec::new(); let mut res = Vec::new();
while iter.peek_n(0).is_some() { while iter.peek_n(0).is_some() {
let expanded = iter.expect_fragment(ParserEntryPoint::Expr); let expanded = iter.expect_fragment(parser::PrefixEntryPoint::Expr);
res.push(match expanded.value { res.push(match expanded.value {
None => break, None => break,

View File

@ -1,7 +1,7 @@
//! A "Parser" structure for token trees. We use this when parsing a declarative //! A "Parser" structure for token trees. We use this when parsing a declarative
//! macro definition into a list of patterns and templates. //! macro definition into a list of patterns and templates.
use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult, ParserEntryPoint}; use crate::{to_parser_input::to_parser_input, ExpandError, ExpandResult};
use syntax::SyntaxKind; use syntax::SyntaxKind;
use tt::buffer::TokenBuffer; use tt::buffer::TokenBuffer;
@ -91,11 +91,11 @@ impl<'a> TtIter<'a> {
pub(crate) fn expect_fragment( pub(crate) fn expect_fragment(
&mut self, &mut self,
entry_point: ParserEntryPoint, entry_point: parser::PrefixEntryPoint,
) -> ExpandResult<Option<tt::TokenTree>> { ) -> ExpandResult<Option<tt::TokenTree>> {
let buffer = TokenBuffer::from_tokens(self.inner.as_slice()); let buffer = TokenBuffer::from_tokens(self.inner.as_slice());
let parser_input = to_parser_input(&buffer); let parser_input = to_parser_input(&buffer);
let tree_traversal = parser::parse(&parser_input, entry_point); let tree_traversal = entry_point.parse(&parser_input);
let mut cursor = buffer.begin(); let mut cursor = buffer.begin();
let mut error = false; let mut error = false;

View File

@ -44,72 +44,76 @@ use crate::{
TokenSet, T, TokenSet, T,
}; };
pub(crate) mod entry_points { pub(crate) mod entry {
use super::*; use super::*;
pub(crate) fn source_file(p: &mut Parser) { pub(crate) mod prefix {
let m = p.start(); use super::*;
p.eat(SHEBANG);
items::mod_contents(p, false);
m.complete(p, SOURCE_FILE);
}
pub(crate) use expressions::block_expr; pub(crate) fn vis(p: &mut Parser) {
let _ = opt_visibility(p, false);
pub(crate) use paths::type_path as path;
pub(crate) use patterns::pattern_single as pattern;
pub(crate) use types::type_;
pub(crate) fn expr(p: &mut Parser) {
let _ = expressions::expr(p);
}
pub(crate) fn stmt(p: &mut Parser) {
expressions::stmt(p, expressions::StmtWithSemi::No, true);
}
pub(crate) fn stmt_optional_semi(p: &mut Parser) {
expressions::stmt(p, expressions::StmtWithSemi::Optional, false);
}
pub(crate) fn visibility(p: &mut Parser) {
let _ = opt_visibility(p, false);
}
// Parse a meta item , which excluded [], e.g : #[ MetaItem ]
pub(crate) fn meta_item(p: &mut Parser) {
attributes::meta(p);
}
pub(crate) fn item(p: &mut Parser) {
items::item_or_macro(p, true);
}
pub(crate) fn macro_items(p: &mut Parser) {
let m = p.start();
items::mod_contents(p, false);
m.complete(p, MACRO_ITEMS);
}
pub(crate) fn macro_stmts(p: &mut Parser) {
let m = p.start();
while !p.at(EOF) {
if p.at(T![;]) {
p.bump(T![;]);
continue;
}
expressions::stmt(p, expressions::StmtWithSemi::Optional, true);
} }
m.complete(p, MACRO_STMTS); pub(crate) fn block(p: &mut Parser) {
expressions::block_expr(p);
}
pub(crate) fn stmt(p: &mut Parser) {
expressions::stmt(p, expressions::StmtWithSemi::No, true);
}
pub(crate) fn pat(p: &mut Parser) {
patterns::pattern_single(p);
}
pub(crate) fn ty(p: &mut Parser) {
types::type_(p);
}
pub(crate) fn expr(p: &mut Parser) {
let _ = expressions::expr(p);
}
pub(crate) fn path(p: &mut Parser) {
let _ = paths::type_path(p);
}
pub(crate) fn item(p: &mut Parser) {
items::item_or_macro(p, true);
}
// Parse a meta item , which excluded [], e.g : #[ MetaItem ]
pub(crate) fn meta_item(p: &mut Parser) {
attributes::meta(p);
}
} }
pub(crate) fn attr(p: &mut Parser) { pub(crate) mod top {
attributes::outer_attrs(p); use super::*;
pub(crate) fn source_file(p: &mut Parser) {
let m = p.start();
p.eat(SHEBANG);
items::mod_contents(p, false);
m.complete(p, SOURCE_FILE);
}
pub(crate) fn macro_stmts(p: &mut Parser) {
let m = p.start();
while !p.at(EOF) {
if p.at(T![;]) {
p.bump(T![;]);
continue;
}
expressions::stmt(p, expressions::StmtWithSemi::Optional, true);
}
m.complete(p, MACRO_STMTS);
}
pub(crate) fn macro_items(p: &mut Parser) {
let m = p.start();
items::mod_contents(p, false);
m.complete(p, MACRO_ITEMS);
}
} }
} }

View File

@ -41,63 +41,95 @@ pub use crate::{
syntax_kind::SyntaxKind, syntax_kind::SyntaxKind,
}; };
/// rust-analyzer parser allows you to choose one of the possible entry points. /// Parse a prefix of the input as a given syntactic construct.
/// ///
/// The primary consumer of this API are declarative macros, `$x:expr` matchers /// This is used by macro-by-example parser to implement things like `$i:item`
/// are implemented by calling into the parser with non-standard entry point. /// and the naming of variants follows the naming of macro fragments.
#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] ///
pub enum ParserEntryPoint { /// Note that this is generally non-optional -- the result is intentionally not
SourceFile, /// `Option<Output>`. The way MBE work, by the time we *try* to parse `$e:expr`
Path, /// we already commit to expression. In other words, this API by design can't be
Expr, /// used to implement "rollback and try another alternative" logic.
Statement, #[derive(Debug)]
StatementOptionalSemi, pub enum PrefixEntryPoint {
Type, Vis,
Pattern,
Item,
Block, Block,
Visibility, Stmt,
Pat,
Ty,
Expr,
Path,
Item,
MetaItem, MetaItem,
Items,
Statements,
Attr,
} }
/// Parse given tokens into the given sink as a rust file. impl PrefixEntryPoint {
pub fn parse_source_file(inp: &Input) -> Output { pub fn parse(&self, input: &Input) -> Output {
parse(inp, ParserEntryPoint::SourceFile) let entry_point: fn(&'_ mut parser::Parser) = match self {
PrefixEntryPoint::Vis => grammar::entry::prefix::vis,
PrefixEntryPoint::Block => grammar::entry::prefix::block,
PrefixEntryPoint::Stmt => grammar::entry::prefix::stmt,
PrefixEntryPoint::Pat => grammar::entry::prefix::pat,
PrefixEntryPoint::Ty => grammar::entry::prefix::ty,
PrefixEntryPoint::Expr => grammar::entry::prefix::expr,
PrefixEntryPoint::Path => grammar::entry::prefix::path,
PrefixEntryPoint::Item => grammar::entry::prefix::item,
PrefixEntryPoint::MetaItem => grammar::entry::prefix::meta_item,
};
let mut p = parser::Parser::new(input);
entry_point(&mut p);
let events = p.finish();
event::process(events)
}
} }
/// Parses the given [`Input`] into [`Output`] assuming that the top-level /// Parse the whole of the input as a given syntactic construct.
/// syntactic construct is the given [`ParserEntryPoint`].
/// ///
/// Both input and output here are fairly abstract. The overall flow is that the /// This covers two main use-cases:
/// caller has some "real" tokens, converts them to [`Input`], parses them to ///
/// [`Output`], and then converts that into a "real" tree. The "real" tree is /// * Parsing a Rust file.
/// made of "real" tokens, so this all hinges on rather tight coordination of /// * Parsing a result of macro expansion.
/// indices between the four stages. ///
pub fn parse(inp: &Input, entry_point: ParserEntryPoint) -> Output { /// That is, for something like
let entry_point: fn(&'_ mut parser::Parser) = match entry_point { ///
ParserEntryPoint::SourceFile => grammar::entry_points::source_file, /// ```
ParserEntryPoint::Path => grammar::entry_points::path, /// quick_check! {
ParserEntryPoint::Expr => grammar::entry_points::expr, /// fn prop() {}
ParserEntryPoint::Type => grammar::entry_points::type_, /// }
ParserEntryPoint::Pattern => grammar::entry_points::pattern, /// ```
ParserEntryPoint::Item => grammar::entry_points::item, ///
ParserEntryPoint::Block => grammar::entry_points::block_expr, /// the input to the macro will be parsed with [`PrefixEntryPoint::Item`], and
ParserEntryPoint::Visibility => grammar::entry_points::visibility, /// the result will be [`TopEntryPoint::Items`].
ParserEntryPoint::MetaItem => grammar::entry_points::meta_item, ///
ParserEntryPoint::Statement => grammar::entry_points::stmt, /// This *should* (but currently doesn't) guarantee that all input is consumed.
ParserEntryPoint::StatementOptionalSemi => grammar::entry_points::stmt_optional_semi, #[derive(Debug)]
ParserEntryPoint::Items => grammar::entry_points::macro_items, pub enum TopEntryPoint {
ParserEntryPoint::Statements => grammar::entry_points::macro_stmts, SourceFile,
ParserEntryPoint::Attr => grammar::entry_points::attr, MacroStmts,
}; MacroItems,
Pattern,
Type,
Expr,
MetaItem,
}
let mut p = parser::Parser::new(inp); impl TopEntryPoint {
entry_point(&mut p); pub fn parse(&self, input: &Input) -> Output {
let events = p.finish(); let entry_point: fn(&'_ mut parser::Parser) = match self {
event::process(events) TopEntryPoint::SourceFile => grammar::entry::top::source_file,
TopEntryPoint::MacroStmts => grammar::entry::top::macro_stmts,
TopEntryPoint::MacroItems => grammar::entry::top::macro_items,
// FIXME
TopEntryPoint::Pattern => grammar::entry::prefix::pat,
TopEntryPoint::Type => grammar::entry::prefix::ty,
TopEntryPoint::Expr => grammar::entry::prefix::expr,
TopEntryPoint::MetaItem => grammar::entry::prefix::meta_item,
};
let mut p = parser::Parser::new(input);
entry_point(&mut p);
let events = p.finish();
event::process(events)
}
} }
/// A parsing function for a specific braced-block. /// A parsing function for a specific braced-block.

View File

@ -52,14 +52,10 @@ impl<'a> LexedStr<'a> {
pub fn intersperse_trivia( pub fn intersperse_trivia(
&self, &self,
output: &crate::Output, output: &crate::Output,
synthetic_root: bool,
sink: &mut dyn FnMut(StrStep), sink: &mut dyn FnMut(StrStep),
) -> bool { ) -> bool {
let mut builder = Builder { lexed: self, pos: 0, state: State::PendingEnter, sink }; let mut builder = Builder { lexed: self, pos: 0, state: State::PendingEnter, sink };
if synthetic_root {
builder.enter(SyntaxKind::SOURCE_FILE);
}
for event in output.iter() { for event in output.iter() {
match event { match event {
Step::Token { kind, n_input_tokens: n_raw_tokens } => { Step::Token { kind, n_input_tokens: n_raw_tokens } => {
@ -73,9 +69,6 @@ impl<'a> LexedStr<'a> {
} }
} }
} }
if synthetic_root {
builder.exit();
}
match mem::replace(&mut builder.state, State::Normal) { match mem::replace(&mut builder.state, State::Normal) {
State::PendingExit => { State::PendingExit => {

View File

@ -80,12 +80,12 @@ fn parse_inline_err() {
fn parse(text: &str) -> (String, bool) { fn parse(text: &str) -> (String, bool) {
let lexed = LexedStr::new(text); let lexed = LexedStr::new(text);
let input = lexed.to_input(); let input = lexed.to_input();
let output = crate::parse_source_file(&input); let output = crate::TopEntryPoint::SourceFile.parse(&input);
let mut buf = String::new(); let mut buf = String::new();
let mut errors = Vec::new(); let mut errors = Vec::new();
let mut indent = String::new(); let mut indent = String::new();
lexed.intersperse_trivia(&output, false, &mut |step| match step { lexed.intersperse_trivia(&output, &mut |step| match step {
crate::StrStep::Token { kind, text } => { crate::StrStep::Token { kind, text } => {
write!(buf, "{}", indent).unwrap(); write!(buf, "{}", indent).unwrap();
write!(buf, "{:?} {:?}\n", kind, text).unwrap(); write!(buf, "{:?} {:?}\n", kind, text).unwrap();

View File

@ -0,0 +1,15 @@
//! Things which exist to solve practial issues, but which shouldn't exist.
//!
//! Please avoid adding new usages of the functions in this module
use crate::{ast, AstNode};
pub fn parse_expr_from_str(s: &str) -> Option<ast::Expr> {
let s = s.trim();
let file = ast::SourceFile::parse(&format!("const _: () = {};", s));
let expr = file.syntax_node().descendants().find_map(ast::Expr::cast)?;
if expr.syntax().text() != s {
return None;
}
Some(expr)
}

View File

@ -40,6 +40,7 @@ pub mod ast;
pub mod fuzz; pub mod fuzz;
pub mod utils; pub mod utils;
pub mod ted; pub mod ted;
pub mod hacks;
use std::{marker::PhantomData, sync::Arc}; use std::{marker::PhantomData, sync::Arc};
@ -167,61 +168,6 @@ impl SourceFile {
} }
} }
// FIXME: `parse` functions shouldn't hang directly from AST nodes, and they
// shouldn't return `Result`.
//
// We need a dedicated module for parser entry points, and they should always
// return `Parse`.
impl ast::Path {
/// Returns `text`, parsed as a path, but only if it has no errors.
pub fn parse(text: &str) -> Result<Self, ()> {
parsing::parse_text_as(text, parser::ParserEntryPoint::Path)
}
}
impl ast::Pat {
/// Returns `text`, parsed as a pattern, but only if it has no errors.
pub fn parse(text: &str) -> Result<Self, ()> {
parsing::parse_text_as(text, parser::ParserEntryPoint::Pattern)
}
}
impl ast::Expr {
/// Returns `text`, parsed as an expression, but only if it has no errors.
pub fn parse(text: &str) -> Result<Self, ()> {
parsing::parse_text_as(text, parser::ParserEntryPoint::Expr)
}
}
impl ast::Item {
/// Returns `text`, parsed as an item, but only if it has no errors.
pub fn parse(text: &str) -> Result<Self, ()> {
parsing::parse_text_as(text, parser::ParserEntryPoint::Item)
}
}
impl ast::Type {
/// Returns `text`, parsed as an type reference, but only if it has no errors.
pub fn parse(text: &str) -> Result<Self, ()> {
parsing::parse_text_as(text, parser::ParserEntryPoint::Type)
}
}
impl ast::Attr {
/// Returns `text`, parsed as an attribute, but only if it has no errors.
pub fn parse(text: &str) -> Result<Self, ()> {
parsing::parse_text_as(text, parser::ParserEntryPoint::Attr)
}
}
impl ast::Stmt {
/// Returns `text`, parsed as statement, but only if it has no errors.
pub fn parse(text: &str) -> Result<Self, ()> {
parsing::parse_text_as(text, parser::ParserEntryPoint::StatementOptionalSemi)
}
}
/// Matches a `SyntaxNode` against an `ast` type. /// Matches a `SyntaxNode` against an `ast` type.
/// ///
/// # Example: /// # Example:

View File

@ -5,46 +5,25 @@ mod reparsing;
use rowan::TextRange; use rowan::TextRange;
use crate::{syntax_node::GreenNode, AstNode, SyntaxError, SyntaxNode, SyntaxTreeBuilder}; use crate::{syntax_node::GreenNode, SyntaxError, SyntaxTreeBuilder};
pub(crate) use crate::parsing::reparsing::incremental_reparse; pub(crate) use crate::parsing::reparsing::incremental_reparse;
pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) { pub(crate) fn parse_text(text: &str) -> (GreenNode, Vec<SyntaxError>) {
let lexed = parser::LexedStr::new(text); let lexed = parser::LexedStr::new(text);
let parser_input = lexed.to_input(); let parser_input = lexed.to_input();
let parser_output = parser::parse_source_file(&parser_input); let parser_output = parser::TopEntryPoint::SourceFile.parse(&parser_input);
let (node, errors, _eof) = build_tree(lexed, parser_output, false); let (node, errors, _eof) = build_tree(lexed, parser_output);
(node, errors) (node, errors)
} }
/// Returns `text` parsed as a `T` provided there are no parse errors.
pub(crate) fn parse_text_as<T: AstNode>(
text: &str,
entry_point: parser::ParserEntryPoint,
) -> Result<T, ()> {
let lexed = parser::LexedStr::new(text);
if lexed.errors().next().is_some() {
return Err(());
}
let parser_input = lexed.to_input();
let parser_output = parser::parse(&parser_input, entry_point);
let (node, errors, eof) = build_tree(lexed, parser_output, true);
if !errors.is_empty() || !eof {
return Err(());
}
SyntaxNode::new_root(node).first_child().and_then(T::cast).ok_or(())
}
pub(crate) fn build_tree( pub(crate) fn build_tree(
lexed: parser::LexedStr<'_>, lexed: parser::LexedStr<'_>,
parser_output: parser::Output, parser_output: parser::Output,
synthetic_root: bool,
) -> (GreenNode, Vec<SyntaxError>, bool) { ) -> (GreenNode, Vec<SyntaxError>, bool) {
let mut builder = SyntaxTreeBuilder::default(); let mut builder = SyntaxTreeBuilder::default();
let is_eof = lexed.intersperse_trivia(&parser_output, synthetic_root, &mut |step| match step { let is_eof = lexed.intersperse_trivia(&parser_output, &mut |step| match step {
parser::StrStep::Token { kind, text } => builder.token(kind, text), parser::StrStep::Token { kind, text } => builder.token(kind, text),
parser::StrStep::Enter { kind } => builder.start_node(kind), parser::StrStep::Enter { kind } => builder.start_node(kind),
parser::StrStep::Exit => builder.finish_node(), parser::StrStep::Exit => builder.finish_node(),

View File

@ -96,7 +96,7 @@ fn reparse_block(
let tree_traversal = reparser.parse(&parser_input); let tree_traversal = reparser.parse(&parser_input);
let (green, new_parser_errors, _eof) = build_tree(lexed, tree_traversal, false); let (green, new_parser_errors, _eof) = build_tree(lexed, tree_traversal);
Some((node.replace_with(green), new_parser_errors, node.text_range())) Some((node.replace_with(green), new_parser_errors, node.text_range()))
} }

View File

@ -59,60 +59,6 @@ fn validation_tests() {
}); });
} }
#[test]
fn expr_parser_tests() {
fragment_parser_dir_test(
&["parser/fragments/expr/ok"],
&["parser/fragments/expr/err"],
crate::ast::Expr::parse,
);
}
#[test]
fn path_parser_tests() {
fragment_parser_dir_test(
&["parser/fragments/path/ok"],
&["parser/fragments/path/err"],
crate::ast::Path::parse,
);
}
#[test]
fn pattern_parser_tests() {
fragment_parser_dir_test(
&["parser/fragments/pattern/ok"],
&["parser/fragments/pattern/err"],
crate::ast::Pat::parse,
);
}
#[test]
fn item_parser_tests() {
fragment_parser_dir_test(
&["parser/fragments/item/ok"],
&["parser/fragments/item/err"],
crate::ast::Item::parse,
);
}
#[test]
fn type_parser_tests() {
fragment_parser_dir_test(
&["parser/fragments/type/ok"],
&["parser/fragments/type/err"],
crate::ast::Type::parse,
);
}
#[test]
fn stmt_parser_tests() {
fragment_parser_dir_test(
&["parser/fragments/stmt/ok"],
&["parser/fragments/stmt/err"],
crate::ast::Stmt::parse,
);
}
#[test] #[test]
fn parser_fuzz_tests() { fn parser_fuzz_tests() {
for (_, text) in collect_rust_files(&test_data_dir(), &["parser/fuzz-failures"]) { for (_, text) in collect_rust_files(&test_data_dir(), &["parser/fuzz-failures"]) {
@ -172,24 +118,6 @@ fn assert_errors_are_present(errors: &[SyntaxError], path: &Path) {
assert!(!errors.is_empty(), "There should be errors in the file {:?}", path.display()); assert!(!errors.is_empty(), "There should be errors in the file {:?}", path.display());
} }
fn fragment_parser_dir_test<T, F>(ok_paths: &[&str], err_paths: &[&str], f: F)
where
T: crate::AstNode,
F: Fn(&str) -> Result<T, ()>,
{
dir_tests(&test_data_dir(), ok_paths, "rast", |text, path| match f(text) {
Ok(node) => format!("{:#?}", crate::ast::AstNode::syntax(&node)),
Err(_) => panic!("Failed to parse '{:?}'", path),
});
dir_tests(&test_data_dir(), err_paths, "rast", |text, path| {
if f(text).is_ok() {
panic!("'{:?}' successfully parsed when it should have errored", path);
} else {
"ERROR\n".to_owned()
}
});
}
/// Calls callback `f` with input code and file paths for each `.rs` file in `test_data_dir` /// Calls callback `f` with input code and file paths for each `.rs` file in `test_data_dir`
/// subdirectories defined by `paths`. /// subdirectories defined by `paths`.
/// ///

View File

@ -1,8 +0,0 @@
BIN_EXPR@0..5
LITERAL@0..1
INT_NUMBER@0..1 "1"
WHITESPACE@1..2 " "
PLUS@2..3 "+"
WHITESPACE@3..4 " "
LITERAL@4..5
INT_NUMBER@4..5 "2"

View File

@ -1,13 +0,0 @@
FN@0..11
FN_KW@0..2 "fn"
WHITESPACE@2..3 " "
NAME@3..6
IDENT@3..6 "foo"
PARAM_LIST@6..8
L_PAREN@6..7 "("
R_PAREN@7..8 ")"
WHITESPACE@8..9 " "
BLOCK_EXPR@9..11
STMT_LIST@9..11
L_CURLY@9..10 "{"
R_CURLY@10..11 "}"

View File

@ -1 +0,0 @@
fn foo() {}

View File

@ -1,4 +0,0 @@
PATH@0..3
PATH_SEGMENT@0..3
NAME_REF@0..3
IDENT@0..3 "foo"

View File

@ -1,14 +0,0 @@
PATH@0..13
PATH@0..8
PATH@0..3
PATH_SEGMENT@0..3
NAME_REF@0..3
IDENT@0..3 "foo"
COLON2@3..5 "::"
PATH_SEGMENT@5..8
NAME_REF@5..8
IDENT@5..8 "bar"
COLON2@8..10 "::"
PATH_SEGMENT@10..13
NAME_REF@10..13
IDENT@10..13 "baz"

View File

@ -1,10 +0,0 @@
TUPLE_STRUCT_PAT@0..7
PATH@0..4
PATH_SEGMENT@0..4
NAME_REF@0..4
IDENT@0..4 "Some"
L_PAREN@4..5 "("
IDENT_PAT@5..6
NAME@5..6
IDENT@5..6 "x"
R_PAREN@6..7 ")"

View File

@ -1,9 +0,0 @@
EXPR_STMT@0..5
BIN_EXPR@0..5
LITERAL@0..1
INT_NUMBER@0..1 "1"
WHITESPACE@1..2 " "
PLUS@2..3 "+"
WHITESPACE@3..4 " "
LITERAL@4..5
INT_NUMBER@4..5 "1"

View File

@ -1,70 +0,0 @@
EXPR_STMT@0..55
BLOCK_EXPR@0..55
STMT_LIST@0..55
L_CURLY@0..1 "{"
WHITESPACE@1..6 "\n "
LET_STMT@6..20
LET_KW@6..9 "let"
WHITESPACE@9..10 " "
IDENT_PAT@10..11
NAME@10..11
IDENT@10..11 "x"
WHITESPACE@11..12 " "
EQ@12..13 "="
WHITESPACE@13..14 " "
CALL_EXPR@14..19
PATH_EXPR@14..17
PATH@14..17
PATH_SEGMENT@14..17
NAME_REF@14..17
IDENT@14..17 "foo"
ARG_LIST@17..19
L_PAREN@17..18 "("
R_PAREN@18..19 ")"
SEMICOLON@19..20 ";"
WHITESPACE@20..25 "\n "
LET_STMT@25..39
LET_KW@25..28 "let"
WHITESPACE@28..29 " "
IDENT_PAT@29..30
NAME@29..30
IDENT@29..30 "y"
WHITESPACE@30..31 " "
EQ@31..32 "="
WHITESPACE@32..33 " "
CALL_EXPR@33..38
PATH_EXPR@33..36
PATH@33..36
PATH_SEGMENT@33..36
NAME_REF@33..36
IDENT@33..36 "bar"
ARG_LIST@36..38
L_PAREN@36..37 "("
R_PAREN@37..38 ")"
SEMICOLON@38..39 ";"
WHITESPACE@39..44 "\n "
CALL_EXPR@44..53
PATH_EXPR@44..46
PATH@44..46
PATH_SEGMENT@44..46
NAME_REF@44..46
IDENT@44..46 "Ok"
ARG_LIST@46..53
L_PAREN@46..47 "("
BIN_EXPR@47..52
PATH_EXPR@47..48
PATH@47..48
PATH_SEGMENT@47..48
NAME_REF@47..48
IDENT@47..48 "x"
WHITESPACE@48..49 " "
PLUS@49..50 "+"
WHITESPACE@50..51 " "
PATH_EXPR@51..52
PATH@51..52
PATH_SEGMENT@51..52
NAME_REF@51..52
IDENT@51..52 "y"
R_PAREN@52..53 ")"
WHITESPACE@53..54 "\n"
R_CURLY@54..55 "}"

View File

@ -1,5 +0,0 @@
{
let x = foo();
let y = bar();
Ok(x + y)
}

View File

@ -1,11 +0,0 @@
EXPR_STMT@0..6
CALL_EXPR@0..5
PATH_EXPR@0..3
PATH@0..3
PATH_SEGMENT@0..3
NAME_REF@0..3
IDENT@0..3 "foo"
ARG_LIST@3..5
L_PAREN@3..4 "("
R_PAREN@4..5 ")"
SEMICOLON@5..6 ";"

View File

@ -1,12 +0,0 @@
LET_STMT@0..11
LET_KW@0..3 "let"
WHITESPACE@3..4 " "
IDENT_PAT@4..5
NAME@4..5
IDENT@4..5 "x"
WHITESPACE@5..6 " "
EQ@6..7 "="
WHITESPACE@7..8 " "
LITERAL@8..10
INT_NUMBER@8..10 "10"
SEMICOLON@10..11 ";"

View File

@ -1,21 +0,0 @@
EXPR_STMT@0..18
MACRO_CALL@0..17
PATH@0..2
PATH_SEGMENT@0..2
NAME_REF@0..2
IDENT@0..2 "m1"
BANG@2..3 "!"
TOKEN_TREE@3..17
L_CURLY@3..4 "{"
WHITESPACE@4..5 " "
LET_KW@5..8 "let"
WHITESPACE@8..9 " "
IDENT@9..10 "a"
WHITESPACE@10..11 " "
EQ@11..12 "="
WHITESPACE@12..13 " "
INT_NUMBER@13..14 "0"
SEMICOLON@14..15 ";"
WHITESPACE@15..16 " "
R_CURLY@16..17 "}"
SEMICOLON@17..18 ";"

View File

@ -1,21 +0,0 @@
EXPR_STMT@0..18
MACRO_CALL@0..17
PATH@0..2
PATH_SEGMENT@0..2
NAME_REF@0..2
IDENT@0..2 "m1"
BANG@2..3 "!"
TOKEN_TREE@3..17
L_CURLY@3..4 "{"
WHITESPACE@4..5 " "
LET_KW@5..8 "let"
WHITESPACE@8..9 " "
IDENT@9..10 "a"
WHITESPACE@10..11 " "
EQ@11..12 "="
WHITESPACE@12..13 " "
INT_NUMBER@13..14 "0"
SEMICOLON@14..15 ";"
WHITESPACE@15..16 " "
R_CURLY@16..17 "}"
SEMICOLON@17..18 ";"

View File

@ -1,22 +0,0 @@
STRUCT@0..28
STRUCT_KW@0..6 "struct"
WHITESPACE@6..7 " "
NAME@7..10
IDENT@7..10 "Foo"
WHITESPACE@10..11 " "
RECORD_FIELD_LIST@11..28
L_CURLY@11..12 "{"
WHITESPACE@12..17 "\n "
RECORD_FIELD@17..25
NAME@17..20
IDENT@17..20 "bar"
COLON@20..21 ":"
WHITESPACE@21..22 " "
PATH_TYPE@22..25
PATH@22..25
PATH_SEGMENT@22..25
NAME_REF@22..25
IDENT@22..25 "u32"
COMMA@25..26 ","
WHITESPACE@26..27 "\n"
R_CURLY@27..28 "}"

View File

@ -1,3 +0,0 @@
struct Foo {
bar: u32,
}

View File

@ -1,10 +0,0 @@
EXPR_STMT@0..5
CALL_EXPR@0..5
PATH_EXPR@0..3
PATH@0..3
PATH_SEGMENT@0..3
NAME_REF@0..3
IDENT@0..3 "foo"
ARG_LIST@3..5
L_PAREN@3..4 "("
R_PAREN@4..5 ")"

View File

@ -1,11 +0,0 @@
LET_STMT@0..10
LET_KW@0..3 "let"
WHITESPACE@3..4 " "
IDENT_PAT@4..5
NAME@4..5
IDENT@4..5 "x"
WHITESPACE@5..6 " "
EQ@6..7 "="
WHITESPACE@7..8 " "
LITERAL@8..10
INT_NUMBER@8..10 "10"

View File

@ -1,22 +0,0 @@
PATH_TYPE@0..16
PATH@0..16
PATH_SEGMENT@0..16
NAME_REF@0..6
IDENT@0..6 "Result"
GENERIC_ARG_LIST@6..16
L_ANGLE@6..7 "<"
TYPE_ARG@7..10
PATH_TYPE@7..10
PATH@7..10
PATH_SEGMENT@7..10
NAME_REF@7..10
IDENT@7..10 "Foo"
COMMA@10..11 ","
WHITESPACE@11..12 " "
TYPE_ARG@12..15
PATH_TYPE@12..15
PATH@12..15
PATH_SEGMENT@12..15
NAME_REF@12..15
IDENT@12..15 "Bar"
R_ANGLE@15..16 ">"

View File

@ -1 +0,0 @@
Result<Foo, Bar>