mirror of
https://github.com/rust-lang/rust-analyzer.git
synced 2025-09-28 11:20:54 +00:00
Merge #773
773: Crash fixes r=matklad a=flodiebold
This fixes a bunch of crashes found while running type inference on the whole rustc repo 😅
- avoid infinite recursion with ref bind patterns
- avoid another infinite recursion
- handle literal patterns, add a new LITERAL_PAT syntax node for this
- fix an expect that's wrong on some invalid code
Co-authored-by: Florian Diebold <flodiebold@gmail.com>
This commit is contained in:
commit
a8a4f8012e
@ -831,18 +831,18 @@ impl ExprCollector {
|
||||
p.field_pat_list().expect("every struct should have a field list");
|
||||
let mut fields: Vec<_> = field_pat_list
|
||||
.bind_pats()
|
||||
.map(|bind_pat| {
|
||||
.filter_map(|bind_pat| {
|
||||
let ast_pat = ast::Pat::cast(bind_pat.syntax()).expect("bind pat is a pat");
|
||||
let pat = self.collect_pat(ast_pat);
|
||||
let name = bind_pat.name().expect("bind pat has a name").as_name();
|
||||
FieldPat { name, pat }
|
||||
let name = bind_pat.name()?.as_name();
|
||||
Some(FieldPat { name, pat })
|
||||
})
|
||||
.collect();
|
||||
let iter = field_pat_list.field_pats().map(|f| {
|
||||
let ast_pat = f.pat().expect("field pat always contains a pattern");
|
||||
let iter = field_pat_list.field_pats().filter_map(|f| {
|
||||
let ast_pat = f.pat()?;
|
||||
let pat = self.collect_pat(ast_pat);
|
||||
let name = f.name().expect("field pats always have a name").as_name();
|
||||
FieldPat { name, pat }
|
||||
let name = f.name()?.as_name();
|
||||
Some(FieldPat { name, pat })
|
||||
});
|
||||
fields.extend(iter);
|
||||
|
||||
@ -850,6 +850,7 @@ impl ExprCollector {
|
||||
}
|
||||
|
||||
// TODO: implement
|
||||
ast::PatKind::LiteralPat(_) => Pat::Missing,
|
||||
ast::PatKind::SlicePat(_) | ast::PatKind::RangePat(_) => Pat::Missing,
|
||||
};
|
||||
let syntax_ptr = SyntaxNodePtr::new(pat.syntax());
|
||||
|
@ -3,4 +3,5 @@ test_utils::marks!(
|
||||
item_map_enum_importing
|
||||
type_var_cycles_resolve_completely
|
||||
type_var_cycles_resolve_as_possible
|
||||
type_var_resolves_to_int_var
|
||||
);
|
||||
|
@ -879,11 +879,22 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||
ty
|
||||
}
|
||||
|
||||
fn unify_substs(&mut self, substs1: &Substs, substs2: &Substs) -> bool {
|
||||
substs1.0.iter().zip(substs2.0.iter()).all(|(t1, t2)| self.unify(t1, t2))
|
||||
fn unify_substs(&mut self, substs1: &Substs, substs2: &Substs, depth: usize) -> bool {
|
||||
substs1.0.iter().zip(substs2.0.iter()).all(|(t1, t2)| self.unify_inner(t1, t2, depth))
|
||||
}
|
||||
|
||||
fn unify(&mut self, ty1: &Ty, ty2: &Ty) -> bool {
|
||||
self.unify_inner(ty1, ty2, 0)
|
||||
}
|
||||
|
||||
fn unify_inner(&mut self, ty1: &Ty, ty2: &Ty, depth: usize) -> bool {
|
||||
if depth > 1000 {
|
||||
// prevent stackoverflows
|
||||
panic!("infinite recursion in unification");
|
||||
}
|
||||
if ty1 == ty2 {
|
||||
return true;
|
||||
}
|
||||
// try to resolve type vars first
|
||||
let ty1 = self.resolve_ty_shallow(ty1);
|
||||
let ty2 = self.resolve_ty_shallow(ty2);
|
||||
@ -904,13 +915,15 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||
(
|
||||
Ty::Adt { def_id: def_id1, substs: substs1, .. },
|
||||
Ty::Adt { def_id: def_id2, substs: substs2, .. },
|
||||
) if def_id1 == def_id2 => self.unify_substs(substs1, substs2),
|
||||
(Ty::Slice(t1), Ty::Slice(t2)) => self.unify(t1, t2),
|
||||
(Ty::RawPtr(t1, m1), Ty::RawPtr(t2, m2)) if m1 == m2 => self.unify(t1, t2),
|
||||
(Ty::Ref(t1, m1), Ty::Ref(t2, m2)) if m1 == m2 => self.unify(t1, t2),
|
||||
) if def_id1 == def_id2 => self.unify_substs(substs1, substs2, depth + 1),
|
||||
(Ty::Slice(t1), Ty::Slice(t2)) => self.unify_inner(t1, t2, depth + 1),
|
||||
(Ty::RawPtr(t1, m1), Ty::RawPtr(t2, m2)) if m1 == m2 => {
|
||||
self.unify_inner(t1, t2, depth + 1)
|
||||
}
|
||||
(Ty::Ref(t1, m1), Ty::Ref(t2, m2)) if m1 == m2 => self.unify_inner(t1, t2, depth + 1),
|
||||
(Ty::FnPtr(sig1), Ty::FnPtr(sig2)) if sig1 == sig2 => true,
|
||||
(Ty::Tuple(ts1), Ty::Tuple(ts2)) if ts1.len() == ts2.len() => {
|
||||
ts1.iter().zip(ts2.iter()).all(|(t1, t2)| self.unify(t1, t2))
|
||||
ts1.iter().zip(ts2.iter()).all(|(t1, t2)| self.unify_inner(t1, t2, depth + 1))
|
||||
}
|
||||
(Ty::Infer(InferTy::TypeVar(tv1)), Ty::Infer(InferTy::TypeVar(tv2)))
|
||||
| (Ty::Infer(InferTy::IntVar(tv1)), Ty::Infer(InferTy::IntVar(tv2)))
|
||||
@ -989,19 +1002,30 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||
/// If `ty` is a type variable with known type, returns that type;
|
||||
/// otherwise, return ty.
|
||||
fn resolve_ty_shallow<'b>(&mut self, ty: &'b Ty) -> Cow<'b, Ty> {
|
||||
match ty {
|
||||
Ty::Infer(tv) => {
|
||||
let inner = tv.to_inner();
|
||||
match self.var_unification_table.probe_value(inner).known() {
|
||||
Some(known_ty) => {
|
||||
// The known_ty can't be a type var itself
|
||||
Cow::Owned(known_ty.clone())
|
||||
}
|
||||
_ => Cow::Borrowed(ty),
|
||||
}
|
||||
let mut ty = Cow::Borrowed(ty);
|
||||
// The type variable could resolve to a int/float variable. Hence try
|
||||
// resolving up to three times; each type of variable shouldn't occur
|
||||
// more than once
|
||||
for i in 0..3 {
|
||||
if i > 0 {
|
||||
tested_by!(type_var_resolves_to_int_var);
|
||||
}
|
||||
match &*ty {
|
||||
Ty::Infer(tv) => {
|
||||
let inner = tv.to_inner();
|
||||
match self.var_unification_table.probe_value(inner).known() {
|
||||
Some(known_ty) => {
|
||||
// The known_ty can't be a type var itself
|
||||
ty = Cow::Owned(known_ty.clone());
|
||||
}
|
||||
_ => return ty,
|
||||
}
|
||||
}
|
||||
_ => return ty,
|
||||
}
|
||||
_ => Cow::Borrowed(ty),
|
||||
}
|
||||
log::error!("Inference variable still not resolved: {:?}", ty);
|
||||
ty
|
||||
}
|
||||
|
||||
/// Resolves the type completely; type variables without known type are
|
||||
@ -1185,17 +1209,21 @@ impl<'a, D: HirDatabase> InferenceContext<'a, D> {
|
||||
self.infer_path_expr(&resolver, &path).unwrap_or(Ty::Unknown)
|
||||
}
|
||||
Pat::Bind { mode, name: _name, subpat } => {
|
||||
let subty = if let Some(subpat) = subpat {
|
||||
let inner_ty = if let Some(subpat) = subpat {
|
||||
self.infer_pat(*subpat, expected)
|
||||
} else {
|
||||
expected.clone()
|
||||
};
|
||||
let inner_ty = self.insert_type_vars_shallow(inner_ty);
|
||||
|
||||
match mode {
|
||||
BindingAnnotation::Ref => Ty::Ref(subty.into(), Mutability::Shared),
|
||||
BindingAnnotation::RefMut => Ty::Ref(subty.into(), Mutability::Mut),
|
||||
BindingAnnotation::Mutable | BindingAnnotation::Unannotated => subty,
|
||||
}
|
||||
let bound_ty = match mode {
|
||||
BindingAnnotation::Ref => Ty::Ref(inner_ty.clone().into(), Mutability::Shared),
|
||||
BindingAnnotation::RefMut => Ty::Ref(inner_ty.clone().into(), Mutability::Mut),
|
||||
BindingAnnotation::Mutable | BindingAnnotation::Unannotated => inner_ty.clone(),
|
||||
};
|
||||
let bound_ty = self.resolve_ty_as_possible(&mut vec![], bound_ty);
|
||||
self.write_pat_ty(pat, bound_ty);
|
||||
return inner_ty;
|
||||
}
|
||||
_ => Ty::Unknown,
|
||||
};
|
||||
|
13
crates/ra_hir/src/ty/snapshots/tests__infer_std_crash_1.snap
Normal file
13
crates/ra_hir/src/ty/snapshots/tests__infer_std_crash_1.snap
Normal file
@ -0,0 +1,13 @@
|
||||
---
|
||||
created: "2019-02-09T16:56:24.803326529Z"
|
||||
creator: insta@0.6.1
|
||||
source: crates/ra_hir/src/ty/tests.rs
|
||||
expression: "&result"
|
||||
---
|
||||
[54; 139) '{ ... } }': ()
|
||||
[60; 137) 'match ... }': ()
|
||||
[66; 83) 'someth...nknown': Maybe<[unknown]>
|
||||
[94; 124) 'Maybe:...thing)': Maybe<[unknown]>
|
||||
[106; 123) 'ref mu...ething': &mut [unknown]
|
||||
[128; 130) '()': ()
|
||||
|
14
crates/ra_hir/src/ty/snapshots/tests__infer_std_crash_2.snap
Normal file
14
crates/ra_hir/src/ty/snapshots/tests__infer_std_crash_2.snap
Normal file
@ -0,0 +1,14 @@
|
||||
---
|
||||
created: "2019-02-09T17:03:11.974225590Z"
|
||||
creator: insta@0.6.1
|
||||
source: crates/ra_hir/src/ty/tests.rs
|
||||
expression: "&result"
|
||||
---
|
||||
[23; 53) '{ ...n']; }': ()
|
||||
[29; 50) '&[0, b...b'\n']': &[u8]
|
||||
[30; 50) '[0, b'...b'\n']': [u8]
|
||||
[31; 32) '0': u8
|
||||
[34; 39) 'b'\n'': u8
|
||||
[41; 42) '1': u8
|
||||
[44; 49) 'b'\n'': u8
|
||||
|
13
crates/ra_hir/src/ty/snapshots/tests__infer_std_crash_3.snap
Normal file
13
crates/ra_hir/src/ty/snapshots/tests__infer_std_crash_3.snap
Normal file
@ -0,0 +1,13 @@
|
||||
---
|
||||
created: "2019-02-09T18:02:37.377591660Z"
|
||||
creator: insta@0.6.1
|
||||
source: crates/ra_hir/src/ty/tests.rs
|
||||
expression: "&result"
|
||||
---
|
||||
[18; 102) '{ ... } }': ()
|
||||
[24; 100) 'match ... }': ()
|
||||
[42; 88) 'SizeSk...tail }': [unknown]
|
||||
[76; 80) 'true': [unknown]
|
||||
[82; 86) 'tail': [unknown]
|
||||
[92; 94) '{}': ()
|
||||
|
16
crates/ra_hir/src/ty/snapshots/tests__infer_std_crash_4.snap
Normal file
16
crates/ra_hir/src/ty/snapshots/tests__infer_std_crash_4.snap
Normal file
@ -0,0 +1,16 @@
|
||||
---
|
||||
created: "2019-02-09T19:55:39.712470520Z"
|
||||
creator: insta@0.6.1
|
||||
source: crates/ra_hir/src/ty/tests.rs
|
||||
expression: "&result"
|
||||
---
|
||||
[25; 110) '{ ... } }': ()
|
||||
[31; 108) 'match ... }': ()
|
||||
[37; 42) '*self': [unknown]
|
||||
[38; 42) 'self': [unknown]
|
||||
[53; 95) 'Borrow...), ..}': [unknown]
|
||||
[74; 77) 'box': [unknown]
|
||||
[78; 87) 'Primitive': [unknown]
|
||||
[88; 89) 'p': [unknown]
|
||||
[99; 101) '{}': ()
|
||||
|
30
crates/ra_hir/src/ty/snapshots/tests__infer_std_crash_5.snap
Normal file
30
crates/ra_hir/src/ty/snapshots/tests__infer_std_crash_5.snap
Normal file
@ -0,0 +1,30 @@
|
||||
---
|
||||
created: "2019-02-09T20:28:37.294693728Z"
|
||||
creator: insta@0.6.1
|
||||
source: crates/ra_hir/src/ty/tests.rs
|
||||
expression: "&result"
|
||||
---
|
||||
[27; 323) '{ ... } }': ()
|
||||
[33; 321) 'for co... }': ()
|
||||
[37; 44) 'content': &[unknown]
|
||||
[48; 61) 'doesnt_matter': [unknown]
|
||||
[62; 321) '{ ... }': ()
|
||||
[76; 80) 'name': &&[unknown]
|
||||
[83; 167) 'if doe... }': &&[unknown]
|
||||
[86; 99) 'doesnt_matter': bool
|
||||
[100; 129) '{ ... }': &&[unknown]
|
||||
[114; 119) 'first': &&[unknown]
|
||||
[135; 167) '{ ... }': &&[unknown]
|
||||
[149; 157) '&content': &&[unknown]
|
||||
[150; 157) 'content': &[unknown]
|
||||
[182; 189) 'content': &&[unknown]
|
||||
[192; 314) 'if ICE... }': &&[unknown]
|
||||
[195; 232) 'ICE_RE..._VALUE': [unknown]
|
||||
[195; 248) 'ICE_RE...&name)': bool
|
||||
[242; 247) '&name': &&&[unknown]
|
||||
[243; 247) 'name': &&[unknown]
|
||||
[249; 277) '{ ... }': &&[unknown]
|
||||
[263; 267) 'name': &&[unknown]
|
||||
[283; 314) '{ ... }': &[unknown]
|
||||
[297; 304) 'content': &[unknown]
|
||||
|
@ -630,6 +630,95 @@ fn test() {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn infer_std_crash_1() {
|
||||
// caused stack overflow, taken from std
|
||||
check_inference(
|
||||
"infer_std_crash_1",
|
||||
r#"
|
||||
enum Maybe<T> {
|
||||
Real(T),
|
||||
Fake,
|
||||
}
|
||||
|
||||
fn write() {
|
||||
match something_unknown {
|
||||
Maybe::Real(ref mut something) => (),
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn infer_std_crash_2() {
|
||||
covers!(type_var_resolves_to_int_var);
|
||||
// caused "equating two type variables, ...", taken from std
|
||||
check_inference(
|
||||
"infer_std_crash_2",
|
||||
r#"
|
||||
fn test_line_buffer() {
|
||||
&[0, b'\n', 1, b'\n'];
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn infer_std_crash_3() {
|
||||
// taken from rustc
|
||||
check_inference(
|
||||
"infer_std_crash_3",
|
||||
r#"
|
||||
pub fn compute() {
|
||||
match _ {
|
||||
SizeSkeleton::Pointer { non_zero: true, tail } => {}
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn infer_std_crash_4() {
|
||||
// taken from rustc
|
||||
check_inference(
|
||||
"infer_std_crash_4",
|
||||
r#"
|
||||
pub fn primitive_type() {
|
||||
match *self {
|
||||
BorrowedRef { type_: box Primitive(p), ..} => {},
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn infer_std_crash_5() {
|
||||
// taken from rustc
|
||||
check_inference(
|
||||
"infer_std_crash_5",
|
||||
r#"
|
||||
fn extra_compiler_flags() {
|
||||
for content in doesnt_matter {
|
||||
let name = if doesnt_matter {
|
||||
first
|
||||
} else {
|
||||
&content
|
||||
};
|
||||
|
||||
let content = if ICE_REPORT_COMPILER_FLAGS_STRIP_VALUE.contains(&name) {
|
||||
name
|
||||
} else {
|
||||
content
|
||||
};
|
||||
}
|
||||
}
|
||||
"#,
|
||||
);
|
||||
}
|
||||
|
||||
fn infer(content: &str) -> String {
|
||||
let (db, _, file_id) = MockDatabase::with_single_file(content);
|
||||
let source_file = db.parse(file_id);
|
||||
|
@ -1821,6 +1821,38 @@ impl LiteralExpr {
|
||||
|
||||
impl LiteralExpr {}
|
||||
|
||||
// LiteralPat
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct LiteralPat {
|
||||
pub(crate) syntax: SyntaxNode,
|
||||
}
|
||||
unsafe impl TransparentNewType for LiteralPat {
|
||||
type Repr = rowan::SyntaxNode<RaTypes>;
|
||||
}
|
||||
|
||||
impl AstNode for LiteralPat {
|
||||
fn cast(syntax: &SyntaxNode) -> Option<&Self> {
|
||||
match syntax.kind() {
|
||||
LITERAL_PAT => Some(LiteralPat::from_repr(syntax.into_repr())),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn syntax(&self) -> &SyntaxNode { &self.syntax }
|
||||
}
|
||||
|
||||
impl ToOwned for LiteralPat {
|
||||
type Owned = TreeArc<LiteralPat>;
|
||||
fn to_owned(&self) -> TreeArc<LiteralPat> { TreeArc::cast(self.syntax.to_owned()) }
|
||||
}
|
||||
|
||||
|
||||
impl LiteralPat {
|
||||
pub fn literal(&self) -> Option<&Literal> {
|
||||
super::child_opt(self)
|
||||
}
|
||||
}
|
||||
|
||||
// LoopExpr
|
||||
#[derive(Debug, PartialEq, Eq, Hash)]
|
||||
#[repr(transparent)]
|
||||
@ -2594,6 +2626,7 @@ pub enum PatKind<'a> {
|
||||
TuplePat(&'a TuplePat),
|
||||
SlicePat(&'a SlicePat),
|
||||
RangePat(&'a RangePat),
|
||||
LiteralPat(&'a LiteralPat),
|
||||
}
|
||||
|
||||
impl AstNode for Pat {
|
||||
@ -2607,7 +2640,8 @@ impl AstNode for Pat {
|
||||
| TUPLE_STRUCT_PAT
|
||||
| TUPLE_PAT
|
||||
| SLICE_PAT
|
||||
| RANGE_PAT => Some(Pat::from_repr(syntax.into_repr())),
|
||||
| RANGE_PAT
|
||||
| LITERAL_PAT => Some(Pat::from_repr(syntax.into_repr())),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@ -2631,6 +2665,7 @@ impl Pat {
|
||||
TUPLE_PAT => PatKind::TuplePat(TuplePat::cast(&self.syntax).unwrap()),
|
||||
SLICE_PAT => PatKind::SlicePat(SlicePat::cast(&self.syntax).unwrap()),
|
||||
RANGE_PAT => PatKind::RangePat(RangePat::cast(&self.syntax).unwrap()),
|
||||
LITERAL_PAT => PatKind::LiteralPat(LiteralPat::cast(&self.syntax).unwrap()),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
@ -161,6 +161,7 @@ Grammar(
|
||||
"TUPLE_PAT",
|
||||
"SLICE_PAT",
|
||||
"RANGE_PAT",
|
||||
"LITERAL_PAT",
|
||||
|
||||
// atoms
|
||||
"TUPLE_EXPR",
|
||||
@ -524,6 +525,7 @@ Grammar(
|
||||
"TuplePat": ( collections: [["args", "Pat"]] ),
|
||||
"SlicePat": (),
|
||||
"RangePat": (),
|
||||
"LiteralPat": (options: ["Literal"]),
|
||||
|
||||
"Pat": (
|
||||
enum: [
|
||||
@ -536,6 +538,7 @@ Grammar(
|
||||
"TuplePat",
|
||||
"SlicePat",
|
||||
"RangePat",
|
||||
"LiteralPat",
|
||||
],
|
||||
),
|
||||
|
||||
|
@ -43,21 +43,8 @@ fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> {
|
||||
return Some(path_pat(p));
|
||||
}
|
||||
|
||||
// test literal_pattern
|
||||
// fn main() {
|
||||
// match () {
|
||||
// -1 => (),
|
||||
// 92 => (),
|
||||
// 'c' => (),
|
||||
// "hello" => (),
|
||||
// }
|
||||
// }
|
||||
if p.at(MINUS) && (p.nth(1) == INT_NUMBER || p.nth(1) == FLOAT_NUMBER) {
|
||||
p.bump();
|
||||
}
|
||||
|
||||
if let Some(m) = expressions::literal(p) {
|
||||
return Some(m);
|
||||
if is_literal_pat_start(p) {
|
||||
return Some(literal_pat(p));
|
||||
}
|
||||
|
||||
let m = match la0 {
|
||||
@ -73,6 +60,30 @@ fn atom_pat(p: &mut Parser, recovery_set: TokenSet) -> Option<CompletedMarker> {
|
||||
Some(m)
|
||||
}
|
||||
|
||||
fn is_literal_pat_start(p: &mut Parser) -> bool {
|
||||
p.at(MINUS) && (p.nth(1) == INT_NUMBER || p.nth(1) == FLOAT_NUMBER)
|
||||
|| p.at_ts(expressions::LITERAL_FIRST)
|
||||
}
|
||||
|
||||
// test literal_pattern
|
||||
// fn main() {
|
||||
// match () {
|
||||
// -1 => (),
|
||||
// 92 => (),
|
||||
// 'c' => (),
|
||||
// "hello" => (),
|
||||
// }
|
||||
// }
|
||||
fn literal_pat(p: &mut Parser) -> CompletedMarker {
|
||||
assert!(is_literal_pat_start(p));
|
||||
let m = p.start();
|
||||
if p.at(MINUS) {
|
||||
p.bump();
|
||||
}
|
||||
expressions::literal(p);
|
||||
m.complete(p, LITERAL_PAT)
|
||||
}
|
||||
|
||||
// test path_part
|
||||
// fn foo() {
|
||||
// let foo::Bar = ();
|
||||
|
@ -157,6 +157,7 @@ pub enum SyntaxKind {
|
||||
TUPLE_PAT,
|
||||
SLICE_PAT,
|
||||
RANGE_PAT,
|
||||
LITERAL_PAT,
|
||||
TUPLE_EXPR,
|
||||
ARRAY_EXPR,
|
||||
PAREN_EXPR,
|
||||
@ -493,6 +494,7 @@ impl SyntaxKind {
|
||||
TUPLE_PAT => &SyntaxInfo { name: "TUPLE_PAT" },
|
||||
SLICE_PAT => &SyntaxInfo { name: "SLICE_PAT" },
|
||||
RANGE_PAT => &SyntaxInfo { name: "RANGE_PAT" },
|
||||
LITERAL_PAT => &SyntaxInfo { name: "LITERAL_PAT" },
|
||||
TUPLE_EXPR => &SyntaxInfo { name: "TUPLE_EXPR" },
|
||||
ARRAY_EXPR => &SyntaxInfo { name: "ARRAY_EXPR" },
|
||||
PAREN_EXPR => &SyntaxInfo { name: "PAREN_EXPR" },
|
||||
|
@ -22,9 +22,10 @@ SOURCE_FILE@[0; 113)
|
||||
L_CURLY@[25; 26)
|
||||
WHITESPACE@[26; 35)
|
||||
MATCH_ARM@[35; 43)
|
||||
MINUS@[35; 36)
|
||||
LITERAL@[36; 37)
|
||||
INT_NUMBER@[36; 37) "1"
|
||||
LITERAL_PAT@[35; 37)
|
||||
MINUS@[35; 36)
|
||||
LITERAL@[36; 37)
|
||||
INT_NUMBER@[36; 37) "1"
|
||||
WHITESPACE@[37; 38)
|
||||
FAT_ARROW@[38; 40)
|
||||
WHITESPACE@[40; 41)
|
||||
@ -34,8 +35,9 @@ SOURCE_FILE@[0; 113)
|
||||
COMMA@[43; 44)
|
||||
WHITESPACE@[44; 53)
|
||||
MATCH_ARM@[53; 61)
|
||||
LITERAL@[53; 55)
|
||||
INT_NUMBER@[53; 55) "92"
|
||||
LITERAL_PAT@[53; 55)
|
||||
LITERAL@[53; 55)
|
||||
INT_NUMBER@[53; 55) "92"
|
||||
WHITESPACE@[55; 56)
|
||||
FAT_ARROW@[56; 58)
|
||||
WHITESPACE@[58; 59)
|
||||
@ -45,8 +47,9 @@ SOURCE_FILE@[0; 113)
|
||||
COMMA@[61; 62)
|
||||
WHITESPACE@[62; 71)
|
||||
MATCH_ARM@[71; 80)
|
||||
LITERAL@[71; 74)
|
||||
CHAR@[71; 74)
|
||||
LITERAL_PAT@[71; 74)
|
||||
LITERAL@[71; 74)
|
||||
CHAR@[71; 74)
|
||||
WHITESPACE@[74; 75)
|
||||
FAT_ARROW@[75; 77)
|
||||
WHITESPACE@[77; 78)
|
||||
@ -56,8 +59,9 @@ SOURCE_FILE@[0; 113)
|
||||
COMMA@[80; 81)
|
||||
WHITESPACE@[81; 90)
|
||||
MATCH_ARM@[90; 103)
|
||||
LITERAL@[90; 97)
|
||||
STRING@[90; 97)
|
||||
LITERAL_PAT@[90; 97)
|
||||
LITERAL@[90; 97)
|
||||
STRING@[90; 97)
|
||||
WHITESPACE@[97; 98)
|
||||
FAT_ARROW@[98; 100)
|
||||
WHITESPACE@[100; 101)
|
||||
|
@ -22,13 +22,15 @@ SOURCE_FILE@[0; 112)
|
||||
WHITESPACE@[26; 35)
|
||||
MATCH_ARM@[35; 50)
|
||||
RANGE_PAT@[35; 44)
|
||||
LITERAL@[35; 36)
|
||||
INT_NUMBER@[35; 36) "0"
|
||||
LITERAL_PAT@[35; 36)
|
||||
LITERAL@[35; 36)
|
||||
INT_NUMBER@[35; 36) "0"
|
||||
WHITESPACE@[36; 37)
|
||||
DOTDOTDOT@[37; 40)
|
||||
WHITESPACE@[40; 41)
|
||||
LITERAL@[41; 44)
|
||||
INT_NUMBER@[41; 44) "100"
|
||||
LITERAL_PAT@[41; 44)
|
||||
LITERAL@[41; 44)
|
||||
INT_NUMBER@[41; 44) "100"
|
||||
WHITESPACE@[44; 45)
|
||||
FAT_ARROW@[45; 47)
|
||||
WHITESPACE@[47; 48)
|
||||
@ -39,13 +41,15 @@ SOURCE_FILE@[0; 112)
|
||||
WHITESPACE@[51; 60)
|
||||
MATCH_ARM@[60; 77)
|
||||
RANGE_PAT@[60; 71)
|
||||
LITERAL@[60; 63)
|
||||
INT_NUMBER@[60; 63) "101"
|
||||
LITERAL_PAT@[60; 63)
|
||||
LITERAL@[60; 63)
|
||||
INT_NUMBER@[60; 63) "101"
|
||||
WHITESPACE@[63; 64)
|
||||
DOTDOTEQ@[64; 67)
|
||||
WHITESPACE@[67; 68)
|
||||
LITERAL@[68; 71)
|
||||
INT_NUMBER@[68; 71) "200"
|
||||
LITERAL_PAT@[68; 71)
|
||||
LITERAL@[68; 71)
|
||||
INT_NUMBER@[68; 71) "200"
|
||||
WHITESPACE@[71; 72)
|
||||
FAT_ARROW@[72; 74)
|
||||
WHITESPACE@[74; 75)
|
||||
@ -56,13 +60,15 @@ SOURCE_FILE@[0; 112)
|
||||
WHITESPACE@[78; 87)
|
||||
MATCH_ARM@[87; 102)
|
||||
RANGE_PAT@[87; 97)
|
||||
LITERAL@[87; 90)
|
||||
INT_NUMBER@[87; 90) "200"
|
||||
LITERAL_PAT@[87; 90)
|
||||
LITERAL@[87; 90)
|
||||
INT_NUMBER@[87; 90) "200"
|
||||
WHITESPACE@[90; 91)
|
||||
DOTDOT@[91; 93)
|
||||
WHITESPACE@[93; 94)
|
||||
LITERAL@[94; 97)
|
||||
INT_NUMBER@[94; 97) "301"
|
||||
LITERAL_PAT@[94; 97)
|
||||
LITERAL@[94; 97)
|
||||
INT_NUMBER@[94; 97) "301"
|
||||
FAT_ARROW@[97; 99)
|
||||
WHITESPACE@[99; 100)
|
||||
TUPLE_EXPR@[100; 102)
|
||||
|
@ -456,8 +456,9 @@ SOURCE_FILE@[0; 3813)
|
||||
L_CURLY@[930; 931)
|
||||
WHITESPACE@[931; 952)
|
||||
MATCH_ARM@[952; 1147)
|
||||
LITERAL@[952; 953)
|
||||
INT_NUMBER@[952; 953) "1"
|
||||
LITERAL_PAT@[952; 953)
|
||||
LITERAL@[952; 953)
|
||||
INT_NUMBER@[952; 953) "1"
|
||||
WHITESPACE@[953; 954)
|
||||
FAT_ARROW@[954; 956)
|
||||
WHITESPACE@[956; 957)
|
||||
@ -1080,8 +1081,9 @@ SOURCE_FILE@[0; 3813)
|
||||
L_CURLY@[1853; 1854)
|
||||
WHITESPACE@[1854; 1855)
|
||||
MATCH_ARM@[1855; 1863)
|
||||
LITERAL@[1855; 1856)
|
||||
INT_NUMBER@[1855; 1856) "1"
|
||||
LITERAL_PAT@[1855; 1856)
|
||||
LITERAL@[1855; 1856)
|
||||
INT_NUMBER@[1855; 1856) "1"
|
||||
WHITESPACE@[1856; 1857)
|
||||
FAT_ARROW@[1857; 1859)
|
||||
WHITESPACE@[1859; 1860)
|
||||
|
Loading…
x
Reference in New Issue
Block a user