derive: don't use format_args! for code formatting

* No need to manually escape raw identifiers
* Allow a few warnings
This commit is contained in:
René Kijewski 2025-08-06 06:25:59 +02:00 committed by René Kijewski
parent 7dbbe397f9
commit 693f86d1c4
13 changed files with 660 additions and 611 deletions

View File

@ -0,0 +1 @@
../../askama/src/ascii_str.rs

View File

@ -12,19 +12,18 @@ use std::str;
use std::sync::Arc;
use parser::node::{Call, Macro, Whitespace};
use parser::{
CharLit, Expr, FloatKind, IntKind, MAX_RUST_KEYWORD_LEN, Num, RUST_KEYWORDS, StrLit, WithSpan,
};
use parser::{CharLit, Expr, FloatKind, IntKind, Num, StrLit, WithSpan};
use proc_macro2::TokenStream;
use quote::ToTokens;
use rustc_hash::FxBuildHasher;
use syn::Token;
use crate::ascii_str::{AsciiChar, AsciiStr};
use crate::generator::helpers::{clean_path, diff_paths};
use crate::heritage::{Context, Heritage};
use crate::html::write_escaped_str;
use crate::input::{Source, TemplateInput};
use crate::integration::{Buffer, impl_everything, write_header};
use crate::{CompileError, FileInfo};
use crate::{CompileError, FileInfo, field_new};
pub(crate) fn template_to_string(
buf: &mut Buffer,
@ -170,16 +169,12 @@ impl<'a, 'h> Generator<'a, 'h> {
) -> Result<usize, CompileError> {
let ctx = &self.contexts[&self.input.path];
let span = proc_macro2::Span::call_site();
let span = self.input.ast.ident.span();
let target = match tmpl_kind {
TmplKind::Struct => spanned!(span=> askama::Template),
TmplKind::Variant => spanned!(span=> askama::helpers::EnumVariantTemplate),
TmplKind::Block(trait_name) => {
let trait_name = proc_macro2::Ident::new(trait_name, span);
quote::quote!(#trait_name)
}
TmplKind::Block(trait_name) => field_new(trait_name, span),
};
write_header(self.input.ast, buf, target, span);
let mut full_paths = TokenStream::new();
if let Some(full_config_path) = &self.input.config.full_config_path {
@ -227,15 +222,17 @@ impl<'a, 'h> Generator<'a, 'h> {
);
}
buf.write_tokens(spanned!(span=>
{
write_header(self.input.ast, buf, target, span);
let var_writer = crate::var_writer();
let var_values = crate::var_values();
quote_into!(buf, span, { {
fn render_into_with_values<AskamaW>(
&self,
__askama_writer: &mut AskamaW,
__askama_values: &dyn askama::Values
#var_writer: &mut AskamaW,
#var_values: &dyn askama::Values,
) -> askama::Result<()>
where
AskamaW: askama::helpers::core::fmt::Write + ?askama::helpers::core::marker::Sized
AskamaW: askama::helpers::core::fmt::Write + ?askama::helpers::core::marker::Sized,
{
#[allow(unused_imports)]
use askama::{
@ -249,8 +246,7 @@ impl<'a, 'h> Generator<'a, 'h> {
askama::Result::Ok(())
}
#size_hint_s
}
));
} });
#[cfg(feature = "blocks")]
for block in self.input.blocks {
@ -688,7 +684,6 @@ impl<'a> MapChain<'a> {
}
fn resolve(&self, name: &str) -> Option<String> {
let name = normalize_identifier(name);
self.get(&Cow::Borrowed(name)).map(|meta| match &meta.refs {
Some(expr) => expr.clone(),
None => name.to_string(),
@ -696,7 +691,6 @@ impl<'a> MapChain<'a> {
}
fn resolve_or_self(&self, name: &str) -> String {
let name = normalize_identifier(name);
self.resolve(name).unwrap_or_else(|| format!("self.{name}"))
}
@ -759,8 +753,6 @@ fn is_associated_item_self(mut expr: &Expr<'_>) -> bool {
}
}
const FILTER_SOURCE: &str = "__askama_filter_block";
#[derive(Clone, Copy, Debug)]
enum DisplayWrap {
Wrapped,
@ -796,36 +788,35 @@ enum Writable<'a> {
Expr(&'a WithSpan<'a, Box<Expr<'a>>>),
}
/// Identifiers to be replaced with raw identifiers, so as to avoid
/// collisions between template syntax and Rust's syntax. In particular
/// [Rust keywords](https://doc.rust-lang.org/reference/keywords.html)
/// should be replaced, since they're not reserved words in Askama
/// syntax but have a high probability of causing problems in the
/// generated code.
///
/// This list excludes the Rust keywords *self*, *Self*, and *super*
/// because they are not allowed to be raw identifiers, and *loop*
/// because it's used something like a keyword in the template
/// language.
fn normalize_identifier(ident: &str) -> &str {
// This table works for as long as the replacement string is the original string
// prepended with "r#". The strings get right-padded to the same length with b'_'.
// While the code does not need it, please keep the list sorted when adding new
// keywords.
if ident.len() > MAX_RUST_KEYWORD_LEN {
return ident;
}
let kws = RUST_KEYWORDS[ident.len()];
let mut padded_ident = [0; MAX_RUST_KEYWORD_LEN];
padded_ident[..ident.len()].copy_from_slice(ident.as_bytes());
// Since the individual buckets are quite short, a linear search is faster than a binary search.
for probe in kws {
if padded_ident == *AsciiChar::slice_as_bytes(probe[2..].try_into().unwrap()) {
return AsciiStr::from_slice(&probe[..ident.len() + 2]);
macro_rules! make_token_match {
($op:ident @ $span:ident => $($tt:tt)+) => {
match $op {
$(stringify!($tt) => Token![$tt]($span).into_token_stream(),)+
_ => unreachable!(),
}
}
ident
};
}
#[inline]
#[track_caller]
fn logic_op(op: &str, span: proc_macro2::Span) -> TokenStream {
make_token_match!(op @ span => && || ^)
}
#[inline]
#[track_caller]
fn unary_op(op: &str, span: proc_macro2::Span) -> TokenStream {
make_token_match!(op @ span => - ! * &)
}
#[inline]
#[track_caller]
fn range_op(op: &str, span: proc_macro2::Span) -> TokenStream {
make_token_match!(op @ span => .. ..=)
}
#[inline]
#[track_caller]
fn binary_op(op: &str, span: proc_macro2::Span) -> TokenStream {
make_token_match!(op @ span => * / % + - << >> & ^ | == != < > <= >= && || .. ..=)
}

View File

@ -3,19 +3,20 @@ use std::str::FromStr;
use parser::node::CondTest;
use parser::{
AssociatedItem, CharLit, CharPrefix, Expr, PathComponent, Span, StrLit, Target, TyGenerics,
WithSpan,
AssociatedItem, CharLit, CharPrefix, Expr, PathComponent, Span, StrLit, StrPrefix, Target,
TyGenerics, WithSpan,
};
use proc_macro2::TokenStream;
use quote::quote;
use quote::{quote, quote_spanned};
use syn::Token;
use super::{
DisplayWrap, FILTER_SOURCE, Generator, LocalMeta, Writable, compile_time_escape, is_copyable,
normalize_identifier,
DisplayWrap, Generator, LocalMeta, Writable, binary_op, compile_time_escape, is_copyable,
logic_op, range_op, unary_op,
};
use crate::CompileError;
use crate::heritage::Context;
use crate::integration::Buffer;
use crate::{CompileError, field_new};
impl<'a> Generator<'a, '_> {
pub(crate) fn visit_expr_root(
@ -35,42 +36,26 @@ impl<'a> Generator<'a, '_> {
iter: &WithSpan<'a, Box<Expr<'a>>>,
) -> Result<DisplayWrap, CompileError> {
let expr_code = self.visit_expr_root(ctx, iter)?;
match &***iter {
Expr::Range(..) => buf.write(expr_code, ctx.span_for_node(iter.span())),
Expr::Array(..) => buf.write(
format_args!("{expr_code}.iter()"),
ctx.span_for_node(iter.span()),
),
let span = ctx.span_for_node(iter.span());
buf.write_tokens(match &***iter {
Expr::Range(..) => expr_code,
Expr::Array(..) => quote_spanned!(span => #expr_code.iter()),
// If `iter` is a call then we assume it's something that returns
// an iterator. If not then the user can explicitly add the needed
// call without issues.
Expr::Call { .. } | Expr::Index(..) => {
buf.write(
format_args!("({expr_code}).into_iter()"),
ctx.span_for_node(iter.span()),
);
}
Expr::Call { .. } | Expr::Index(..) => quote_spanned!(span => (#expr_code).into_iter()),
// If accessing `self` then it most likely needs to be
// borrowed, to prevent an attempt of moving.
// FIXME: Remove this `to_string()` call, it's terrible performance-wise.
_ if expr_code.to_string().trim_start().starts_with("self.") => {
buf.write(
format_args!("(&{expr_code}).into_iter()"),
ctx.span_for_node(iter.span()),
);
quote_spanned!(span => (&#expr_code).into_iter())
}
// If accessing a field then it most likely needs to be
// borrowed, to prevent an attempt of moving.
Expr::AssociatedItem(..) => buf.write(
format_args!("(&{expr_code}).into_iter()"),
ctx.span_for_node(iter.span()),
),
Expr::AssociatedItem(..) => quote_spanned!(span => (&#expr_code).into_iter()),
// Otherwise, we borrow `iter` assuming that it implements `IntoIterator`.
_ => buf.write(
format_args!("({expr_code}).into_iter()"),
ctx.span_for_node(iter.span()),
),
}
_ => quote_spanned!(span => (#expr_code).into_iter()),
});
Ok(DisplayWrap::Unwrapped)
}
@ -138,11 +123,11 @@ impl<'a> Generator<'a, '_> {
match ***expr {
Expr::BinOp(ref v) if matches!(v.op, "&&" | "||") => {
let ret = self.visit_expr(ctx, buf, &v.lhs)?;
buf.write(format_args!(" {} ", &v.op), ctx.span_for_node(expr.span()));
buf.write_tokens(logic_op(v.op, ctx.span_for_node(expr.span())));
return Ok(ret);
}
Expr::Unary(op, ref inner) => {
buf.write(op, ctx.span_for_node(expr.span()));
buf.write_tokens(unary_op(op, ctx.span_for_node(expr.span())));
return self.visit_expr_first(ctx, buf, inner);
}
_ => {}
@ -180,12 +165,12 @@ impl<'a> Generator<'a, '_> {
self.visit_expr(ctx, buf, expr)?;
}
Expr::Unary("!", expr) => {
buf.write('!', ctx.span_for_node(expr.span()));
buf.write_token(Token![!], ctx.span_for_node(expr.span()));
self.visit_condition(ctx, buf, expr)?;
}
Expr::BinOp(v) if matches!(v.op, "&&" | "||") => {
self.visit_condition(ctx, buf, &v.lhs)?;
buf.write(format_args!(" {} ", v.op), ctx.span_for_node(expr.span()));
buf.write_tokens(logic_op(v.op, ctx.span_for_node(expr.span())));
self.visit_condition(ctx, buf, &v.rhs)?;
}
Expr::Group(expr) => {
@ -237,10 +222,12 @@ impl<'a> Generator<'a, '_> {
self.visit_expr(ctx, &mut tmp, expr)?;
let tmp = tmp.into_token_stream();
let span = ctx.span_for_node(expr.span());
let target = proc_macro2::Ident::new(target, span);
buf.write_tokens(spanned!(
span=> askama::helpers::get_primitive_value(&(#tmp)) as askama::helpers::core::primitive::#target
));
let target = field_new(target, span);
quote_into!(
buf,
span,
{ askama::helpers::get_primitive_value(&(#tmp)) as askama::helpers::core::primitive::#target }
);
Ok(DisplayWrap::Unwrapped)
}
@ -255,7 +242,6 @@ impl<'a> Generator<'a, '_> {
[expr] => self.visit_expr(ctx, buf, expr),
exprs => {
let (l, r) = exprs.split_at(exprs.len().div_ceil(2));
// FIXME: Is this valid?
let span = ctx.span_for_node(l[0].span());
let mut buf_l = Buffer::new();
let mut buf_r = Buffer::new();
@ -281,7 +267,7 @@ impl<'a> Generator<'a, '_> {
let display_wrap = self.visit_expr_first(ctx, &mut expr_buf, &cond.expr)?;
let expr_buf = expr_buf.into_token_stream();
let span = ctx.span_for_node(cond.span());
buf.write(" let ", span);
buf.write_token(Token![let], span);
if let Some(ref target) = cond.target {
self.visit_target(ctx, buf, true, true, target);
}
@ -317,15 +303,12 @@ impl<'a> Generator<'a, '_> {
let [path @ .., name] = path else {
unreachable!("path cannot be empty");
};
let name = match normalize_identifier(name) {
"loop" => quote::format_ident!("r#loop"),
name => quote::format_ident!("{}", name),
};
let span = ctx.span_for_node(node);
let name = field_new(name, span);
if !path.is_empty() {
self.visit_macro_path(buf, path, span);
buf.write("::", span);
buf.write_token(Token![::], span);
}
let args = TokenStream::from_str(args).unwrap();
buf.write_tokens(spanned!(span=> #name !(#args)));
@ -365,8 +348,9 @@ impl<'a> Generator<'a, '_> {
let args = self.visit_arg(ctx, key, span)?;
let ty_generics = ty_generics.into_token_stream();
let var_values = crate::var_values();
buf.write_tokens(spanned!(span=> askama::helpers::get_value::<#ty_generics>(
&__askama_values, &(#args)
&#var_values, &(#args)
)));
Ok(DisplayWrap::Unwrapped)
}
@ -380,7 +364,7 @@ impl<'a> Generator<'a, '_> {
for (i, arg) in args.iter().enumerate() {
let span = ctx.span_for_node(arg.span());
if i > 0 {
buf.write(',', span);
buf.write_token(Token![,], span);
}
buf.write(self.visit_arg(ctx, arg, span)?, ctx.template_span);
}
@ -444,15 +428,16 @@ impl<'a> Generator<'a, '_> {
let tmp = tmp.into_token_stream();
buf.write_tokens(spanned!(span=> askama::filters::Safe(#tmp)));
} else {
buf.write("askama::helpers::Empty", span);
quote_into!(buf, span, { askama::helpers::Empty });
}
} else {
let arg = self.visit_arg(ctx, arg, span)?;
let escaper = TokenStream::from_str(self.input.escaper).unwrap();
buf.write_tokens(spanned!(span=> (
&&askama::filters::AutoEscaper::new(#arg, #escaper)
).askama_auto_escape()?
));
quote_into!(
buf,
span,
{ (&&askama::filters::AutoEscaper::new(#arg, #escaper)).askama_auto_escape()? }
);
}
Ok(())
}
@ -466,28 +451,29 @@ impl<'a> Generator<'a, '_> {
) -> Result<DisplayWrap, CompileError> {
let span = ctx.span_for_node(obj.span());
if let Expr::Var("loop") = ***obj {
buf.write(
match associated_item.name {
"index0" => "__askama_item.index0",
"index" => "(__askama_item.index0 + 1)",
"first" => "(__askama_item.index0 == 0)",
"last" => "__askama_item.last",
name => {
return Err(ctx.generate_error(
format!("unknown loop variable `{}`", name.escape_debug()),
obj.span(),
));
}
},
span,
);
let var_item = crate::var_item();
buf.write_tokens(match associated_item.name {
"index0" => quote_spanned!(span => #var_item.index0),
"index" => quote_spanned!(span => (#var_item.index0 + 1)),
"first" => quote_spanned!(span => (#var_item.index0 == 0)),
"last" => quote_spanned!(span => #var_item.last),
name => {
return Err(ctx.generate_error(
format!("unknown loop variable `{}`", name.escape_debug()),
obj.span(),
));
}
});
return Ok(DisplayWrap::Unwrapped);
}
let mut expr = Buffer::new();
self.visit_expr(ctx, &mut expr, obj)?;
let expr = expr.into_token_stream();
let identifier = TokenStream::from_str(normalize_identifier(associated_item.name)).unwrap();
let identifier = field_new(
associated_item.name,
ctx.span_for_node(Span::from(associated_item.name)),
);
let mut call_generics = Buffer::new();
self.visit_call_generics(ctx, &mut call_generics, &associated_item.generics);
let call_generics = call_generics.into_token_stream();
@ -503,7 +489,7 @@ impl<'a> Generator<'a, '_> {
generics: &[WithSpan<'a, TyGenerics<'a>>],
) {
if let Some(first) = generics.first() {
buf.write("::", ctx.span_for_node(first.span()));
buf.write_token(Token![::], ctx.span_for_node(first.span()));
self.visit_ty_generics(ctx, buf, generics);
}
}
@ -521,7 +507,7 @@ impl<'a> Generator<'a, '_> {
for generic in generics {
let span = ctx.span_for_node(generic.span());
self.visit_ty_generic(ctx, &mut tmp, generic, span);
tmp.write(',', span);
tmp.write_token(Token![,], span);
}
let tmp = tmp.into_token_stream();
// FIXME: use a better span
@ -535,12 +521,14 @@ impl<'a> Generator<'a, '_> {
generic: &WithSpan<'a, TyGenerics<'a>>,
span: proc_macro2::Span,
) {
let TyGenerics { refs, path, args } = &**generic;
let mut refs_s = String::new();
for _ in 0..*refs {
refs_s.push('&');
let TyGenerics {
refs,
ref path,
ref args,
} = **generic;
for _ in 0..refs {
buf.write_token(Token![&], span);
}
buf.write(refs_s, span);
self.visit_macro_path(buf, path, span);
self.visit_ty_generics(ctx, buf, args);
}
@ -552,7 +540,7 @@ impl<'a> Generator<'a, '_> {
obj: &WithSpan<'a, Box<Expr<'a>>>,
key: &WithSpan<'a, Box<Expr<'a>>>,
) -> Result<DisplayWrap, CompileError> {
buf.write('&', ctx.span_for_node(obj.span()));
buf.write_token(Token![&], ctx.span_for_node(obj.span()));
self.visit_expr(ctx, buf, obj)?;
let key_span = ctx.span_for_node(key.span());
@ -605,14 +593,17 @@ impl<'a> Generator<'a, '_> {
let expr_buf = expr_buf.into_token_stream();
let arg_span = ctx.span_for_node(arg.span());
let var_cycle = crate::var_cycle();
let var_item = crate::var_item();
let var_len = crate::var_len();
buf.write_tokens(
spanned!(arg_span=> ({
let _cycle = &(#expr_buf);
let __askama_len = _cycle.len();
if __askama_len == 0 {
let #var_cycle = &(#expr_buf);
let #var_len = #var_cycle.len();
if #var_len == 0 {
return askama::helpers::core::result::Result::Err(askama::Error::Fmt);
}
_cycle[__askama_item.index0 % __askama_len]
#var_cycle[#var_item.index0 % #var_len]
})),
);
}
@ -632,26 +623,31 @@ impl<'a> Generator<'a, '_> {
}
}
}
// We special-case "askama::get_value".
Expr::Path(path) if matches!(path.as_slice(), [part1, part2] if part1.generics.is_empty() && part1.name == "askama" && part2.name == "get_value") =>
{
self.visit_value(
ctx,
buf,
args,
// Generics of the `get_value` call.
&path[1].generics,
left.span(),
"`get_value` function",
)?;
}
sub_left => {
// We special-case "askama::get_value".
if let Expr::Path(path) = sub_left
&& let [part1, part2] = path.as_slice()
&& part1.generics.is_empty()
&& part1.name == "askama"
&& part2.name == "get_value"
{
return self.visit_value(
ctx,
buf,
args,
&part2.generics,
left.span(),
"`get_value` function",
);
}
let span = ctx.span_for_node(left.span());
match *sub_left {
Expr::Var(name) => match self.locals.resolve(name) {
Some(resolved) => buf.write(resolved, span),
Some(resolved) => write_resolved(buf, &resolved, span),
None => {
buf.write(format_args!("self.{}", normalize_identifier(name)), span)
let id = field_new(name, span);
quote_into!(buf, span, { self.#id });
}
},
_ => {
@ -675,7 +671,7 @@ impl<'a> Generator<'a, '_> {
inner: &WithSpan<'a, Box<Expr<'a>>>,
span: Span<'_>,
) -> Result<DisplayWrap, CompileError> {
buf.write(op, ctx.span_for_node(span));
buf.write_tokens(unary_op(op, ctx.span_for_node(span)));
self.visit_expr(ctx, buf, inner)?;
Ok(DisplayWrap::Unwrapped)
}
@ -692,7 +688,7 @@ impl<'a> Generator<'a, '_> {
if let Some(left) = left {
self.visit_expr(ctx, buf, left)?;
}
buf.write(op, ctx.span_for_node(span));
buf.write_tokens(range_op(op, ctx.span_for_node(span)));
if let Some(right) = right {
self.visit_expr(ctx, buf, right)?;
}
@ -709,7 +705,7 @@ impl<'a> Generator<'a, '_> {
span: Span<'_>,
) -> Result<DisplayWrap, CompileError> {
self.visit_expr(ctx, buf, left)?;
buf.write(format_args!(" {op} "), ctx.span_for_node(span));
buf.write_tokens(binary_op(op, ctx.span_for_node(span)));
self.visit_expr(ctx, buf, right)?;
Ok(DisplayWrap::Unwrapped)
}
@ -742,7 +738,7 @@ impl<'a> Generator<'a, '_> {
let mut tmp = Buffer::new();
for expr in exprs {
self.visit_expr(ctx, &mut tmp, expr)?;
tmp.write(',', span);
tmp.write_token(Token![,], span);
}
let tmp = tmp.into_token_stream();
buf.write_tokens(spanned!(span=> (#tmp)));
@ -771,7 +767,7 @@ impl<'a> Generator<'a, '_> {
let mut tmp = Buffer::new();
for el in elements {
self.visit_expr(ctx, &mut tmp, el)?;
tmp.write(',', span);
tmp.write_token(Token![,], span);
}
let tmp = tmp.into_token_stream();
buf.write_tokens(spanned!(span=> [#tmp]));
@ -786,7 +782,7 @@ impl<'a> Generator<'a, '_> {
) {
for (i, part) in path.iter().copied().enumerate() {
if i > 0 {
buf.write("::", span);
buf.write_token(Token![::], span);
} else if let Some(enum_ast) = self.input.enum_ast
&& part == "Self"
{
@ -796,7 +792,7 @@ impl<'a> Generator<'a, '_> {
buf.write_tokens(spanned!(span=> #this #generics));
continue;
}
buf.write(part, span);
buf.write_field(part, span);
}
}
@ -809,7 +805,7 @@ impl<'a> Generator<'a, '_> {
for (i, part) in path.iter().enumerate() {
let span = ctx.span_for_node(part.span());
if i > 0 {
buf.write("::", span);
buf.write_token(Token![::], span);
} else if let Some(enum_ast) = self.input.enum_ast
&& part.name == "Self"
{
@ -819,9 +815,11 @@ impl<'a> Generator<'a, '_> {
buf.write_tokens(spanned!(span=> #this #generics));
continue;
}
buf.write(part.name, span);
if !part.name.is_empty() {
buf.write_field(part.name, span);
}
if !part.generics.is_empty() {
buf.write("::", span);
buf.write_token(Token![::], span);
self.visit_ty_generics(ctx, buf, &part.generics);
}
}
@ -837,11 +835,10 @@ impl<'a> Generator<'a, '_> {
) -> DisplayWrap {
let span = ctx.span_for_node(node);
if s == "self" {
buf.write(s, span);
return DisplayWrap::Unwrapped;
quote_into!(buf, span, { self });
} else {
write_resolved(buf, &self.locals.resolve_or_self(s), span);
}
buf.write(normalize_identifier(&self.locals.resolve_or_self(s)), span);
DisplayWrap::Unwrapped
}
@ -853,10 +850,9 @@ impl<'a> Generator<'a, '_> {
) -> DisplayWrap {
// We can assume that the body of the `{% filter %}` was already escaped.
// And if it's not, then this was done intentionally.
buf.write(
format_args!("askama::filters::Safe(&{FILTER_SOURCE})"),
ctx.span_for_node(node),
);
let span = ctx.span_for_node(node);
let id = crate::var_filter_source();
quote_into!(buf, span, { askama::filters::Safe(&#id) });
DisplayWrap::Wrapped
}
@ -869,9 +865,9 @@ impl<'a> Generator<'a, '_> {
) -> DisplayWrap {
let span = ctx.span_for_node(node);
if s {
buf.write("true", span);
quote_into!(buf, span, { true });
} else {
buf.write("false", span);
quote_into!(buf, span, { false });
}
DisplayWrap::Unwrapped
}
@ -879,35 +875,36 @@ impl<'a> Generator<'a, '_> {
pub(super) fn visit_str_lit(
&mut self,
buf: &mut Buffer,
StrLit {
&StrLit {
content, prefix, ..
}: &StrLit<'_>,
span: proc_macro2::Span,
) -> DisplayWrap {
if let Some(prefix) = prefix {
buf.write(format!("{}\"{content}\"", prefix.to_char()), span);
} else {
buf.write(format!("\"{content}\""), span);
}
let repr = match prefix {
Some(StrPrefix::Binary) => format!(r#"b"{content}""#),
Some(StrPrefix::CLike) => format!(r#"c"{content}""#),
None => format!(r#""{content}""#),
};
buf.write_literal(&repr, span);
DisplayWrap::Unwrapped
}
fn visit_char_lit(
&mut self,
buf: &mut Buffer,
c: &CharLit<'_>,
&CharLit { prefix, content }: &CharLit<'_>,
span: proc_macro2::Span,
) -> DisplayWrap {
if c.prefix == Some(CharPrefix::Binary) {
buf.write(format_args!("b'{}'", c.content), span);
} else {
buf.write(format_args!("'{}'", c.content), span);
}
let repr = match prefix {
Some(CharPrefix::Binary) => format!(r#"b'{content}'"#),
None => format!(r#"'{content}'"#),
};
buf.write_literal(&repr, span);
DisplayWrap::Unwrapped
}
fn visit_num_lit(&mut self, buf: &mut Buffer, s: &str, span: proc_macro2::Span) -> DisplayWrap {
buf.write(s, span);
buf.write_literal(s, span);
DisplayWrap::Unwrapped
}
@ -921,32 +918,32 @@ impl<'a> Generator<'a, '_> {
target: &Target<'a>,
) {
match target {
Target::Placeholder(_) => buf.write('_', ctx.template_span),
Target::Placeholder(s) => quote_into!(buf, ctx.span_for_node(s.span()), { _ }),
Target::Rest(s) => {
let span = ctx.span_for_node(s.span());
if let Some(var_name) = &**s {
let id = field_new(var_name, span);
self.locals
.insert(Cow::Borrowed(var_name), LocalMeta::var_def());
buf.write(*var_name, ctx.template_span);
buf.write(" @ ", ctx.template_span);
quote_into!(buf, span, { #id @ });
}
buf.write("..", ctx.template_span);
buf.write_token(Token![..], span);
}
Target::Name(name) => {
let name = normalize_identifier(name);
match initialized {
true => self
.locals
.insert(Cow::Borrowed(name), LocalMeta::var_def()),
false => self.locals.insert_with_default(Cow::Borrowed(name)),
}
buf.write(name, ctx.template_span);
buf.write_field(name, ctx.template_span);
}
Target::OrChain(targets) => match targets.first() {
None => buf.write('_', ctx.template_span),
None => quote_into!(buf, ctx.template_span, { _ }),
Some(first_target) => {
self.visit_target(ctx, buf, initialized, first_level, first_target);
for target in &targets[1..] {
buf.write('|', ctx.template_span);
buf.write_token(Token![|], ctx.template_span);
self.visit_target(ctx, buf, initialized, first_level, target);
}
}
@ -956,7 +953,7 @@ impl<'a> Generator<'a, '_> {
let mut targets_buf = Buffer::new();
for target in targets {
self.visit_target(ctx, &mut targets_buf, initialized, false, target);
targets_buf.write(',', ctx.template_span);
targets_buf.write_token(Token![,], ctx.template_span);
}
let targets_buf = targets_buf.into_token_stream();
buf.write(
@ -971,7 +968,7 @@ impl<'a> Generator<'a, '_> {
let mut targets_buf = Buffer::new();
for target in targets {
self.visit_target(ctx, &mut targets_buf, initialized, false, target);
targets_buf.write(',', ctx.template_span);
targets_buf.write_token(Token![,], ctx.template_span);
}
let targets_buf = targets_buf.into_token_stream();
buf.write(
@ -986,14 +983,14 @@ impl<'a> Generator<'a, '_> {
let mut targets_buf = Buffer::new();
for (name, target) in targets {
if let Target::Rest(_) = target {
targets_buf.write("..", ctx.template_span);
targets_buf.write_token(Token![..], ctx.template_span);
continue;
}
targets_buf.write(normalize_identifier(name), ctx.template_span);
targets_buf.write(": ", ctx.template_span);
targets_buf.write_field(name, ctx.template_span);
targets_buf.write_token(Token![:], ctx.template_span);
self.visit_target(ctx, &mut targets_buf, initialized, false, target);
targets_buf.write(',', ctx.template_span);
targets_buf.write_token(Token![,], ctx.template_span);
}
let targets_buf = targets_buf.into_token_stream();
buf.write(
@ -1007,35 +1004,49 @@ impl<'a> Generator<'a, '_> {
}
Target::Path(path) => {
self.visit_path(ctx, buf, path);
buf.write("{}", ctx.template_span);
quote_into!(buf, ctx.template_span, { {} });
}
Target::StrLit(s) => {
let span = ctx.span_for_node(Span::from(s.content));
if first_level {
buf.write('&', ctx.template_span);
buf.write_token(Token![&], span);
}
// FIXME: `Span` should not be `ctx.template_span`.
self.visit_str_lit(buf, s, ctx.template_span);
self.visit_str_lit(buf, s, span);
}
Target::NumLit(s, _) => {
&Target::NumLit(repr, _) => {
let span = ctx.span_for_node(Span::from(repr));
if first_level {
buf.write('&', ctx.template_span);
buf.write_token(Token![&], span);
}
// FIXME: `Span` should not be `ctx.template_span`.
self.visit_num_lit(buf, s, ctx.template_span);
self.visit_num_lit(buf, repr, span);
}
Target::CharLit(s) => {
let span = ctx.span_for_node(Span::from(s.content));
if first_level {
buf.write('&', ctx.template_span);
buf.write_token(Token![&], span);
}
// FIXME: `Span` should not be `ctx.template_span`.
self.visit_char_lit(buf, s, ctx.template_span);
self.visit_char_lit(buf, s, span);
}
Target::BoolLit(s) => {
&Target::BoolLit(s) => {
let span = ctx.span_for_node(Span::from(s));
if first_level {
buf.write('&', ctx.template_span);
buf.write_token(Token![&], span);
}
match s {
"true" => quote_into!(buf, span, { true }),
"false" => quote_into!(buf, span, { false }),
_ => unreachable!(),
}
buf.write(*s, ctx.template_span);
}
}
}
}
fn write_resolved(buf: &mut Buffer, resolved: &str, span: proc_macro2::Span) {
for (idx, name) in resolved.split('.').enumerate() {
if idx > 0 {
buf.write_token(Token![.], span);
}
buf.write_field(name, span);
}
}

View File

@ -9,11 +9,12 @@ use parser::{
WithSpan,
};
use proc_macro2::TokenStream;
use syn::Token;
use super::{DisplayWrap, Generator, TargetIsize, TargetUsize};
use crate::heritage::Context;
use crate::integration::Buffer;
use crate::{CompileError, MsgValidEscapers, fmt_left, fmt_right};
use crate::{CompileError, MsgValidEscapers, field_new, fmt_left, fmt_right};
impl<'a> Generator<'a, '_> {
pub(super) fn visit_filter(
@ -85,9 +86,10 @@ impl<'a> Generator<'a, '_> {
let mut tmp = Buffer::new();
tmp.write_tokens(self.visit_arg(ctx, &args[0], ctx.span_for_node(args[0].span()))?);
tmp.write(",__askama_values", span);
let var_values = crate::var_values();
quote_into!(&mut tmp, span, { ,#var_values });
if args.len() > 1 {
tmp.write(',', span);
tmp.write_token(Token![,], span);
self.visit_args(ctx, &mut tmp, &args[1..])?;
}
let tmp = tmp.into_token_stream();
@ -155,7 +157,8 @@ impl<'a> Generator<'a, '_> {
let span = ctx.span_for_node(node);
let arg = no_arguments(ctx, name, args)?;
buf.write(format_args!("askama::filters::{name}"), span);
let name = field_new(name, span);
quote_into!(buf, span, { askama::filters::#name });
self.visit_call_generics(ctx, buf, generics);
let arg = self.visit_arg(ctx, arg, span)?;
buf.write_tokens(spanned!(span=> (#arg)?));
@ -222,10 +225,11 @@ impl<'a> Generator<'a, '_> {
let span = ctx.span_for_node(node);
let arg = self.visit_arg(ctx, arg, span)?;
let var_values = crate::var_values();
buf.write_tokens(spanned!(span=> match askama::filters::wordcount(&(#arg)) {
expr0 => {
(&&&askama::filters::Writable(&expr0)).
askama_write(&mut askama::helpers::Empty, __askama_values)?;
askama_write(&mut askama::helpers::Empty, #var_values)?;
expr0.into_count()
}
}));
@ -346,7 +350,7 @@ impl<'a> Generator<'a, '_> {
self.visit_auto_escaped_arg(ctx, &mut pl_buf, pl)?;
let sg = sg_buf.into_token_stream();
let pl = pl_buf.into_token_stream();
buf.write_tokens(spanned!(span=> askama::filters::pluralize(#arg,#sg,#pl)?));
quote_into!(buf, span, { askama::filters::pluralize(#arg, #sg, #pl)? });
}
Ok(DisplayWrap::Wrapped)
}
@ -412,7 +416,7 @@ impl<'a> Generator<'a, '_> {
node: Span<'_>,
) -> Result<DisplayWrap, CompileError> {
let arg = no_arguments(ctx, "ref", args)?;
buf.write('&', ctx.span_for_node(node));
buf.write_token(Token![&], ctx.span_for_node(node));
self.visit_expr(ctx, buf, arg)?;
Ok(DisplayWrap::Unwrapped)
}
@ -425,7 +429,7 @@ impl<'a> Generator<'a, '_> {
node: Span<'_>,
) -> Result<DisplayWrap, CompileError> {
let arg = no_arguments(ctx, "deref", args)?;
buf.write('*', ctx.span_for_node(node));
buf.write_token(Token![*], ctx.span_for_node(node));
self.visit_expr(ctx, buf, arg)?;
Ok(DisplayWrap::Unwrapped)
}
@ -611,7 +615,7 @@ impl<'a> Generator<'a, '_> {
let mut filter = Buffer::new();
self.visit_str_lit(&mut filter, fmt, span);
if !tail.is_empty() {
filter.write(',', span);
filter.write_token(Token![,], ctx.span_for_node(node));
self.visit_args(ctx, &mut filter, tail)?;
}
let filter = filter.into_token_stream();

View File

@ -6,13 +6,14 @@ use std::mem;
use parser::node::{Call, Macro, Ws};
use parser::{Expr, Span, WithSpan};
use quote::quote_spanned;
use rustc_hash::FxBuildHasher;
use crate::CompileError;
use crate::generator::node::AstLevel;
use crate::generator::{Generator, LocalMeta, is_copyable, normalize_identifier};
use crate::generator::{Generator, LocalMeta, is_copyable};
use crate::heritage::Context;
use crate::integration::Buffer;
use crate::{CompileError, field_new};
/// Helper to generate the code for macro invocations
pub(crate) struct MacroInvocation<'a, 'b> {
@ -83,14 +84,7 @@ impl<'a, 'b> MacroInvocation<'a, 'b> {
this.flush_ws(self.macro_def.ws2);
size_hint += this.write_buf_writable(self.callsite_ctx, &mut content)?;
let content = content.into_token_stream();
buf.write(
quote::quote!(
{
#content
}
),
self.callsite_ctx.template_span,
);
quote_into!(buf, self.callsite_ctx.template_span, {{ #content }});
this.prepare_ws(self.callsite_ws);
this.seen_callers.pop();
@ -205,22 +199,16 @@ impl<'a, 'b> MacroInvocation<'a, 'b> {
// parameters being used multiple times.
_ => {
value.clear();
let (before, after) = if !is_copyable(expr) {
("&(", ")")
} else {
("", "")
};
value.write_tokens(generator.visit_expr_root(self.callsite_ctx, expr)?);
// We need to normalize the arg to write it, thus we need to add it to
// locals in the normalized manner
let normalized_arg = normalize_identifier(arg);
buf.write(
format_args!("let {normalized_arg} = {before}{value}{after};"),
self.callsite_ctx.template_span,
);
generator
.locals
.insert_with_default(Cow::Borrowed(normalized_arg));
let span = self.callsite_ctx.template_span;
let id = field_new(arg, span);
buf.write_tokens(if !is_copyable(expr) {
quote_spanned! { span => let #id = &(#value); }
} else {
quote_spanned! { span => let #id = #value; }
});
generator.locals.insert_with_default(Cow::Borrowed(arg));
}
}
}

View File

@ -11,15 +11,14 @@ use parser::node::{
};
use parser::{Expr, Node, Span, Target, WithSpan};
use proc_macro2::TokenStream;
use quote::quote_spanned;
use syn::{Ident, Token};
use super::{
DisplayWrap, FILTER_SOURCE, Generator, LocalMeta, MapChain, compile_time_escape, is_copyable,
normalize_identifier,
};
use crate::generator::{LocalCallerMeta, Writable, helpers};
use super::{DisplayWrap, Generator, LocalMeta, MapChain, compile_time_escape, is_copyable};
use crate::generator::{LocalCallerMeta, Writable, helpers, logic_op};
use crate::heritage::{Context, Heritage};
use crate::integration::{Buffer, string_escape};
use crate::{CompileError, FileInfo, fmt_left, fmt_right};
use crate::{CompileError, FileInfo, field_new, fmt_left, fmt_right};
impl<'a> Generator<'a, '_> {
pub(super) fn impl_template_inner(
@ -162,12 +161,16 @@ impl<'a> Generator<'a, '_> {
Node::Break(ref ws) => {
self.handle_ws(**ws);
self.write_buf_writable(ctx, buf)?;
buf.write("break;", ctx.template_span);
quote_into!(buf, ctx.template_span, {
break;
});
}
Node::Continue(ref ws) => {
self.handle_ws(**ws);
self.write_buf_writable(ctx, buf)?;
buf.write("continue;", ctx.template_span);
quote_into!(buf, ctx.template_span, {
continue;
});
}
}
}
@ -343,22 +346,22 @@ impl<'a> Generator<'a, '_> {
if pos == 0 {
if cond_info.generate_condition {
buf.write("if", span);
buf.write_token(Token![if], ctx.template_span);
} else {
has_cond = false;
}
// Otherwise it means it will be the only condition generated,
// so nothing to be added here.
} else if cond_info.generate_condition {
buf.write("else if", span);
quote_into!(buf, ctx.template_span, { else if });
} else {
buf.write("else", span);
buf.write_token(Token![else], ctx.template_span);
has_else = true;
}
if let Some(target) = target {
let mut expr_buf = Buffer::new();
buf.write("let ", span);
buf.write_token(Token![let], ctx.template_span);
// If this is a chain condition, then we need to declare the variable after the
// left expression has been handled but before the right expression is handled
// but this one should have access to the let-bound variable.
@ -373,7 +376,8 @@ impl<'a> Generator<'a, '_> {
&v.lhs,
display_wrap,
)?;
buf.write(format_args!("= &{expr_buf} {} ", v.op), span);
let op = logic_op(v.op, span);
quote_into!(buf, span, { = &#expr_buf #op });
this.visit_condition(ctx, buf, &v.rhs)?;
}
_ => {
@ -381,7 +385,7 @@ impl<'a> Generator<'a, '_> {
this.visit_expr_first(ctx, &mut expr_buf, expr)?;
this.visit_target(ctx, buf, true, true, target);
this.visit_expr_not_first(ctx, &mut expr_buf, expr, display_wrap)?;
buf.write(format_args!("= &{expr_buf}"), span);
quote_into!(buf, ctx.template_span, { = &#expr_buf });
}
}
} else if cond_info.generate_condition {
@ -389,7 +393,7 @@ impl<'a> Generator<'a, '_> {
}
} else if pos != 0 {
// FIXME: Should have a span.
buf.write("else", ctx.template_span);
buf.write_token(Token![else], ctx.template_span);
has_else = true;
} else {
has_cond = false;
@ -420,14 +424,7 @@ impl<'a> Generator<'a, '_> {
if has_cond {
let block_buf = block_buf.into_token_stream();
// FIXME Should have a span.
buf.write(
quote::quote!(
{
#block_buf
}
),
ctx.template_span,
);
quote_into!(buf, ctx.template_span, { { #block_buf } });
} else {
buf.write_buf(block_buf);
}
@ -475,7 +472,7 @@ impl<'a> Generator<'a, '_> {
let mut targets_buf = Buffer::new();
for (index, target) in arm.target.iter().enumerate() {
if index != 0 {
targets_buf.write('|', span);
targets_buf.write_token(Token![|], span);
}
this.visit_target(ctx, &mut targets_buf, true, true, target);
}
@ -492,15 +489,13 @@ impl<'a> Generator<'a, '_> {
}
let targets_buf = targets_buf.into_token_stream();
let arm_buf = arm_buf.into_token_stream();
arms.write_tokens(spanned!(span=> #targets_buf => { #arm_buf }));
quote_into!(&mut arms, span, { #targets_buf => { #arm_buf } });
Ok(0)
})?;
}
let arms = arms.into_token_stream();
buf.write_tokens(spanned!(span=> match & #expr_code {
#arms
}));
quote_into!(buf, span, { match &#expr_code { #arms } });
Ok(flushed + median(&mut arm_sizes))
}
@ -516,15 +511,19 @@ impl<'a> Generator<'a, '_> {
self.push_locals(|this| {
let has_else_nodes = !loop_block.else_nodes.is_empty();
let var_did_loop = crate::var_did_loop();
let var_item = crate::var_item();
let var_iter = crate::var_iter();
let flushed = this.write_buf_writable(ctx, buf)?;
let mut loop_buf = Buffer::new();
if has_else_nodes {
loop_buf.write("let mut __askama_did_loop = false;", span);
quote_into!(&mut loop_buf, span, { let mut #var_did_loop = false; });
}
loop_buf.write("let __askama_iter =", span);
quote_into!(&mut loop_buf, span, { let #var_iter = });
this.visit_loop_iter(ctx, &mut loop_buf, &loop_block.iter)?;
loop_buf.write(';', span);
loop_buf.write_token(Token![;], span);
if let Some(cond) = &loop_block.cond {
this.push_locals(|this| {
let mut target_buf = Buffer::new();
@ -533,11 +532,17 @@ impl<'a> Generator<'a, '_> {
let mut expr_buf = Buffer::new();
this.visit_expr(ctx, &mut expr_buf, cond)?;
let expr_buf = expr_buf.into_token_stream();
loop_buf.write_tokens(spanned!(span=>
let __askama_iter = __askama_iter.filter(|#target_buf| -> askama::helpers::core::primitive::bool {
#expr_buf
});
));
quote_into!(
&mut loop_buf,
span,
{
let #var_iter = #var_iter.filter(
|#target_buf| -> askama::helpers::core::primitive::bool {
#expr_buf
}
);
}
);
Ok(0)
})?;
}
@ -549,14 +554,15 @@ impl<'a> Generator<'a, '_> {
let mut loop_body_buf = Buffer::new();
if has_else_nodes {
loop_body_buf.write("__askama_did_loop = true;", span);
quote_into!(&mut loop_body_buf, span, { #var_did_loop = true; });
}
let mut size_hint1 = this.handle(ctx, &loop_block.body, &mut loop_body_buf, AstLevel::Nested)?;
let mut size_hint1 =
this.handle(ctx, &loop_block.body, &mut loop_body_buf, AstLevel::Nested)?;
this.handle_ws(loop_block.ws2);
size_hint1 += this.write_buf_writable(ctx, &mut loop_body_buf)?;
let loop_body_buf = loop_body_buf.into_token_stream();
loop_buf.write_tokens(spanned!(span=>
for (#target_buf, __askama_item) in askama::helpers::TemplateLoop::new(__askama_iter) {
for (#target_buf, #var_item) in askama::helpers::TemplateLoop::new(#var_iter) {
#loop_body_buf
}
));
@ -574,7 +580,7 @@ impl<'a> Generator<'a, '_> {
Ok(size_hint)
})?;
let cond_buf = cond_buf.into_token_stream();
loop_buf.write_tokens(spanned!(span=> if !__askama_did_loop {
loop_buf.write_tokens(spanned!(span=> if !#var_did_loop {
#cond_buf
}));
} else {
@ -647,6 +653,8 @@ impl<'a> Generator<'a, '_> {
buf: &mut Buffer,
filter: &'a WithSpan<'a, FilterBlock<'_>>,
) -> Result<usize, CompileError> {
let var_filter_source = crate::var_filter_source();
self.write_buf_writable(ctx, buf)?;
self.flush_ws(filter.ws1);
self.is_in_filter_block += 1;
@ -664,10 +672,10 @@ impl<'a> Generator<'a, '_> {
Ok(size_hint)
})?;
let filter_def_buf = filter_def_buf.into_token_stream();
let filter_source = quote::format_ident!("{FILTER_SOURCE}");
let var_writer = crate::var_writer();
let filter_def_buf = spanned!(span=>
let #filter_source = askama::helpers::FmtCell::new(
|__askama_writer: &mut askama::helpers::core::fmt::Formatter<'_>| -> askama::Result<()> {
let #var_filter_source = askama::helpers::FmtCell::new(
|#var_writer: &mut askama::helpers::core::fmt::Formatter<'_>| -> askama::Result<()> {
#filter_def_buf
askama::Result::Ok(())
}
@ -695,14 +703,12 @@ impl<'a> Generator<'a, '_> {
)
}
};
buf.write_tokens(spanned!(span=>
{
#filter_def_buf
if askama::helpers::core::write!(__askama_writer, "{}", #filter_buf).is_err() {
return #filter_source.take_err();
}
quote_into!(buf, span, { {
#filter_def_buf
if askama::helpers::core::write!(#var_writer, "{}", #filter_buf).is_err() {
return #var_filter_source.take_err();
}
));
} });
self.is_in_filter_block -= 1;
self.prepare_ws(filter.ws2);
@ -774,7 +780,6 @@ impl<'a> Generator<'a, '_> {
) -> Result<bool, CompileError> {
match var {
Target::Name(name) => {
let name = normalize_identifier(name);
match self.locals.get(name) {
// declares a new variable
None => Ok(false),
@ -830,12 +835,12 @@ impl<'a> Generator<'a, '_> {
let Some(val) = &l.val else {
self.write_buf_writable(ctx, buf)?;
buf.write("let ", span);
buf.write_token(Token![let], span);
if l.is_mutable {
buf.write("mut ", span);
buf.write_token(Token![mut], span);
}
self.visit_target(ctx, buf, false, true, &l.var);
buf.write(';', span);
buf.write_token(Token![;], span);
return Ok(());
};
@ -862,24 +867,23 @@ impl<'a> Generator<'a, '_> {
|| !matches!(l.var, Target::Name(_))
|| matches!(&l.var, Target::Name(name) if self.locals.get(name).is_none())
{
buf.write("let ", span);
buf.write_token(Token![let], span);
if l.is_mutable {
buf.write("mut ", span);
buf.write_token(Token![mut], span);
}
}
self.visit_target(ctx, buf, true, true, &l.var);
// If it's not taking the ownership of a local variable or copyable, then we need to add
// a reference.
let (before, after) = if !matches!(***val, Expr::Try(..))
let borrow = !matches!(***val, Expr::Try(..))
&& !matches!(***val, Expr::Var(name) if self.locals.get(name).is_some())
&& !is_copyable(val)
{
("&(", ")")
&& !is_copyable(val);
buf.write_tokens(if borrow {
quote_spanned! { span => = &(#expr_buf); }
} else {
("", "")
};
buf.write(format_args!(" = {before}{expr_buf}{after};"), span);
quote_spanned! { span => = #expr_buf; }
});
Ok(())
}
@ -1157,27 +1161,19 @@ impl<'a> Generator<'a, '_> {
// multiple times, e.g. in the case of macro
// parameters being used multiple times.
_ => {
let (before, after) = if !is_copyable(expr) {
("&(", ")")
} else {
("", "")
};
value.write(
this.visit_expr_root(&call_ctx, expr)?,
span_span,
);
let value = value.to_string();
// We need to normalize the arg to write it, thus we need to add it to
// locals in the normalized manner
let normalized_arg = normalize_identifier(arg);
variable_buf.write(
format_args!(
"let {normalized_arg} = {before}{value}{after};"
),
span_span,
);
this.locals
.insert_with_default(Cow::Borrowed(normalized_arg));
let id = field_new(arg, span_span);
variable_buf.write_tokens(if !is_copyable(expr) {
quote_spanned! { span_span => let #id = &(#value); }
} else {
quote_spanned! { span_span => let #id = #value; }
});
this.locals.insert_with_default(Cow::Borrowed(arg));
}
}
}
@ -1197,10 +1193,7 @@ impl<'a> Generator<'a, '_> {
size_hint += this.write_buf_writable(&call_ctx, &mut value)?;
let value = value.into_token_stream();
let variable_buf = variable_buf.into_token_stream();
buf.write_tokens(spanned!(span_span=> {
#variable_buf
#value
}));
quote_into!(buf, span_span, { #variable_buf #value });
Ok(size_hint)
})?;
return Ok(ControlFlow::Break(size_hint));
@ -1326,39 +1319,43 @@ impl<'a> Generator<'a, '_> {
match expr_cache.entry(expr.to_string()) {
Entry::Occupied(e) => *e.get(),
Entry::Vacant(e) => {
matched_expr_buf.write(format_args!("&({}),", e.key()), span);
targets.write(format_args!("expr{idx},"), span);
let id = Ident::new(&format!("expr{idx}"), span);
quote_into!(&mut matched_expr_buf, span, { &(#expr), });
quote_into!(&mut targets, span, { #id, });
e.insert(idx);
idx
}
}
} else {
matched_expr_buf.write_tokens(spanned!(span=> &(#expr),));
targets.write(format_args!("expr{idx}, "), span);
quote_into!(&mut matched_expr_buf, span, { &(#expr), });
let id = Ident::new(&format!("expr{idx}"), span);
quote_into!(&mut targets, span, { #id, });
idx
};
lines.write(
format_args!(
"(&&&askama::filters::Writable(expr{idx})).\
askama_write(__askama_writer, __askama_values)?;",
),
let id = Ident::new(&format!("expr{idx}"), span);
let var_writer = crate::var_writer();
let var_values = crate::var_values();
quote_into!(
&mut lines,
span,
{
(&&&askama::filters::Writable(#id)).
askama_write(#var_writer, #var_values)?;
},
);
}
}
}
let matched_expr_buf = matched_expr_buf.into_token_stream();
let targets = targets.into_token_stream();
let lines = lines.into_token_stream();
buf.write(
quote::quote!(
quote_into!(
buf,
ctx.template_span,
{
match (#matched_expr_buf) {
(#targets) => {
#lines
}
}
),
ctx.template_span,
}
);
if !trailing_simple_lines.is_empty() {

View File

@ -1,18 +1,18 @@
use std::fmt::{Arguments, Display};
use std::str::FromStr;
use std::fmt::Display;
use std::mem::take;
use parser::{PathComponent, WithSpan};
use proc_macro2::{TokenStream, TokenTree};
use quote::{ToTokens, quote};
use proc_macro2::{Literal, TokenStream, TokenTree};
use quote::{ToTokens, quote_spanned};
use syn::spanned::Spanned;
use syn::{
Data, DeriveInput, Fields, GenericParam, Generics, Ident, Lifetime, LifetimeParam, Token, Type,
Variant, parse_quote,
Data, DeriveInput, Fields, GenericParam, Generics, Ident, Lifetime, LifetimeParam, LitStr,
Token, Type, Variant, parse_quote,
};
use crate::generator::TmplKind;
use crate::input::{PartialTemplateArgs, TemplateArgs};
use crate::{CompileError, Context, Print, build_template_item};
use crate::{CompileError, Context, Print, build_template_item, field_new};
/// Implement every integration for the given item
pub(crate) fn impl_everything(ast: &DeriveInput, buf: &mut Buffer) {
@ -30,31 +30,31 @@ pub(crate) fn write_header(
let (impl_generics, orig_ty_generics, where_clause) = ast.generics.split_for_impl();
let ident = &ast.ident;
buf.write_tokens(spanned!(span=>
quote_into!(buf, span, {
#[automatically_derived]
impl #impl_generics #target for #ident #orig_ty_generics #where_clause
));
});
}
/// Implement `Display` for the given item.
// FIXME: Add span
fn impl_display(ast: &DeriveInput, buf: &mut Buffer) {
let ident = &ast.ident;
let span = ast.span();
buf.write(
TokenStream::from_str(&format!(
"\
/// Implement the [`format!()`][askama::helpers::std::format] trait for [`{ident}`]\n\
///\n\
/// Please be aware of the rendering performance notice in the \
[`Template`][askama::Template] trait.\n\
",
))
.unwrap(),
let span = ident.span();
let msg =
format!(" Implement the [`format!()`][askama::helpers::std::format] trait for [`{ident}`]");
quote_into!(buf, span, {
#[doc = #msg]
///
/// Please be aware of the rendering performance notice in the [`Template`][askama::Template] trait.
});
write_header(
ast,
buf,
quote_spanned!(span => askama::helpers::core::fmt::Display),
span,
);
write_header(ast, buf, quote!(askama::helpers::core::fmt::Display), span);
buf.write(
quote!({
quote_into!(buf, span, {
{
#[inline]
fn fmt(
&self,
@ -63,18 +63,16 @@ fn impl_display(ast: &DeriveInput, buf: &mut Buffer) {
askama::Template::render_into(self, f)
.map_err(|_| askama::helpers::core::fmt::Error)
}
}),
span,
);
}
});
}
/// Implement `FastWritable` for the given item.
// FIXME: Add span
fn impl_fast_writable(ast: &DeriveInput, buf: &mut Buffer) {
let span = ast.span();
write_header(ast, buf, quote!(askama::FastWritable), span);
buf.write(
quote!({
write_header(ast, buf, quote_spanned!(span => askama::FastWritable), span);
quote_into!(buf, span, {
{
#[inline]
fn write_into<AskamaW>(
&self,
@ -86,9 +84,8 @@ fn impl_fast_writable(ast: &DeriveInput, buf: &mut Buffer) {
{
askama::Template::render_into_with_values(self, dest, values)
}
}),
span,
);
}
});
}
#[derive(Debug)]
@ -105,6 +102,23 @@ impl Display for Buffer {
}
}
impl ToTokens for Buffer {
#[inline]
fn to_tokens(&self, tokens: &mut TokenStream) {
self.buf.to_tokens(tokens);
}
#[inline]
fn to_token_stream(&self) -> TokenStream {
self.buf.clone()
}
#[inline]
fn into_token_stream(self) -> TokenStream {
self.buf
}
}
impl Buffer {
pub(crate) fn new() -> Self {
Self {
@ -115,30 +129,22 @@ impl Buffer {
}
fn handle_str_lit(&mut self) {
let str_literals = std::mem::take(&mut self.string_literals);
match str_literals.as_slice() {
[] => {}
[(literal, span)] => {
let span = *span;
let literal =
proc_macro2::TokenStream::from_str(&format!("\"{literal}\"")).unwrap();
self.buf
.extend(spanned!(span=> __askama_writer.write_str(#literal)?;));
}
[(literal, span), rest @ ..] => {
let (literal, span) = rest.iter().fold(
(literal.clone(), *span),
|(mut acc_lit, acc_span), (literal, span)| {
acc_lit.push_str(literal);
(acc_lit, acc_span.join(*span).unwrap_or(acc_span))
},
);
let literal =
proc_macro2::TokenStream::from_str(&format!("\"{literal}\"")).unwrap();
self.buf
.extend(spanned!(span=> __askama_writer.write_str(#literal)?;));
}
}
let Some((literal, span)) = take(&mut self.string_literals).into_iter().reduce(
|(mut acc_lit, acc_span), (literal, span)| {
acc_lit.push_str(&literal);
(acc_lit, acc_span.join(span).unwrap_or(acc_span))
},
) else {
return;
};
let mut literal: Literal = format!(r#""{literal}""#).parse().unwrap();
literal.set_span(span);
let askama_writer = crate::var_writer();
self.buf.extend(quote_spanned! {
span =>
#askama_writer.write_str(#literal)?;
});
}
pub(crate) fn write_str_lit(&mut self, literal: String, span: proc_macro2::Span) {
@ -178,6 +184,40 @@ impl Buffer {
self.buf.extend(src);
}
pub(crate) fn write_token<F, T>(&mut self, token: F, span: proc_macro2::Span)
where
F: Fn(proc_macro2::Span) -> T,
T: syn::token::Token + ToTokens,
{
if self.discard {
return;
}
self.handle_str_lit();
token(span).to_tokens(&mut self.buf);
}
pub(crate) fn write_literal(&mut self, repr: &str, span: proc_macro2::Span) {
if self.discard {
return;
}
self.handle_str_lit();
self._write_literal_repr(repr, span);
}
fn _write_literal_repr(&mut self, repr: &str, span: proc_macro2::Span) {
let mut literal: Literal = repr.parse().unwrap();
literal.set_span(span);
literal.to_tokens(&mut self.buf);
}
pub(crate) fn write_field(&mut self, name: &str, span: proc_macro2::Span) {
if self.discard {
return;
}
self.handle_str_lit();
self.buf.extend(field_new(name, span));
}
pub(crate) fn write_separated_path(
&mut self,
ctx: &Context<'_>,
@ -191,10 +231,11 @@ impl Buffer {
for (idx, item) in path.iter().enumerate() {
let span = ctx.span_for_node(item.span());
if idx > 0 {
self.buf.extend(spanned!(span=> ::));
Token![::](span).to_tokens(&mut self.buf);
}
if !item.name.is_empty() {
Ident::new(item.name, span).to_tokens(&mut self.buf);
}
let name = quote::format_ident!("{}", item.name);
self.buf.extend(spanned!(span=> #name));
}
}
@ -203,9 +244,7 @@ impl Buffer {
return;
}
self.handle_str_lit();
let mut buf = String::with_capacity(s.len());
string_escape(&mut buf, s);
self.buf.extend(spanned!(span=> #buf));
LitStr::new(s, span).to_tokens(&mut self.buf);
}
pub(crate) fn clear(&mut self) {
@ -234,39 +273,6 @@ pub(crate) trait BufferFmt {
fn append_to(self, buf: &mut TokenStream, span: proc_macro2::Span);
}
macro_rules! impl_bufferfmt {
($($ty_name:ty),+) => {
$(
impl BufferFmt for $ty_name {
fn append_to(self, buf: &mut TokenStream, span: proc_macro2::Span) {
let Ok(stream) = TokenStream::from_str(&self) else {
panic!("Invalid token stream input:\n----------\n{self}\n----------");
};
buf.extend(spanned!(span=> #stream));
}
}
)+
}
}
impl_bufferfmt!(&str, String);
impl BufferFmt for char {
fn append_to(self, buf: &mut TokenStream, span: proc_macro2::Span) {
self.to_string().append_to(buf, span);
}
}
impl BufferFmt for Arguments<'_> {
fn append_to(self, buf: &mut TokenStream, span: proc_macro2::Span) {
if let Some(s) = self.as_str() {
s.append_to(buf, span);
} else {
self.to_string().append_to(buf, span);
}
}
}
impl BufferFmt for TokenStream {
fn append_to(self, buf: &mut TokenStream, _span: proc_macro2::Span) {
buf.extend(self);
@ -307,6 +313,7 @@ pub(crate) fn build_template_enum(
let Data::Enum(enum_data) = &enum_ast.data else {
unreachable!();
};
let span = enum_ast.span();
impl_everything(enum_ast, buf);
@ -334,7 +341,7 @@ pub(crate) fn build_template_enum(
};
let var_ast = type_for_enum_variant(enum_ast, &generics, var);
buf.write(quote!(#var_ast), enum_ast.span());
quote_into!(buf, span, { #var_ast });
// not inherited: template, meta_docs, block, print
if let Some(enum_args) = &mut enum_args {
@ -377,29 +384,35 @@ pub(crate) fn build_template_enum(
)?;
biggest_size_hint = biggest_size_hint.max(size_hint);
render_into_arms.extend(quote! {
ref __askama_arg => {
let var_arg = crate::var_arg();
let var_writer = crate::var_writer();
let var_values = crate::var_values();
render_into_arms.extend(quote_spanned! {
span =>
ref #var_arg => {
<_ as askama::helpers::EnumVariantTemplate>::render_into_with_values(
__askama_arg,
__askama_writer,
__askama_values,
#var_arg,
#var_writer,
#var_values,
)
}
});
size_hint_arms.extend(quote! {
size_hint_arms.extend(quote_spanned! {
span =>
_ => {
#size_hint
}
});
}
write_header(enum_ast, buf, quote!(askama::Template), enum_ast.span());
let mut methods = TokenStream::new();
methods.extend(quote!(
let var_writer = crate::var_writer();
let var_values = crate::var_values();
methods.extend(quote_spanned!(span =>
fn render_into_with_values<AskamaW>(
&self,
__askama_writer: &mut AskamaW,
__askama_values: &dyn askama::Values,
#var_writer: &mut AskamaW,
#var_values: &dyn askama::Values,
) -> askama::Result<()>
where
AskamaW: askama::helpers::core::fmt::Write + ?askama::helpers::core::marker::Sized
@ -411,28 +424,34 @@ pub(crate) fn build_template_enum(
));
#[cfg(feature = "alloc")]
methods.extend(quote!(
fn render_with_values(
&self,
__askama_values: &dyn askama::Values,
) -> askama::Result<askama::helpers::alloc::string::String> {
let size_hint = match self {
#size_hint_arms
};
let mut buf = askama::helpers::alloc::string::String::new();
let _ = buf.try_reserve(size_hint);
self.render_into_with_values(&mut buf, __askama_values)?;
askama::Result::Ok(buf)
}));
methods.extend(quote_spanned!(
span =>
fn render_with_values(
&self,
#var_values: &dyn askama::Values,
) -> askama::Result<askama::helpers::alloc::string::String> {
let size_hint = match self {
#size_hint_arms
};
let mut buf = askama::helpers::alloc::string::String::new();
let _ = buf.try_reserve(size_hint);
self.render_into_with_values(&mut buf, #var_values)?;
askama::Result::Ok(buf)
}
));
buf.write(
quote!(
write_header(
enum_ast,
buf,
quote_spanned!(span => askama::Template),
span,
);
quote_into!(buf, span, {
{
#methods
const SIZE_HINT: askama::helpers::core::primitive::usize = #biggest_size_hint;
}),
enum_ast.span(),
);
}
});
if print_code {
eprintln!("{buf}");
}
@ -545,8 +564,8 @@ fn variant_as_arm(
for (idx, field) in fields.named.iter().enumerate() {
let arg = Ident::new(&format!("__askama_arg_{idx}"), field.span());
let id = field.ident.as_ref().unwrap();
src.extend(quote!(#id: ref #arg,));
this.extend(quote!(#id: #arg,));
src.extend(quote_spanned!(span => #id: ref #arg,));
this.extend(quote_spanned!(span => #id: #arg,));
}
let phantom = match &ast_data.fields {
@ -560,7 +579,9 @@ fn variant_as_arm(
.unwrap(),
Fields::Unnamed(_) | Fields::Unit => unreachable!(),
};
this.extend(quote!(#phantom: askama::helpers::core::marker::PhantomData {},));
this.extend(quote_spanned!(
span => #phantom: askama::helpers::core::marker::PhantomData {},
));
}
Fields::Unnamed(fields) => {
@ -568,27 +589,33 @@ fn variant_as_arm(
let span = field.ident.span();
let arg = Ident::new(&format!("__askama_arg_{idx}"), span);
let idx = syn::LitInt::new(&format!("{idx}"), span);
src.extend(quote!(#idx: ref #arg,));
this.extend(quote!(#idx: #arg,));
src.extend(quote_spanned!(span => #idx: ref #arg,));
this.extend(quote_spanned!(span => #idx: #arg,));
}
let idx = syn::LitInt::new(&format!("{}", fields.unnamed.len()), span);
this.extend(quote!(#idx: askama::helpers::core::marker::PhantomData {},));
this.extend(
quote_spanned!(span => #idx: askama::helpers::core::marker::PhantomData {},),
);
}
Fields::Unit => {
this.extend(quote!(0: askama::helpers::core::marker::PhantomData {},));
this.extend(quote_spanned!(span => 0: askama::helpers::core::marker::PhantomData {},));
}
};
render_into_arms.extend(quote! {
let var_writer = crate::var_writer();
let var_values = crate::var_values();
render_into_arms.extend(quote_spanned! {
span =>
Self :: #ident { #src } => {
<_ as askama::helpers::EnumVariantTemplate>::render_into_with_values(
& #var_id #ty_generics { #this },
__askama_writer,
__askama_values,
#var_writer,
#var_values,
)
}
});
size_hint_arms.extend(quote! {
size_hint_arms.extend(quote_spanned! {
span =>
Self :: #ident { .. } => {
#size_hint
}

View File

@ -7,6 +7,7 @@ extern crate proc_macro;
#[macro_use]
mod macros;
mod ascii_str;
mod config;
mod generator;
mod heritage;
@ -31,10 +32,11 @@ use std::hash::{BuildHasher, Hash};
use std::path::Path;
use std::sync::Mutex;
use parser::{Parsed, ascii_str, strip_common};
use proc_macro2::{Span, TokenStream};
use quote::quote;
use parser::{Parsed, is_rust_keyword, strip_common};
use proc_macro2::{Literal, Span, TokenStream};
use quote::{ToTokens, quote, quote_spanned};
use rustc_hash::FxBuildHasher;
use syn::Ident;
use syn::spanned::Spanned;
use crate::config::{Config, read_config_file};
@ -272,8 +274,20 @@ pub fn derive_template(input: TokenStream, import_askama: fn() -> TokenStream) -
Some(crate_name) => quote!(use #crate_name as askama;),
None => import_askama(),
};
quote! {
#[allow(dead_code, non_camel_case_types, non_snake_case)]
quote_spanned! {
ast.ident.span() =>
#[allow(
// We use `Struct { 0: arg0, 1: arg1 }` in enum specialization.
clippy::init_numbered_fields, non_shorthand_field_patterns,
// The generated code is not indented at all.
clippy::suspicious_else_formatting,
// We don't care if the user does not use `Template`, `FastWritable`, etc.
dead_code,
// We intentionally add extraneous underscores in type names.
non_camel_case_types,
// We intentionally add extraneous underscores in variable names.
non_snake_case,
)]
const _: () = {
#import_askama
#ts
@ -561,6 +575,54 @@ impl fmt::Display for MsgValidEscapers<'_> {
}
}
fn field_new(name: &str, span: proc_macro2::Span) -> TokenStream {
if name.starts_with(|c: char| c.is_ascii_digit()) {
let mut literal: Literal = name.parse().unwrap();
literal.set_span(span);
literal.into_token_stream()
} else if is_rust_keyword(name) && !matches!(name, "self" | "Self" | "crate" | "super") {
Ident::new_raw(name, span).into_token_stream()
} else {
Ident::new(name, span).into_token_stream()
}
}
fn var_writer() -> Ident {
syn::Ident::new("__askama_writer", proc_macro2::Span::mixed_site())
}
fn var_filter_source() -> Ident {
syn::Ident::new("__askama_filter_block", proc_macro2::Span::mixed_site())
}
fn var_values() -> Ident {
syn::Ident::new("__askama_values", proc_macro2::Span::mixed_site())
}
fn var_arg() -> Ident {
syn::Ident::new("__askama_arg", proc_macro2::Span::mixed_site())
}
fn var_item() -> Ident {
syn::Ident::new("__askama_item", proc_macro2::Span::mixed_site())
}
fn var_len() -> Ident {
syn::Ident::new("__askama_len", proc_macro2::Span::mixed_site())
}
fn var_iter() -> Ident {
syn::Ident::new("__askama_iter", proc_macro2::Span::mixed_site())
}
fn var_cycle() -> Ident {
syn::Ident::new("__askama_cycle", proc_macro2::Span::mixed_site())
}
fn var_did_loop() -> Ident {
syn::Ident::new("__askama_did_loop", proc_macro2::Span::mixed_site())
}
#[derive(Debug)]
struct OnceMap<K, V>([Mutex<HashMap<K, V, FxBuildHasher>>; 8]);

View File

@ -7,3 +7,11 @@ macro_rules! spanned {
// }
}
}
macro_rules! quote_into {
($buffer:expr, $span:expr, { $($x:tt)+ } $(,)?) => {{
let buffer: &mut $crate::integration::Buffer = $buffer;
let span: ::proc_macro2::Span = $span;
buffer.write_tokens(::quote::quote_spanned!(span => $($x)+));
}};
}

View File

@ -13,11 +13,11 @@ use crate::integration::Buffer;
use crate::{AnyTemplateArgs, derive_template};
#[track_caller]
fn build_template(ast: &syn::DeriveInput) -> Result<String, crate::CompileError> {
fn build_template(ast: &syn::DeriveInput) -> Result<TokenStream, crate::CompileError> {
let mut buf = Buffer::new();
let args = AnyTemplateArgs::new(ast)?;
crate::build_template(&mut buf, ast, args)?;
Ok(buf.to_string())
Ok(buf.into_token_stream())
}
fn import_askama() -> TokenStream {
@ -41,10 +41,11 @@ fn compare_ex(
size_hint: usize,
prefix: &str,
) {
let generated = jinja_to_rust(jinja, fields, prefix).unwrap();
let generated = jinja_to_rust(jinja, fields, prefix);
let expected: TokenStream = expected.parse().unwrap();
let expected: syn::File = syn::parse_quote! {
#[automatically_derived]
impl askama::Template for Foo {
fn render_into_with_values<AskamaW>(
&self,
@ -68,6 +69,7 @@ fn compare_ex(
/// Implement the [`format!()`][askama::helpers::std::format] trait for [`Foo`]
///
/// Please be aware of the rendering performance notice in the [`Template`][askama::Template] trait.
#[automatically_derived]
impl askama::helpers::core::fmt::Display for Foo {
#[inline]
fn fmt(&self, f: &mut askama::helpers::core::fmt::Formatter<'_>) -> askama::helpers::core::fmt::Result {
@ -75,6 +77,7 @@ fn compare_ex(
}
}
#[automatically_derived]
impl askama::FastWritable for Foo {
#[inline]
fn write_into<AskamaW>(
@ -143,7 +146,7 @@ fn compare_ex(
}
}
fn jinja_to_rust(jinja: &str, fields: &[(&str, &str)], prefix: &str) -> syn::Result<syn::File> {
fn jinja_to_rust(jinja: &str, fields: &[(&str, &str)], prefix: &str) -> syn::File {
let jinja = format!(
r##"#[template(source = {jinja:?}, ext = "txt")]
{prefix}
@ -156,7 +159,7 @@ struct Foo {{ {} }}"##,
);
let generated = build_template(&syn::parse_str::<syn::DeriveInput>(&jinja).unwrap()).unwrap();
let generated = match generated.parse() {
match syn::parse2(generated.clone()) {
Ok(generated) => generated,
Err(err) => panic!(
"\n\
@ -168,8 +171,7 @@ struct Foo {{ {} }}"##,
\n\
{err}"
),
};
syn::parse2(generated)
}
}
#[test]
@ -822,7 +824,7 @@ fn test_code_in_comment() {
struct Tmpl;
"#;
let ast = syn::parse_str(ts).unwrap();
let generated = build_template(&ast).unwrap();
let generated = build_template(&ast).unwrap().to_string();
assert!(generated.contains("Hello world!"));
assert!(!generated.contains("compile_error"));
@ -835,7 +837,7 @@ fn test_code_in_comment() {
struct Tmpl;
"#;
let ast = syn::parse_str(ts).unwrap();
let generated = build_template(&ast).unwrap();
let generated = build_template(&ast).unwrap().to_string();
assert!(generated.contains("Hello\nworld!"));
assert!(!generated.contains("compile_error"));
@ -848,7 +850,7 @@ fn test_code_in_comment() {
struct Tmpl;
"#;
let ast = syn::parse_str(ts).unwrap();
let generated = build_template(&ast).unwrap();
let generated = build_template(&ast).unwrap().to_string();
assert!(generated.contains("Hello\nworld!"));
assert!(!generated.contains("compile_error"));
@ -865,7 +867,7 @@ fn test_code_in_comment() {
struct Tmpl;
"#;
let ast = syn::parse_str(ts).unwrap();
let generated = build_template(&ast).unwrap();
let generated = build_template(&ast).unwrap().to_string();
assert!(generated.contains("Hello\nworld!"));
assert!(!generated.contains("compile_error"));
@ -875,7 +877,7 @@ fn test_code_in_comment() {
struct Tmpl;
";
let ast = syn::parse_str(ts).unwrap();
let generated = build_template(&ast).unwrap();
let generated = build_template(&ast).unwrap().to_string();
assert!(generated.contains("Hello\nworld!"));
assert!(!generated.contains("compile_error"));
@ -906,7 +908,7 @@ fn test_code_in_comment() {
struct BlockOnBlock;
";
let ast = syn::parse_str(ts).unwrap();
let generated = build_template(&ast).unwrap();
let generated = build_template(&ast).unwrap().to_string();
assert!(!generated.contains("compile_error"));
}
@ -1147,12 +1149,12 @@ fn test_concat() {
fn extends_with_whitespace_control() {
const CONTROL: &[&str] = &["", "\t", "-", "+", "~"];
let expected = jinja_to_rust(r#"front {% extends "a.html" %} back"#, &[], "").unwrap();
let expected = jinja_to_rust(r#"front {% extends "a.html" %} back"#, &[], "");
let expected = unparse(&expected);
for front in CONTROL {
for back in CONTROL {
let src = format!(r#"front {{%{front} extends "a.html" {back}%}} back"#);
let actual = jinja_to_rust(&src, &[], "").unwrap();
let actual = jinja_to_rust(&src, &[], "");
let actual = unparse(&actual);
assert_eq!(expected, actual, "source: {src:?}");
}

View File

@ -3,7 +3,7 @@
#![deny(unreachable_pub)]
#![allow(clippy::vec_box)] // intentional, less copying
pub mod ascii_str;
mod ascii_str;
pub mod expr;
pub mod node;
mod target;
@ -1344,139 +1344,95 @@ const PRIMITIVE_TYPES: &[&str] = &{
list
};
pub const MAX_RUST_KEYWORD_LEN: usize = 8;
pub const MAX_RUST_RAW_KEYWORD_LEN: usize = MAX_RUST_KEYWORD_LEN + 2;
const MAX_RUST_KEYWORD_LEN: usize = 8;
pub const RUST_KEYWORDS: &[&[[AsciiChar; MAX_RUST_RAW_KEYWORD_LEN]]; MAX_RUST_KEYWORD_LEN + 1] = &{
const NO_KWS: &[[AsciiChar; MAX_RUST_RAW_KEYWORD_LEN]] = &[];
const KW2: &[[AsciiChar; MAX_RUST_RAW_KEYWORD_LEN]] = &[
AsciiStr::new_sized("r#as"),
AsciiStr::new_sized("r#do"),
AsciiStr::new_sized("r#fn"),
AsciiStr::new_sized("r#if"),
AsciiStr::new_sized("r#in"),
const RUST_KEYWORDS: &[&[[AsciiChar; MAX_RUST_KEYWORD_LEN]]; MAX_RUST_KEYWORD_LEN + 1] = &{
const NO_KWS: &[[AsciiChar; MAX_RUST_KEYWORD_LEN]] = &[];
const KW2: &[[AsciiChar; MAX_RUST_KEYWORD_LEN]] = &[
AsciiStr::new_sized("as"),
AsciiStr::new_sized("do"),
AsciiStr::new_sized("fn"),
AsciiStr::new_sized("if"),
AsciiStr::new_sized("in"),
];
const KW3: &[[AsciiChar; MAX_RUST_RAW_KEYWORD_LEN]] = &[
AsciiStr::new_sized("r#box"),
AsciiStr::new_sized("r#dyn"),
AsciiStr::new_sized("r#for"),
AsciiStr::new_sized("r#gen"),
AsciiStr::new_sized("r#let"),
AsciiStr::new_sized("r#mod"),
AsciiStr::new_sized("r#mut"),
AsciiStr::new_sized("r#pub"),
AsciiStr::new_sized("r#ref"),
AsciiStr::new_sized("r#try"),
AsciiStr::new_sized("r#use"),
const KW3: &[[AsciiChar; MAX_RUST_KEYWORD_LEN]] = &[
AsciiStr::new_sized("box"),
AsciiStr::new_sized("dyn"),
AsciiStr::new_sized("for"),
AsciiStr::new_sized("gen"),
AsciiStr::new_sized("let"),
AsciiStr::new_sized("mod"),
AsciiStr::new_sized("mut"),
AsciiStr::new_sized("pub"),
AsciiStr::new_sized("ref"),
AsciiStr::new_sized("try"),
AsciiStr::new_sized("use"),
];
const KW4: &[[AsciiChar; MAX_RUST_RAW_KEYWORD_LEN]] = &[
AsciiStr::new_sized("r#else"),
AsciiStr::new_sized("r#enum"),
AsciiStr::new_sized("r#impl"),
AsciiStr::new_sized("r#move"),
AsciiStr::new_sized("r#priv"),
AsciiStr::new_sized("r#true"),
AsciiStr::new_sized("r#type"),
const KW4: &[[AsciiChar; MAX_RUST_KEYWORD_LEN]] = &[
AsciiStr::new_sized("else"),
AsciiStr::new_sized("enum"),
AsciiStr::new_sized("impl"),
AsciiStr::new_sized("loop"),
AsciiStr::new_sized("move"),
AsciiStr::new_sized("priv"),
AsciiStr::new_sized("self"),
AsciiStr::new_sized("Self"),
AsciiStr::new_sized("true"),
AsciiStr::new_sized("type"),
];
const KW5: &[[AsciiChar; MAX_RUST_RAW_KEYWORD_LEN]] = &[
AsciiStr::new_sized("r#async"),
AsciiStr::new_sized("r#await"),
AsciiStr::new_sized("r#break"),
AsciiStr::new_sized("r#const"),
AsciiStr::new_sized("r#crate"),
AsciiStr::new_sized("r#false"),
AsciiStr::new_sized("r#final"),
AsciiStr::new_sized("r#macro"),
AsciiStr::new_sized("r#match"),
AsciiStr::new_sized("r#trait"),
AsciiStr::new_sized("r#where"),
AsciiStr::new_sized("r#while"),
AsciiStr::new_sized("r#yield"),
const KW5: &[[AsciiChar; MAX_RUST_KEYWORD_LEN]] = &[
AsciiStr::new_sized("async"),
AsciiStr::new_sized("await"),
AsciiStr::new_sized("break"),
AsciiStr::new_sized("const"),
AsciiStr::new_sized("crate"),
AsciiStr::new_sized("false"),
AsciiStr::new_sized("final"),
AsciiStr::new_sized("macro"),
AsciiStr::new_sized("match"),
AsciiStr::new_sized("super"),
AsciiStr::new_sized("trait"),
AsciiStr::new_sized("union"),
AsciiStr::new_sized("where"),
AsciiStr::new_sized("while"),
AsciiStr::new_sized("yield"),
];
const KW6: &[[AsciiChar; MAX_RUST_RAW_KEYWORD_LEN]] = &[
AsciiStr::new_sized("r#become"),
AsciiStr::new_sized("r#extern"),
AsciiStr::new_sized("r#return"),
AsciiStr::new_sized("r#static"),
AsciiStr::new_sized("r#struct"),
AsciiStr::new_sized("r#typeof"),
AsciiStr::new_sized("r#unsafe"),
const KW6: &[[AsciiChar; MAX_RUST_KEYWORD_LEN]] = &[
AsciiStr::new_sized("become"),
AsciiStr::new_sized("extern"),
AsciiStr::new_sized("return"),
AsciiStr::new_sized("static"),
AsciiStr::new_sized("struct"),
AsciiStr::new_sized("typeof"),
AsciiStr::new_sized("unsafe"),
AsciiStr::new_sized("caller"),
];
const KW7: &[[AsciiChar; MAX_RUST_RAW_KEYWORD_LEN]] = &[
AsciiStr::new_sized("r#unsized"),
AsciiStr::new_sized("r#virtual"),
const KW7: &[[AsciiChar; MAX_RUST_KEYWORD_LEN]] = &[
AsciiStr::new_sized("unsized"),
AsciiStr::new_sized("virtual"),
];
const KW8: &[[AsciiChar; MAX_RUST_RAW_KEYWORD_LEN]] = &[
AsciiStr::new_sized("r#abstract"),
AsciiStr::new_sized("r#continue"),
AsciiStr::new_sized("r#override"),
const KW8: &[[AsciiChar; MAX_RUST_KEYWORD_LEN]] = &[
AsciiStr::new_sized("abstract"),
AsciiStr::new_sized("continue"),
AsciiStr::new_sized("override"),
];
[NO_KWS, NO_KWS, KW2, KW3, KW4, KW5, KW6, KW7, KW8]
};
// These ones are only used in the parser, hence why they're private.
const KWS_PARSER: &[&[[AsciiChar; MAX_RUST_RAW_KEYWORD_LEN]]; MAX_RUST_KEYWORD_LEN + 1] = &{
const KW4: &[[AsciiChar; MAX_RUST_RAW_KEYWORD_LEN]] = &{
let mut result = [AsciiStr::new_sized("r#"); RUST_KEYWORDS[4].len() + 3];
let mut i = 0;
while i < RUST_KEYWORDS[4].len() {
result[i] = RUST_KEYWORDS[4][i];
i += 1;
}
result[result.len() - 3] = AsciiStr::new_sized("r#loop");
result[result.len() - 2] = AsciiStr::new_sized("r#self");
result[result.len() - 1] = AsciiStr::new_sized("r#Self");
result
};
const KW5: &[[AsciiChar; MAX_RUST_RAW_KEYWORD_LEN]] = &{
let mut result = [AsciiStr::new_sized("r#"); RUST_KEYWORDS[5].len() + 2];
let mut i = 0;
while i < RUST_KEYWORDS[5].len() {
result[i] = RUST_KEYWORDS[5][i];
i += 1;
}
result[result.len() - 2] = AsciiStr::new_sized("r#super");
result[result.len() - 1] = AsciiStr::new_sized("r#union");
result
};
const KW6: &[[AsciiChar; MAX_RUST_RAW_KEYWORD_LEN]] = &{
let mut result = [AsciiStr::new_sized("r#"); RUST_KEYWORDS[6].len() + 1];
let mut i = 0;
while i < RUST_KEYWORDS[6].len() {
result[i] = RUST_KEYWORDS[6][i];
i += 1;
}
result[result.len() - 1] = AsciiStr::new_sized("r#caller");
result
};
[
RUST_KEYWORDS[0],
RUST_KEYWORDS[1],
RUST_KEYWORDS[2],
RUST_KEYWORDS[3],
KW4,
KW5,
KW6,
RUST_KEYWORDS[7],
RUST_KEYWORDS[8],
]
};
fn is_rust_keyword(ident: &str) -> bool {
pub fn is_rust_keyword(ident: &str) -> bool {
let ident_len = ident.len();
if ident_len > MAX_RUST_KEYWORD_LEN {
return false;
}
let kws = KWS_PARSER[ident.len()];
let kws = RUST_KEYWORDS[ident.len()];
let mut padded_ident = [0; MAX_RUST_KEYWORD_LEN];
padded_ident[..ident_len].copy_from_slice(ident.as_bytes());
// Since the individual buckets are quite short, a linear search is faster than a binary search.
for probe in kws {
if padded_ident == *AsciiChar::slice_as_bytes(probe[2..].try_into().unwrap()) {
if padded_ident == *AsciiChar::slice_as_bytes(probe) {
return true;
}
}

View File

@ -275,6 +275,7 @@ fn filter_block_conditions() {
// The output of `|upper` is not marked as `|safe`, so the output of `|paragraphbreaks` gets
// escaped. The '&' in the input is is not marked as `|safe`, so it should get escaped, twice.
#[test]
#[expect(unused_variables)] // `canary` inside the filter block is intentionally unused
fn filter_nested_filter_blocks() {
#[derive(Template)]
#[template(

View File

@ -33,6 +33,7 @@ fn underscore_ident1() {
}
// Ensures that variables can be named `_`.
#[expect(clippy::redundant_pattern_matching)] // We want to test if `Some(_)` is recognized.
#[test]
fn underscore_ident2() {
#[derive(Template)]