diff --git a/askama/Cargo.toml b/askama/Cargo.toml index f70902b34..8af117cec 100644 --- a/askama/Cargo.toml +++ b/askama/Cargo.toml @@ -33,6 +33,7 @@ with-mendes = ["askama_derive/with-mendes"] with-rocket = ["askama_derive/with-rocket"] with-tide = ["askama_derive/with-tide"] with-warp = ["askama_derive/with-warp"] +i18n = ["askama_derive/i18n", "fluent-templates"] # deprecated mime = [] @@ -48,6 +49,7 @@ percent-encoding = { version = "2.1.0", optional = true } serde = { version = "1.0", optional = true, features = ["derive"] } serde_json = { version = "1.0", optional = true } serde_yaml = { version = "0.9", optional = true } +fluent-templates = { version = "0.8.0", optional = true } [package.metadata.docs.rs] features = ["config", "humansize", "num-traits", "serde-json", "serde-yaml"] diff --git a/askama/src/i18n.rs b/askama/src/i18n.rs new file mode 100644 index 000000000..1869b2383 --- /dev/null +++ b/askama/src/i18n.rs @@ -0,0 +1,74 @@ +//! Module for compile time checked localization +//! +//! # Example: +//! +//! [Fluent Translation List](https://projectfluent.org/) resource file `i18n/es-MX/basic.ftl`: +//! +//! ```ftl +//! greeting = ¡Hola, { $name }! +//! ``` +//! +//! Askama HTML template `templates/example.html`: +//! +//! ```html +//!

{{ localize("greeting", name: name) }}

+//! ``` +//! +//! Rust usage: +//! ```ignore +//! use askama::i18n::{langid, Locale}; +//! use askama::Template; +//! +//! askama::i18n::load!(LOCALES); +//! +//! #[derive(Template)] +//! #[template(path = "example.html")] +//! struct ExampleTemplate<'a> { +//! #[locale] +//! loc: Locale<'a>, +//! name: &'a str, +//! } +//! +//! let template = ExampleTemplate { +//! loc: Locale::new(langid!("es-MX"), &LOCALES), +//! name: "Hilda", +//! }; +//! +//! // "

¡Hola, Hilda!

" +//! template.render().unwrap(); +//! ``` + +use std::collections::HashMap; +use std::iter::FromIterator; + +// Re-export conventiently as `askama::i18n::load!()`. +// Proc-macro crates can only export macros from their root namespace. +/// Load locales at compile time. See example above for usage. +pub use askama_derive::i18n_load as load; + +pub use fluent_templates::{self, fluent_bundle::FluentValue, fs::langid, LanguageIdentifier}; +use fluent_templates::{Loader, StaticLoader}; + +pub struct Locale<'a> { + loader: &'a StaticLoader, + language: LanguageIdentifier, +} + +impl Locale<'_> { + pub fn new(language: LanguageIdentifier, loader: &'static StaticLoader) -> Self { + Self { loader, language } + } + + pub fn translate<'a>( + &self, + msg_id: &str, + args: impl IntoIterator)>, + ) -> Option { + let args = HashMap::<&str, FluentValue<'_>>::from_iter(args); + let args = match args.is_empty() { + true => None, + false => Some(&args), + }; + self.loader.lookup_complete(&self.language, msg_id, args) + } +} diff --git a/askama/src/lib.rs b/askama/src/lib.rs index a98989e85..cb80b5d94 100644 --- a/askama/src/lib.rs +++ b/askama/src/lib.rs @@ -66,6 +66,8 @@ mod error; pub mod filters; pub mod helpers; +#[cfg(feature = "i18n")] +pub mod i18n; use std::fmt; diff --git a/askama_derive/Cargo.toml b/askama_derive/Cargo.toml index 5b33e6d7a..4f5c39ab2 100644 --- a/askama_derive/Cargo.toml +++ b/askama_derive/Cargo.toml @@ -29,6 +29,7 @@ with-mendes = [] with-rocket = [] with-tide = [] with-warp = [] +i18n = ["fluent-syntax", "fluent-templates", "serde", "basic-toml"] [dependencies] mime = "0.3" @@ -39,3 +40,5 @@ quote = "1" serde = { version = "1.0", optional = true, features = ["derive"] } syn = "2" basic-toml = { version = "0.1.1", optional = true } +fluent-syntax = { version = "0.11.0", optional = true, default-features = false } +fluent-templates = { version = "0.8.0", optional = true, default-features = false } diff --git a/askama_derive/src/generator.rs b/askama_derive/src/generator.rs index f990330a7..2f4595209 100644 --- a/askama_derive/src/generator.rs +++ b/askama_derive/src/generator.rs @@ -1376,9 +1376,40 @@ impl<'a> Generator<'a> { Expr::RustMacro(ref path, args) => self.visit_rust_macro(buf, path, args), Expr::Try(ref expr) => self.visit_try(buf, expr.as_ref())?, Expr::Tuple(ref exprs) => self.visit_tuple(buf, exprs)?, + Expr::Localize(ref msg_id, ref args) => self.visit_localize(buf, msg_id, args)?, }) } + fn visit_localize( + &mut self, + buf: &mut Buffer, + msg_id: &Expr<'_>, + args: &[(&str, Expr<'_>)], + ) -> Result { + let localizer = + self.input.localizer.as_deref().ok_or( + "You need to annotate a field with #[locale] to use the localize() function.", + )?; + + buf.write(&format!( + "self.{}.translate(", + normalize_identifier(localizer) + )); + self.visit_expr(buf, msg_id)?; + buf.writeln(", [")?; + buf.indent(); + for (k, v) in args { + buf.write(&format!("({:?}, ::askama::i18n::FluentValue::from(", k)); + self.visit_expr(buf, v)?; + buf.writeln(")),")?; + } + buf.dedent()?; + // Safe to unwrap, as `msg_id` is checked at compile time. + buf.write("]).unwrap()"); + + Ok(DisplayWrap::Unwrapped) + } + fn visit_try( &mut self, buf: &mut Buffer, diff --git a/askama_derive/src/i18n.rs b/askama_derive/src/i18n.rs new file mode 100644 index 000000000..d9d6eaa3f --- /dev/null +++ b/askama_derive/src/i18n.rs @@ -0,0 +1,378 @@ +use std::collections::{HashMap, HashSet}; +use std::fmt::Display; +use std::fs::{DirEntry, OpenOptions}; +use std::io::Read; +use std::path::{Path, PathBuf}; +use std::str::FromStr; + +use basic_toml::from_str; +use fluent_syntax::ast::{ + Expression, InlineExpression, PatternElement, Resource, Variant, VariantKey, +}; +use fluent_syntax::parser::parse_runtime; +use fluent_templates::lazy_static::lazy_static; +use fluent_templates::loader::build_fallbacks; +use fluent_templates::LanguageIdentifier; +use proc_macro::TokenStream; +use proc_macro2::{Ident, TokenStream as TokenStream2}; +use quote::quote_spanned; +use serde::Deserialize; +use syn::parse::{Parse, ParseStream}; +use syn::spanned::Spanned; +use syn::{parse2, Visibility}; + +use crate::CompileError; + +type FileResource = (PathBuf, Resource); + +macro_rules! mk_static { + ($(let $ident:ident: $ty:ty = $expr:expr;)*) => { + $( + let $ident = { + let value: Option<$ty> = Some($expr); + unsafe { + static mut VALUE: Option<$ty> = None; + VALUE = value; + match &VALUE { + Some(value) => value, + None => unreachable!(), + } + } + }; + )* + }; +} + +struct Variable { + vis: Visibility, + name: Ident, +} + +impl Parse for Variable { + fn parse(input: ParseStream<'_>) -> syn::Result { + let vis = input.parse().unwrap_or(Visibility::Inherited); + let name = input.parse()?; + Ok(Variable { vis, name }) + } +} + +struct Configuration { + pub(crate) fallback: LanguageIdentifier, + pub(crate) use_isolating: bool, + pub(crate) core_locales: Option, + pub(crate) locales: Vec<(LanguageIdentifier, Vec)>, + pub(crate) fallbacks: &'static HashMap>, + pub(crate) assets_dir: PathBuf, +} + +#[derive(Default, Deserialize)] +struct I18nConfig { + #[serde(default)] + pub(crate) fallback_language: Option, + #[serde(default)] + pub(crate) fluent: Option, +} + +#[derive(Default, Deserialize)] +struct I18nFluent { + #[serde(default)] + pub(crate) assets_dir: Option, + #[serde(default)] + pub(crate) core_locales: Option, + #[serde(default)] + pub(crate) use_isolating: Option, +} + +fn format_err(path: &Path, err: impl Display) -> String { + format!("error processing {:?}: {}", path, err) +} + +fn read_resource(path: PathBuf) -> Result { + let mut buf = String::new(); + OpenOptions::new() + .read(true) + .open(&path) + .map_err(|err| format_err(&path, err))? + .read_to_string(&mut buf) + .map_err(|err| format_err(&path, err))?; + + let resource = match parse_runtime(buf) { + Ok(resource) => resource, + Err((_, err_vec)) => return Err(format_err(&path, err_vec.first().unwrap())), + }; + Ok((path, resource)) +} + +fn read_lang_dir( + entry: Result, +) -> Result)>, String> { + let entry = match entry { + Ok(entry) => entry, + Err(_) => return Ok(None), + }; + + let language = entry + .file_name() + .to_str() + .and_then(|s| LanguageIdentifier::from_str(s).ok()); + let language: LanguageIdentifier = match language { + Some(language) => language, + None => return Ok(None), + }; + + let dir_iter = match entry.path().read_dir() { + Ok(dir_iter) => dir_iter, + Err(_) => return Ok(None), + }; + + let mut resources = vec![]; + for entry in dir_iter.flatten() { + let path = entry.path(); + if path.to_str().map(|s| s.ends_with(".ftl")).unwrap_or(false) { + resources.push(read_resource(path)?); + }; + } + if resources.is_empty() { + return Ok(None); + } + + resources.sort_by(|(l, _), (r, _)| Path::cmp(l, r)); + Ok(Some((language, resources))) +} + +fn read_configuration() -> Result { + let root = PathBuf::from(std::env::var("CARGO_MANIFEST_DIR").unwrap()); + let root = root.canonicalize().unwrap_or(root); + + let i18n_toml = root.join("i18n.toml"); + let config = match i18n_toml.exists() { + false => I18nConfig::default(), + true => { + let mut buf = String::new(); + OpenOptions::new() + .read(true) + .open(&i18n_toml) + .map_err(|err| format_err(&i18n_toml, err))? + .read_to_string(&mut buf) + .map_err(|err| format_err(&i18n_toml, err))?; + from_str(&buf).map_err(|err| format_err(&i18n_toml, err))? + } + }; + let fluent = config.fluent.unwrap_or_default(); + + let fallback = config.fallback_language.as_deref().unwrap_or("en"); + let fallback: LanguageIdentifier = match fallback.parse() { + Ok(fallback) => fallback, + Err(err) => { + return Err(format!( + "not a valid LanguageIdentifier {:?} for fallback_language: {}", + err, fallback, + )) + } + }; + + let core_locales = match fluent.core_locales { + Some(path) => { + let path = match path.is_absolute() { + true => path, + false => root.join(path), + }; + if path.to_str().is_none() { + return Err(format!( + "core_locales path contains illegal UTF-8 characters: {:?}", + path, + )); + }; + Some(read_resource(path)?) + } + None => None, + }; + + let assets_dir = match fluent.assets_dir { + Some(path) if path.is_absolute() => todo!(), + Some(path) => root.join(&path), + None => root.join("i18n"), + }; + let mut locales = { + let mut locales = vec![]; + for entry in assets_dir + .read_dir() + .map_err(|err| format_err(&assets_dir, err))? + { + if let Some(datum) = read_lang_dir(entry)? { + locales.push(datum); + } + } + locales + }; + locales.sort_by(|(l1, _), (l2, _)| LanguageIdentifier::cmp(l1, l2)); + + mk_static! { + let locales_: Vec = locales.iter().map(|(l, _)| l.clone()).collect(); + let fallbacks: HashMap> = build_fallbacks( + locales_, + ); + }; + + Ok(Configuration { + fallback, + use_isolating: fluent.use_isolating.unwrap_or(false), + core_locales, + locales, + fallbacks, + assets_dir, + }) +} + +fn get_i18n_config() -> Result<&'static Configuration, CompileError> { + lazy_static! { + static ref CONFIGURATION: Result = read_configuration(); + } + match &*CONFIGURATION { + Ok(configuration) => Ok(configuration), + Err(err) => Err(err.as_str().into()), + } +} + +pub(crate) fn load(input: TokenStream) -> Result { + let configuration = get_i18n_config()?; + + let input: TokenStream2 = input.into(); + let span = input.span(); + let variable: Variable = match parse2(input) { + Ok(variable) => variable, + Err(err) => return Err(format!("could not parse i18n_load!(…): {}", err).into()), + }; + + let vis = variable.vis; + let name = variable.name; + let assets_dir = configuration.assets_dir.to_str().unwrap(); + let fallback = configuration.fallback.to_string(); + let core_locales = configuration.core_locales.as_ref().map(|(s, _)| { + let s = s.to_str().unwrap(); + quote_spanned!(span => core_locales: #s,) + }); + let customise = match configuration.use_isolating { + false => Some(quote_spanned!(span => customise: |b| b.set_use_isolating(false),)), + true => None, + }; + + let ts = quote_spanned! { + span => + mod __askama_i18n_macro { + mod fluent_templates { + pub use ::askama::i18n::fluent_templates::*; + } + ::askama::i18n::fluent_templates::static_loader! { + pub static #name = { + locales: #assets_dir, + fallback_language: #fallback, + #core_locales + #customise + }; + } + } + #vis use __askama_i18n_macro::#name; + }; + Ok(ts.into()) +} + +pub(crate) fn arguments_of(msg_id: &str) -> Result, CompileError> { + let config = get_i18n_config()?; + let entry = config.fallbacks[&config.fallback] + .iter() + .filter_map(|l1| { + config + .locales + .binary_search_by(|(l2, _)| LanguageIdentifier::cmp(l2, l1)) + .ok() + }) + .flat_map(|index| &config.locales[index].1) + .chain(config.core_locales.iter()) + .flat_map(|(_, resource)| &resource.body) + .filter_map(|entry| match entry { + fluent_syntax::ast::Entry::Message(entry) => Some(entry), + _ => None, + }) + .find(|entry| entry.id.name == msg_id) + .ok_or_else(|| CompileError::from(format!("msg_id {:?} not found", msg_id)))?; + + let keys = entry + .value + .iter() + .flat_map(|v| v.elements.iter()) + .filter_map(|p| match p { + PatternElement::Placeable { expression } => Some(expression), + _ => None, + }) + .flat_map(expr_to_key) + .collect(); + Ok(keys) +} + +fn expr_to_key(expr: &'static Expression) -> Vec<&'static str> { + let (selector, variants): (&InlineExpression, &[Variant]) = match expr { + Expression::Select { selector, variants } => (selector, variants), + Expression::Inline(selector) => (selector, &[]), + }; + + let variant_keys = variants.iter().filter_map(|v| match &v.key { + VariantKey::Identifier { name } => Some(name.as_str()), + _ => None, + }); + + let variant_values = variants + .iter() + .flat_map(|v| v.value.elements.iter()) + .filter_map(|v| match v { + PatternElement::Placeable { expression } => Some(expression), + _ => None, + }) + .flat_map(expr_to_key); + + let selector_keys = inline_expr_to_key(selector); + + let mut v = vec![]; + v.extend(variant_keys); + v.extend(variant_values); + v.extend(selector_keys); + v +} + +fn inline_expr_to_key(selector: &'static InlineExpression) -> Vec<&'static str> { + let mut v = vec![]; + v.extend(selector_placeable(selector)); + v.extend(selector_variable(selector)); + v.extend(selector_function(selector)); + v +} + +fn selector_placeable(e: &'static InlineExpression) -> impl Iterator { + let e = match e { + InlineExpression::Placeable { expression } => Some(expression), + _ => None, + }; + e.into_iter().flat_map(|e| expr_to_key(e)) +} + +fn selector_variable(e: &'static InlineExpression) -> impl Iterator { + let id = match e { + InlineExpression::VariableReference { id } => Some(id.name.as_str()), + _ => None, + }; + id.into_iter() +} + +fn selector_function(e: &'static InlineExpression) -> impl Iterator { + let arguments = match e { + InlineExpression::FunctionReference { arguments, .. } => Some(arguments), + _ => None, + }; + arguments.into_iter().flat_map(|a| { + a.named + .iter() + .map(|n| &n.value) + .chain(&a.positional) + .flat_map(inline_expr_to_key) + }) +} diff --git a/askama_derive/src/input.rs b/askama_derive/src/input.rs index 47d51bd89..3194700b4 100644 --- a/askama_derive/src/input.rs +++ b/askama_derive/src/input.rs @@ -17,6 +17,7 @@ pub(crate) struct TemplateInput<'a> { pub(crate) ext: Option, pub(crate) mime_type: String, pub(crate) path: PathBuf, + pub(crate) localizer: Option, } impl TemplateInput<'_> { @@ -48,6 +49,39 @@ impl TemplateInput<'_> { return Err("must include 'ext' attribute when using 'source' attribute".into()) } }; + let localizer = match ast.data { + syn::Data::Struct(syn::DataStruct { + fields: syn::Fields::Named(ref fields), + .. + }) => { + let mut localizers = + fields + .named + .iter() + .filter(|&f| f.ident.is_some()) + .flat_map( + |f| match f.attrs.iter().any(|a| a.path().is_ident("locale")) { + true => Some(f.ident.as_ref()?.to_string()), + false => None, + }, + ); + match localizers.next() { + Some(localizer) => { + if !cfg!(feature = "i18n") { + return Err( + "You need to activate the \"i18n\" feature to use #[locale]." + .into(), + ); + } else if localizers.next().is_some() { + return Err("You cannot mark more than one field as #[locale].".into()); + } + Some(localizer) + } + None => None, + } + } + _ => None, + }; // Validate syntax let syntax = syntax.map_or_else( @@ -95,6 +129,7 @@ impl TemplateInput<'_> { ext, mime_type, path, + localizer, }) } diff --git a/askama_derive/src/lib.rs b/askama_derive/src/lib.rs index 0683e719c..b931db31b 100644 --- a/askama_derive/src/lib.rs +++ b/askama_derive/src/lib.rs @@ -10,14 +10,28 @@ use proc_macro2::Span; mod config; mod generator; mod heritage; +#[cfg(feature = "i18n")] +mod i18n; mod input; mod parser; -#[proc_macro_derive(Template, attributes(template))] +#[proc_macro_derive(Template, attributes(template, locale))] pub fn derive_template(input: TokenStream) -> TokenStream { generator::derive_template(input) } +#[proc_macro] +pub fn i18n_load(_input: TokenStream) -> TokenStream { + #[cfg(feature = "i18n")] + match i18n::load(_input) { + Ok(ts) => ts, + Err(err) => err.into_compile_error(), + } + + #[cfg(not(feature = "i18n"))] + CompileError::from(r#"Activate the "i18n" feature to use i18n_load!()."#).into_compile_error() +} + #[derive(Debug, Clone)] struct CompileError { msg: Cow<'static, str>, diff --git a/askama_derive/src/parser/expr.rs b/askama_derive/src/parser/expr.rs index 1d4ea89ee..36f5ad150 100644 --- a/askama_derive/src/parser/expr.rs +++ b/askama_derive/src/parser/expr.rs @@ -33,6 +33,8 @@ pub(crate) enum Expr<'a> { Call(Box>, Vec>), RustMacro(Vec<&'a str>, &'a str), Try(Box>), + #[allow(dead_code)] + Localize(Box>, Vec<(&'a str, Expr<'a>)>), } impl Expr<'_> { @@ -108,6 +110,9 @@ impl Expr<'_> { } Expr::Group(arg) => arg.is_cacheable(), Expr::Tuple(args) => args.iter().all(|arg| arg.is_cacheable()), + Expr::Localize(msg_id, args) => { + msg_id.is_cacheable() && args.iter().all(|(_, arg)| arg.is_cacheable()) + } // We have too little information to tell if the expression is pure: Expr::Call(_, _) => false, Expr::RustMacro(_, _) => false, @@ -331,9 +336,72 @@ expr_prec_layer!(expr_compare, expr_bor, "==", "!=", ">=", ">", "<=", "<"); expr_prec_layer!(expr_and, expr_compare, "&&"); expr_prec_layer!(expr_or, expr_and, "||"); +#[cfg(not(feature = "i18n"))] +fn expr_localize(i: &str) -> IResult<&str, Expr<'_>> { + let (i, _) = pair(tag("localize"), ws(tag("(")))(i)?; + eprintln!(r#"Activate the "i18n" feature to use {{ localize() }}."#); + Err(nom::Err::Failure(error_position!(i, ErrorKind::Tag))) +} + +#[cfg(feature = "i18n")] +fn expr_localize(i: &str) -> IResult<&str, Expr<'_>> { + fn localize_args(mut i: &str) -> IResult<&str, Vec<(&str, Expr<'_>)>> { + let mut args = Vec::<(&str, Expr<'_>)>::new(); + + let mut p = opt(tuple((ws(tag(",")), identifier, ws(tag(":")), expr_any))); + while let (j, Some((_, k, _, v))) = p(i)? { + if args.iter().any(|&(a, _)| a == k) { + eprintln!("Duplicated key: {:?}", k); + return Err(nom::Err::Failure(error_position!(i, ErrorKind::Tag))); + } + + args.push((k, v)); + i = j; + } + + let (i, _) = opt(tag(","))(i)?; + Ok((i, args)) + } + + let (j, (_, _, (msg_id, args, _))) = tuple(( + tag("localize"), + ws(tag("(")), + cut(tuple((expr_any, localize_args, ws(tag(")"))))), + ))(i)?; + + if let Expr::StrLit(msg_id) = msg_id { + let mut msg_args = match crate::i18n::arguments_of(msg_id) { + Ok(args) => args, + Err(err) => { + eprintln!("{}", err.msg); + return Err(nom::Err::Failure(error_position!(i, ErrorKind::Tag))); + } + }; + for &(call_arg, _) in &args { + if !msg_args.remove(call_arg) { + eprintln!( + "Fluent template {:?} does not contain argument {:?}", + msg_id, call_arg, + ); + return Err(nom::Err::Failure(error_position!(i, ErrorKind::Tag))); + } + } + if !msg_args.is_empty() { + eprintln!( + "Missing argument(s) {:?} to fluent template {:?}", + msg_args, msg_id, + ); + return Err(nom::Err::Failure(error_position!(i, ErrorKind::Tag))); + } + } + + Ok((j, Expr::Localize(msg_id.into(), args))) +} + fn expr_any(i: &str) -> IResult<&str, Expr<'_>> { let range_right = |i| pair(ws(alt((tag("..="), tag("..")))), opt(expr_or))(i); alt(( + expr_localize, map(range_right, |(op, right)| { Expr::Range(op, None, right.map(Box::new)) }), diff --git a/askama_derive/src/parser/tests.rs b/askama_derive/src/parser/tests.rs index 801e78733..0cf520dde 100644 --- a/askama_derive/src/parser/tests.rs +++ b/askama_derive/src/parser/tests.rs @@ -711,3 +711,71 @@ fn test_missing_space_after_kw() { "unable to parse template:\n\n\"{%leta=b%}\"" )); } + +#[cfg(feature = "i18n")] +#[test] +fn test_parse_localize() { + macro_rules! map { + ($($k:expr => $v:expr),* $(,)?) => {{ + use std::iter::{Iterator, IntoIterator}; + Iterator::collect(IntoIterator::into_iter([$(($k, $v),)*])) + }}; + } + assert_eq!( + super::parse(r#"{{ localize(1, v: 32 + 7) }}"#, &Syntax::default()).unwrap(), + vec![Node::Expr( + Ws(None, None), + Expr::Localize( + Expr::NumLit("1").into(), + map!( + "v" => { + Expr::BinOp("+", Expr::NumLit("32").into(), Expr::NumLit("7").into()) + } + ), + ) + )], + ); + assert_eq!( + super::parse( + r#"{{ localize(1, b: "b", c: "c", d: "d") }}"#, + &Syntax::default(), + ) + .unwrap(), + vec![Node::Expr( + Ws(None, None), + Expr::Localize( + Expr::NumLit("1").into(), + map!( + "b" => Expr::StrLit("b"), + "c" => Expr::StrLit("c"), + "d" => Expr::StrLit("d"), + ), + ) + )], + ); + assert_eq!( + super::parse( + r#"{{ localize(1, v: localize(2, v: 32 + 7) ) }}"#, + &Syntax::default(), + ) + .unwrap(), + vec![Node::Expr( + Ws(None, None), + Expr::Localize( + Expr::NumLit("1").into(), + map!( + "v" => Expr::Localize( + Expr::NumLit("2").into(), + map!( + "v" => Expr::BinOp( + "+", + Expr::NumLit("32").into(), + Expr::NumLit("7").into(), + ), + ), + ), + ), + ), + )], + ); +}