2020-06-04 16:39:33 -04:00
|
|
|
use proc_macro2::Span;
|
2020-07-14 17:13:37 -04:00
|
|
|
use quote::ToTokens;
|
2020-07-14 14:21:56 -04:00
|
|
|
use syn::parse::{Parse, ParseStream as SynParseStream, Result as ParseResult};
|
|
|
|
use syn::spanned::Spanned;
|
|
|
|
use syn::{BinOp, Block, Expr};
|
2020-06-04 16:39:33 -04:00
|
|
|
|
|
|
|
use crate::error::*;
|
|
|
|
use crate::parser::{ParseStream, Token, TokenKind};
|
|
|
|
|
2020-07-14 14:21:56 -04:00
|
|
|
enum Filter {
|
|
|
|
Ident(syn::Ident),
|
|
|
|
Call(syn::ExprCall),
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Spanned for Filter {
|
|
|
|
fn span(&self) -> Span {
|
|
|
|
match *self {
|
|
|
|
Filter::Ident(ref i) => i.span(),
|
|
|
|
Filter::Call(ref c) => c.span(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
struct CodeBlock {
|
|
|
|
#[allow(dead_code)]
|
|
|
|
expr: Box<Expr>,
|
|
|
|
filter: Option<Filter>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Parse for CodeBlock {
|
|
|
|
fn parse(s: SynParseStream) -> ParseResult<Self> {
|
|
|
|
let main = s.parse::<Expr>()?;
|
|
|
|
|
|
|
|
let code_block = match main {
|
2020-07-14 17:13:37 -04:00
|
|
|
Expr::Binary(b) if matches!(b.op, BinOp::BitOr(_)) => {
|
2020-07-14 14:21:56 -04:00
|
|
|
match *b.right {
|
|
|
|
Expr::Call(c) => {
|
|
|
|
if let Expr::Path(ref p) = *c.func {
|
|
|
|
if p.path.get_ident().is_some() {
|
|
|
|
CodeBlock {
|
|
|
|
expr: b.left,
|
|
|
|
filter: Some(Filter::Call(c)),
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return Err(syn::Error::new_spanned(
|
|
|
|
p,
|
|
|
|
"Invalid filter name",
|
|
|
|
));
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// if function in right side is not a path, fallback to
|
|
|
|
// normal evaluation block
|
|
|
|
CodeBlock {
|
|
|
|
expr: b.left,
|
|
|
|
filter: None,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Expr::Path(p) => {
|
|
|
|
if let Some(i) = p.path.get_ident() {
|
|
|
|
CodeBlock {
|
|
|
|
expr: b.left,
|
|
|
|
filter: Some(Filter::Ident(i.clone())),
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return Err(syn::Error::new_spanned(
|
|
|
|
p,
|
|
|
|
"Invalid filter name",
|
|
|
|
));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
return Err(syn::Error::new_spanned(b, "Expected filter"));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => CodeBlock {
|
|
|
|
expr: Box::new(main),
|
|
|
|
filter: None,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok(code_block)
|
|
|
|
}
|
|
|
|
}
|
2020-06-04 16:39:33 -04:00
|
|
|
|
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct SourceMapEntry {
|
|
|
|
pub original: usize,
|
|
|
|
pub new: usize,
|
|
|
|
pub length: usize,
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Default)]
|
|
|
|
pub struct SourceMap {
|
|
|
|
entries: Vec<SourceMapEntry>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl SourceMap {
|
2020-06-07 04:58:52 -04:00
|
|
|
// #[inline]
|
|
|
|
// pub fn entries(&self) -> &[SourceMapEntry] {
|
|
|
|
// &*self.entries
|
|
|
|
// }
|
2020-06-04 16:39:33 -04:00
|
|
|
|
|
|
|
pub fn reverse_mapping(&self, offset: usize) -> Option<usize> {
|
|
|
|
// find entry which satisfies entry.new <= offset < entry.new + entry.length
|
|
|
|
let idx = self
|
|
|
|
.entries
|
|
|
|
.iter()
|
|
|
|
.position(|entry| offset < entry.new + entry.length && entry.new <= offset)?;
|
|
|
|
|
|
|
|
let entry = &self.entries[idx];
|
|
|
|
debug_assert!(entry.new <= offset);
|
|
|
|
debug_assert!(offset < entry.new + entry.length);
|
|
|
|
|
|
|
|
Some(entry.original + offset - entry.new)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
struct SourceBuilder {
|
|
|
|
escape: bool,
|
|
|
|
source: String,
|
|
|
|
source_map: SourceMap,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl SourceBuilder {
|
2020-07-14 17:13:37 -04:00
|
|
|
fn new(escape: bool) -> SourceBuilder {
|
|
|
|
SourceBuilder {
|
|
|
|
escape,
|
|
|
|
source: String::from("{\n"),
|
|
|
|
source_map: SourceMap::default(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn reserve(&mut self, additional: usize) {
|
|
|
|
self.source.reserve(additional);
|
|
|
|
}
|
|
|
|
|
2020-06-04 16:39:33 -04:00
|
|
|
fn write_token<'a>(&mut self, token: &Token<'a>) {
|
|
|
|
let entry = SourceMapEntry {
|
|
|
|
original: token.offset(),
|
|
|
|
new: self.source.len(),
|
|
|
|
length: token.as_str().len(),
|
|
|
|
};
|
|
|
|
self.source_map.entries.push(entry);
|
|
|
|
self.source.push_str(token.as_str());
|
|
|
|
}
|
|
|
|
|
2020-07-14 14:21:56 -04:00
|
|
|
fn write_code<'a>(&mut self, token: &Token<'a>) -> Result<(), Error> {
|
2020-06-04 16:39:33 -04:00
|
|
|
// TODO: automatically add missing tokens (e.g. ';', '{')
|
|
|
|
self.write_token(token);
|
2020-12-18 05:18:52 -05:00
|
|
|
self.source.push('\n');
|
2020-07-14 14:21:56 -04:00
|
|
|
Ok(())
|
2020-06-04 16:39:33 -04:00
|
|
|
}
|
|
|
|
|
2020-07-14 14:21:56 -04:00
|
|
|
fn write_text<'a>(&mut self, token: &Token<'a>) -> Result<(), Error> {
|
2020-06-04 16:39:33 -04:00
|
|
|
use std::fmt::Write;
|
|
|
|
|
2020-07-14 11:02:54 -04:00
|
|
|
self.source.push_str("__sf_rt::render_text!(__sf_buf, ");
|
2020-06-04 16:39:33 -04:00
|
|
|
// write text token with Debug::fmt
|
|
|
|
write!(self.source, "{:?}", token.as_str()).unwrap();
|
|
|
|
self.source.push_str(");\n");
|
2020-07-14 14:21:56 -04:00
|
|
|
Ok(())
|
2020-06-04 16:39:33 -04:00
|
|
|
}
|
|
|
|
|
2020-07-14 14:21:56 -04:00
|
|
|
fn write_buffered_code<'a>(
|
|
|
|
&mut self,
|
|
|
|
token: &Token<'a>,
|
|
|
|
escape: bool,
|
|
|
|
) -> Result<(), Error> {
|
|
|
|
// parse and split off filter
|
2020-07-14 17:13:37 -04:00
|
|
|
let code_block = syn::parse_str::<CodeBlock>(token.as_str()).map_err(|e| {
|
|
|
|
let span = e.span();
|
|
|
|
let mut err = make_error!(ErrorKind::RustSyntaxError(e));
|
|
|
|
err.offset = into_offset(token.as_str(), span).map(|p| token.offset() + p);
|
|
|
|
err
|
|
|
|
})?;
|
2020-06-04 16:39:33 -04:00
|
|
|
let method = if self.escape && escape {
|
|
|
|
"render_escaped"
|
|
|
|
} else {
|
|
|
|
"render"
|
|
|
|
};
|
|
|
|
|
2020-07-14 10:18:23 -04:00
|
|
|
self.source.push_str("__sf_rt::");
|
2020-06-04 16:39:33 -04:00
|
|
|
self.source.push_str(method);
|
2020-07-14 11:02:54 -04:00
|
|
|
self.source.push_str("!(__sf_buf, ");
|
2020-07-14 17:13:37 -04:00
|
|
|
|
|
|
|
if let Some(filter) = code_block.filter {
|
|
|
|
let expr_str = code_block.expr.into_token_stream().to_string();
|
|
|
|
let (name, extra_args) = match filter {
|
|
|
|
Filter::Ident(i) => (i.to_string(), None),
|
|
|
|
Filter::Call(c) => (
|
|
|
|
c.func.into_token_stream().to_string(),
|
|
|
|
Some(c.args.into_token_stream().to_string()),
|
|
|
|
),
|
|
|
|
};
|
|
|
|
|
|
|
|
self.source.push_str("sailfish::runtime::filter::");
|
|
|
|
self.source.push_str(&*name);
|
2020-12-18 05:18:52 -05:00
|
|
|
self.source.push('(');
|
2020-07-14 17:13:37 -04:00
|
|
|
|
|
|
|
// arguments to filter function
|
|
|
|
{
|
|
|
|
self.source.push_str("&(");
|
|
|
|
let entry = SourceMapEntry {
|
|
|
|
original: token.offset(),
|
|
|
|
new: self.source.len(),
|
|
|
|
length: expr_str.len(),
|
|
|
|
};
|
|
|
|
self.source_map.entries.push(entry);
|
|
|
|
self.source.push_str(&expr_str);
|
2020-12-18 05:18:52 -05:00
|
|
|
self.source.push(')');
|
2020-07-14 17:13:37 -04:00
|
|
|
|
|
|
|
if let Some(extra_args) = extra_args {
|
|
|
|
self.source.push_str(", ");
|
|
|
|
self.source.push_str(&*extra_args);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-12-18 05:18:52 -05:00
|
|
|
self.source.push(')');
|
2020-07-14 17:13:37 -04:00
|
|
|
} else {
|
|
|
|
self.write_token(token);
|
|
|
|
}
|
|
|
|
|
2020-06-04 16:39:33 -04:00
|
|
|
self.source.push_str(");\n");
|
2020-07-14 17:13:37 -04:00
|
|
|
|
2020-07-14 14:21:56 -04:00
|
|
|
Ok(())
|
2020-06-04 16:39:33 -04:00
|
|
|
}
|
|
|
|
|
2020-07-14 14:21:56 -04:00
|
|
|
pub fn feed_tokens<'a>(&mut self, token_iter: ParseStream<'a>) -> Result<(), Error> {
|
|
|
|
let mut it = token_iter.peekable();
|
2020-06-04 16:39:33 -04:00
|
|
|
while let Some(token) = it.next() {
|
2020-07-14 14:21:56 -04:00
|
|
|
let token = token?;
|
2020-06-04 16:39:33 -04:00
|
|
|
match token.kind() {
|
2020-07-14 14:21:56 -04:00
|
|
|
TokenKind::Code => self.write_code(&token)?,
|
2020-06-04 16:39:33 -04:00
|
|
|
TokenKind::Comment => {}
|
|
|
|
TokenKind::BufferedCode { escape } => {
|
2020-07-14 14:21:56 -04:00
|
|
|
self.write_buffered_code(&token, escape)?
|
2020-06-04 16:39:33 -04:00
|
|
|
}
|
|
|
|
TokenKind::Text => {
|
|
|
|
// concatenate repeated text token
|
|
|
|
let offset = token.offset();
|
|
|
|
let mut concatenated = String::new();
|
|
|
|
concatenated.push_str(token.as_str());
|
|
|
|
|
2020-07-14 14:21:56 -04:00
|
|
|
while let Some(&Ok(ref next_token)) = it.peek() {
|
2020-06-04 16:39:33 -04:00
|
|
|
match next_token.kind() {
|
|
|
|
TokenKind::Text => {
|
|
|
|
concatenated.push_str(next_token.as_str());
|
|
|
|
it.next();
|
|
|
|
}
|
|
|
|
TokenKind::Comment => {
|
|
|
|
it.next();
|
|
|
|
}
|
|
|
|
_ => break,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let new_token = Token::new(&*concatenated, offset, TokenKind::Text);
|
2020-07-14 14:21:56 -04:00
|
|
|
self.write_text(&new_token)?;
|
2020-06-04 16:39:33 -04:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-07-14 14:21:56 -04:00
|
|
|
|
|
|
|
Ok(())
|
2020-06-04 16:39:33 -04:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn finalize(mut self) -> Result<TranslatedSource, Error> {
|
|
|
|
self.source.push_str("\n}");
|
2020-06-07 07:52:38 -04:00
|
|
|
proc_macro2::fallback::force();
|
2020-06-04 16:39:33 -04:00
|
|
|
match syn::parse_str::<Block>(&*self.source) {
|
|
|
|
Ok(ast) => Ok(TranslatedSource {
|
|
|
|
ast,
|
|
|
|
source_map: self.source_map,
|
|
|
|
}),
|
|
|
|
Err(synerr) => {
|
|
|
|
let span = synerr.span();
|
|
|
|
let original_offset = into_offset(&*self.source, span)
|
|
|
|
.and_then(|o| self.source_map.reverse_mapping(o));
|
|
|
|
|
|
|
|
let mut err =
|
|
|
|
make_error!(ErrorKind::RustSyntaxError(synerr), source = self.source);
|
|
|
|
|
|
|
|
err.offset = original_offset;
|
|
|
|
|
|
|
|
Err(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn into_offset(source: &str, span: Span) -> Option<usize> {
|
|
|
|
let lc = span.start();
|
|
|
|
if lc.line > 0 {
|
|
|
|
Some(
|
|
|
|
source
|
|
|
|
.lines()
|
|
|
|
.take(lc.line - 1)
|
|
|
|
.fold(0, |s, e| s + e.len() + 1)
|
|
|
|
+ lc.column,
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-07-14 14:21:56 -04:00
|
|
|
pub struct TranslatedSource {
|
|
|
|
pub ast: Block,
|
|
|
|
pub source_map: SourceMap,
|
|
|
|
}
|
|
|
|
|
|
|
|
// translate tokens into Rust code
|
|
|
|
#[derive(Clone, Debug, Default)]
|
|
|
|
pub struct Translator {
|
|
|
|
escape: bool,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Translator {
|
|
|
|
#[inline]
|
|
|
|
pub fn new() -> Self {
|
|
|
|
Self { escape: true }
|
|
|
|
}
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
pub fn escape(mut self, new: bool) -> Self {
|
|
|
|
self.escape = new;
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn translate<'a>(
|
|
|
|
&self,
|
|
|
|
token_iter: ParseStream<'a>,
|
|
|
|
) -> Result<TranslatedSource, Error> {
|
|
|
|
let original_source = token_iter.original_source;
|
|
|
|
|
2020-07-14 17:13:37 -04:00
|
|
|
let mut ps = SourceBuilder::new(self.escape);
|
|
|
|
ps.reserve(original_source.len());
|
2020-07-14 14:21:56 -04:00
|
|
|
ps.feed_tokens(token_iter)?;
|
|
|
|
|
|
|
|
Ok(ps.finalize()?)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-04 16:39:33 -04:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
|
|
|
use super::*;
|
|
|
|
use crate::parser::Parser;
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn translate() {
|
2020-07-14 14:21:56 -04:00
|
|
|
let src = "<% pub fn sample() { %> <%% <%=//%>\n1%><% } %>";
|
2020-06-04 16:39:33 -04:00
|
|
|
let lexer = Parser::new();
|
|
|
|
let token_iter = lexer.parse(src);
|
|
|
|
let mut ps = SourceBuilder {
|
|
|
|
escape: true,
|
|
|
|
source: String::with_capacity(token_iter.original_source.len()),
|
|
|
|
source_map: SourceMap::default(),
|
|
|
|
};
|
2020-07-14 14:21:56 -04:00
|
|
|
ps.feed_tokens(token_iter.clone()).unwrap();
|
2020-06-04 16:39:33 -04:00
|
|
|
Translator::new().translate(token_iter).unwrap();
|
|
|
|
}
|
|
|
|
}
|