xshell_macros/
lib.rs

1//! Private implementation details of `xshell`.
2
3#![deny(missing_debug_implementations)]
4#![deny(rust_2018_idioms)]
5
6use std::iter;
7
8use proc_macro::{Delimiter, Group, Literal, Span, TokenStream, TokenTree};
9
10#[doc(hidden)]
11#[proc_macro]
12pub fn __cmd(macro_arg: TokenStream) -> TokenStream {
13    try_cmd(macro_arg).unwrap_or_else(|msg| parse_ts(&format!("compile_error!({:?})", msg)))
14}
15
16type Result<T> = std::result::Result<T, String>;
17
18fn try_cmd(macro_arg: TokenStream) -> Result<TokenStream> {
19    let (cmd, literal) = {
20        let mut iter = macro_arg.into_iter();
21        let cmd = iter.next().unwrap();
22        let literal = iter.next().unwrap();
23        assert!(iter.next().is_none());
24        (cmd, literal)
25    };
26
27    let literal = match into_literal(&literal) {
28        Some(it) => it,
29        None => return Err("expected a plain string literal".to_string()),
30    };
31
32    let literal_text = literal.to_string();
33    if !literal_text.starts_with('"') {
34        return Err("expected a plain string literal".to_string());
35    }
36
37    let mut args = shell_lex(literal_text.as_str(), literal.span());
38
39    let mut res = TokenStream::new();
40
41    {
42        let (_joined_to_prev, splat, program) =
43            args.next().ok_or_else(|| "command can't be empty".to_string())??;
44        if splat {
45            return Err("can't splat program name".to_string());
46        }
47        res.extend(Some(cmd));
48        res.extend(program);
49    }
50
51    let mut prev_spat = false;
52    for arg in args {
53        let (joined_to_prev, splat, arg) = arg?;
54        if prev_spat && joined_to_prev {
55            return Err(format!(
56                "can't combine splat with concatenation, add spaces around `{{{}...}}`",
57                trim_decorations(&res.into_iter().last().unwrap().to_string()),
58            ));
59        }
60        prev_spat = splat;
61
62        let method = match (joined_to_prev, splat) {
63            (false, false) => ".arg",
64            (false, true) => ".args",
65            (true, false) => ".__extend_arg",
66            (true, true) => {
67                return Err(format!(
68                    "can't combine splat with concatenation, add spaces around `{{{}...}}`",
69                    trim_decorations(&arg.to_string()),
70                ))
71            }
72        };
73
74        res.extend(parse_ts(method));
75        res.extend(arg);
76    }
77
78    Ok(res)
79}
80
81fn into_literal(ts: &TokenTree) -> Option<Literal> {
82    match ts {
83        TokenTree::Literal(l) => Some(l.clone()),
84        TokenTree::Group(g) => match g.delimiter() {
85            Delimiter::None => match g.stream().into_iter().collect::<Vec<_>>().as_slice() {
86                [TokenTree::Literal(l)] => Some(l.clone()),
87                _ => None,
88            },
89            Delimiter::Parenthesis | Delimiter::Brace | Delimiter::Bracket => None,
90        },
91        _ => None,
92    }
93}
94
95fn trim_decorations(s: &str) -> &str {
96    &s[1..s.len() - 1]
97}
98
99fn shell_lex(
100    cmd: &str,
101    call_site: Span,
102) -> impl Iterator<Item = Result<(bool, bool, TokenStream)>> + '_ {
103    tokenize(cmd).map(move |token| {
104        let token = token?;
105        let mut splat = false;
106        let ts = match token.kind {
107            TokenKind::Word => parse_ts(&format!("(\"{}\")", token.text)),
108            TokenKind::String => parse_ts(&format!("(\"{}\")", trim_decorations(token.text))),
109            TokenKind::Interpolation { splat: s } => {
110                splat = s;
111                let text = trim_decorations(token.text);
112                let text = &text[..text.len() - (if splat { "...".len() } else { 0 })];
113                if !(text.chars().all(|c| c.is_ascii_alphanumeric() || c == '_')) {
114                    return Err(format!(
115                        "can only interpolate simple variables, got this expression instead: `{}`",
116                        text
117                    ));
118                }
119                let ts = if splat { format!("({})", text) } else { format!("(&({}))", text) };
120                respan(parse_ts(&ts), call_site)
121            }
122        };
123        Ok((token.joined_to_prev, splat, ts))
124    })
125}
126
127/// Like trim_matches except only trims a maximum of 1 match
128fn strip_matches<'a>(s: &'a str, pattern: &str) -> &'a str {
129    s.strip_prefix(pattern).unwrap_or(s).strip_suffix(pattern).unwrap_or(s)
130}
131
132fn tokenize(cmd: &str) -> impl Iterator<Item = Result<Token<'_>>> + '_ {
133    let mut cmd = strip_matches(cmd, "\"");
134
135    iter::from_fn(move || {
136        let old_len = cmd.len();
137        cmd = cmd.trim_start();
138        let joined_to_prev = old_len == cmd.len();
139        if cmd.is_empty() {
140            return None;
141        }
142        let (len, kind) = match next_token(cmd) {
143            Ok(it) => it,
144            Err(err) => {
145                cmd = "";
146                return Some(Err(err));
147            }
148        };
149        let token = Token { joined_to_prev, text: &cmd[..len], kind };
150        cmd = &cmd[len..];
151        Some(Ok(token))
152    })
153}
154
155#[derive(Debug)]
156struct Token<'a> {
157    joined_to_prev: bool,
158    text: &'a str,
159    kind: TokenKind,
160}
161#[derive(Debug)]
162enum TokenKind {
163    Word,
164    String,
165    Interpolation { splat: bool },
166}
167
168fn next_token(s: &str) -> Result<(usize, TokenKind)> {
169    if s.starts_with('{') {
170        let len = s.find('}').ok_or_else(|| "unclosed `{` in command".to_string())? + 1;
171        let splat = s[..len].ends_with("...}");
172        return Ok((len, TokenKind::Interpolation { splat }));
173    }
174    if s.starts_with('\'') {
175        let len = s[1..].find('\'').ok_or_else(|| "unclosed `'` in command".to_string())? + 2;
176        return Ok((len, TokenKind::String));
177    }
178    let len =
179        s.find(|it: char| it.is_ascii_whitespace() || it == '\'' || it == '{').unwrap_or(s.len());
180    Ok((len, TokenKind::Word))
181}
182
183fn respan(ts: TokenStream, span: Span) -> TokenStream {
184    let mut res = TokenStream::new();
185    for tt in ts {
186        let tt = match tt {
187            TokenTree::Ident(mut ident) => {
188                ident.set_span(ident.span().resolved_at(span).located_at(span));
189                TokenTree::Ident(ident)
190            }
191            TokenTree::Group(group) => {
192                TokenTree::Group(Group::new(group.delimiter(), respan(group.stream(), span)))
193            }
194            _ => tt,
195        };
196        res.extend(Some(tt))
197    }
198    res
199}
200
201fn parse_ts(s: &str) -> TokenStream {
202    s.parse().unwrap()
203}