1#![doc(
11 html_root_url = "https://docs.rs/pest_derive",
12 html_logo_url = "https://raw.githubusercontent.com/pest-parser/pest/master/pest-logo.svg",
13 html_favicon_url = "https://raw.githubusercontent.com/pest-parser/pest/master/pest-logo.svg"
14)]
15#![warn(missing_docs, rust_2018_idioms, unused_qualifications)]
16#![recursion_limit = "256"]
17#[macro_use]
22extern crate quote;
23
24use std::env;
25use std::fs::File;
26use std::io::{self, Read};
27use std::path::Path;
28
29use proc_macro2::TokenStream;
30use syn::{Attribute, DeriveInput, Generics, Ident, Lit, Meta};
31
32#[macro_use]
33mod macros;
34mod docs;
35mod generator;
36
37use pest_meta::parser::{self, rename_meta_rule, Rule};
38use pest_meta::{optimizer, unwrap_or_report, validator};
39
40pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
44 let ast: DeriveInput = syn::parse2(input).unwrap();
45 let (name, generics, contents) = parse_derive(ast);
46
47 let mut data = String::new();
48 let mut path = None;
49
50 for content in contents {
51 let (_data, _path) = match content {
52 GrammarSource::File(ref path) => {
53 let root = env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".into());
54
55 let path = if Path::new(&root).join(path).exists() {
64 Path::new(&root).join(path)
65 } else {
66 Path::new(&root).join("src/").join(path)
67 };
68
69 let file_name = match path.file_name() {
70 Some(file_name) => file_name,
71 None => panic!("grammar attribute should point to a file"),
72 };
73
74 let data = match read_file(&path) {
75 Ok(data) => data,
76 Err(error) => panic!("error opening {:?}: {}", file_name, error),
77 };
78 (data, Some(path.clone()))
79 }
80 GrammarSource::Inline(content) => (content, None),
81 };
82
83 data.push_str(&_data);
84 if _path.is_some() {
85 path = _path;
86 }
87 }
88
89 let pairs = match parser::parse(Rule::grammar_rules, &data) {
90 Ok(pairs) => pairs,
91 Err(error) => panic!("error parsing \n{}", error.renamed_rules(rename_meta_rule)),
92 };
93
94 let defaults = unwrap_or_report(validator::validate_pairs(pairs.clone()));
95 let doc_comment = docs::consume(pairs.clone());
96 let ast = unwrap_or_report(parser::consume_rules(pairs));
97 let optimized = optimizer::optimize(ast);
98
99 generator::generate(
100 name,
101 &generics,
102 path,
103 optimized,
104 defaults,
105 &doc_comment,
106 include_grammar,
107 )
108}
109
110fn read_file<P: AsRef<Path>>(path: P) -> io::Result<String> {
111 let mut file = File::open(path.as_ref())?;
112 let mut string = String::new();
113 file.read_to_string(&mut string)?;
114 Ok(string)
115}
116
117#[derive(Debug, PartialEq)]
118enum GrammarSource {
119 File(String),
120 Inline(String),
121}
122
123fn parse_derive(ast: DeriveInput) -> (Ident, Generics, Vec<GrammarSource>) {
124 let name = ast.ident;
125 let generics = ast.generics;
126
127 let grammar: Vec<&Attribute> = ast
128 .attrs
129 .iter()
130 .filter(|attr| match attr.parse_meta() {
131 Ok(Meta::NameValue(name_value)) => {
132 name_value.path.is_ident("grammar") || name_value.path.is_ident("grammar_inline")
133 }
134 _ => false,
135 })
136 .collect();
137
138 if grammar.is_empty() {
139 panic!("a grammar file needs to be provided with the #[grammar = \"PATH\"] or #[grammar_inline = \"GRAMMAR CONTENTS\"] attribute");
140 }
141
142 let mut grammar_sources = Vec::with_capacity(grammar.len());
143 for attr in grammar {
144 grammar_sources.push(get_attribute(attr))
145 }
146
147 (name, generics, grammar_sources)
148}
149
150fn get_attribute(attr: &Attribute) -> GrammarSource {
151 match attr.parse_meta() {
152 Ok(Meta::NameValue(name_value)) => match name_value.lit {
153 Lit::Str(string) => {
154 if name_value.path.is_ident("grammar") {
155 GrammarSource::File(string.value())
156 } else {
157 GrammarSource::Inline(string.value())
158 }
159 }
160 _ => panic!("grammar attribute must be a string"),
161 },
162 _ => panic!("grammar attribute must be of the form `grammar = \"...\"`"),
163 }
164}
165
166#[cfg(test)]
167mod tests {
168 use super::parse_derive;
169 use super::GrammarSource;
170
171 #[test]
172 fn derive_inline_file() {
173 let definition = "
174 #[other_attr]
175 #[grammar_inline = \"GRAMMAR\"]
176 pub struct MyParser<'a, T>;
177 ";
178 let ast = syn::parse_str(definition).unwrap();
179 let (_, _, filenames) = parse_derive(ast);
180 assert_eq!(filenames, [GrammarSource::Inline("GRAMMAR".to_string())]);
181 }
182
183 #[test]
184 fn derive_ok() {
185 let definition = "
186 #[other_attr]
187 #[grammar = \"myfile.pest\"]
188 pub struct MyParser<'a, T>;
189 ";
190 let ast = syn::parse_str(definition).unwrap();
191 let (_, _, filenames) = parse_derive(ast);
192 assert_eq!(filenames, [GrammarSource::File("myfile.pest".to_string())]);
193 }
194
195 #[test]
196 fn derive_multiple_grammars() {
197 let definition = "
198 #[other_attr]
199 #[grammar = \"myfile1.pest\"]
200 #[grammar = \"myfile2.pest\"]
201 pub struct MyParser<'a, T>;
202 ";
203 let ast = syn::parse_str(definition).unwrap();
204 let (_, _, filenames) = parse_derive(ast);
205 assert_eq!(
206 filenames,
207 [
208 GrammarSource::File("myfile1.pest".to_string()),
209 GrammarSource::File("myfile2.pest".to_string())
210 ]
211 );
212 }
213
214 #[test]
215 #[should_panic(expected = "grammar attribute must be a string")]
216 fn derive_wrong_arg() {
217 let definition = "
218 #[other_attr]
219 #[grammar = 1]
220 pub struct MyParser<'a, T>;
221 ";
222 let ast = syn::parse_str(definition).unwrap();
223 parse_derive(ast);
224 }
225
226 #[test]
227 #[should_panic(
228 expected = "a grammar file needs to be provided with the #[grammar = \"PATH\"] or #[grammar_inline = \"GRAMMAR CONTENTS\"] attribute"
229 )]
230 fn derive_no_grammar() {
231 let definition = "
232 #[other_attr]
233 pub struct MyParser<'a, T>;
234 ";
235 let ast = syn::parse_str(definition).unwrap();
236 parse_derive(ast);
237 }
238
239 #[doc = "Matches dar\n\nMatch dar description\n"]
240 #[test]
241 fn test_generate_doc() {
242 let input = quote! {
243 #[derive(Parser)]
244 #[grammar = "../tests/test.pest"]
245 pub struct TestParser;
246 };
247
248 let token = super::derive_parser(input, true);
249
250 let expected = quote! {
251 #[doc = "A parser for JSON file.\nAnd this is a example for JSON parser.\n\n indent-4-space\n"]
252 #[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
253 #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
254
255 pub enum Rule {
256 #[doc = "Matches foo str, e.g.: `foo`"]
257 r#foo,
258 #[doc = "Matches bar str\n\n Indent 2, e.g: `bar` or `foobar`"]
259 r#bar,
260 r#bar1,
261 #[doc = "Matches dar\n\nMatch dar description\n"]
262 r#dar
263 }
264 };
265
266 assert!(
267 token.to_string().contains(expected.to_string().as_str()),
268 "{}\n\nExpected to contains:\n{}",
269 token,
270 expected
271 );
272 }
273}