Skip to content

Commit

Permalink
Rewrite line_docs generator by use HashMap with rule_name.
Browse files Browse the repository at this point in the history
  • Loading branch information
huacnlee committed Jan 18, 2023
1 parent 9043679 commit 90abcb4
Show file tree
Hide file tree
Showing 3 changed files with 101 additions and 76 deletions.
88 changes: 44 additions & 44 deletions generator/src/generator.rs
Expand Up @@ -7,6 +7,7 @@
// option. All files in the project carrying such notice may not be copied,
// modified, or distributed except according to those terms.

use std::collections::HashMap;
use std::path::PathBuf;

use proc_macro2::TokenStream;
Expand All @@ -18,17 +19,31 @@ use pest_meta::ast::*;
use pest_meta::optimizer::*;

#[derive(Debug)]
pub(crate) struct DocComment<'a> {
pub(crate) grammar_docs: Vec<&'a str>,
pub(crate) line_docs: Vec<Vec<&'a str>>,
pub(crate) rules: Vec<Rule>,
pub(crate) struct DocComment {
/// Multi-line grammar doc, (joined with `\n`)
///
/// e.g.
///
/// ```ignore
/// "grammar doc 1\ngrammar doc 2"
/// ```
grammar_doc: String,
/// HashMap rule name and doc comments (joined with `\n`)
///
/// e.g.
///
/// ```ignore
/// { "foo": "line doc 1\nline doc 2", "bar": "line doc 3" }
/// ```
line_docs: HashMap<String, String>,
}

impl DocComment<'_> {
fn line_docs_for_rule(&self, rule_name: &str) -> Option<String> {
let idx = self.rules.iter().position(|r| r.name == rule_name)?;

self.line_docs.get(idx).map(|comments| comments.join("\n"))
impl DocComment {
pub fn new(grammar_doc: String, line_docs: HashMap<String, String>) -> Self {
Self {
grammar_doc,
line_docs,
}
}
}

Expand All @@ -38,7 +53,7 @@ pub(crate) fn generate(
path: Option<PathBuf>,
rules: Vec<OptimizedRule>,
defaults: Vec<&str>,
doc_comment: &DocComment<'_>,
doc_comment: &DocComment,
include_grammar: bool,
) -> TokenStream {
let uses_eoi = defaults.iter().any(|name| *name == "EOI");
Expand Down Expand Up @@ -197,32 +212,28 @@ fn generate_include(name: &Ident, path: &str) -> TokenStream {
}
}

fn generate_enum(
rules: &[OptimizedRule],
doc_comment: &DocComment<'_>,
uses_eoi: bool,
) -> TokenStream {
fn generate_enum(rules: &[OptimizedRule], doc_comment: &DocComment, uses_eoi: bool) -> TokenStream {
let rules = rules.iter().map(|rule| {
let rule_name = format_ident!("r#{}", rule.name);

let comments = doc_comment.line_docs_for_rule(&rule.name);
let comments = comments.unwrap_or_else(|| "".to_owned());
if comments.is_empty() {
let line_doc = doc_comment.line_docs.get(&rule.name);
if line_doc.is_none() {
quote! {
#rule_name
}
} else {
let doc = line_doc.unwrap();
quote! {
#[doc = #comments]
#[doc = #doc]
#rule_name
}
}
});

let grammar_docs = doc_comment.grammar_docs.join("\n");
let grammar_doc = &doc_comment.grammar_doc;
if uses_eoi {
quote! {
#[doc = #grammar_docs]
#[doc = #grammar_doc]
#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum Rule {
Expand All @@ -232,7 +243,7 @@ fn generate_enum(
}
} else {
quote! {
#[doc = #grammar_docs]
#[doc = #grammar_doc]
#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub enum Rule {
Expand Down Expand Up @@ -709,14 +720,12 @@ mod tests {
expr: OptimizedExpr::Ident("g".to_owned()),
}];

let mut line_docs = HashMap::new();
line_docs.insert("f".to_owned(), "This is rule comment".to_owned());

let doc_comment = &DocComment {
grammar_docs: vec!["Rule doc", "hello"],
line_docs: vec![vec!["This is rule comment"]],
rules: vec![Rule {
name: "f".to_owned(),
ty: RuleType::Normal,
expr: Expr::Ident("g".to_owned()),
}],
grammar_doc: "Rule doc\nhello".to_owned(),
line_docs,
};

assert_eq!(
Expand Down Expand Up @@ -1009,7 +1018,7 @@ mod tests {
}

#[test]
fn generate_complete() {
fn test_generate_complete() {
let name = Ident::new("MyParser", Span::call_site());
let generics = Generics::default();

Expand All @@ -1026,21 +1035,12 @@ mod tests {
},
];

let mut line_docs = HashMap::new();
line_docs.insert("if".to_owned(), "If statement".to_owned());

let doc_comment = &DocComment {
line_docs: vec![vec![], vec!["If statement"]],
grammar_docs: vec!["This is Rule doc", "This is second line"],
rules: vec![
Rule {
name: "a".to_owned(),
ty: RuleType::Silent,
expr: Expr::Str("b".to_owned()),
},
Rule {
name: "if".to_owned(),
ty: RuleType::Silent,
expr: Expr::Str("b".to_owned()),
},
],
line_docs,
grammar_doc: "This is Rule doc\nThis is second line".to_owned(),
};

let defaults = vec!["ANY"];
Expand Down
88 changes: 56 additions & 32 deletions generator/src/lib.rs
Expand Up @@ -21,6 +21,7 @@
#[macro_use]
extern crate quote;

use std::collections::HashMap;
use std::env;
use std::fs::File;
use std::io::{self, Read};
Expand Down Expand Up @@ -93,31 +94,28 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
Err(error) => panic!("error parsing \n{}", error.renamed_rules(rename_meta_rule)),
};

let grammar_docs = consume_grammar_doc(pairs.clone());
let grammar_doc = consume_grammar_doc(pairs.clone());
let line_docs = consume_line_docs(pairs.clone());

let defaults = unwrap_or_report(validator::validate_pairs(pairs.clone()));
let ast = unwrap_or_report(parser::consume_rules(pairs));
let optimized = optimizer::optimize(ast.clone());

let doc_comment = DocComment {
grammar_docs,
line_docs,
rules: ast,
};
let doc_comment = &DocComment::new(grammar_doc, line_docs);

generator::generate(
name,
&generics,
path,
optimized,
defaults,
&doc_comment,
doc_comment,
include_grammar,
)
}

fn consume_grammar_doc(pairs: Pairs<'_, Rule>) -> Vec<&'_ str> {
/// Consume grammar doc into String, multi-line joined with `\n`
fn consume_grammar_doc<'a>(pairs: Pairs<'_, Rule>) -> String {
let mut docs = vec![];
for pair in pairs {
if pair.as_rule() == Rule::grammar_doc {
Expand All @@ -126,31 +124,52 @@ fn consume_grammar_doc(pairs: Pairs<'_, Rule>) -> Vec<&'_ str> {
}
}

docs
docs.join("\n")
}

fn consume_line_docs(pairs: Pairs<'_, Rule>) -> Vec<Vec<&'_ str>> {
let mut docs = vec![];
/// Consume line docs into HashMap<rule_name, doc>
///
/// Example a `test.pest`:
///
/// ```ignore
/// /// Line doc 1
/// foo = {}
///
/// /// Line doc 2
/// /// Line doc 3
/// bar = {}
/// ```
///
/// Will returns `{ "foo": "This is line comment", "bar": "Line doc 2\n/// Line doc 3" }`
fn consume_line_docs<'a>(pairs: Pairs<'_, Rule>) -> HashMap<String, String> {
let mut docs: HashMap<String, String> = HashMap::new();
let mut comments = vec![];

for pair in pairs {
if pair.as_rule() == Rule::grammar_rule {
let rule = pair.as_rule();

if rule == Rule::grammar_rule {
if let Some(inner) = pair.into_inner().next() {
if inner.as_rule() == Rule::line_doc {
let inner_doc = inner.into_inner().next().unwrap();
comments.push(inner_doc.as_str());
continue;
} else {
docs.push(comments);
comments = vec![];
// grammar_rule > line_doc | identifier
match inner.as_rule() {
Rule::line_doc => {
// line_doc > inner_doc
match inner.into_inner().next() {
Some(inner_doc) => comments.push(inner_doc.as_str()),
None => (),
}
}
Rule::identifier => {
if comments.len() > 0 {
let rule_name = inner.as_str().to_owned();
docs.insert(rule_name, comments.join("\n"));
comments = vec![];
}
}
_ => (),
}
}
}

if !comments.is_empty() {
docs.push(comments);
comments = vec![];
}
}

docs
Expand Down Expand Up @@ -214,6 +233,8 @@ fn get_attribute(attr: &Attribute) -> GrammarSource {

#[cfg(test)]
mod tests {
use std::collections::HashMap;

use super::consume_line_docs;
use super::parse_derive;
use super::GrammarSource;
Expand Down Expand Up @@ -296,15 +317,18 @@ mod tests {
};

let line_docs = consume_line_docs(pairs);
assert_eq!(
vec![
vec!["Matches foo str, e.g.: `foo`"],
vec!["Matches bar str,", " Indent 2, e.g: `bar` or `foobar`"],
vec![],
vec!["Matches dar", "Match dar description"]
],
line_docs

let mut expected = HashMap::new();
expected.insert("foo".to_owned(), "Matches foo str, e.g.: `foo`".to_owned());
expected.insert(
"bar".to_owned(),
"Matches bar str,\n Indent 2, e.g: `bar` or `foobar`".to_owned(),
);
expected.insert(
"dar".to_owned(),
"Matches dar\nMatch dar description".to_owned(),
);
assert_eq!(expected, line_docs);
}

#[test]
Expand Down
1 change: 1 addition & 0 deletions grammars/src/grammars/json.pest
Expand Up @@ -8,6 +8,7 @@
// modified, or distributed except according to those terms.

//! A parser for JSON file.
//!
//! And this is a example for JSON parser.
json = { SOI ~ (object | array) ~ EOI }

Expand Down

0 comments on commit 90abcb4

Please sign in to comment.