Skip to content

Commit

Permalink
Move DocComment methods into docs.rs
Browse files Browse the repository at this point in the history
  • Loading branch information
huacnlee committed Jan 18, 2023
1 parent 63985fc commit 4572232
Show file tree
Hide file tree
Showing 3 changed files with 134 additions and 127 deletions.
127 changes: 127 additions & 0 deletions generator/src/docs.rs
@@ -0,0 +1,127 @@
use pest::iterators::Pairs;
use pest_meta::parser::Rule;
use std::collections::HashMap;

#[derive(Debug)]
pub(crate) struct DocComment {
/// Multi-line grammar doc, (joined with `\n`)
///
/// e.g.
///
/// ```ignore
/// "grammar doc 1\ngrammar doc 2"
/// ```
pub grammar_doc: String,
/// HashMap rule name and doc comments (joined with `\n`)
///
/// e.g.
///
/// ```ignore
/// { "foo": "line doc 1\nline doc 2", "bar": "line doc 3" }
/// ```
pub line_docs: HashMap<String, String>,
}

pub(crate) fn consume(pairs: Pairs<'_, Rule>) -> DocComment {
let grammar_doc = consume_grammar_doc(pairs.clone());
let line_docs = consume_line_docs(pairs);

DocComment {
grammar_doc,
line_docs,
}
}

/// Consume grammar doc into String, multi-line joined with `\n`
fn consume_grammar_doc(pairs: Pairs<'_, Rule>) -> String {
let mut docs = vec![];
for pair in pairs {
if pair.as_rule() == Rule::grammar_doc {
let inner_doc = pair.into_inner().next().unwrap();
docs.push(inner_doc.as_str());
}
}

docs.join("\n")
}

/// Consume line docs into HashMap<rule_name, doc>
///
/// Example a `test.pest`:
///
/// ```ignore
/// /// Line doc 1
/// foo = {}
///
/// /// Line doc 2
/// /// Line doc 3
/// bar = {}
/// ```
///
/// Will returns `{ "foo": "This is line comment", "bar": "Line doc 2\n/// Line doc 3" }`
fn consume_line_docs(pairs: Pairs<'_, Rule>) -> HashMap<String, String> {
let mut docs: HashMap<String, String> = HashMap::new();
let mut comments = vec![];

for pair in pairs {
let rule = pair.as_rule();

if rule == Rule::grammar_rule {
if let Some(inner) = pair.into_inner().next() {
// grammar_rule > line_doc | identifier
match inner.as_rule() {
Rule::line_doc => {
if let Some(inner_doc) = inner.into_inner().next() {
comments.push(inner_doc.as_str())
}
}
Rule::identifier => {
if !comments.is_empty() {
let rule_name = inner.as_str().to_owned();
docs.insert(rule_name, comments.join("\n"));
comments = vec![];
}
}
_ => (),
}
}
}
}

docs
}

#[cfg(test)]
mod tests {
use std::collections::HashMap;

use pest_meta::parser;
use pest_meta::parser::Rule;

#[test]
fn test_doc_comment() {
let pairs = match parser::parse(Rule::grammar_rules, include_str!("../tests/test.pest")) {
Ok(pairs) => pairs,
Err(_) => panic!("error parsing tests/test.pest"),
};

let doc_comment = super::consume(pairs);

let mut expected = HashMap::new();
expected.insert("foo".to_owned(), "Matches foo str, e.g.: `foo`".to_owned());
expected.insert(
"bar".to_owned(),
"Matches bar str,\n Indent 2, e.g: `bar` or `foobar`".to_owned(),
);
expected.insert(
"dar".to_owned(),
"Matches dar\nMatch dar description".to_owned(),
);
assert_eq!(expected, doc_comment.line_docs);

assert_eq!(
"A parser for JSON file.\nAnd this is a example for JSON parser.\n\n indent-4-space",
doc_comment.grammar_doc
);
}
}
35 changes: 4 additions & 31 deletions generator/src/generator.rs
Expand Up @@ -7,7 +7,6 @@
// option. All files in the project carrying such notice may not be copied,
// modified, or distributed except according to those terms.

use std::collections::HashMap;
use std::path::PathBuf;

use proc_macro2::TokenStream;
Expand All @@ -18,34 +17,7 @@ use pest::unicode::unicode_property_names;
use pest_meta::ast::*;
use pest_meta::optimizer::*;

#[derive(Debug)]
pub(crate) struct DocComment {
/// Multi-line grammar doc, (joined with `\n`)
///
/// e.g.
///
/// ```ignore
/// "grammar doc 1\ngrammar doc 2"
/// ```
grammar_doc: String,
/// HashMap rule name and doc comments (joined with `\n`)
///
/// e.g.
///
/// ```ignore
/// { "foo": "line doc 1\nline doc 2", "bar": "line doc 3" }
/// ```
line_docs: HashMap<String, String>,
}

impl DocComment {
pub fn new(grammar_doc: String, line_docs: HashMap<String, String>) -> Self {
Self {
grammar_doc,
line_docs,
}
}
}
use crate::docs::DocComment;

pub(crate) fn generate(
name: Ident,
Expand Down Expand Up @@ -705,10 +677,11 @@ fn option_type() -> TokenStream {

#[cfg(test)]
mod tests {
use proc_macro2::Span;

use super::*;

use proc_macro2::Span;
use std::collections::HashMap;

#[test]
fn rule_enum_simple() {
let rules = vec![OptimizedRule {
Expand Down
99 changes: 3 additions & 96 deletions generator/src/lib.rs
Expand Up @@ -21,25 +21,22 @@
#[macro_use]
extern crate quote;

use std::collections::HashMap;
use std::env;
use std::fs::File;
use std::io::{self, Read};
use std::path::Path;

use pest::iterators::Pairs;
use proc_macro2::TokenStream;
use syn::{Attribute, DeriveInput, Generics, Ident, Lit, Meta};

#[macro_use]
mod macros;
mod docs;
mod generator;

use pest_meta::parser::{self, rename_meta_rule, Rule};
use pest_meta::{optimizer, unwrap_or_report, validator};

use generator::DocComment;

/// Processes the derive/proc macro input and generates the corresponding parser based
/// on the parsed grammar. If `include_grammar` is set to true, it'll generate an explicit
/// "include_str" statement (done in pest_derive, but turned off in the local bootstrap).
Expand Down Expand Up @@ -94,85 +91,22 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
Err(error) => panic!("error parsing \n{}", error.renamed_rules(rename_meta_rule)),
};

let grammar_doc = consume_grammar_doc(pairs.clone());
let line_docs = consume_line_docs(pairs.clone());

let defaults = unwrap_or_report(validator::validate_pairs(pairs.clone()));
let doc_comment = docs::consume(pairs.clone());
let ast = unwrap_or_report(parser::consume_rules(pairs));
let optimized = optimizer::optimize(ast);

let doc_comment = &DocComment::new(grammar_doc, line_docs);

generator::generate(
name,
&generics,
path,
optimized,
defaults,
doc_comment,
&doc_comment,
include_grammar,
)
}

/// Consume grammar doc into String, multi-line joined with `\n`
fn consume_grammar_doc(pairs: Pairs<'_, Rule>) -> String {
let mut docs = vec![];
for pair in pairs {
if pair.as_rule() == Rule::grammar_doc {
let inner_doc = pair.into_inner().next().unwrap();
docs.push(inner_doc.as_str());
}
}

docs.join("\n")
}

/// Consume line docs into HashMap<rule_name, doc>
///
/// Example a `test.pest`:
///
/// ```ignore
/// /// Line doc 1
/// foo = {}
///
/// /// Line doc 2
/// /// Line doc 3
/// bar = {}
/// ```
///
/// Will returns `{ "foo": "This is line comment", "bar": "Line doc 2\n/// Line doc 3" }`
fn consume_line_docs(pairs: Pairs<'_, Rule>) -> HashMap<String, String> {
let mut docs: HashMap<String, String> = HashMap::new();
let mut comments = vec![];

for pair in pairs {
let rule = pair.as_rule();

if rule == Rule::grammar_rule {
if let Some(inner) = pair.into_inner().next() {
// grammar_rule > line_doc | identifier
match inner.as_rule() {
Rule::line_doc => {
if let Some(inner_doc) = inner.into_inner().next() {
comments.push(inner_doc.as_str())
}
}
Rule::identifier => {
if !comments.is_empty() {
let rule_name = inner.as_str().to_owned();
docs.insert(rule_name, comments.join("\n"));
comments = vec![];
}
}
_ => (),
}
}
}
}

docs
}

fn read_file<P: AsRef<Path>>(path: P) -> io::Result<String> {
let mut file = File::open(path.as_ref())?;
let mut string = String::new();
Expand Down Expand Up @@ -231,14 +165,9 @@ fn get_attribute(attr: &Attribute) -> GrammarSource {

#[cfg(test)]
mod tests {
use std::collections::HashMap;

use super::consume_line_docs;
use super::parse_derive;
use super::GrammarSource;

use pest_meta::parser::{self, Rule};

#[test]
fn derive_inline_file() {
let definition = "
Expand Down Expand Up @@ -307,28 +236,6 @@ mod tests {
parse_derive(ast);
}

#[test]
fn test_consume_line_docs() {
let pairs = match parser::parse(Rule::grammar_rules, include_str!("../tests/test.pest")) {
Ok(pairs) => pairs,
Err(_) => panic!("error parsing tests/test.pest"),
};

let line_docs = consume_line_docs(pairs);

let mut expected = HashMap::new();
expected.insert("foo".to_owned(), "Matches foo str, e.g.: `foo`".to_owned());
expected.insert(
"bar".to_owned(),
"Matches bar str,\n Indent 2, e.g: `bar` or `foobar`".to_owned(),
);
expected.insert(
"dar".to_owned(),
"Matches dar\nMatch dar description".to_owned(),
);
assert_eq!(expected, line_docs);
}

#[test]
fn test_generate_doc() {
let input = quote! {
Expand Down

0 comments on commit 4572232

Please sign in to comment.