Skip to content

Commit

Permalink
Merge branch 'master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
01mf02 authored Oct 4, 2021
2 parents 5113e5b + 9acced9 commit 338373c
Show file tree
Hide file tree
Showing 9 changed files with 57 additions and 42 deletions.
25 changes: 8 additions & 17 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ jobs:
with:
toolchain: 1.51.0 # Pinned warnings
components: rustfmt, clippy
default: true
- name: Install gcc
run: sudo apt-get update && sudo apt-get install -y gcc
- name: Bootstraping Grammars - Building
Expand Down Expand Up @@ -103,11 +104,10 @@ jobs:
- name: Install Rust Nightly
uses: actions-rs/toolchain@v1
with:
toolchain: nightly-2021-01-01
toolchain: nightly-2021-08-01
profile: minimal
components: llvm-tools-preview
default: true
- name: Install gcc
run: sudo apt-get update && sudo apt-get install -y gcc libssl-dev pkg-config
- name: Bootstraping Grammars - Building
uses: actions-rs/cargo@v1
with:
Expand All @@ -118,23 +118,14 @@ jobs:
with:
command: run
args: --package pest_bootstrap
- name: Install Tarpaulin
uses: actions-rs/install@v0.1
with:
crate: cargo-tarpaulin
version: latest
use-tool-cache: true
- name: Run Tarpaulin
env:
RUSTFLAGS: --cfg procmacro2_semver_exempt
uses: actions-rs/cargo@v1
with:
command: tarpaulin
args: --verbose --out Xml
- name: Install cargo-llvm-cov
run: curl -LsSf https://github.com/taiki-e/cargo-llvm-cov/releases/latest/download/cargo-llvm-cov-x86_64-unknown-linux-gnu.tar.gz | tar xzf - -C ~/.cargo/bin
- name: Generate code coverage
run: cargo llvm-cov --all-features --workspace --lcov --output-path lcov.info
- name: Upload Results to Codecov
uses: codecov/codecov-action@v1
with:
file: ./cobertura.xml
file: lcov.info
flags: unittests
name: pest-ci-coverage
fail_ci_if_error: false
Expand Down
7 changes: 5 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -143,13 +143,14 @@ Digit: 2
* [pest_meta](https://github.com/pest-parser/pest/blob/master/meta/src/grammar.pest) (bootstrapped)
* [AshPaper](https://github.com/shnewto/ashpaper)
* [brain](https://github.com/brain-lang/brain)
* [Chelone](https://github.com/Aaronepower/chelone)
* [cicada](https://github.com/mitnk/cicada)
* [comrak](https://github.com/kivikakk/comrak)
* [elastic-rs](https://github.com/cch123/elastic-rs)
* [graphql-parser](https://github.com/Keats/graphql-parser)
* [handlebars-rust](https://github.com/sunng87/handlebars-rust)
* [hexdino](https://github.com/Luz/hexdino)
* [Huia](https://gitlab.com/jimsy/huia/)
* [insta](https://github.com/mitsuhiko/insta)
* [jql](https://github.com/yamafaktory/jql)
* [json5-rs](https://github.com/callum-oakley/json5-rs)
* [mt940](https://github.com/svenstaro/mt940-rs)
Expand All @@ -164,8 +165,10 @@ Digit: 2
* [ukhasnet-parser](https://github.com/adamgreig/ukhasnet-parser)
* [ZoKrates](https://github.com/ZoKrates/ZoKrates)
* [Vector](https://github.com/timberio/vector)
* [AutoCorrect](https://github.com/huacnlee/autocorrect)
* [yaml-peg](https://github.com/aofdev/yaml-peg)

## Special thanks

A special round of applause goes to prof. Marius Minea for his guidance and all
pest contributors, some of which being none other than my friends.
pest contributors, some of which being none other than my friends.
20 changes: 5 additions & 15 deletions generator/src/generator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@
// option. All files in the project carrying such notice may not be copied,
// modified, or distributed except according to those terms.

use std::path::PathBuf;

use proc_macro2::{Span, TokenStream};
use quote::{ToTokens, TokenStreamExt};
use syn::{self, Generics, Ident};
Expand All @@ -21,7 +19,7 @@ use pest_meta::UNICODE_PROPERTY_NAMES;
pub fn generate(
name: Ident,
generics: &Generics,
path: Option<PathBuf>,
path: Option<String>,
rules: Vec<OptimizedRule>,
defaults: Vec<&str>,
include_grammar: bool,
Expand All @@ -31,7 +29,7 @@ pub fn generate(
let builtins = generate_builtin_rules();
let include_fix = if include_grammar {
match path {
Some(ref path) => generate_include(&name, path.to_str().expect("non-Unicode path")),
Some(ref path) => generate_include(&name, path),
None => quote!(),
}
} else {
Expand Down Expand Up @@ -171,14 +169,9 @@ fn generate_builtin_rules() -> Vec<(&'static str, TokenStream)> {
// Needed because Cargo doesn't watch for changes in grammars.
fn generate_include(name: &Ident, path: &str) -> TokenStream {
let const_name = Ident::new(&format!("_PEST_GRAMMAR_{}", name), Span::call_site());
// Need to make this relative to the current directory since the path to the file
// is derived from the CARGO_MANIFEST_DIR environment variable
let mut current_dir = std::env::current_dir().expect("Unable to get current directory");
current_dir.push(path);
let relative_path = current_dir.to_str().expect("path contains invalid unicode");
quote! {
#[allow(non_upper_case_globals)]
const #const_name: &'static str = include_str!(#relative_path);
const #const_name: &'static str = include_str!(#path);
}
}

Expand Down Expand Up @@ -967,16 +960,13 @@ mod tests {
expr: OptimizedExpr::Str("b".to_owned()),
}];
let defaults = vec!["ANY"];
let mut current_dir = std::env::current_dir().expect("Unable to get current directory");
current_dir.push("test.pest");
let test_path = current_dir.to_str().expect("path contains invalid unicode");
let result = result_type();
let box_ty = box_type();
assert_eq!(
generate(name, &generics, Some(PathBuf::from("test.pest")), rules, defaults, true).to_string(),
generate(name, &generics, Some(String::from("test.pest")), rules, defaults, true).to_string(),
quote! {
#[allow(non_upper_case_globals)]
const _PEST_GRAMMAR_MyParser: &'static str = include_str!(#test_path);
const _PEST_GRAMMAR_MyParser: &'static str = include_str!("test.pest");

#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
#[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
Expand Down
10 changes: 5 additions & 5 deletions generator/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,19 +39,19 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
let (name, generics, content) = parse_derive(ast);

let (data, path) = match content {
GrammarSource::File(ref path) => {
GrammarSource::File(path) => {
let root = env::var("CARGO_MANIFEST_DIR").unwrap_or_else(|_| ".".into());
let path = Path::new(&root).join("src/").join(&path);
let file_name = match path.file_name() {
let full_path = Path::new(&root).join("src/").join(&path);
let file_name = match full_path.file_name() {
Some(file_name) => file_name,
None => panic!("grammar attribute should point to a file"),
};

let data = match read_file(&path) {
let data = match read_file(&full_path) {
Ok(data) => data,
Err(error) => panic!("error opening {:?}: {}", file_name, error),
};
(data, Some(path.clone()))
(data, Some(path))
}
GrammarSource::Inline(content) => (content, None),
};
Expand Down
29 changes: 29 additions & 0 deletions pest/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,35 @@ impl<R: RuleType> Error<R> {
self
}

/// Returns the path set using [`Error::with_path()`].
///
/// # Examples
///
/// ```
/// # use pest::error::{Error, ErrorVariant};
/// # use pest::Position;
/// # #[allow(non_camel_case_types)]
/// # #[allow(dead_code)]
/// # #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
/// # enum Rule {
/// # open_paren,
/// # closed_paren
/// # }
/// # let input = "";
/// # let pos = Position::from_start(input);
/// # let error = Error::new_from_pos(
/// # ErrorVariant::ParsingError {
/// # positives: vec![Rule::open_paren],
/// # negatives: vec![Rule::closed_paren]
/// # },
/// # pos);
/// let error = error.with_path("file.rs");
/// assert_eq!(Some("file.rs"), error.path());
/// ```
pub fn path(&self) -> Option<&str> {
self.path.as_deref()
}

/// Renames all `Rule`s if this is a [`ParsingError`]. It does nothing when called on a
/// [`CustomError`].
///
Expand Down
2 changes: 2 additions & 0 deletions pest/src/iterators/pair.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@

use alloc::format;
use alloc::rc::Rc;
#[cfg(feature = "pretty-print")]
use alloc::string::String;
use alloc::vec::Vec;
use core::fmt;
use core::hash::{Hash, Hasher};
Expand Down
2 changes: 1 addition & 1 deletion pest/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
// option. All files in the project carrying such notice may not be copied,
// modified, or distributed except according to those terms.
#![no_std]
#![cfg_attr(feature = "const_prec_climber", feature(const_fn))]
#![cfg_attr(feature = "const_prec_climber", feature(const_fn_trait_bound))]

//! # pest. The Elegant Parser
//!
Expand Down
2 changes: 1 addition & 1 deletion pest/src/position.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ use core::str;
use span;

/// A cursor position in a `&str` which provides useful methods to manually parse that string.
#[derive(Clone)]
#[derive(Clone, Copy)]
pub struct Position<'i> {
input: &'i str,
/// # Safety:
Expand Down
2 changes: 1 addition & 1 deletion pest/src/span.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ use position;
///
/// [two `Position`s]: struct.Position.html#method.span
/// [`Pair`]: ../iterators/struct.Pair.html#method.span
#[derive(Clone)]
#[derive(Clone, Copy)]
pub struct Span<'i> {
input: &'i str,
/// # Safety
Expand Down

0 comments on commit 338373c

Please sign in to comment.