Skip to content

Commit 18aaddb

Browse files
authored
Merge pull request #9 from Yuta1004/#4_RemoveLexIterator
#4 LexIterator 削除
2 parents 26c7eef + b250277 commit 18aaddb

File tree

18 files changed

+191
-140
lines changed

18 files changed

+191
-140
lines changed

Cargo.toml

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -6,16 +6,16 @@ edition = "2021"
66
[dependencies]
77
anyhow = { workspace = true }
88
thiserror = { workspace = true }
9-
core = { workspace = true }
10-
algorithm = { workspace = true }
9+
pgen_core = { workspace = true }
10+
pgen_algorithm = { workspace = true }
1111

1212
[dev-dependencies]
1313
serde = { workspace = true }
1414
serde_json = "1.0.117"
1515

1616
[features]
1717
default = []
18-
derive = ["core/derive"]
18+
derive = ["pgen_core/derive"]
1919

2020
[workspace]
2121
resolver = "2"
@@ -31,5 +31,5 @@ thiserror = "1.0.58"
3131
serde = "1.0.197"
3232
regex = "1.10.4"
3333
regex-macro = "0.2.0"
34-
core = { path = "./crates/core" }
35-
algorithm = { path = "./crates/algorithm" }
34+
pgen_core = { package = "core", path = "./crates/core" }
35+
pgen_algorithm = { package = "algorithm", path = "./crates/algorithm" }

crates/algorithm_lr1/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,4 +8,4 @@ anyhow = { workspace = true }
88
thiserror = { workspace = true }
99
serde = { workspace = true, features = ["derive"] }
1010
itertools = "0.12.1"
11-
core = { path = "../core", features = ["derive"] }
11+
pgen_core = { package = "core", path = "../core", features = ["derive"] }

crates/algorithm_lr1/src/builder.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ use std::marker::PhantomData;
55
use serde::{Serialize, Deserialize};
66
use itertools::Itertools;
77

8-
use core::cfg::{TokenSet, Syntax, Rule, RuleElem, RuleSet};
8+
use pgen_core::cfg::{TokenSet, Syntax, Rule, RuleElem, RuleSet};
99

1010
#[derive(Debug, Serialize, Deserialize)]
1111
pub(super) enum LRAction<S> {

crates/algorithm_lr1/src/driver.rs

Lines changed: 17 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
1-
use core::cfg::{TokenSet, Syntax};
2-
use core::lex::LexIterator;
1+
use pgen_core::cfg::{TokenSet, Syntax};
2+
use pgen_core::lex::Token;
33

4-
use super::builder::{LRAction, LR1Configure};
4+
use crate::error::ParseError;
5+
use crate::builder::{LRAction, LR1Configure};
56

67
pub(super) struct LR1Driver<'a, 'b, T, S> (&'b LR1Configure<'a, T, S>)
78
where
@@ -19,7 +20,7 @@ where
1920

2021
pub fn run<'c>(
2122
&self,
22-
lexer: &mut impl LexIterator<'a, 'c, T>,
23+
lexer: &mut impl Iterator<Item = Token<'a, 'c, T>>,
2324
) -> anyhow::Result<()> {
2425
let mut stack = vec![0];
2526
loop {
@@ -29,31 +30,31 @@ where
2930
let action = match input {
3031
Some(token) => (
3132
self.0.action_table[top].get(&token.kind).unwrap(),
32-
Some(token.as_str()),
33+
Some(token),
3334
),
3435
None => (
3536
&self.0.eof_action_table[top],
3637
None
3738
),
3839
};
39-
match action.0 {
40-
LRAction::Shift(new_state) => {
40+
match action {
41+
(LRAction::Shift(new_state), _) => {
4142
stack.push(*new_state);
4243
break;
4344
}
44-
LRAction::Reduce(_, goto, elems_cnt) => {
45+
(LRAction::Reduce(_, goto, elems_cnt), _) => {
4546
stack.truncate(stack.len() - elems_cnt);
4647
stack.push(self.0.goto_table[stack[stack.len() - 1]][*goto]);
4748
}
48-
LRAction::None => {
49-
let pos = lexer.pos();
50-
let pos = match action.1 {
51-
Some(raw) => (pos.0, pos.1 - (raw.len() as u32)),
52-
None => pos,
53-
};
54-
return Err(anyhow::anyhow!("Error at {:?}", pos).into());
49+
(LRAction::Accept, _) => {
50+
return Ok(());
51+
}
52+
(LRAction::None, Some(token)) => {
53+
return Err(ParseError::new_unexpected_token(token).into());
54+
}
55+
(LRAction::None, None) => {
56+
return Err(ParseError::UnexpectedEOF.into());
5557
}
56-
LRAction::Accept => return Ok(()),
5758
}
5859
}
5960
}

crates/algorithm_lr1/src/error.rs

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
1+
use thiserror::Error;
2+
3+
use pgen_core::error::ParseError as SuperParseError;
4+
use pgen_core::cfg::TokenSet;
5+
use pgen_core::lex::Token;
6+
7+
#[derive(Debug, Error)]
8+
pub enum ParseError {
9+
#[error("Unexpected token {actual:?} found")]
10+
UnexpectedToken {
11+
actual: String,
12+
},
13+
#[error("Unexpected EOF")]
14+
UnexpectedEOF,
15+
}
16+
17+
impl ParseError {
18+
pub fn new_unexpected_token<'a, T>(expected: Token<'a, '_, T>) -> SuperParseError
19+
where
20+
T: TokenSet<'a>,
21+
{
22+
let err = ParseError::UnexpectedToken {
23+
actual: format!("{:?}", expected.kind),
24+
};
25+
SuperParseError::from(err).with(expected)
26+
}
27+
}

crates/algorithm_lr1/src/lib.rs

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
1+
mod error;
12
mod builder;
23
mod driver;
34

45
use serde::{Serialize, Deserialize};
56

6-
use core::cfg::{TokenSet, Syntax};
7-
use core::lex::LexIterator;
8-
use core::parse::ParserImpl;
7+
use pgen_core::cfg::{TokenSet, Syntax};
8+
use pgen_core::lex::Token;
9+
use pgen_core::parse::ParserImpl;
910

1011
use builder::LR1Configure;
1112
use driver::LR1Driver;
@@ -31,16 +32,16 @@ where
3132

3233
fn parse<'b>(
3334
&self,
34-
mut lexer: impl LexIterator<'a, 'b, T>,
35+
mut lexer: impl Iterator<Item = Token<'a, 'b, T>>,
3536
) -> anyhow::Result<Self::Output> {
3637
LR1Driver::new(&self.0).run(&mut lexer)
3738
}
3839
}
3940

4041
#[cfg(test)]
4142
mod test {
42-
use core::cfg::{TokenSet, Syntax, Rule, RuleElem};
43-
use core::Parser;
43+
use pgen_core::cfg::{TokenSet, Syntax, Rule, RuleElem};
44+
use pgen_core::Parser;
4445

4546
use super::LR1;
4647

crates/core/Cargo.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,8 @@ anyhow = { workspace = true }
88
thiserror = { workspace = true }
99
serde = { workspace = true, features = ["derive"]}
1010
regex = { workspace = true }
11-
derive = { package = "core_derive", path = "../core_derive", optional = true }
11+
pgen_core_derive = { package = "core_derive", path = "../core_derive", optional = true }
1212

1313
[features]
1414
default = []
15-
derive = ["dep:derive"]
15+
derive = ["dep:pgen_core_derive"]

crates/core/src/cfg.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ mod token;
22
mod syntax;
33

44
#[cfg(feature = "derive")]
5-
pub use derive::{TokenSet, Syntax};
5+
pub use pgen_core_derive::{TokenSet, Syntax};
66

77
pub use token::TokenSet;
88
pub use syntax::{Syntax, Rule, RuleElem, RuleSet};

crates/core/src/cfg/syntax.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,13 @@
11
use std::collections::HashMap;
2+
use std::fmt::Debug;
23
use std::hash::Hash;
34
use std::marker::PhantomData;
45

56
use super::token::TokenSet;
67

78
pub trait Syntax<'a>
89
where
9-
Self: Clone + Copy + Sized,
10+
Self: Debug + Clone + Copy + Sized,
1011
{
1112
type TokenSet: TokenSet<'a>;
1213

crates/core/src/cfg/token.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
1+
use std::fmt::Debug;
12
use std::hash::Hash;
23

34
use regex::Regex;
45

56
pub trait TokenSet<'a>
67
where
7-
Self: Copy + Clone + Hash + Eq,
8+
Self: Debug + Copy + Clone + Hash + Eq,
89
{
910
fn ignore_str() -> &'a str;
1011
fn enum_iter() -> impl Iterator<Item = Self>;

0 commit comments

Comments
 (0)