@@ -41,9 +41,9 @@ impl Lexer {
4141
4242struct LexDriver < ' a , ' b , T : TokenSet < ' a > > {
4343 // Regex
44+ regex_istr : Regex ,
4445 regex_set : RegexSet ,
4546 regex_map : Vec < ( Regex , T ) > ,
46- regex_istr : Regex ,
4747
4848 // State
4949 input : & ' b str ,
@@ -57,18 +57,16 @@ impl<'a, 'b, T: TokenSet<'a>> TryFrom<&'b str> for LexDriver<'a, 'b, T> {
5757 type Error = anyhow:: Error ;
5858
5959 fn try_from ( input : & ' b str ) -> anyhow:: Result < Self > {
60- let regex_map = T :: try_into ( ) ?;
61- let regex_set = regex_map
62- . iter ( )
63- . map ( |( _, token) | T :: to_regex ( & token) )
64- . collect :: < Vec < _ > > ( ) ;
65- let regex_set = RegexSet :: new ( regex_set) ?;
6660 let regex_istr = Regex :: new ( T :: ignore_str ( ) ) ?;
61+ let regex_set = T :: try_into_regexset ( ) ?;
62+ let regex_map = T :: into_iter ( )
63+ . map ( |token| Ok ( ( token. into_regex ( ) ?, token) ) )
64+ . collect :: < anyhow:: Result < Vec < _ > > > ( ) ?;
6765
6866 Ok ( LexDriver {
67+ regex_istr,
6968 regex_set,
7069 regex_map,
71- regex_istr,
7270 input,
7371 pos : 0 ,
7472 tokenset : PhantomData ,
@@ -126,11 +124,11 @@ mod test {
126124 r"^[ \t\n]+"
127125 }
128126
129- fn enum_iter ( ) -> Box < dyn Iterator < Item = Self > > {
130- Box :: new ( vec ! [ TestToken :: Num , TestToken :: Plus ] . into_iter ( ) )
127+ fn into_iter ( ) -> impl Iterator < Item = Self > {
128+ vec ! [ TestToken :: Num , TestToken :: Plus ] . into_iter ( )
131129 }
132130
133- fn to_regex ( & self ) -> & ' static str {
131+ fn into_regex_str ( & self ) -> & ' static str {
134132 match self {
135133 TestToken :: Num => r"^[1-9][0-9]*" ,
136134 TestToken :: Plus => r"^\+" ,
0 commit comments