@@ -72,7 +72,7 @@ pub enum TokenError {
7272
7373#[ derive( Debug ) ]
7474pub struct TokenStream < ' src > {
75- tokens : Vec < ( Token < ' src > , usize , usize ) > ,
75+ tokens : Vec < ( Token < ' src > , u32 , u32 ) > ,
7676}
7777
7878#[ derive( Debug , PartialEq ) ]
@@ -108,8 +108,8 @@ impl Token<'_> {
108108 const OPERATORS : & ' static str = "=+-*/%&|^!<>." ;
109109 const BRACKETS : & ' static str = "()[]{}" ;
110110
111- fn parse ( source : & str , start_of_file : bool ) -> Result < ( Token , usize ) , TokenError > {
112- let mut chars = source. char_indices ( ) . peekable ( ) ;
111+ fn parse ( source : & str , start_of_file : bool ) -> Result < ( Token , u32 ) , TokenError > {
112+ let mut chars = source. char_indices ( ) . map ( | ( i , c ) | ( i as u32 , c ) ) . peekable ( ) ;
113113 ' chr: while let Some ( ( start, c) ) = chars. next ( ) {
114114 return match c {
115115 '#' => {
@@ -126,7 +126,7 @@ impl Token<'_> {
126126 return match chars. next ( ) {
127127 Some ( ( _, '\t' ) ) => continue ,
128128 Some ( ( _, ' ' ) ) => Err ( TokenError :: SpaceInIndent ) ,
129- Some ( r) => Ok ( ( Token :: Indent ( i) , start + ( i + 1 - s) as usize ) ) ,
129+ Some ( r) => Ok ( ( Token :: Indent ( i) , start + ( i + 1 - s) as u32 ) ) ,
130130 None => Err ( TokenError :: Empty ) ,
131131 } ;
132132 }
@@ -145,7 +145,7 @@ impl Token<'_> {
145145 '"' => loop {
146146 if let Some ( ( i, c) ) = chars. next ( ) {
147147 if c == '"' {
148- let s = & source[ start + 1 ..i] ;
148+ let s = & source[ start as usize + 1 ..i as usize ] ;
149149 break Ok ( ( Token :: String ( s) , i + 1 ) ) ;
150150 }
151151 } else {
@@ -192,14 +192,15 @@ impl Token<'_> {
192192 }
193193 }
194194 _ if c. is_digit ( 10 ) => {
195+ let start = start as usize ;
195196 let mut dot_encountered = false ;
196197 let mut prev_was_dot = false ;
197198 loop {
198199 if let Some ( ( i, c) ) = chars. next ( ) {
199200 if !c. is_alphanumeric ( ) && c != '_' {
200201 if dot_encountered || c != '.' {
201202 let i = if prev_was_dot { i - 1 } else { i } ;
202- let s = & source[ start..i] ;
203+ let s = & source[ start..i as usize ] ;
203204 break Ok ( ( Token :: Number ( s) , i) ) ;
204205 } else {
205206 dot_encountered = true ;
@@ -210,22 +211,23 @@ impl Token<'_> {
210211 }
211212 } else {
212213 let s = & source[ start..] ;
213- break Ok ( ( Token :: Number ( s) , source. len ( ) ) ) ;
214+ break Ok ( ( Token :: Number ( s) , source. len ( ) as u32 ) ) ;
214215 }
215216 }
216217 }
217218 _ => {
219+ let start = start as usize ;
218220 let ( s, i) = loop {
219221 if let Some ( ( i, c) ) = chars. next ( ) {
220222 if c. is_whitespace ( )
221223 || Self :: OPERATORS . contains ( c)
222224 || Self :: BRACKETS . contains ( c)
223225 || c == ','
224226 {
225- break ( & source[ start..i] , i) ;
227+ break ( & source[ start..i as usize ] , i) ;
226228 }
227229 } else {
228- break ( & source[ start..] , source. len ( ) ) ;
230+ break ( & source[ start..] , source. len ( ) as u32 ) ;
229231 }
230232 } ;
231233 Ok ( (
@@ -242,7 +244,7 @@ impl Token<'_> {
242244 "pass" => Token :: Pass ,
243245 _ => Token :: Name ( s) ,
244246 } ,
245- i,
247+ i as u32 ,
246248 ) )
247249 }
248250 } ;
@@ -260,13 +262,17 @@ impl<'src> TokenStream<'src> {
260262 loop {
261263 match Token :: parse ( source, start) {
262264 Ok ( ( tk, len) ) => {
263- if let Token :: Indent ( i) = tk {
265+ let prev_col = if let Token :: Indent ( i) = tk {
264266 line += 1 ;
265- column = 0 ;
266- }
267- column += len;
268- tokens. push ( ( tk, line, column) ) ;
269- source = & source[ len..] ;
267+ column = i as u32 + 1 ;
268+ 1
269+ } else {
270+ let c = column;
271+ column += len;
272+ c
273+ } ;
274+ tokens. push ( ( tk, line, prev_col) ) ;
275+ source = & source[ len as usize ..] ;
270276 start = false ;
271277 }
272278 Err ( e) => {
@@ -281,12 +287,12 @@ impl<'src> TokenStream<'src> {
281287 }
282288 }
283289
284- pub fn iter ( & self ) -> impl DoubleEndedIterator < Item = ( Token < ' src > , usize , usize ) > + ' _ {
290+ pub fn iter ( & self ) -> impl DoubleEndedIterator < Item = ( Token < ' src > , u32 , u32 ) > + ' _ {
285291 self . tokens . iter ( ) . cloned ( )
286292 }
287293
288294 /// Removes redundant tokens, such as multiple Indents in a row. It also shrinks the vec
289- fn remove_redundant ( tokens : & mut Vec < ( Token , usize , usize ) > ) {
295+ fn remove_redundant ( tokens : & mut Vec < ( Token , u32 , u32 ) > ) {
290296 // Remove trailing newlines
291297 while let Some ( ( Token :: Indent ( _) , ..) ) = tokens. last ( ) {
292298 tokens. pop ( ) . unwrap ( ) ;
0 commit comments