@@ -19,6 +19,7 @@ use html5ever::tokenizer::{CharacterTokens, EOFToken, NullCharacterToken, ParseE
1919use html5ever:: tokenizer:: { CommentToken , DoctypeToken , TagToken , Token } ;
2020use html5ever:: tokenizer:: { Doctype , EndTag , StartTag , Tag } ;
2121use html5ever:: tokenizer:: { TokenSink , TokenSinkResult , Tokenizer , TokenizerOpts } ;
22+ use html5ever:: TokenizerResult ;
2223use html5ever:: { ns, Attribute , LocalName , QualName } ;
2324use serde_json:: { Map , Value } ;
2425use std:: cell:: RefCell ;
@@ -147,15 +148,19 @@ impl TokenSink for TokenLogger {
147148
148149fn tokenize ( input : Vec < StrTendril > , opts : TokenizerOpts ) -> ( Vec < Token > , Vec < TestError > ) {
149150 let sink = TokenLogger :: new ( opts. exact_errors ) ;
150- let tok = Tokenizer :: new ( sink, opts) ;
151+ let tokenizer = Tokenizer :: new ( sink, opts) ;
152+
151153 let buffer = BufferQueue :: default ( ) ;
152154 for chunk in input. into_iter ( ) {
153155 buffer. push_back ( chunk) ;
154- let _ = tok. feed ( & buffer) ;
155156 }
156- let _ = tok. feed ( & buffer) ;
157- tok. end ( ) ;
158- tok. sink . get_tokens ( )
157+
158+ while tokenizer. feed ( & buffer) != TokenizerResult :: Done {
159+ // Ignore any script tags...
160+ }
161+
162+ tokenizer. end ( ) ;
163+ tokenizer. sink . get_tokens ( )
159164}
160165
161166trait JsonExt : Sized {
0 commit comments