Skip to content

Commit e34874d

Browse files
committed
Remove some trailing whitespace
1 parent f0decc5 commit e34874d

File tree

1 file changed

+10
-10
lines changed

1 file changed

+10
-10
lines changed

html5ever/src/tokenizer/mod.rs

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1090,7 +1090,7 @@ impl<Sink: TokenSink> Tokenizer<Sink> {
10901090
'\t' | '\n' | '\x0C' | ' ' => (),
10911091
'\0' => go!(self: error; create_doctype; push_doctype_name '\u{fffd}'; to DoctypeName),
10921092
'>' => go!(self: error; create_doctype; force_quirks; emit_doctype; to Data),
1093-
c => go!(self: create_doctype; push_doctype_name (c.to_ascii_lowercase());
1093+
c => go!(self: create_doctype; push_doctype_name (c.to_ascii_lowercase());
10941094
to DoctypeName),
10951095
}},
10961096

@@ -1418,15 +1418,15 @@ mod test {
14181418

14191419
use {LocalName};
14201420

1421-
// LinesMatch implements the TokenSink trait. It is used for testing to see
1421+
// LinesMatch implements the TokenSink trait. It is used for testing to see
14221422
// if current_line is being updated when process_token is called. The lines
14231423
// vector is a collection of the line numbers that each token is on.
14241424
struct LinesMatch {
14251425
tokens: Vec<Token>,
14261426
current_str: StrTendril,
14271427
lines: Vec<(Token, u64)>,
14281428
}
1429-
1429+
14301430
impl LinesMatch {
14311431
fn new() -> LinesMatch {
14321432
LinesMatch {
@@ -1451,11 +1451,11 @@ mod test {
14511451
}
14521452

14531453
impl TokenSink for LinesMatch {
1454-
1454+
14551455
type Handle = ();
14561456

14571457
fn process_token(&mut self, token: Token, line_number: u64) -> TokenSinkResult<Self::Handle> {
1458-
1458+
14591459
match token {
14601460
CharacterTokens(b) => {
14611461
self.current_str.push_slice(&b);
@@ -1491,7 +1491,7 @@ mod test {
14911491
}
14921492
}
14931493

1494-
// Take in tokens, process them, and return vector with line
1494+
// Take in tokens, process them, and return vector with line
14951495
// numbers that each token is on
14961496
fn tokenize(input: Vec<StrTendril>, opts: TokenizerOpts) -> Vec<(Token, u64)> {
14971497
let sink = LinesMatch::new();
@@ -1549,8 +1549,8 @@ mod test {
15491549
let vector = vec![StrTendril::from("<a>\n"), StrTendril::from("<b>\n"),
15501550
StrTendril::from("</b>\n"), StrTendril::from("</a>\n")];
15511551
let expected = vec![(create_tag(StrTendril::from("a"), StartTag), 1),
1552-
(create_tag(StrTendril::from("b"), StartTag), 2),
1553-
(create_tag(StrTendril::from("b"), EndTag), 3),
1552+
(create_tag(StrTendril::from("b"), StartTag), 2),
1553+
(create_tag(StrTendril::from("b"), EndTag), 3),
15541554
(create_tag(StrTendril::from("a"), EndTag), 4)];
15551555
let results = tokenize(vector, opts);
15561556
assert_eq!(results, expected);
@@ -1568,8 +1568,8 @@ mod test {
15681568
let vector = vec![StrTendril::from("<a>\r\n"), StrTendril::from("<b>\r\n"),
15691569
StrTendril::from("</b>\r\n"), StrTendril::from("</a>\r\n")];
15701570
let expected = vec![(create_tag(StrTendril::from("a"), StartTag), 1),
1571-
(create_tag(StrTendril::from("b"), StartTag), 2),
1572-
(create_tag(StrTendril::from("b"), EndTag), 3),
1571+
(create_tag(StrTendril::from("b"), StartTag), 2),
1572+
(create_tag(StrTendril::from("b"), EndTag), 3),
15731573
(create_tag(StrTendril::from("a"), EndTag), 4)];
15741574
let results = tokenize(vector, opts);
15751575
assert_eq!(results, expected);

0 commit comments

Comments
 (0)