Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 23 additions & 14 deletions src/anchor_permanence.rs
Original file line number Diff line number Diff line change
Expand Up @@ -87,73 +87,82 @@ mod tests {
use super::*;
use crate::dom_utils;
use crate::parser::{parse_document_async, tests::serialize_for_test};
use std::io;

#[tokio::test]
async fn removes_script_from_head() {
let document = parse_document_async(r#"<!DOCTYPE html>
async fn removes_script_from_head() -> io::Result<()> {
let parsed = parse_document_async(r#"<!DOCTYPE html>
<html><head><script type="text/required-ids">a b c</script></head><body><div id="a"></div><p id="b"></p><section id="c"></section></body></html>
"#.as_bytes()).await.unwrap();
"#.as_bytes()).await?;
let document = parsed.document().clone();
let mut processor = Processor::new();
dom_utils::scan_dom(&document, &mut |h| processor.visit(h));
processor.apply().unwrap();
let serialized = serialize_for_test(&[document]);
assert!(!serialized.contains("text/required-ids"));
Ok(())
}

#[tokio::test]
async fn no_script_present_noop() {
let document = parse_document_async(
async fn no_script_present_noop() -> io::Result<()> {
let parsed = parse_document_async(
r#"<!DOCTYPE html>
<html><head></head><body></body></html>
"#
.as_bytes(),
)
.await
.unwrap();
.await?;
let document = parsed.document().clone();
let before = serialize_for_test(&[document.clone()]);
let mut processor = Processor::new();
dom_utils::scan_dom(&document, &mut |h| processor.visit(h));
processor.apply().unwrap();
assert_eq!(before, serialize_for_test(&[document]));
Ok(())
}

#[tokio::test]
async fn whitespace_splitting() {
async fn whitespace_splitting() -> io::Result<()> {
// Includes indentation, multiple spaces, and newlines in the script content.
let document = parse_document_async(r#"<!DOCTYPE html><html><head><script type="text/required-ids">
let parsed = parse_document_async(r#"<!DOCTYPE html><html><head><script type="text/required-ids">
foo bar
baz
qux
</script></head><body><div id="foo"></div><div id="bar"></div><div id="baz"></div><div id="qux"></div></body></html>
"#.as_bytes()).await.unwrap();
"#.as_bytes()).await?;
let document = parsed.document().clone();
let mut processor = Processor::new();
dom_utils::scan_dom(&document, &mut |h| processor.visit(h));
processor.apply().unwrap();
let serialized = serialize_for_test(&[document]);
assert!(!serialized.contains("text/required-ids"));
Ok(())
}

#[tokio::test]
async fn errors_on_missing_ids() {
let document = parse_document_async(r#"<!DOCTYPE html>
async fn errors_on_missing_ids() -> io::Result<()> {
let parsed = parse_document_async(r#"<!DOCTYPE html>
<html><head><script type="text/required-ids">foo bar baz</script></head><body><div id="foo"></div></body></html>
"#.as_bytes()).await.unwrap();
"#.as_bytes()).await?;
let document = parsed.document().clone();
let mut processor = Processor::new();
dom_utils::scan_dom(&document, &mut |h| processor.visit(h));
let err = processor.apply().expect_err("expected missing IDs error");
assert!(
err.to_string()
.contains("Missing required IDs for anchor permanence: bar, baz")
);
Ok(())
}

#[tokio::test]
#[should_panic(expected = "multiple required-ids scripts encountered")]
async fn panics_on_multiple_required_ids_scripts() {
let document = parse_document_async(r#"<!DOCTYPE html><html><head>
let parsed = parse_document_async(r#"<!DOCTYPE html><html><head>
<script type="text/required-ids">a b</script>
<script type="text/required-ids">c d</script>
</head><body><div id="a"></div><div id="b"></div><div id="c"></div><div id="d"></div></body></html>"#.as_bytes()).await.unwrap();
let document = parsed.document().clone();
let mut processor = Processor::new();
dom_utils::scan_dom(&document, &mut |h| processor.visit(h));
}
Expand Down
15 changes: 10 additions & 5 deletions src/annotate_attributes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,7 @@ mod tests {
// before and after the attributes table, to demonstrate that this is
// not sensitive to which order they occur in (i.e., these could be
// reordered in the HTML spec).
let document = parse_document_async(
let parsed = parse_document_async(
r#"
<!DOCTYPE html>
<h3>The a element</h3>
Expand All @@ -333,6 +333,7 @@ mod tests {
<dd><code data-x="attr-area-href">href</code>
</dl>
"#.trim().as_bytes()).await?;
let document = parsed.document().clone();
let mut proc = Processor::new();
dom_utils::scan_dom(&document, &mut |h| proc.visit(h));
proc.apply().await?;
Expand Down Expand Up @@ -368,7 +369,7 @@ mod tests {
async fn test_variant() -> io::Result<()> {
// This checks that <!-- variant --> and <!-- or: --> work correctly.
// i.e., the variant description is used where requested
let document = parse_document_async(
let parsed = parse_document_async(
r#"
<!DOCTYPE html>
<h3>The a element</h3>
Expand All @@ -386,6 +387,7 @@ mod tests {
<dd><code data-x="attr-area-href">href</code><!-- variant -->
</dl>
"#.trim().as_bytes()).await?;
let document = parsed.document().clone();
let mut proc = Processor::new();
dom_utils::scan_dom(&document, &mut |h| proc.visit(h));
proc.apply().await?;
Expand Down Expand Up @@ -415,7 +417,7 @@ mod tests {
#[tokio::test]
async fn test_special_semantics() -> io::Result<()> {
// Checks that the special rules for using : instead of an em dash work.
let document = parse_document_async(
let parsed = parse_document_async(
r#"
<!DOCTYPE html>
<h3>The a element</h3>
Expand All @@ -428,6 +430,7 @@ mod tests {
<tr><th><code data-x>name</code><td><code data-x="attr-a-name">a</code><td>Anchor name
</tbody></table>
"#.trim().as_bytes()).await?;
let document = parsed.document().clone();
let mut proc = Processor::new();
dom_utils::scan_dom(&document, &mut |h| proc.visit(h));
proc.apply().await?;
Expand All @@ -451,7 +454,7 @@ mod tests {
#[tokio::test]
async fn test_special_semantics_multiple() -> io::Result<()> {
// Checks that the special rules for joining any special semantics with a ; work.
let document = parse_document_async(
let parsed = parse_document_async(
r#"
<!DOCTYPE html>
<h3>The a element</h3>
Expand All @@ -465,6 +468,7 @@ mod tests {
<tr><th><code data-x>name</code><td><code data-x="attr-a-name">a</code><td>Name of the anchor
</tbody></table>
"#.trim().as_bytes()).await?;
let document = parsed.document().clone();
let mut proc = Processor::new();
dom_utils::scan_dom(&document, &mut |h| proc.visit(h));
proc.apply().await?;
Expand All @@ -490,7 +494,7 @@ mod tests {
async fn test_identical_links() -> io::Result<()> {
// This checks the same identifier can be linked multiple times without
// repeating the description.
let document = parse_document_async(
let parsed = parse_document_async(
r#"
<!DOCTYPE html>
<h3>The img element</h3>
Expand All @@ -508,6 +512,7 @@ mod tests {
<tr><th><code data-x>width</code><td><code data-x="attr-dim-width">img</code>; <code data-x="attr-dim-width">video</code><td>Horizontal dimension
</tbody></table>
"#.trim().as_bytes()).await?;
let document = parsed.document().clone();
let mut proc = Processor::new();
dom_utils::scan_dom(&document, &mut |h| proc.visit(h));
proc.apply().await?;
Expand Down
12 changes: 8 additions & 4 deletions src/boilerplate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -166,10 +166,11 @@ mod tests {
"<tr><td>en<td>English",
)
.await?;
let document = parse_document_async(
let parsed = parse_document_async(
"<!DOCTYPE html><table><!--BOILERPLATE languages--></table>".as_bytes(),
)
.await?;
let document = parsed.document().clone();
let mut proc = Processor::new(boilerplate_dir.path(), Path::new("."));
dom_utils::scan_dom(&document, &mut |h| proc.visit(h));
proc.apply().await?;
Expand All @@ -188,10 +189,11 @@ mod tests {
"data:text/html,Hello, world!",
)
.await?;
let document = parse_document_async(
let parsed = parse_document_async(
"<!DOCTYPE html><a href=\"<!--BOILERPLATE data.url-->\">hello</a>".as_bytes(),
)
.await?;
let document = parsed.document().clone();
let mut proc = Processor::new(boilerplate_dir.path(), Path::new("."));
dom_utils::scan_dom(&document, &mut |h| proc.visit(h));
proc.apply().await?;
Expand All @@ -208,9 +210,10 @@ mod tests {
tokio::fs::write(example_dir.path().join("ex1"), "first").await?;
tokio::fs::write(example_dir.path().join("ex2"), "second").await?;
tokio::fs::write(example_dir.path().join("ignored"), "bad").await?;
let document =
let parsed =
parse_document_async("<!DOCTYPE html><pre>EXAMPLE ex1</pre><pre><code class=html>\nEXAMPLE ex2 </code></pre><p>EXAMPLE ignored</p>".as_bytes())
.await?;
let document = parsed.document().clone();
let mut proc = Processor::new(Path::new("."), example_dir.path());
dom_utils::scan_dom(&document, &mut |h| proc.visit(h));
proc.apply().await?;
Expand All @@ -229,7 +232,8 @@ mod tests {
"<!DOCTYPE html><body><pre>EXAMPLE ../foo</pre>",
];
for example in bad_path_examples {
let document = parse_document_async(example.as_bytes()).await?;
let parsed = parse_document_async(example.as_bytes()).await?;
let document = parsed.document().clone();
let mut proc = Processor::new(Path::new("."), Path::new("."));
dom_utils::scan_dom(&document, &mut |h| proc.visit(h));
let result = proc.apply().await;
Expand Down
27 changes: 18 additions & 9 deletions src/interface_index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@ mod tests {

#[tokio::test]
async fn test_two_interfaces_in_one_block() -> io::Result<()> {
let document = parse_document_async(
let parsed = parse_document_async(
r#"
<!DOCTYPE html>
<pre><code class=idl>
Expand All @@ -199,6 +199,7 @@ INSERT INTERFACES HERE
.as_bytes(),
)
.await?;
let document = parsed.document().clone();
let mut proc = Processor::new();
dom_utils::scan_dom(&document, &mut |h| proc.visit(h));
proc.apply()?;
Expand All @@ -216,7 +217,7 @@ interface <dfn interface="">HTMLBlinkElement</dfn> { ... }

#[tokio::test]
async fn test_two_interfaces_in_separate_blocks() -> io::Result<()> {
let document = parse_document_async(
let parsed = parse_document_async(
r#"
<!DOCTYPE html>
<pre><code class=idl>
Expand All @@ -231,6 +232,7 @@ INSERT INTERFACES HERE
.as_bytes(),
)
.await?;
let document = parsed.document().clone();
let mut proc = Processor::new();
dom_utils::scan_dom(&document, &mut |h| proc.visit(h));
proc.apply()?;
Expand All @@ -250,7 +252,7 @@ interface <dfn interface="">HTMLBlinkElement</dfn> { ... }

#[tokio::test]
async fn interface_with_partial() -> io::Result<()> {
let document = parse_document_async(
let parsed = parse_document_async(
r#"
<!DOCTYPE html>
<pre><code class=idl>
Expand All @@ -265,6 +267,7 @@ INSERT INTERFACES HERE
.as_bytes(),
)
.await?;
let document = parsed.document().clone();
let mut proc = Processor::new();
dom_utils::scan_dom(&document, &mut |h| proc.visit(h));
proc.apply()?;
Expand All @@ -284,7 +287,7 @@ partial interface <span id="HTMLMarqueeElement-partial">HTMLMarqueeElement</span

#[tokio::test]
async fn interface_with_two_partials() -> io::Result<()> {
let document = parse_document_async(
let parsed = parse_document_async(
r#"
<!DOCTYPE html>
<pre><code class=idl>
Expand All @@ -298,6 +301,7 @@ INSERT INTERFACES HERE
.as_bytes(),
)
.await?;
let document = parsed.document().clone();
let mut proc = Processor::new();
dom_utils::scan_dom(&document, &mut |h| proc.visit(h));
proc.apply()?;
Expand All @@ -316,7 +320,7 @@ partial interface <span id="HTMLMarqueeElement-partial-2">HTMLMarqueeElement</sp

#[tokio::test]
async fn only_partials() -> io::Result<()> {
let document = parse_document_async(
let parsed = parse_document_async(
r#"
<!DOCTYPE html>
<pre><code class=idl>
Expand All @@ -329,6 +333,7 @@ INSERT INTERFACES HERE
.as_bytes(),
)
.await?;
let document = parsed.document().clone();
let mut proc = Processor::new();
dom_utils::scan_dom(&document, &mut |h| proc.visit(h));
proc.apply()?;
Expand All @@ -346,7 +351,7 @@ partial interface <span id="HTMLMarqueeElement-partial-2">HTMLMarqueeElement</sp

#[tokio::test]
async fn marker_before() -> io::Result<()> {
let document = parse_document_async(
let parsed = parse_document_async(
r#"
<!DOCTYPE html>
INSERT INTERFACES HERE
Expand All @@ -358,6 +363,7 @@ interface <dfn interface>HTMLMarqueeElement</dfn> { ... }
.as_bytes(),
)
.await?;
let document = parsed.document().clone();
let mut proc = Processor::new();
dom_utils::scan_dom(&document, &mut |h| proc.visit(h));
proc.apply()?;
Expand All @@ -376,7 +382,8 @@ interface <dfn interface="">HTMLMarqueeElement</dfn> { ... }

#[tokio::test]
async fn no_marker() -> io::Result<()> {
let document = parse_document_async("<!DOCTYPE html>".as_bytes()).await?;
let parsed = parse_document_async("<!DOCTYPE html>".as_bytes()).await?;
let document = parsed.document().clone();
let mut proc = Processor::new();
dom_utils::scan_dom(&document, &mut |h| proc.visit(h));
let result = proc.apply();
Expand All @@ -386,11 +393,12 @@ interface <dfn interface="">HTMLMarqueeElement</dfn> { ... }

#[tokio::test]
async fn duplicate_marker() -> io::Result<()> {
let document = parse_document_async(
let parsed = parse_document_async(
"<!DOCTYPE html><div>INSERT INTERFACES HERE</div><div>INSERT INTERFACES HERE</div>"
.as_bytes(),
)
.await?;
let document = parsed.document().clone();
let mut proc = Processor::new();
dom_utils::scan_dom(&document, &mut |h| proc.visit(h));
let result = proc.apply();
Expand All @@ -400,7 +408,7 @@ interface <dfn interface="">HTMLMarqueeElement</dfn> { ... }

#[tokio::test]
async fn duplicate_dfn() -> io::Result<()> {
let document = parse_document_async(
let parsed = parse_document_async(
r#"
<!DOCTYPE html>
<pre><code class=idl>
Expand All @@ -411,6 +419,7 @@ interface <dfn interface>HTMLMarqueeElement</dfn> { ... }
.as_bytes(),
)
.await?;
let document = parsed.document().clone();
let mut proc = Processor::new();
dom_utils::scan_dom(&document, &mut |h| proc.visit(h));
let result = proc.apply();
Expand Down
6 changes: 4 additions & 2 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,8 @@ async fn run_preprocess() -> io::Result<()> {
// Because parsing can jump around the tree a little, it's most reasonable
// to just parse the whole document before doing any processing. Even for
// the HTML standard, this doesn't take too long.
let document = parser::parse_document_async(tokio::io::stdin()).await?;
let parsed = parser::parse_document_async(tokio::io::stdin()).await?;
let document = parsed.document().clone();

let mut boilerplate = boilerplate::Processor::new(cache_dir.clone(), source_dir.join("demos"));
let mut represents = represents::Processor::new();
Expand Down Expand Up @@ -92,7 +93,8 @@ async fn run_preprocess() -> io::Result<()> {

// The steps and considerations here are similar to run_preprocess.
async fn run_postprocess() -> io::Result<()> {
let document = parser::parse_document_async(tokio::io::stdin()).await?;
let parsed = parser::parse_document_async(tokio::io::stdin()).await?;
let document = parsed.document().clone();

let mut anchor_permanence = anchor_permanence::Processor::new();

Expand Down
Loading
Loading