diff --git a/sources/academy/webscraping/scraping_basics_javascript2/index.md b/sources/academy/webscraping/scraping_basics_javascript2/index.md index 0cc5b5569a..c9025b2f68 100644 --- a/sources/academy/webscraping/scraping_basics_javascript2/index.md +++ b/sources/academy/webscraping/scraping_basics_javascript2/index.md @@ -9,11 +9,18 @@ unlisted: true --- import DocCardList from '@theme/DocCardList'; +import UrlConditional from '@site/src/components/UrlConditional'; **Learn how to use JavaScript to extract information from websites in this practical course, starting from the absolute basics.** --- + +:::tip +You're seeing this because your URL includes `#old-js-course`. +::: + + In this course we'll use JavaScript to create an application for watching prices. It'll be able to scrape all product pages of an e-commerce website and record prices. Data from several runs of such program would be useful for seeing trends in price changes, detecting discounts, etc. ![E-commerce listing on the left, JSON with data on the right](./images/scraping.webp) diff --git a/src/components/UrlConditional.jsx b/src/components/UrlConditional.jsx new file mode 100644 index 0000000000..b46680fc65 --- /dev/null +++ b/src/components/UrlConditional.jsx @@ -0,0 +1,7 @@ +import { useLocation } from '@docusaurus/router'; + +export default function UrlConditional({fragment, children}) { + const location = useLocation(); + const shouldShow = location.hash === `#${fragment}`; + return shouldShow ? <>{children} : null; +}