@@ -21,7 +21,7 @@ To access the default dataset, we can use the [`Dataset`](https://crawlee.dev/a
2121
2222``` JavaScript
2323// dataset.js
24- import { Dataset , } from ' crawlee' ;
24+ import { Dataset , Configuration } from ' crawlee' ;
2525
2626// Crawlee automatically deletes data from its previous runs.
2727// We can turn this off by setting 'purgeOnStart' to false.
@@ -43,7 +43,7 @@ Let's say we wanted to print the title for each product that is more expensive t
4343
4444``` JavaScript
4545// dataset.js
46- import { Dataset } from ' crawlee' ;
46+ import { Dataset , Configuration } from ' crawlee' ;
4747
4848Configuration .getGlobalConfig ().set (' purgeOnStart' , false );
4949
@@ -82,7 +82,7 @@ Now that you have a token, you can upload your local dataset to the Apify platfo
8282
8383``` JavaScript
8484// dataset.js
85- import { Dataset } from ' crawlee' ;
85+ import { Dataset , Configuration } from ' crawlee' ;
8686import { ApifyClient } from ' apify-client' ;
8787
8888Configuration .getGlobalConfig ().set (' purgeOnStart' , false );
@@ -116,7 +116,7 @@ The full code, to do this in one go, looks like this:
116116
117117``` JavaScript
118118// dataset.js
119- import { Dataset } from ' crawlee' ;
119+ import { Dataset , Configuration } from ' crawlee' ;
120120import { ApifyClient } from ' apify-client' ;
121121import { writeFileSync } from ' fs' ;
122122
0 commit comments