@@ -125,9 +125,9 @@ def test_cachedownloader_cached_file(tmp_path, monkeypatch, default_response):
125125
126126
127127@pytest .mark .parametrize ("disable_cache" , (True , False ))
128- def test_cachedownloader_on_success (cache_dir , disable_cache ):
128+ def test_cachedownloader_on_success (get_download_cache_loc , disable_cache ):
129129 add_default_response ()
130- f = cache_dir / "check_jsonschema" / "downloads" / " schema1.json"
130+ f = get_download_cache_loc ( " schema1.json")
131131 cd = CacheDownloader (disable_cache = disable_cache ).bind (
132132 "https://example.com/schema1.json"
133133 )
@@ -151,7 +151,9 @@ def test_cachedownloader_using_alternate_target_dir(cache_dir):
151151
152152@pytest .mark .parametrize ("disable_cache" , (True , False ))
153153@pytest .mark .parametrize ("failures" , (1 , 2 , requests .ConnectionError ))
154- def test_cachedownloader_succeeds_after_few_errors (cache_dir , disable_cache , failures ):
154+ def test_cachedownloader_succeeds_after_few_errors (
155+ get_download_cache_loc , disable_cache , failures
156+ ):
155157 if isinstance (failures , int ):
156158 for _i in range (failures ):
157159 responses .add (
@@ -168,7 +170,7 @@ def test_cachedownloader_succeeds_after_few_errors(cache_dir, disable_cache, fai
168170 match_querystring = None ,
169171 )
170172 add_default_response ()
171- f = cache_dir / "check_jsonschema" / "downloads" / " schema1.json"
173+ f = get_download_cache_loc ( " schema1.json")
172174 cd = CacheDownloader (disable_cache = disable_cache ).bind (
173175 "https://example.com/schema1.json"
174176 )
@@ -184,7 +186,7 @@ def test_cachedownloader_succeeds_after_few_errors(cache_dir, disable_cache, fai
184186@pytest .mark .parametrize ("disable_cache" , (True , False ))
185187@pytest .mark .parametrize ("connection_error" , (True , False ))
186188def test_cachedownloader_fails_after_many_errors (
187- cache_dir , disable_cache , connection_error
189+ get_download_cache_loc , disable_cache , connection_error
188190):
189191 for _i in range (10 ):
190192 if connection_error :
@@ -202,7 +204,7 @@ def test_cachedownloader_fails_after_many_errors(
202204 match_querystring = None ,
203205 )
204206 add_default_response () # never reached, the 11th response
205- f = cache_dir / "check_jsonschema" / "downloads" / " schema1.json"
207+ f = get_download_cache_loc ( " schema1.json")
206208 cd = CacheDownloader (disable_cache = disable_cache ).bind (
207209 "https://example.com/schema1.json"
208210 )
@@ -213,7 +215,7 @@ def test_cachedownloader_fails_after_many_errors(
213215
214216
215217@pytest .mark .parametrize ("disable_cache" , (True , False ))
216- def test_cachedownloader_retries_on_bad_data (cache_dir , disable_cache ):
218+ def test_cachedownloader_retries_on_bad_data (get_download_cache_loc , disable_cache ):
217219 responses .add (
218220 "GET" ,
219221 "https://example.com/schema1.json" ,
@@ -222,7 +224,7 @@ def test_cachedownloader_retries_on_bad_data(cache_dir, disable_cache):
222224 match_querystring = None ,
223225 )
224226 add_default_response ()
225- f = cache_dir / "check_jsonschema" / "downloads" / " schema1.json"
227+ f = get_download_cache_loc ( " schema1.json")
226228 cd = CacheDownloader (
227229 disable_cache = disable_cache ,
228230 ).bind (
@@ -245,20 +247,19 @@ def test_cachedownloader_retries_on_bad_data(cache_dir, disable_cache):
245247 "failure_mode" , ("header_missing" , "header_malformed" , "time_overflow" )
246248)
247249def test_cachedownloader_handles_bad_lastmod_header (
248- monkeypatch , cache_dir , file_exists , failure_mode
250+ monkeypatch ,
251+ get_download_cache_loc ,
252+ inject_cached_download ,
253+ file_exists ,
254+ failure_mode ,
249255):
256+ uri = "https://example.com/schema1.json"
250257 if failure_mode == "header_missing" :
251- responses .add (
252- "GET" ,
253- "https://example.com/schema1.json" ,
254- headers = {},
255- json = {},
256- match_querystring = None ,
257- )
258+ responses .add ("GET" , uri , headers = {}, json = {}, match_querystring = None )
258259 elif failure_mode == "header_malformed" :
259260 responses .add (
260261 "GET" ,
261- "https://example.com/schema1.json" ,
262+ uri ,
262263 headers = {"Last-Modified" : "Jan 2000 00:00:01" },
263264 json = {},
264265 match_querystring = None ,
@@ -274,47 +275,47 @@ def fake_mktime(*args):
274275 raise NotImplementedError
275276
276277 original_file_contents = b'{"foo": "bar"}'
277- (cache_dir / "check_jsonschema" / "downloads" ).mkdir (parents = True )
278- f = cache_dir / "check_jsonschema" / "downloads" / "schema1.json"
278+ file_path = get_download_cache_loc (uri )
279279
280+ assert not file_path .exists ()
280281 if file_exists :
281- f .write_bytes (original_file_contents )
282- else :
283- assert not f .exists ()
282+ inject_cached_download (uri , original_file_contents )
284283
285- cd = CacheDownloader ().bind ("https://example.com/schema1.json" , filename = str ( f ) )
284+ cd = CacheDownloader ().bind (uri )
286285
287286 # if the file already existed, it will not be overwritten by the cachedownloader
288287 # so the returned value for both the downloader and a direct file read should be the
289288 # original contents
290289 if file_exists :
291290 with cd .open () as fp :
292291 assert fp .read () == original_file_contents
293- assert f .read_bytes () == original_file_contents
292+ assert file_path .read_bytes () == original_file_contents
294293 # otherwise, the file will have been created with new content
295294 # both reads will show that new content
296295 else :
297296 with cd .open () as fp :
298297 assert fp .read () == b"{}"
299- assert f .read_bytes () == b"{}"
298+ assert file_path .read_bytes () == b"{}"
300299
301300 # at the end, the file always exists on disk
302- assert f .exists ()
301+ assert file_path .exists ()
303302
304303
305- def test_cachedownloader_validation_is_not_invoked_on_hit (monkeypatch , cache_dir ):
304+ def test_cachedownloader_validation_is_not_invoked_on_hit (
305+ monkeypatch , inject_cached_download
306+ ):
306307 """
307308 Regression test for https://github.com/python-jsonschema/check-jsonschema/issues/453
308309
309310 This was a bug in which the validation callback was invoked eagerly, even on a cache
310311 hit. As a result, cache hits did not demonstrate their expected performance gain.
311312 """
313+ uri = "https://example.com/schema1.json"
314+
312315 # 1: construct some perfectly good data (it doesn't really matter what it is)
313316 add_default_response ()
314317 # 2: put equivalent data on disk
315- (cache_dir / "check_jsonschema" / "downloads" ).mkdir (parents = True )
316- f = cache_dir / "check_jsonschema" / "downloads" / "schema1.json"
317- f .write_text ("{}" )
318+ inject_cached_download (uri , "{}" )
318319
319320 # 3: construct a validator which marks that it ran in a variable
320321 validator_ran = False
@@ -327,7 +328,6 @@ def dummy_validate_bytes(data):
327328 # and use the above validation method
328329 cd = CacheDownloader ().bind (
329330 "https://example.com/schema1.json" ,
330- filename = str (f ),
331331 validation_callback = dummy_validate_bytes ,
332332 )
333333
0 commit comments