11import contextlib
22import fileinput
3- import signal
43import os
54import re
65import sys
7- import threading
86import typing
97import urllib .parse
108
119from github_job_summary import JobSummary
1210from subdomains import Subdomains
13- from curl_wrapper import EXIT_CODES as CURL_EXIT_CODES
11+ from curl_wrapper import CurlExitCodes
1412from url_checker import UrlChecker
1513
1614"""
2220
2321JOIN_TIMEOUT_SEC = 120
2422
25- CURL_EXIT_CODES_AND_HTTP_CODES : dict [str , tuple [int , int | None ]] = {
26- "https://api.aspose.cloud/connect/token" : (CURL_EXIT_CODES .HTTP_RETURNED_ERROR , 400 ),
27- "https://api.aspose.cloud/v3.0" : (CURL_EXIT_CODES .HTTP_RETURNED_ERROR , 404 ),
28- "https://api.aspose.cloud/v4.0" : (CURL_EXIT_CODES .HTTP_RETURNED_ERROR , 404 ),
29- "https://api.aspose.cloud/v4.0/" : (CURL_EXIT_CODES .HTTP_RETURNED_ERROR , 404 ),
30- "https://id.aspose.cloud/connect/token" : (CURL_EXIT_CODES .HTTP_RETURNED_ERROR , 400 ),
23+ EXIT_CODE_EXPECTATIONS : dict [str , tuple [int , int | None ]] = {
24+ "https://api.aspose.cloud/connect/token" : (CurlExitCodes .HTTP_RETURNED_ERROR , 400 ),
25+ "https://api.aspose.cloud/v3.0" : (CurlExitCodes .HTTP_RETURNED_ERROR , 404 ),
26+ "https://api.aspose.cloud/v4.0" : (CurlExitCodes .HTTP_RETURNED_ERROR , 404 ),
27+ "https://api.aspose.cloud/v4.0/" : (CurlExitCodes .HTTP_RETURNED_ERROR , 404 ),
28+ "https://id.aspose.cloud/connect/token" : (CurlExitCodes .HTTP_RETURNED_ERROR , 400 ),
3129 # TODO: Temporary fix
32- "https://dashboard.aspose.cloud/applications" : (CURL_EXIT_CODES .HTTP_RETURNED_ERROR , 404 ),
30+ "https://dashboard.aspose.cloud/applications" : (CurlExitCodes .HTTP_RETURNED_ERROR , 404 ),
3331}
3432
3533REGEX_TO_IGNORE : list [re .Pattern [str ]] = [
@@ -159,7 +157,7 @@ def text_extractor(files: list[str]) -> typing.Generator[tuple[str, str], None,
159157
160158def main (files : list [str ]) -> int :
161159 url_checker = UrlChecker (
162- expectations = CURL_EXIT_CODES_AND_HTTP_CODES ,
160+ expectations = EXIT_CODE_EXPECTATIONS ,
163161 )
164162
165163 with url_checker .start () as checker :
@@ -174,8 +172,8 @@ def main(files: list[str]) -> int:
174172 if res .ok :
175173 JOB_SUMMARY .add_success (res .url )
176174 else :
177- files = EXTRACTED_URLS_WITH_FILES .get (res .url , [])
178- JOB_SUMMARY .add_error (f"Broken URL '{ res .url } ': { res .stderr } Files: { files } " )
175+ src_files = EXTRACTED_URLS_WITH_FILES .get (res .url , [])
176+ JOB_SUMMARY .add_error (f"Broken URL '{ res .url } ': { res .stderr } Files: { src_files } " )
179177
180178 JOB_SUMMARY .finalize ("Checked {total} failed **{failed}**\n Good={success}" )
181179 if JOB_SUMMARY .has_errors :
0 commit comments