1313PROW_URL = ""
1414final_job_list = []
1515
16+ def request_url (url , verify = False , timeout = 15 ):
17+
18+ retries = 3
19+ delay = 2
20+ for attempt in range (1 , retries + 1 ):
21+ try :
22+ response = requests .get (url , verify = False , timeout = 15 )
23+ response .raise_for_status ()
24+ return response # Success
25+ except (requests .RequestException , requests .Timeout ) as e :
26+ print (f"Attempt { attempt } failed: { e } " )
27+ if attempt < retries :
28+ print (f"Retrying in { delay } seconds..." )
29+ time .sleep (delay )
30+ else :
31+ print ("Max retries reached. Request failed." )
32+ raise
1633
1734def fetch_release_date (release ):
1835 '''
@@ -21,10 +38,10 @@ def fetch_release_date(release):
2138
2239 try :
2340 url = constants .STABLE_RELEASE_URL + release
24- response = requests . get (url , verify = False , timeout = 15 )
41+ response = request_url (url , verify = False , timeout = 15 )
2542 if response .status_code == 404 :
2643 url = constants .DEV_PREVIEW_RELEASE_URL + release
27- response = requests . get (url , verify = False , timeout = 15 )
44+ response = request_url (url , verify = False , timeout = 15 )
2845 if response .status_code == 404 :
2946 print (f"Failed to get the release page. { response .text } " )
3047 sys .exit (1 )
@@ -42,7 +59,7 @@ def fetch_release_date(release):
4259 a_tag = form .find ("a" , href = True )
4360 if a_tag and "changelog" in a_tag ["href" ]:
4461 changelog_url = constants .RELEASE_BASE_URL + a_tag ["href" ]
45- changelog_resp = requests . get (changelog_url , verify = False , timeout = 15 )
62+ changelog_resp = request_url (changelog_url , verify = False , timeout = 15 )
4663 if changelog_resp .status_code == 200 :
4764 lines = changelog_resp .text .splitlines ()
4865 for line in lines :
@@ -66,7 +83,7 @@ def fetch_build_time(url):
6683 '''
6784 Returns the created time (HH:MM) and date (YYYY-MM-DD) of the release in IST
6885 '''
69- response = requests . get (url , verify = False , timeout = 15 )
86+ response = request_url (url , verify = False , timeout = 15 )
7087 response .raise_for_status ()
7188 buildtime = json .loads (response .text )
7289 timestamp_str = buildtime ["metadata" ]["creationTimestamp" ]
@@ -147,7 +164,7 @@ def get_jobs(prow_link):
147164 url = PROW_URL + prow_link
148165
149166 try :
150- response = requests . get (url , verify = False , timeout = 15 )
167+ response = request_url (url , verify = False , timeout = 15 )
151168
152169 if response .status_code == 200 :
153170 soup = BeautifulSoup (response .text , 'html.parser' )
@@ -203,7 +220,7 @@ def get_n_recent_jobs(prow_link,n):
203220 url = PROW_URL + prow_link
204221
205222 try :
206- response = requests . get (url , verify = False , timeout = 15 )
223+ response = request_url (url , verify = False , timeout = 15 )
207224
208225 if response .status_code == 200 :
209226 soup = BeautifulSoup (response .text , 'html.parser' )
@@ -252,7 +269,7 @@ def check_job_status(spy_link):
252269 '''
253270 job_status_url = constants .PROW_VIEW_URL + spy_link [8 :] + '/finished.json'
254271 try :
255- response = requests . get (job_status_url , verify = False , timeout = 15 )
272+ response = request_url (job_status_url , verify = False , timeout = 15 )
256273 if response .status_code == 200 :
257274 cluster_status = json .loads (response .text )
258275 return cluster_status ["result" ]
@@ -282,14 +299,14 @@ def cluster_deploy_status(spy_link):
282299 mce_install_log_url = constants .PROW_VIEW_URL + spy_link [8 :] + '/artifacts/' + job_type + '/hypershift-mce-install/finished.json'
283300
284301 try :
285- response = requests . get (mce_install_log_url , verify = False , timeout = 15 )
302+ response = request_url (mce_install_log_url , verify = False , timeout = 15 )
286303 if response .status_code == 200 :
287304 cluster_status = json .loads (response .text )
288305 cluster_result = "MCE-INSTALL " + cluster_status ["result" ]
289306 if cluster_status ["result" ] == "SUCCESS" :
290307 # check mce-power-create status also
291308 mce_power_log_url = constants .PROW_VIEW_URL + spy_link [8 :] + '/artifacts/' + job_type + '/hypershift-mce-power-create-nodepool/finished.json'
292- response = requests . get (mce_power_log_url , verify = False , timeout = 15 )
309+ response = request_url (mce_power_log_url , verify = False , timeout = 15 )
293310 if response .status_code == 200 :
294311 cluster_status = json .loads (response .text )
295312 cluster_result += "\n MCE-POWER-CREATE " + cluster_status ["result" ]
@@ -321,7 +338,7 @@ def cluster_deploy_status(spy_link):
321338 job_log_url = constants .PROW_VIEW_URL + spy_link [8 :] + '/artifacts/' + job_type + '/upi-install-' + job_platform + '/finished.json'
322339
323340 try :
324- response = requests . get (job_log_url , verify = False , timeout = 15 )
341+ response = request_url (job_log_url , verify = False , timeout = 15 )
325342 if response .status_code == 200 :
326343
327344 cluster_status = json .loads (response .text )
@@ -348,7 +365,7 @@ def cluster_creation_error_analysis(spylink):
348365 job_log_url = constants .PROW_VIEW_URL + spylink [8 :] + '/artifacts/' + job_type + '/ipi-install-' + job_platform + '-install/build-log.txt'
349366
350367 try :
351- response = requests . get (job_log_url ,verify = False )
368+ response = request_url (job_log_url ,verify = False )
352369
353370 if response .status_code == 200 :
354371
@@ -392,7 +409,7 @@ def check_if_gather_libvirt_dir_exists(spy_link,job_type):
392409 base_artifacts_dir_url = constants .PROW_VIEW_URL + spy_link [8 :] + "/artifacts/" + job_type
393410
394411 try :
395- response = requests . get (base_artifacts_dir_url , verify = False , timeout = 15 )
412+ response = request_url (base_artifacts_dir_url , verify = False , timeout = 15 )
396413 gather_libvirt_dir_re = re .compile ('gather-libvirt' )
397414 gather_libvirt_dir_re_match = gather_libvirt_dir_re .search (response .text , re .MULTILINE | re .DOTALL )
398415
@@ -409,7 +426,7 @@ def check_if_gather_libvirt_dir_exists(spy_link,job_type):
409426def check_hypervisor_error (spy_link ):
410427 build_log_url = constants .PROW_VIEW_URL + spy_link [8 :] + '/build-log.txt'
411428 try :
412- response = requests . get (build_log_url , verify = False , timeout = 15 )
429+ response = request_url (build_log_url , verify = False , timeout = 15 )
413430 hypervisor_re = re .compile (constants .HYPERVISOR_CONNECTION_ERROR )
414431 hypervisor_re_match = hypervisor_re .search (response .text )
415432 if hypervisor_re_match is not None :
@@ -427,17 +444,11 @@ def check_if_sensitive_info_exposed(spy_link):
427444
428445 build_log_url = constants .PROW_VIEW_URL + spy_link [8 :] + '/build-log.txt'
429446 try :
430- response = requests .get (build_log_url , verify = False , timeout = 15 )
431- senstive_info_re = re .compile ('This file contained potentially sensitive information and has been removed.' )
432- senstive_info_re_match = senstive_info_re .search (response .text )
433- if senstive_info_re_match is not None :
434- return True
435- else :
436- return False
437- except requests .Timeout :
438- return "Request timed out"
439- except requests .RequestException :
440- return "Error while sending request to url"
447+ response = request_url (build_log_url , verify = False , timeout = 15 )
448+ sensitive_info_re = re .compile ('This file contained potentially sensitive information and has been removed.' )
449+ return bool (sensitive_info_re .search (response .text ))
450+ except (requests .Timeout , requests .RequestException ) as e :
451+ raise RuntimeError (f"Error in check_if_sensitive_info_exposed: { e } " )
441452
442453def get_node_status (spy_link ):
443454
@@ -470,7 +481,7 @@ def get_node_status(spy_link):
470481
471482
472483 try :
473- node_log_response = requests . get (node_log_url , verify = False , timeout = 15 )
484+ node_log_response = request_url (node_log_url , verify = False , timeout = 15 )
474485 if "NAME" in node_log_response .text :
475486 if version > 4.15 and job_platform == "libvirt" :
476487 workers = "compute-"
@@ -506,7 +517,7 @@ def check_node_crash(spy_link):
506517 crash_log_url = constants .PROW_VIEW_URL + spy_link [8 :] + "/artifacts/" + job_type + "/ipi-conf-debug-kdump-gather-logs/artifacts/"
507518
508519 try :
509- crash_log_response = requests . get (crash_log_url , verify = False , timeout = 15 )
520+ crash_log_response = request_url (crash_log_url , verify = False , timeout = 15 )
510521 if "kdump.tar" in crash_log_response .text :
511522 print ("*********************************" )
512523 print ("ERROR- Crash observed in the job" )
@@ -594,7 +605,7 @@ def get_quota_and_nightly(spy_link):
594605 build_log_url = constants .PROW_VIEW_URL + spy_link [8 :] + "/build-log.txt"
595606 for attempt in range (1 , max_retries + 1 ):
596607 try :
597- build_log_response = requests . get (build_log_url , verify = False , timeout = 15 )
608+ build_log_response = request_url (build_log_url , verify = False , timeout = 15 )
598609 if 'ppc64le' in spy_link :
599610 if job_platform == "libvirt" :
600611 job_platform += "-ppc64le-s2s"
@@ -695,7 +706,7 @@ def get_failed_monitor_testcases(spy_link,job_type):
695706 test_log_junit_dir_url = constants .PROW_VIEW_URL + spy_link [8 :] + "/artifacts/" + job_type + "/openshift-e2e-libvirt-test/artifacts/junit/"
696707
697708 try :
698- response = requests . get (test_log_junit_dir_url , verify = False , timeout = 15 )
709+ response = request_url (test_log_junit_dir_url , verify = False , timeout = 15 )
699710
700711 if response .status_code == 200 :
701712 monitor_test_failure_summary_filename_re = re .compile (r'(test-failures-summary_monitor_2[^.]*\.json)' )
@@ -704,7 +715,7 @@ def get_failed_monitor_testcases(spy_link,job_type):
704715 if monitor_test_failure_summary_filename_match is not None :
705716 monitor_test_failure_summary_filename_str = monitor_test_failure_summary_filename_match .group (1 )
706717 test_log_url = constants .PROW_VIEW_URL + spy_link [8 :] + "/artifacts/" + job_type + "/openshift-e2e-libvirt-test/artifacts/junit/" + monitor_test_failure_summary_filename_str
707- response_2 = requests . get (test_log_url ,verify = False , timeout = 15 )
718+ response_2 = request_url (test_log_url ,verify = False , timeout = 15 )
708719 if response_2 .status_code == 200 :
709720 data = response_2 .json ()
710721 for tc in data ['Tests' ]:
@@ -745,7 +756,7 @@ def get_failed_monitor_testcases_from_xml(spy_link,job_type):
745756 test_type = "openshift-e2e-libvirt-test"
746757 test_log_junit_dir_url = constants .PROW_VIEW_URL + spy_link [8 :] + "/artifacts/" + job_type + "/" + test_type + "/artifacts/junit/"
747758 try :
748- response = requests . get (test_log_junit_dir_url , verify = False , timeout = 15 )
759+ response = request_url (test_log_junit_dir_url , verify = False , timeout = 15 )
749760
750761 if response .status_code == 200 :
751762 test_failure_summary_filename_re = re .compile (r'(e2e-monitor-tests__2[^.]*\.xml)' )
@@ -754,7 +765,7 @@ def get_failed_monitor_testcases_from_xml(spy_link,job_type):
754765 if test_failure_summary_filename_match is not None :
755766 test_failure_summary_filename_str = test_failure_summary_filename_match .group (1 )
756767 test_log_url = constants .PROW_VIEW_URL + spy_link [8 :] + "/artifacts/" + job_type + "/" + test_type + "/artifacts/junit/" + test_failure_summary_filename_str
757- response = requests . get (test_log_url ,verify = False ,timeout = 15 )
768+ response = request_url (test_log_url ,verify = False ,timeout = 15 )
758769 if response .status_code == 200 :
759770 root = ET .fromstring (response .content )
760771 for idx ,testcase in enumerate (root .iter ('testcase' )):
@@ -838,7 +849,7 @@ def get_failed_e2e_testcases(spy_link,job_type):
838849 test_type = "openshift-e2e-libvirt-test"
839850 test_log_junit_dir_url = constants .PROW_VIEW_URL + spy_link [8 :] + "/artifacts/" + job_type + "/" + test_type + "/artifacts/junit/"
840851 try :
841- response = requests . get (test_log_junit_dir_url , verify = False , timeout = 15 )
852+ response = request_url (test_log_junit_dir_url , verify = False , timeout = 15 )
842853
843854 if response .status_code == 200 :
844855 test_failure_summary_filename_re = re .compile (r'(test-failures-summary_2[^.]*\.json)' )
@@ -847,7 +858,7 @@ def get_failed_e2e_testcases(spy_link,job_type):
847858 if test_failure_summary_filename_match is not None :
848859 test_failure_summary_filename_str = test_failure_summary_filename_match .group (1 )
849860 test_log_url = constants .PROW_VIEW_URL + spy_link [8 :] + "/artifacts/" + job_type + "/" + test_type + "/artifacts/junit/" + test_failure_summary_filename_str
850- response_2 = requests . get (test_log_url ,verify = False , timeout = 15 )
861+ response_2 = request_url (test_log_url ,verify = False , timeout = 15 )
851862 if response_2 .status_code == 200 :
852863 data = response_2 .json ()
853864 for tc in data ['Tests' ]:
@@ -893,13 +904,13 @@ def get_junit_symptom_detection_testcase_failures(spy_link,job_type):
893904 test_log_junit_dir_url = constants .PROW_VIEW_URL + spy_link [8 :] + "/artifacts/" + job_type + "/artifacts/junit/"
894905 symptom_detection_failed_testcase = []
895906 try :
896- response = requests . get (test_log_junit_dir_url ,verify = False ,timeout = 15 )
907+ response = request_url (test_log_junit_dir_url ,verify = False ,timeout = 15 )
897908 if response .status_code == 200 :
898909 junit_failure_summary_filename_re = re .compile ('junit_symptoms.xml' )
899910 junit_failure_summary_filename_match = junit_failure_summary_filename_re .search (response .text , re .MULTILINE | re .DOTALL )
900911 if junit_failure_summary_filename_match is not None :
901912 test_log_junit_url = constants .PROW_VIEW_URL + spy_link [8 :] + "/artifacts/" + job_type + "/artifacts/junit/junit_symptoms.xml"
902- response_2 = requests . get (test_log_junit_url ,verify = False ,timeout = 15 )
913+ response_2 = request_url (test_log_junit_url ,verify = False ,timeout = 15 )
903914 root = ET .fromstring (response_2 .content )
904915 for testcase in root .findall ('.//testcase' ):
905916 testcase_name = testcase .get ('name' )
@@ -982,7 +993,7 @@ def check_ts_exe_status(spylink,jobtype):
982993 test_type = "openshift-e2e-libvirt-test"
983994 test_exe_status_url = constants .PROW_VIEW_URL + spylink [8 :] + "/artifacts/" + jobtype + "/" + test_type + "/finished.json"
984995 try :
985- response = requests . get (test_exe_status_url , verify = False , timeout = 15 )
996+ response = request_url (test_exe_status_url , verify = False , timeout = 15 )
986997 if response .status_code == 200 :
987998 cluster_status = json .loads (response .text )
988999 return cluster_status ["result" ]
@@ -1102,7 +1113,7 @@ def get_jobs_with_date(prowci_url,start_date,end_date):
11021113 url = PROW_URL + prowci_url
11031114
11041115 try :
1105- response = requests . get (url , verify = False , timeout = 15 )
1116+ response = request_url (url , verify = False , timeout = 15 )
11061117
11071118
11081119 if response .status_code == 200 :
@@ -1183,7 +1194,7 @@ def get_next_page_first_build_date(ci_next_page_spylink,end_date):
11831194 ci_next_page_link = PROW_URL + ci_next_page_spylink
11841195
11851196 try :
1186- response = requests . get (ci_next_page_link , verify = False , timeout = 15 )
1197+ response = request_url (ci_next_page_link , verify = False , timeout = 15 )
11871198 if response .status_code == 200 :
11881199 soup = BeautifulSoup (response .text , 'html.parser' )
11891200 script_elements = soup .find_all ('script' )
@@ -1262,7 +1273,11 @@ def get_brief_job_info(build_list,prow_ci_name,zone=None,job_filter='All'):
12621273 continue
12631274 build_status = check_job_status (build )
12641275 cluster_status = cluster_deploy_status (build )
1265- sensitive_info_expose_status = check_if_sensitive_info_exposed (build )
1276+ try :
1277+ sensitive_info_expose_status = check_if_sensitive_info_exposed (build )
1278+ except RuntimeError as e :
1279+ print (f"Warning: Could not check sensitive info exposure: { e } " )
1280+ sensitive_info_expose_status = False
12661281 i = i + 1
12671282 job_dict = {}
12681283 job_dict ["Job" ] = prow_ci_name
@@ -1340,6 +1355,18 @@ def get_detailed_job_info(build_list, prow_ci_name, zone=None, job_filter="all")
13401355 print ("Build start time:" , time )
13411356 except (requests .exceptions .RequestException , KeyError , ValueError ) as e :
13421357 print ("Error fetching build time:" , e )
1358+ < << << << HEAD
1359+ == == == =
1360+ build_status = check_job_status (build )
1361+ try :
1362+ sensitive_info_expose_status = check_if_sensitive_info_exposed (build )
1363+ except RuntimeError as e :
1364+ print (f"Warning: Could not check sensitive info exposure: { e } " )
1365+ sensitive_info_expose_status = False # or decide what default behavior you want
1366+ << << << < Updated upstream
1367+ >> >> > >> 2 bb7328 (refactor exception handling )
1368+ == == == =
1369+ >> >> >> > Stashed changes
13431370
13441371 build_status = check_job_status (build )
13451372 sensitive_info_expose_status = check_if_sensitive_info_exposed (build )
0 commit comments