|
8 | 8 | from codeflash.cli_cmds.console import logger |
9 | 9 | from codeflash.code_utils.code_utils import custom_addopts, get_run_tmp_file |
10 | 10 | from codeflash.code_utils.compat import IS_POSIX, SAFE_SYS_EXECUTABLE |
11 | | -from codeflash.code_utils.config_consts import get_total_looping_time |
| 11 | +from codeflash.code_utils.config_consts import TOTAL_LOOPING_TIME_EFFECTIVE |
12 | 12 | from codeflash.code_utils.coverage_utils import prepare_coverage_files |
13 | 13 | from codeflash.models.models import TestFiles, TestType |
14 | 14 |
|
@@ -37,7 +37,6 @@ def run_behavioral_tests( |
37 | 37 | pytest_timeout: int | None = None, |
38 | 38 | pytest_cmd: str = "pytest", |
39 | 39 | verbose: bool = False, |
40 | | - pytest_target_runtime_seconds: int = get_total_looping_time(), |
41 | 40 | enable_coverage: bool = False, |
42 | 41 | ) -> tuple[Path, subprocess.CompletedProcess, Path | None, Path | None]: |
43 | 42 | if test_framework == "pytest": |
@@ -66,7 +65,6 @@ def run_behavioral_tests( |
66 | 65 | "--codeflash_loops_scope=session", |
67 | 66 | "--codeflash_min_loops=1", |
68 | 67 | "--codeflash_max_loops=1", |
69 | | - f"--codeflash_seconds={pytest_target_runtime_seconds}", # TODO : This is unnecessary, update the plugin to not ask for this |
70 | 68 | ] |
71 | 69 |
|
72 | 70 | result_file_path = get_run_tmp_file(Path("pytest_results.xml")) |
@@ -151,7 +149,6 @@ def run_line_profile_tests( |
151 | 149 | cwd: Path, |
152 | 150 | test_framework: str, |
153 | 151 | *, |
154 | | - pytest_target_runtime_seconds: float = get_total_looping_time(), |
155 | 152 | verbose: bool = False, |
156 | 153 | pytest_timeout: int | None = None, |
157 | 154 | pytest_min_loops: int = 5, # noqa: ARG001 |
@@ -186,7 +183,6 @@ def run_line_profile_tests( |
186 | 183 | "--codeflash_loops_scope=session", |
187 | 184 | "--codeflash_min_loops=1", |
188 | 185 | "--codeflash_max_loops=1", |
189 | | - f"--codeflash_seconds={pytest_target_runtime_seconds}", |
190 | 186 | ] |
191 | 187 | result_file_path = get_run_tmp_file(Path("pytest_results.xml")) |
192 | 188 | result_args = [f"--junitxml={result_file_path.as_posix()}", "-o", "junit_logging=all"] |
@@ -237,7 +233,7 @@ def run_benchmarking_tests( |
237 | 233 | cwd: Path, |
238 | 234 | test_framework: str, |
239 | 235 | *, |
240 | | - pytest_target_runtime_seconds: float = get_total_looping_time(), |
| 236 | + pytest_target_runtime_seconds: float = TOTAL_LOOPING_TIME_EFFECTIVE, |
241 | 237 | verbose: bool = False, |
242 | 238 | pytest_timeout: int | None = None, |
243 | 239 | pytest_min_loops: int = 5, |
|
0 commit comments