Skip to content

Commit 0ab5fca

Browse files
marcorudolphflexyaugenst-flex
authored andcommitted
feat(tidy3d): FXC-3886-cache-logging-verbosity-control
1 parent be656b0 commit 0ab5fca

File tree

3 files changed

+153
-49
lines changed

3 files changed

+153
-49
lines changed

tests/test_web/test_local_cache.py

Lines changed: 83 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,26 @@
11
from __future__ import annotations
22

3+
import importlib
4+
import io
35
import os
6+
import re
47
from pathlib import Path
8+
from types import SimpleNamespace
59

610
import pytest
11+
from rich.console import Console
712

813
import tidy3d as td
914
from tests.test_components.autograd.test_autograd import ALL_KEY, get_functions, params0
1015
from tests.test_web.test_webapi_mode import make_mode_sim
1116
from tidy3d import config
1217
from tidy3d.config import get_manager
13-
from tidy3d.web import Job, common, run_async
18+
from tidy3d.web import Job, common, run, run_async
1419
from tidy3d.web.api import webapi as web
1520
from tidy3d.web.api.container import Batch, WebContainer
1621
from tidy3d.web.api.webapi import load_simulation_if_cached
1722
from tidy3d.web.cache import CACHE_ARTIFACT_NAME, clear, get_cache_entry_dir, resolve_local_cache
23+
from tidy3d.web.core.task_core import BatchTask
1824

1925
common.CONNECTION_RETRY_TIME = 0.1
2026

@@ -139,6 +145,8 @@ def _fake_status(self):
139145
monkeypatch.setattr(
140146
web, "load_simulation", lambda task_id, *args, **kwargs: TASK_TO_SIM[task_id]
141147
)
148+
monkeypatch.setattr(BatchTask, "is_batch", lambda *args, **kwargs: "success")
149+
monkeypatch.setattr(BatchTask, "detail", lambda *args: SimpleNamespace(status="success"))
142150
return counters
143151

144152

@@ -267,6 +275,79 @@ def _test_run_cache_hit_async(monkeypatch, basic_simulation, tmp_path):
267275
assert len(cache) == 3
268276

269277

278+
def _test_verbosity(monkeypatch, basic_simulation):
279+
_CSI_RE = re.compile(r"\x1b\[[0-?]*[ -/]*[@-~]") # ANSI CSI
280+
_OSC8_RE = re.compile(r"\x1b\]8;.*?(?:\x1b\\|\x07)", re.DOTALL) # OSC-8 hyperlinks
281+
282+
def _normalize_console_text(s: str) -> str:
283+
s = _OSC8_RE.sub("", s) # drop hyperlinks
284+
s = _CSI_RE.sub("", s) # drop colors, etc.
285+
s = s.replace("\r", "")
286+
s = re.sub(r"[ \t]+", " ", s) # collapse runs of spaces/tabs
287+
return s.strip()
288+
289+
buf = io.StringIO()
290+
test_console = Console(file=buf, force_terminal=True)
291+
292+
counters = _patch_run_pipeline(monkeypatch)
293+
cache = resolve_local_cache(use_cache=True)
294+
cache.clear()
295+
_reset_fake_maps()
296+
_reset_counters(counters)
297+
sim2 = basic_simulation.updated_copy(shutoff=1e-4)
298+
sim3 = basic_simulation.updated_copy(shutoff=1e-3)
299+
300+
run(basic_simulation, verbose=True) # seed cache
301+
302+
log_mod = importlib.import_module("tidy3d.log")
303+
304+
# --- swap handler console (and restore later) ---
305+
if "console" not in log_mod.log.handlers:
306+
log_mod.set_logging_console()
307+
orig_console = log_mod.log.handlers["console"].console
308+
log_mod.log.handlers["console"].console = test_console
309+
try:
310+
buf.truncate(0)
311+
buf.seek(0)
312+
313+
log_mod.get_logging_console().log("test")
314+
assert "test" in buf.getvalue()
315+
316+
buf.truncate(0)
317+
buf.seek(0)
318+
319+
# test for load_simulation_if_cached
320+
sim_data = load_simulation_if_cached(basic_simulation, verbose=True)
321+
assert sim_data is not None
322+
assert "Loading simulation from" in buf.getvalue(), (
323+
f"Expected 'Loading simulation from' in log, got '{buf.getvalue()}'"
324+
)
325+
326+
buf.truncate(0)
327+
buf.seek(0)
328+
load_simulation_if_cached(basic_simulation, verbose=False)
329+
assert sim_data is not None
330+
assert buf.getvalue().strip() == "", f"Expected empty log, got '{buf.getvalue()}'"
331+
332+
# test for batched runs
333+
buf.truncate(0)
334+
buf.seek(0)
335+
run([basic_simulation, sim3], verbose=True)
336+
txt = _normalize_console_text(buf.getvalue())
337+
assert "Got 1 simulation from cache" in txt, (
338+
f"Expected 'Got 1 simulation from cache' in log, got '{buf.getvalue()}'"
339+
)
340+
341+
buf.truncate(0)
342+
buf.seek(0)
343+
run([basic_simulation, sim2], verbose=False)
344+
assert buf.getvalue().strip() == "", f"Expected empty log, got '{buf.getvalue()}'"
345+
346+
finally:
347+
# explicit restore so nothing leaks
348+
log_mod.log.handlers["console"].console = orig_console
349+
350+
270351
def _test_job_run_cache(monkeypatch, basic_simulation, tmp_path):
271352
counters = _patch_run_pipeline(monkeypatch)
272353
cache = resolve_local_cache(use_cache=True)
@@ -436,3 +517,4 @@ def test_cache_sequential(monkeypatch, tmp_path, tmp_path_factory, basic_simulat
436517
_test_autograd_cache(monkeypatch)
437518
_test_configure_cache_roundtrip(monkeypatch, tmp_path)
438519
_test_mode_solver_caching(monkeypatch, tmp_path)
520+
_test_verbosity(monkeypatch, basic_simulation)

tidy3d/web/api/container.py

Lines changed: 61 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,14 @@
1818

1919
import pydantic.v1 as pd
2020
from pydantic.v1 import PrivateAttr
21-
from rich.progress import BarColumn, Progress, TaskProgressColumn, TextColumn, TimeElapsedColumn
21+
from rich.progress import (
22+
BarColumn,
23+
Progress,
24+
TaskID,
25+
TaskProgressColumn,
26+
TextColumn,
27+
TimeElapsedColumn,
28+
)
2229

2330
from tidy3d.components.base import Tidy3dBaseModel, cached_property
2431
from tidy3d.components.mode.mode_solver import ModeSolver
@@ -331,7 +338,7 @@ def load_if_cached(self) -> bool:
331338
simulation=self.simulation,
332339
path=stash_path,
333340
reduce_simulation=self.reduce_simulation,
334-
verbose=getattr(self, "verbose", True),
341+
verbose=self.verbose,
335342
)
336343

337344
if restored is None:
@@ -945,27 +952,42 @@ def upload(self) -> None:
945952
"""Upload a series of tasks associated with this ``Batch`` using multi-threading."""
946953
self._check_folder(self.folder_name)
947954
with ThreadPoolExecutor(max_workers=self.num_workers) as executor:
948-
futures = [executor.submit(job.upload) for _, job in self.jobs.items()]
955+
jobs_from_cache = [job for job in self.jobs.values() if job.load_if_cached]
956+
jobs_to_upload = [job for job in self.jobs.values() if not job.load_if_cached]
957+
futures = [executor.submit(job.upload) for job in jobs_to_upload]
949958

950959
# progressbar (number of tasks uploaded)
951960
if self.verbose:
952961
console = get_logging_console()
962+
n_cached = len(jobs_from_cache)
963+
if n_cached > 0:
964+
console.log(
965+
f"Got {n_cached} simulation{'s' if n_cached > 1 else ''} from cache."
966+
)
967+
968+
if len(futures) == 0: # got all jobs from cache
969+
return
970+
971+
if self.verbose:
953972
progress_columns = (
954973
TextColumn("[progress.description]{task.description}"),
955974
BarColumn(),
956975
TaskProgressColumn(),
957976
TimeElapsedColumn(),
958977
)
959978
with Progress(*progress_columns, console=console, transient=False) as progress:
960-
pbar_message = f"Uploading data for {self.num_jobs} tasks"
961-
pbar = progress.add_task(pbar_message, total=self.num_jobs)
979+
pbar_message = f"Uploading data for {len(jobs_to_upload)} task{'s' if len(jobs_to_upload) > 1 else ''}"
980+
pbar = progress.add_task(pbar_message, total=len(jobs_to_upload))
962981
completed = 0
963982
for _ in concurrent.futures.as_completed(futures):
964983
completed += 1
965984
progress.update(pbar, completed=completed)
966985

967986
progress.refresh()
968987
time.sleep(BATCH_PROGRESS_REFRESH_TIME)
988+
else:
989+
for _ in concurrent.futures.as_completed(futures):
990+
pass
969991

970992
def get_info(self) -> dict[TaskName, TaskInfo]:
971993
"""Get information about each task in the :class:`Batch`.
@@ -1138,6 +1160,8 @@ def pbar_description(
11381160
postprocess_started_tasks: set[str] = set()
11391161

11401162
try:
1163+
console = None
1164+
progress_columns = []
11411165
if self.verbose:
11421166
console = get_logging_console()
11431167
self.estimate_cost()
@@ -1152,34 +1176,38 @@ def pbar_description(
11521176
TimeElapsedColumn(),
11531177
)
11541178

1155-
with Progress(*progress_columns, console=console, transient=False) as progress:
1156-
pbar_tasks: dict[str, int] = {}
1157-
for task_name, job in self.jobs.items():
1158-
schedule_download(job)
1179+
with Progress(
1180+
*progress_columns, console=console, transient=False, disable=not self.verbose
1181+
) as progress:
1182+
pbar_tasks: dict[str, TaskID] = {}
1183+
for task_name, job in self.jobs.items():
1184+
schedule_download(job)
1185+
if self.verbose:
11591186
status = job.status
11601187
completed = STATE_PROGRESS_PERCENTAGE.get(status, 0)
11611188
desc = pbar_description(task_name, status, max_name_length, 0)
11621189
pbar_tasks[task_name] = progress.add_task(
11631190
desc, total=COMPLETED_PERCENT, completed=completed
11641191
)
11651192

1166-
while any(check_continue_condition(job) for job in self.jobs.values()):
1167-
for task_name, job in self.jobs.items():
1168-
status = job.status
1169-
1170-
# auto-start postprocess for modeler jobs when run finishes
1171-
if (
1172-
web._is_modeler_batch(job.task_id)
1173-
and status == "run_success"
1174-
and job.task_id not in postprocess_started_tasks
1175-
):
1176-
job.postprocess_start(
1177-
worker_group=postprocess_worker_group, verbose=True
1178-
)
1179-
postprocess_started_tasks.add(job.task_id)
1193+
while any(check_continue_condition(job) for job in self.jobs.values()):
1194+
for task_name, job in self.jobs.items():
1195+
status = job.status
1196+
1197+
# auto-start postprocess for modeler jobs when run finishes
1198+
if (
1199+
web._is_modeler_batch(job.task_id)
1200+
and status == "run_success"
1201+
and job.task_id not in postprocess_started_tasks
1202+
):
1203+
job.postprocess_start(
1204+
worker_group=postprocess_worker_group, verbose=True
1205+
)
1206+
postprocess_started_tasks.add(job.task_id)
11801207

1181-
schedule_download(job)
1208+
schedule_download(job)
11821209

1210+
if self.verbose:
11831211
# choose display status & percent
11841212
if status != "run_success":
11851213
display_status = status
@@ -1196,13 +1224,17 @@ def pbar_description(
11961224
pbar = pbar_tasks[task_name]
11971225
desc = pbar_description(task_name, display_status, max_name_length, 0)
11981226
progress.update(pbar, description=desc, completed=pct)
1199-
1227+
if self.verbose:
12001228
progress.refresh()
12011229
time.sleep(BATCH_PROGRESS_REFRESH_TIME)
1230+
else:
1231+
time.sleep(web.REFRESH_TIME)
12021232

1203-
# final render to terminal state for all bars
1204-
for task_name, job in self.jobs.items():
1205-
schedule_download(job)
1233+
# final render to terminal state for all bars
1234+
for task_name, job in self.jobs.items():
1235+
schedule_download(job)
1236+
1237+
if self.verbose:
12061238
status = job.status
12071239
if status != "run_success":
12081240
display_status = status
@@ -1222,25 +1254,9 @@ def pbar_description(
12221254
desc = pbar_description(task_name, display_status, max_name_length, 0)
12231255
progress.update(pbar, description=desc, completed=pct)
12241256

1257+
if self.verbose:
12251258
progress.refresh()
12261259
console.log("Batch complete.")
1227-
else:
1228-
# quiet mode
1229-
while any(check_continue_condition(job) for job in self.jobs.values()):
1230-
for job in self.jobs.values():
1231-
if (
1232-
web._is_modeler_batch(job.task_id)
1233-
and job.status == "run_success"
1234-
and job.task_id not in postprocess_started_tasks
1235-
):
1236-
job.postprocess_start(
1237-
worker_group=postprocess_worker_group, verbose=False
1238-
)
1239-
postprocess_started_tasks.add(job.task_id)
1240-
1241-
schedule_download(job)
1242-
1243-
time.sleep(web.REFRESH_TIME)
12441260
finally:
12451261
if download_executor is not None:
12461262
try:

tidy3d/web/api/webapi.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -387,10 +387,10 @@ def restore_simulation_if_cached(
387387
cached_task_id = entry.metadata.get("task_id")
388388
cached_workflow_type = entry.metadata.get("workflow_type")
389389
if cached_task_id is not None and cached_workflow_type is not None and verbose:
390-
console = get_logging_console() if verbose else None
390+
console = get_logging_console()
391391
url, _ = _get_task_urls(cached_workflow_type, simulation, cached_task_id)
392392
console.log(
393-
f"Loaded simulation from local cache.\nView cached task using web UI at [link={url}]'{url}'[/link]."
393+
f"Loading simulation from local cache. View cached task using web UI at [link={url}]'{url}'[/link]."
394394
)
395395
return retrieved_simulation_path
396396

@@ -399,6 +399,7 @@ def load_simulation_if_cached(
399399
simulation: WorkflowType,
400400
path: Optional[PathLike] = None,
401401
reduce_simulation: Literal["auto", True, False] = "auto",
402+
verbose: bool = True,
402403
) -> Optional[WorkflowDataType]:
403404
"""
404405
Load simulation results directly from the local cache, if available.
@@ -412,17 +413,22 @@ def load_simulation_if_cached(
412413
reduce_simulation : Literal["auto", True, False] = "auto"
413414
Whether to use a reduced simulation when checking the cache. If "auto",
414415
reduction is applied automatically for mode solvers.
416+
verbose : bool = True
417+
If True, logs a message including a link to the cached task in the web UI on loading.
415418
416419
Returns
417420
-------
418421
Optional[WorkflowDataType]
419422
The loaded simulation data if found in cache, otherwise None.
420423
"""
421-
restored_path = restore_simulation_if_cached(simulation, path, reduce_simulation)
424+
restored_path = restore_simulation_if_cached(
425+
simulation, path, reduce_simulation, verbose=verbose
426+
)
422427
if restored_path is not None:
423428
data = load(
424429
task_id=None,
425430
path=str(restored_path),
431+
verbose=verbose,
426432
)
427433
if isinstance(simulation, ModeSolver):
428434
simulation._patch_data(data=data)

0 commit comments

Comments
 (0)