Skip to content

Commit 53f0257

Browse files
authored
feat: poly commands should respect any existing project-specific exclude pattern (#387)
* refactor(hatch): move exclude pattern collector to the toml brick * feat(toml): collect exclude patterns from Hatch, PDM and Poetry * wip: use exclude list * wip: collext imports to exclude * wip: create report with bricks and libs to exclude * feat(uv): get uv build backend configured exclude patterns * test: check - report * test: check - report * test: toml, collect exclude patterns * bump hatch brick hook to 1.5.3 * bump PDM build hook to 1.3.4 * bump PDM workspace hook to 1.3.4 * bump Poetry plugin to 1.44.0 * bump CLI to 1.37.0 * feat: libs, excluded third-party imports
1 parent 7a10007 commit 53f0257

File tree

19 files changed

+398
-62
lines changed

19 files changed

+398
-62
lines changed

components/polylith/check/report.py

Lines changed: 66 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,22 @@ def print_missing_deps(diff: Set[str], project_name: str) -> None:
4242

4343
missing = ", ".join(sorted(diff))
4444

45-
console.print(f":thinking_face: Cannot locate [data]{missing}[/] in [proj]{project_name}[/]")
45+
console.print(
46+
f":thinking_face: Cannot locate [data]{missing}[/] in [proj]{project_name}[/]"
47+
)
48+
49+
50+
def print_excluded_deps(excluded: Set[str], project_name: str) -> None:
51+
if not excluded:
52+
return
53+
54+
console = Console(theme=theme.poly_theme)
55+
56+
skipped = ", ".join(sorted(excluded))
57+
58+
console.print(
59+
f":information: Marked as excluded in [proj]{project_name}[/]: [data]{skipped}[/]"
60+
)
4661

4762

4863
def print_unused_bricks(bricks: Set[str], project_name: str) -> None:
@@ -54,7 +69,25 @@ def print_unused_bricks(bricks: Set[str], project_name: str) -> None:
5469
unused = ", ".join(sorted(bricks))
5570
verb = "Are" if len(bricks) > 1 else "Is"
5671

57-
console.print(f":mag_right: {verb} [comp]{unused}[/] needed in [proj]{project_name}[/]?")
72+
console.print(
73+
f":mag_right: {verb} [comp]{unused}[/] needed in [proj]{project_name}[/]?"
74+
)
75+
76+
77+
def extract_collected_imports(
78+
ns: str, imports_in_bases: dict, imports_in_components: dict
79+
) -> dict:
80+
brick_imports = {
81+
"bases": grouping.extract_brick_imports(imports_in_bases, ns),
82+
"components": grouping.extract_brick_imports(imports_in_components, ns),
83+
}
84+
85+
third_party_imports = {
86+
"bases": libs.extract_third_party_imports(imports_in_bases, ns),
87+
"components": libs.extract_third_party_imports(imports_in_components, ns),
88+
}
89+
90+
return {"brick_imports": brick_imports, "third_party_imports": third_party_imports}
5891

5992

6093
def collect_all_imports(root: Path, ns: str, project_data: dict) -> dict:
@@ -67,17 +100,39 @@ def collect_all_imports(root: Path, ns: str, project_data: dict) -> dict:
67100
all_imports_in_bases = imports.fetch_all_imports(bases_paths)
68101
all_imports_in_components = imports.fetch_all_imports(components_paths)
69102

70-
brick_imports = {
71-
"bases": grouping.extract_brick_imports(all_imports_in_bases, ns),
72-
"components": grouping.extract_brick_imports(all_imports_in_components, ns),
73-
}
103+
return extract_collected_imports(
104+
ns, all_imports_in_bases, all_imports_in_components
105+
)
74106

75-
third_party_imports = {
76-
"bases": libs.extract_third_party_imports(all_imports_in_bases, ns),
77-
"components": libs.extract_third_party_imports(all_imports_in_components, ns),
78-
}
79107

80-
return {"brick_imports": brick_imports, "third_party_imports": third_party_imports}
108+
def collect_imports_to_exclude(root: Path, ns: str, project_data: dict) -> dict:
109+
exclude = project_data["exclude"]
110+
111+
if not exclude:
112+
return {}
113+
114+
bases = set(project_data.get("bases", []))
115+
components = set(project_data.get("components", []))
116+
117+
bases_paths = workspace.paths.collect_bases_paths(root, ns, bases)
118+
components_paths = workspace.paths.collect_components_paths(root, ns, components)
119+
120+
excludes_in_bases = imports.fetch_excluded_imports(bases_paths, exclude)
121+
excludes_in_components = imports.fetch_excluded_imports(components_paths, exclude)
122+
123+
return extract_collected_imports(ns, excludes_in_bases, excludes_in_components)
124+
125+
126+
def create_exclude_report(collected_excludes: dict) -> dict:
127+
fallback: dict = {"bases": {}, "components": {}}
128+
129+
collected_bricks = collected_excludes.get("brick_imports", fallback)
130+
collected_third_party = collected_excludes.get("third_party_imports", fallback)
131+
132+
bricks = collect.to_flattened_imports(collected_bricks)
133+
third_party = collect.to_flattened_imports(collected_third_party)
134+
135+
return {"brick_exclude": bricks, "libs_exclude": third_party}
81136

82137

83138
def create_report(

components/polylith/commands/check.py

Lines changed: 19 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -78,6 +78,7 @@ def run_each(
7878
) -> Tuple[bool, dict]:
7979
is_quiet = options["quiet"]
8080
is_strict = options["strict"]
81+
is_verbose = options["verbose"]
8182

8283
name = project_data["name"]
8384
deps = project_data["deps"]
@@ -86,6 +87,7 @@ def run_each(
8687
from_lock_file = libs.is_from_lock_file(deps)
8788

8889
collected_imports = check.report.collect_all_imports(root, ns, project_data)
90+
collected_excludes = check.report.collect_imports_to_exclude(root, ns, project_data)
8991
collected_libs = distributions.known_aliases_and_sub_dependencies(
9092
deps,
9193
alias,
@@ -100,15 +102,29 @@ def run_each(
100102
is_strict or from_lock_file,
101103
)
102104

103-
res = all([not details["brick_diff"], not details["libs_diff"]])
105+
exclude_details = check.report.create_exclude_report(collected_excludes)
106+
107+
brick_diff = details["brick_diff"]
108+
libs_diff = details["libs_diff"]
109+
brick_exclude = exclude_details["brick_exclude"]
110+
libs_exclude = exclude_details["libs_exclude"]
111+
112+
missing_bricks = brick_diff.difference(brick_exclude)
113+
missing_libs = libs_diff.difference(libs_exclude)
114+
115+
res = all([not missing_bricks, not missing_libs])
104116

105117
if not is_quiet:
106-
check.report.print_missing_deps(details["brick_diff"], name)
107-
check.report.print_missing_deps(details["libs_diff"], name)
118+
check.report.print_missing_deps(missing_bricks, name)
119+
check.report.print_missing_deps(missing_libs, name)
108120

109121
if is_strict and not is_quiet:
110122
check.report.print_unused_bricks(details["unused_bricks"], name)
111123

124+
if is_verbose:
125+
check.report.print_excluded_deps(brick_exclude, name)
126+
check.report.print_excluded_deps(libs_exclude, name)
127+
112128
return res, details
113129

114130

components/polylith/hatch/hooks/bricks.py

Lines changed: 1 addition & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -34,15 +34,6 @@ def filtered_bricks(data: dict, version: str) -> dict:
3434
return bricks
3535

3636

37-
def collect_configured_exclude_patterns(data: dict, target_name: str) -> set:
38-
entry = data.get("tool", {}).get("hatch", {}).get("build", {})
39-
target = entry.get("targets", {}).get(target_name, {})
40-
41-
exclude = target.get("exclude", [])
42-
43-
return set(exclude)
44-
45-
4637
def copy_bricks(bricks: dict, work_dir: Path, exclude_patterns: Set[str]) -> List[Path]:
4738
return [
4839
parsing.copy_brick(source, brick, work_dir, exclude_patterns)
@@ -76,7 +67,7 @@ def initialize(self, version: str, build_data: Dict[str, Any]) -> None:
7667
ns = parsing.parse_brick_namespace_from_path(bricks)
7768
top_ns = core.get_top_namespace(data, self.config)
7869
work_dir = core.get_work_dir(self.config)
79-
exclude_patterns = collect_configured_exclude_patterns(data, self.target_name)
70+
exclude_patterns = toml.collect_configured_exclude_patterns(data, self.target_name)
8071

8172
if not top_ns and not exclude_patterns:
8273
build_data[include_key] = bricks
Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,13 @@
1-
from polylith.imports.parser import extract_top_ns, fetch_all_imports, list_imports
1+
from polylith.imports.parser import (
2+
extract_top_ns,
3+
fetch_all_imports,
4+
fetch_excluded_imports,
5+
list_imports,
6+
)
27

3-
__all__ = ["extract_top_ns", "fetch_all_imports", "list_imports"]
8+
__all__ = [
9+
"extract_top_ns",
10+
"fetch_all_imports",
11+
"fetch_excluded_imports",
12+
"list_imports",
13+
]

components/polylith/imports/parser.py

Lines changed: 32 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import ast
2+
from collections.abc import Iterable
23
from functools import lru_cache
34
from pathlib import Path
45
from typing import List, Set
@@ -43,18 +44,26 @@ def extract_imports(path: Path) -> List[str]:
4344
return [i for node in ast.walk(tree) for i in parse_imports(node) if i is not None]
4445

4546

47+
def extract_and_flatten(py_modules: Iterable) -> Set[str]:
48+
extracted = (extract_imports(m) for m in py_modules)
49+
flattened = (i for imports in extracted for i in imports)
50+
51+
return set(flattened)
52+
53+
4654
def is_python_file(path: Path) -> bool:
4755
return path.is_file() and path.suffix == ".py"
4856

4957

58+
def find_files(path: Path) -> Iterable:
59+
return [path] if is_python_file(path) else path.rglob("*.py")
60+
61+
5062
@lru_cache(maxsize=None)
5163
def list_imports(path: Path) -> Set[str]:
52-
py_modules = [path] if is_python_file(path) else path.rglob("*.py")
53-
54-
extracted = (extract_imports(m) for m in py_modules)
55-
flattened = (i for imports in extracted for i in imports)
64+
py_modules = find_files(path)
5665

57-
return set(flattened)
66+
return extract_and_flatten(py_modules)
5867

5968

6069
def fetch_all_imports(paths: Set[Path]) -> dict:
@@ -63,6 +72,24 @@ def fetch_all_imports(paths: Set[Path]) -> dict:
6372
return {k: v for row in rows for k, v in row.items()}
6473

6574

75+
def should_exclude(path: Path, excludes: Set[str]):
76+
return any(path.match(pattern) for pattern in excludes)
77+
78+
79+
def list_excluded_imports(path: Path, excludes: Set[str]) -> Set[str]:
80+
py_modules = find_files(path)
81+
82+
filtered = [p for p in py_modules if should_exclude(p, excludes)]
83+
84+
return extract_and_flatten(filtered)
85+
86+
87+
def fetch_excluded_imports(paths: Set[Path], excludes: Set[str]) -> dict:
88+
rows = [{p.name: list_excluded_imports(p, excludes)} for p in paths]
89+
90+
return {k: v for row in rows for k, v in row.items()}
91+
92+
6693
def extract_top_ns_from_imports(imports: Set[str]) -> Set:
6794
return {imp.split(".")[0] for imp in imports}
6895

components/polylith/libs/grouping.py

Lines changed: 15 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from typing import Set
44

55
from polylith import configuration
6-
from polylith.imports import extract_top_ns, fetch_all_imports
6+
from polylith.imports import extract_top_ns, fetch_all_imports, fetch_excluded_imports
77
from polylith.libs.stdlib import standard_libs
88

99

@@ -41,9 +41,21 @@ def extract_third_party_imports(all_imports: dict, top_ns: str) -> dict:
4141
return exclude_empty(with_third_party)
4242

4343

44-
def get_third_party_imports(root: Path, paths: Set[Path]) -> dict:
44+
def get_third_party_imports(
45+
root: Path, paths: Set[Path], project_data: dict
46+
) -> dict:
4547
top_ns = configuration.get_namespace_from_config(root)
4648

4749
all_imports = fetch_all_imports(paths)
4850

49-
return extract_third_party_imports(all_imports, top_ns)
51+
third_party = extract_third_party_imports(all_imports, top_ns)
52+
53+
exclude = project_data["exclude"]
54+
55+
if not exclude:
56+
return third_party
57+
58+
excluded = fetch_excluded_imports(paths, exclude)
59+
excluded_third_party = extract_third_party_imports(excluded, top_ns)
60+
61+
return {k: v for k, v in third_party.items() if k not in excluded_third_party}

components/polylith/libs/report.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,10 @@ def get_third_party_imports(root: Path, ns: str, project_data: dict) -> dict:
1919
bases_paths = workspace.paths.collect_bases_paths(root, ns, bases)
2020
components_paths = workspace.paths.collect_components_paths(root, ns, components)
2121

22-
bases_imports = grouping.get_third_party_imports(root, bases_paths)
23-
components_imports = grouping.get_third_party_imports(root, components_paths)
22+
bases_imports = grouping.get_third_party_imports(root, bases_paths, project_data)
23+
components_imports = grouping.get_third_party_imports(
24+
root, components_paths, project_data
25+
)
2426

2527
return {"bases": bases_imports, "components": components_imports}
2628

components/polylith/project/get.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -139,6 +139,7 @@ def get_packages_for_projects(root: Path) -> List[dict]:
139139
"path": d["path"],
140140
"type": d["type"],
141141
"deps": toml.get_project_dependencies(d["toml"]),
142+
"exclude": toml.collect_configured_exclude_patterns(d["toml"]),
142143
}
143144
for d in toml_files
144145
]

components/polylith/repo/__init__.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
is_pdm,
1010
is_pep_621_ready,
1111
is_poetry,
12+
is_uv,
1213
load_workspace_config,
1314
projects_dir,
1415
readme_file,
@@ -27,6 +28,7 @@
2728
"is_pdm",
2829
"is_pep_621_ready",
2930
"is_poetry",
31+
"is_uv",
3032
"load_workspace_config",
3133
"projects_dir",
3234
"readme_file",

components/polylith/repo/repo.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -130,5 +130,9 @@ def is_pdm(pyproject: dict) -> bool:
130130
return has_build_requires(pyproject, "pdm")
131131

132132

133+
def is_uv(pyproject: dict) -> bool:
134+
return has_build_requires(pyproject, "uv_build")
135+
136+
133137
def is_pep_621_ready(pyproject: dict) -> bool:
134138
return pyproject.get("project", {}).get("name") is not None

0 commit comments

Comments
 (0)