Skip to content

Commit f74ce8e

Browse files
authored
vcspull add: Fix more issues with --no-merge (#482)
why: - `vcspull add --no-merge` rewrote configs by flattening duplicate sections - users lost repositories when opting out of automatic merging what: - extend loader to expose ordered duplicate items for downstream writers - add duplicate-safe YAML writer that replays top-level sections faithfully - update CLI add flow to write through the duplicate-aware helper under `--no-merge` - broaden regression coverage for duplicate workspace roots in loader and CLI verification: - uv run ruff check . --fix --show-fixes - uv run ruff format . - uv run mypy - uv run py.test
2 parents 34ed07b + f75d438 commit f74ce8e

File tree

10 files changed

+331
-17
lines changed

10 files changed

+331
-17
lines changed

CHANGES

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,15 @@ $ uvx --from 'vcspull' --prerelease allow vcspull
3333

3434
_Upcoming changes will be written here._
3535

36+
### Bug Fixes
37+
38+
#### `vcspull add --no-merge` preserves duplicate workspace roots (#482)
39+
40+
- Duplicate workspace sections are no longer flattened when adding a repository
41+
with `--no-merge`; all previously configured repositories stay intact.
42+
- The configuration loader now exposes ordered duplicate entries so CLI writes
43+
can target the correct section without data loss.
44+
3645
## vcspull v1.44.0 (2025-11-02)
3746

3847
### Improvements

src/vcspull/_internal/config_reader.py

Lines changed: 29 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -228,6 +228,7 @@ def __init__(self, stream: str) -> None:
228228
super().__init__(stream)
229229
self.top_level_key_values: dict[t.Any, list[t.Any]] = {}
230230
self._mapping_depth = 0
231+
self.top_level_items: list[tuple[t.Any, t.Any]] = []
231232

232233

233234
def _duplicate_tracking_construct_mapping(
@@ -248,7 +249,9 @@ def _duplicate_tracking_construct_mapping(
248249
value = construct(value_node)
249250

250251
if loader._mapping_depth == 1:
251-
loader.top_level_key_values.setdefault(key, []).append(copy.deepcopy(value))
252+
duplicated_value = copy.deepcopy(value)
253+
loader.top_level_key_values.setdefault(key, []).append(duplicated_value)
254+
loader.top_level_items.append((copy.deepcopy(key), duplicated_value))
252255

253256
mapping[key] = value
254257

@@ -270,20 +273,27 @@ def __init__(
270273
content: RawConfigData,
271274
*,
272275
duplicate_sections: dict[str, list[t.Any]] | None = None,
276+
top_level_items: list[tuple[str, t.Any]] | None = None,
273277
) -> None:
274278
super().__init__(content)
275279
self._duplicate_sections = duplicate_sections or {}
280+
self._top_level_items = top_level_items or []
276281

277282
@property
278283
def duplicate_sections(self) -> dict[str, list[t.Any]]:
279284
"""Mapping of top-level keys to the list of duplicated values."""
280285
return self._duplicate_sections
281286

287+
@property
288+
def top_level_items(self) -> list[tuple[str, t.Any]]:
289+
"""Ordered list of top-level items, including duplicates."""
290+
return copy.deepcopy(self._top_level_items)
291+
282292
@classmethod
283293
def _load_yaml_with_duplicates(
284294
cls,
285295
content: str,
286-
) -> tuple[dict[str, t.Any], dict[str, list[t.Any]]]:
296+
) -> tuple[dict[str, t.Any], dict[str, list[t.Any]], list[tuple[str, t.Any]]]:
287297
loader = _DuplicateTrackingSafeLoader(content)
288298

289299
try:
@@ -306,33 +316,42 @@ def _load_yaml_with_duplicates(
306316
if len(values) > 1
307317
}
308318

309-
return loaded, duplicate_sections
319+
top_level_items = [
320+
(t.cast("str", key), copy.deepcopy(value))
321+
for key, value in loader.top_level_items
322+
]
323+
324+
return loaded, duplicate_sections, top_level_items
310325

311326
@classmethod
312327
def _load_from_path(
313328
cls,
314329
path: pathlib.Path,
315-
) -> tuple[dict[str, t.Any], dict[str, list[t.Any]]]:
330+
) -> tuple[dict[str, t.Any], dict[str, list[t.Any]], list[tuple[str, t.Any]]]:
316331
if path.suffix.lower() in {".yaml", ".yml"}:
317332
content = path.read_text(encoding="utf-8")
318333
return cls._load_yaml_with_duplicates(content)
319334

320-
return ConfigReader._from_file(path), {}
335+
return ConfigReader._from_file(path), {}, []
321336

322337
@classmethod
323338
def from_file(cls, path: pathlib.Path) -> DuplicateAwareConfigReader:
324-
content, duplicate_sections = cls._load_from_path(path)
325-
return cls(content, duplicate_sections=duplicate_sections)
339+
content, duplicate_sections, top_level_items = cls._load_from_path(path)
340+
return cls(
341+
content,
342+
duplicate_sections=duplicate_sections,
343+
top_level_items=top_level_items,
344+
)
326345

327346
@classmethod
328347
def _from_file(cls, path: pathlib.Path) -> dict[str, t.Any]:
329-
content, _ = cls._load_from_path(path)
348+
content, _, _ = cls._load_from_path(path)
330349
return content
331350

332351
@classmethod
333352
def load_with_duplicates(
334353
cls,
335354
path: pathlib.Path,
336-
) -> tuple[dict[str, t.Any], dict[str, list[t.Any]]]:
355+
) -> tuple[dict[str, t.Any], dict[str, list[t.Any]], list[tuple[str, t.Any]]]:
337356
reader = cls.from_file(path)
338-
return reader.content, reader.duplicate_sections
357+
return reader.content, reader.duplicate_sections, reader.top_level_items

src/vcspull/cli/add.py

Lines changed: 56 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
from __future__ import annotations
44

5+
import copy
56
import logging
67
import pathlib
78
import subprocess
@@ -18,6 +19,7 @@
1819
merge_duplicate_workspace_roots,
1920
normalize_workspace_roots,
2021
save_config_yaml,
22+
save_config_yaml_with_items,
2123
workspace_root_label,
2224
)
2325
from vcspull.util import contract_user_home
@@ -335,13 +337,15 @@ def add_repo(
335337
# Load existing config
336338
raw_config: dict[str, t.Any]
337339
duplicate_root_occurrences: dict[str, list[t.Any]]
340+
top_level_items: list[tuple[str, t.Any]]
338341
display_config_path = contract_user_home(config_file_path)
339342

340343
if config_file_path.exists() and config_file_path.is_file():
341344
try:
342345
(
343346
raw_config,
344347
duplicate_root_occurrences,
348+
top_level_items,
345349
) = DuplicateAwareConfigReader.load_with_duplicates(config_file_path)
346350
except TypeError:
347351
log.exception(
@@ -357,11 +361,32 @@ def add_repo(
357361
else:
358362
raw_config = {}
359363
duplicate_root_occurrences = {}
364+
top_level_items = []
360365
log.info(
361366
"Config file %s not found. A new one will be created.",
362367
display_config_path,
363368
)
364369

370+
config_items: list[tuple[str, t.Any]] = (
371+
[(label, copy.deepcopy(section)) for label, section in top_level_items]
372+
if top_level_items
373+
else [(label, copy.deepcopy(section)) for label, section in raw_config.items()]
374+
)
375+
376+
def _aggregate_items(items: list[tuple[str, t.Any]]) -> dict[str, t.Any]:
377+
aggregated: dict[str, t.Any] = {}
378+
for label, section in items:
379+
if isinstance(section, dict):
380+
workspace_section = aggregated.setdefault(label, {})
381+
for repo_name, repo_config in section.items():
382+
workspace_section[repo_name] = copy.deepcopy(repo_config)
383+
else:
384+
aggregated[label] = copy.deepcopy(section)
385+
return aggregated
386+
387+
if not merge_duplicates:
388+
raw_config = _aggregate_items(config_items)
389+
365390
duplicate_merge_conflicts: list[str] = []
366391
duplicate_merge_changes = 0
367392
duplicate_merge_details: list[tuple[str, int]] = []
@@ -416,14 +441,18 @@ def add_repo(
416441
cwd = pathlib.Path.cwd()
417442
home = pathlib.Path.home()
418443

444+
aggregated_config = (
445+
raw_config if merge_duplicates else _aggregate_items(config_items)
446+
)
447+
419448
if merge_duplicates:
420449
(
421450
raw_config,
422451
workspace_map,
423452
merge_conflicts,
424453
merge_changes,
425454
) = normalize_workspace_roots(
426-
raw_config,
455+
aggregated_config,
427456
cwd=cwd,
428457
home=home,
429458
)
@@ -435,7 +464,7 @@ def add_repo(
435464
merge_conflicts,
436465
_merge_changes,
437466
) = normalize_workspace_roots(
438-
raw_config,
467+
aggregated_config,
439468
cwd=cwd,
440469
home=home,
441470
)
@@ -458,15 +487,24 @@ def add_repo(
458487
)
459488
workspace_map[workspace_path] = workspace_label
460489
raw_config.setdefault(workspace_label, {})
490+
if not merge_duplicates:
491+
config_items.append((workspace_label, {}))
461492

462493
if workspace_label not in raw_config:
463494
raw_config[workspace_label] = {}
495+
if not merge_duplicates:
496+
config_items.append((workspace_label, {}))
464497
elif not isinstance(raw_config[workspace_label], dict):
465498
log.error(
466499
"Workspace root '%s' in configuration is not a dictionary. Aborting.",
467500
workspace_label,
468501
)
469502
return
503+
workspace_sections: list[tuple[int, dict[str, t.Any]]] = [
504+
(idx, section)
505+
for idx, (label, section) in enumerate(config_items)
506+
if label == workspace_label and isinstance(section, dict)
507+
]
470508

471509
# Check if repo already exists
472510
if name in raw_config[workspace_label]:
@@ -517,7 +555,18 @@ def add_repo(
517555
return
518556

519557
# Add the repository in verbose format
520-
raw_config[workspace_label][name] = {"repo": url}
558+
new_repo_entry = {"repo": url}
559+
if merge_duplicates:
560+
raw_config[workspace_label][name] = new_repo_entry
561+
else:
562+
target_section: dict[str, t.Any]
563+
if workspace_sections:
564+
_, target_section = workspace_sections[-1]
565+
else:
566+
target_section = {}
567+
config_items.append((workspace_label, target_section))
568+
target_section[name] = copy.deepcopy(new_repo_entry)
569+
raw_config[workspace_label][name] = copy.deepcopy(new_repo_entry)
521570

522571
# Save or preview config
523572
if dry_run:
@@ -540,7 +589,10 @@ def add_repo(
540589
)
541590
else:
542591
try:
543-
save_config_yaml(config_file_path, raw_config)
592+
if merge_duplicates:
593+
save_config_yaml(config_file_path, raw_config)
594+
else:
595+
save_config_yaml_with_items(config_file_path, config_items)
544596
log.info(
545597
"%s✓%s Successfully added %s'%s'%s (%s%s%s) to %s%s%s under '%s%s%s'.",
546598
Fore.GREEN,

src/vcspull/cli/discover.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -203,6 +203,7 @@ def discover_repos(
203203
(
204204
raw_config,
205205
duplicate_root_occurrences,
206+
_top_level_items,
206207
) = DuplicateAwareConfigReader.load_with_duplicates(config_file_path)
207208
except TypeError:
208209
log.exception(

src/vcspull/cli/fmt.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -172,7 +172,7 @@ def format_single_config(
172172

173173
# Load existing config
174174
try:
175-
raw_config, duplicate_root_occurrences = (
175+
raw_config, duplicate_root_occurrences, _top_level_items = (
176176
DuplicateAwareConfigReader.load_with_duplicates(config_file_path)
177177
)
178178
except TypeError:

src/vcspull/config.py

Lines changed: 24 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -271,7 +271,7 @@ def load_configs(
271271
file = pathlib.Path(file)
272272
assert isinstance(file, pathlib.Path)
273273

274-
config_content, duplicate_roots = (
274+
config_content, duplicate_roots, _top_level_items = (
275275
DuplicateAwareConfigReader.load_with_duplicates(file)
276276
)
277277

@@ -481,6 +481,29 @@ def save_config_yaml(config_file_path: pathlib.Path, data: dict[t.Any, t.Any]) -
481481
config_file_path.write_text(yaml_content, encoding="utf-8")
482482

483483

484+
def save_config_yaml_with_items(
485+
config_file_path: pathlib.Path,
486+
items: list[tuple[str, t.Any]],
487+
) -> None:
488+
"""Persist configuration data while preserving duplicate top-level sections."""
489+
documents: list[str] = []
490+
491+
for label, section in items:
492+
dumped = ConfigReader._dump(
493+
fmt="yaml",
494+
content={label: section},
495+
indent=2,
496+
).rstrip()
497+
if dumped:
498+
documents.append(dumped)
499+
500+
yaml_content = "\n".join(documents)
501+
if yaml_content:
502+
yaml_content += "\n"
503+
504+
config_file_path.write_text(yaml_content, encoding="utf-8")
505+
506+
484507
def merge_duplicate_workspace_root_entries(
485508
label: str,
486509
occurrences: list[t.Any],

0 commit comments

Comments
 (0)