ruff check --fix --exit-zero $(ARGS) ${PY_CHANGED_FILES}; \
fi
-.PHONY: lint/
-lint/: lint/ruff lint/pylint lint/mypy
-lint/: ##H@@ Lint with ruff, pylint, and mypy
-
.PHONY: lint
-lint: lint/
+lint: ruff pylint mypy
+lint: ##H@@ Lint with ruff, pylint, and mypy
+
-.PHONY: lint/ruff
-lint/ruff:
+.PHONY: ruff
+ruff:
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# ruff (lint)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
ruff check ${PY_CHANGED_FILES}; \
fi
-.PHONY: lint/pylint
-lint/pylint:
+.PHONY: pylint
+pylint:
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# pylint
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
pylint -j 0 ${PY_CHANGED_FILES}; \
fi
-.PHONY: lint/mypy
-lint/mypy:
+.PHONY: mypy
+mypy:
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# mypy
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
mypy ${PY_CHANGED_FILES}; \
fi
-.PHONY: pylint
-pylint: lint/pylint
-
-.PHONY: mypy
-mypy: lint/mypy
-
.PHONY: clean
clean: ##H@@ Clean up build files/cache
self,
cache_path,
backend="filesystem",
+ serializer="json",
expire_after=expire_after,
allowable_codes=(200, 204),
cache_control=cache_control, # Enable HTTP conditional requests (ETag/Last-Modified)
allow_to_fetch_missing=(not offline_mode), # prevent fetch on miss
)
+ print(
+ f"DEBUG: CachedSession initialized. Backend=filesystem, Path={cache_path}, Offline={offline_mode}"
+ )
# Re-apply default headers as CSession.__init__ might have wiped them
with self.lock:
self.headers.update(self.DEFAULT_HEADERS)
import time
import xml.etree.ElementTree as ET
from datetime import datetime
+from enum import Enum
from typing import Any, BinaryIO, Dict, Iterable, List, Optional, Set, Tuple, Union
# global imports
from .utils import GEONAME_FEATURE_MAP, cont
+class ParentRelType(str, Enum):
+ """Parental relationship type in FamilySearch (PEDI)"""
+
+ BIRTH = "birth"
+ ADOPTED = "adopted"
+ STEP = "step"
+ FOSTER = "foster"
+
+ @staticmethod
+ def from_fs_type(facts: list | None) -> "ParentRelType | None":
+ """Parse from FamilySearch fact/relationship type"""
+ if not facts:
+ return None
+ for fact in facts:
+ ftype = fact.get("type", "")
+ if ftype == "http://gedcomx.org/BiologicalParent":
+ return ParentRelType.BIRTH
+ if ftype == "http://gedcomx.org/StepParent":
+ return ParentRelType.STEP
+ if ftype == "http://gedcomx.org/AdoptiveParent":
+ return ParentRelType.ADOPTED
+ if ftype == "http://gedcomx.org/FosterParent":
+ return ParentRelType.FOSTER
+ return None
+
+
class Indi:
"""GEDCOM individual class
:param fid' FamilySearch id
self.tree = tree
self.num_prefix = "I"
self.origin_file: Optional[str] = None
- self.famc: Set["Fam"] = set()
+ self.famc: Dict["Fam", Optional[ParentRelType]] = {}
self.fams: Set["Fam"] = set()
self.famc_fid: Set[str] = set()
self.fams_fid: Set[str] = set()
),
None,
)
- source = (
- self.tree.ensure_source(source_data)
- if self.tree and source_data
- else None
- )
+ if self.tree:
+ if source_data:
+ source = self.tree.ensure_source(source_data)
+ else:
+ existing_source = self.tree.sources.get(source_id)
+ if existing_source:
+ source = existing_source
+ else:
+ source = self.tree.ensure_source({"id": source_id})
+ else:
+ source = None
if source and self.tree:
citation = self.tree.ensure_citation(quote, source)
self.citations.add(citation)
"""add family fid (for spouse or parent)"""
self.fams.add(fam)
- def add_famc(self, fam: "Fam"):
+ def add_famc(self, fam: "Fam", rel_type: Optional[ParentRelType] = None):
"""add family fid (for child)"""
- self.famc.add(fam)
+ self.famc[fam] = rel_type
def get_notes(self):
"""retrieve individual notes"""
self.sealing_child.print(file)
for fam in sorted(self.fams, key=lambda x: x.id or ""):
file.write("1 FAMS @F%s@\n" % fam.id)
- for fam in sorted(self.famc, key=lambda x: x.id or ""):
+ for fam in sorted(self.famc.keys(), key=lambda x: x.id or ""):
file.write("1 FAMC @F%s@\n" % fam.id)
+ val = self.famc[fam]
+ if val:
+ file.write("2 PEDI %s\n" % val.value)
# print(f'Fams Ids: {self.fams_ids}, {self.fams_fid}, {self.fams_num}', file=sys.stderr)
# for num in self.fams_ids:
# print(f'Famc Ids: {self.famc_ids}', file=sys.stderr)
def merge_names(target_set, source_set):
# Combine all names and sort deterministically
all_names = list(target_set) + list(source_set)
- all_names.sort(key=lambda x: (
- str(x),
- x.given or "",
- x.surname or "",
- x.prefix or "",
- x.suffix or "",
- x.kind or "",
- str(x.alternative) if hasattr(x, 'alternative') else "",
- x.note.text if hasattr(x, 'note') and x.note else "",
- ))
+ all_names.sort(
+ key=lambda nm: (
+ str(nm),
+ nm.given or "",
+ nm.surname or "",
+ nm.prefix or "",
+ nm.suffix or "",
+ nm.kind or "",
+ str(nm.alternative) if hasattr(nm, "alternative") else "",
+ nm.note.text if hasattr(nm, "note") and nm.note else "",
+ )
+ )
# Rebuild target_set keeping first occurrence by string
target_set.clear()
seen = set()
for chil_fid in fam.chil_fid:
if chil_fid in tree.indi:
fam.children.add(tree.indi[chil_fid])
- tree.indi[chil_fid].famc.add(fam)
+ tree.indi[chil_fid].add_famc(fam)
# compute number for family relationships and print GEDCOM file
tree.reset_num()
-Subproject commit cefbd8dbd42cbb85209bae8e242e57add0c0e520
+Subproject commit b3084953c34c25c3867bcc1e000dfb32e59ae6f5
expectations = load_expectations()
exp_ada = expectations.get("EXPECTED_ADA_LINES", 0)
exp_marie = expectations.get("EXPECTED_MARIE_LINES", 0)
- exp_merged = expectations.get("EXPECTED_MERGED_LINES", 0)
# 2. Setup Cache
setup_cache()
# Check merged file with exact diff (no line count tolerance)
diff_result = subprocess.run(
- ["git", "diff", "--no-index", "--exit-code", "--color=always", str(merged), str(ARTIFACTS_DIR / "merged_scientists.ged")],
+ [
+ "git",
+ "diff",
+ "--no-index",
+ "--exit-code",
+ "--color=always",
+ str(merged),
+ str(ARTIFACTS_DIR / "merged_scientists.ged"),
+ ],
+ check=False,
)
if diff_result.returncode != 0:
- print(f"❌ Merged file differs from artifact (see diff above)")
+ print("❌ Merged file differs from artifact (see diff above)")
+ print("Diff Stat:")
+ subprocess.run(
+ [
+ "git",
+ "diff",
+ "--no-index",
+ "--stat",
+ str(merged),
+ str(ARTIFACTS_DIR / "merged_scientists.ged"),
+ ],
+ check=False,
+ )
failed = True
else:
print(f"✓ Merged file matches artifact exactly ({l_merged} lines).")