]> Nutra Git (v1) - nutratech/cli.git/commitdiff
wip trim some code
authorShane Jaroch <chown_tee@proton.me>
Sun, 11 Jan 2026 11:16:12 +0000 (06:16 -0500)
committerShane Jaroch <chown_tee@proton.me>
Sun, 11 Jan 2026 11:16:12 +0000 (06:16 -0500)
ntclient/argparser/funcs.py
ntclient/persistence/csv_manager.py [new file with mode: 0644]
ntclient/services/logs.py [new file with mode: 0644]

index 600559823d2e72024c6e26885e845086f716d0de..921b4614010e14e19f06173772d71aeb03a2cfe4 100644 (file)
@@ -15,6 +15,7 @@ from tabulate import tabulate
 
 import ntclient.services.analyze
 import ntclient.services.bugs
+import ntclient.services.logs
 import ntclient.services.recipe.recipe
 import ntclient.services.usda
 from ntclient.services import calculate as calc
@@ -370,3 +371,19 @@ def bugs_report(args: argparse.Namespace) -> tuple:
     """Report bugs"""
     n_submissions = ntclient.services.bugs.submit_bugs()
     return 0, n_submissions
+
+
+def log_add(args: argparse.Namespace) -> tuple:
+    """Wrapper for log add"""
+    ntclient.services.logs.log_add(args.food_id, args.grams, args.date)
+    return 0, []
+
+def log_view(args: argparse.Namespace) -> tuple:
+    """Wrapper for log view"""
+    ntclient.services.logs.log_view(args.date)
+    return 0, []
+
+def log_analyze(args: argparse.Namespace) -> tuple:
+    """Wrapper for log analyze"""
+    ntclient.services.logs.log_analyze(args.date)
+    return 0, []
diff --git a/ntclient/persistence/csv_manager.py b/ntclient/persistence/csv_manager.py
new file mode 100644 (file)
index 0000000..43f1846
--- /dev/null
@@ -0,0 +1,41 @@
+"""
+CSV Persistence Manager
+Handles reading and writing to daily log CSV files.
+"""
+
+import csv
+import os
+from typing import Dict, List, Union
+
+
+def ensure_log_exists(log_path: str) -> None:
+    """Creates the log file with headers if it doesn't exist."""
+    if not os.path.exists(log_path):
+        os.makedirs(os.path.dirname(log_path), exist_ok=True)
+        with open(log_path, "w", newline="", encoding="utf-8") as f:
+            writer = csv.writer(f)
+            writer.writerow(["id", "grams"])
+
+
+def append_to_log(log_path: str, food_id: int, grams: float) -> None:
+    """Appends a food entry to the specified log file."""
+    ensure_log_exists(log_path)
+    with open(log_path, "a", newline="", encoding="utf-8") as f:
+        writer = csv.writer(f)
+        writer.writerow([food_id, grams])
+
+
+def read_log(log_path: str) -> List[Dict[str, Union[str, float]]]:
+    """Reads a log file and returns a list of dictionaries."""
+    if not os.path.exists(log_path):
+        return []
+
+    with open(log_path, "r", encoding="utf-8") as f:
+        # Filter out comments/empty lines if necessary, matching existing logic
+        rows = [row for row in f if not row.startswith("#") and row.strip()]
+        if not rows:
+            return []
+
+        reader = csv.DictReader(rows)
+        # Check if empty (headers only or truly empty) - DictReader handles headers
+        return list(reader)
diff --git a/ntclient/services/logs.py b/ntclient/services/logs.py
new file mode 100644 (file)
index 0000000..58a2a67
--- /dev/null
@@ -0,0 +1,95 @@
+"""
+Logs Service
+Business logic for managing daily food logs.
+"""
+
+import datetime
+import os
+from typing import Optional
+
+from tabulate import tabulate
+
+from ntclient import NUTRA_HOME
+from ntclient.persistence.csv_manager import append_to_log, read_log
+from ntclient.persistence.sql.usda.funcs import sql_food_details
+from ntclient.services.analyze import day_analyze
+
+
+def get_log_path(date_str: Optional[str] = None) -> str:
+    """
+    Returns the absolute path to the log file for the given date.
+    Defaults to today's date if date_str is None.
+    Expected date format: YYYY-MM-DD (or similar valid filename)
+    """
+    if not date_str:
+        date_str = datetime.date.today().isoformat()
+
+    # Sanitize inputs strictly if necessary, but assuming basic CLI usage for now
+    filename = f"{date_str}.csv"
+    return os.path.join(NUTRA_HOME, filename)
+
+
+def log_add(food_id: int, grams: float, date_str: Optional[str] = None) -> None:
+    """
+    Adds a food entry to the recurring daily log.
+    Validates that the food_id exists in the USDA database.
+    """
+    # Validate Food ID
+    food_details = sql_food_details({food_id})
+    if not food_details:
+        print(f"ERROR: Food ID {food_id} not found in database.")
+        return
+
+    log_path = get_log_path(date_str)
+    append_to_log(log_path, food_id, grams)
+
+    # Feedback
+    food_name = food_details[0][2]
+    # Truncate
+    if len(food_name) > 40:
+        food_name = food_name[:37] + "..."
+    print(
+        f"Added: {grams}g of '{food_name}' ({food_id}) to {os.path.basename(log_path)}"
+    )
+
+
+def log_view(date_str: Optional[str] = None) -> None:
+    """
+    Views the raw entries of a log file.
+    """
+    log_path = get_log_path(date_str)
+    entries = read_log(log_path)
+
+    if not entries:
+        print(f"No log entries found for {os.path.basename(log_path)}")
+        return
+
+    # Enrich with food names for display
+    # entries is list of dicts like {'id': '1001', 'grams': '100'}
+    food_ids = {int(e["id"]) for e in entries if e["id"]}
+    food_des = {x[0]: x[2] for x in sql_food_details(food_ids)}
+
+    table_data = []
+    for e in entries:
+        fid = int(e["id"])
+        grams = float(e["grams"])
+        name = food_des.get(fid, "Unknown Food")
+        if len(name) > 50:
+            name = name[:47] + "..."
+        table_data.append([fid, name, grams])
+
+    print(f"\nLog: {os.path.basename(log_path)}")
+    print(tabulate(table_data, headers=["ID", "Food", "Grams"], tablefmt="simple"))
+
+
+def log_analyze(date_str: Optional[str] = None) -> None:
+    """
+    Runs full analysis on the log file.
+    """
+    log_path = get_log_path(date_str)
+    if not os.path.exists(log_path):
+        print(f"Log file not found: {log_path}")
+        return
+
+    # Reuse existing analysis logic
+    day_analyze([log_path])