Optimize get_journal_entries with server-side date filtering
Use Fava's 'time' query parameter to filter entries on the server instead of fetching all entries and filtering in Python. This reduces: - Data transfer (only relevant entries are sent) - Memory usage (no need to hold all entries) - Processing time (no Python-side date parsing/filtering) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
parent
49d18c3e73
commit
addf4cd05f
1 changed files with 26 additions and 47 deletions
|
|
@ -866,6 +866,9 @@ class FavaClient:
|
||||||
"""
|
"""
|
||||||
Get journal entries from Fava (with entry hashes), optionally filtered by date.
|
Get journal entries from Fava (with entry hashes), optionally filtered by date.
|
||||||
|
|
||||||
|
Uses Fava's server-side 'time' parameter for efficient date filtering,
|
||||||
|
avoiding the need to fetch all entries and filter in Python.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
days: If provided, only return entries from the last N days.
|
days: If provided, only return entries from the last N days.
|
||||||
If None, returns all entries (default behavior).
|
If None, returns all entries (default behavior).
|
||||||
|
|
@ -890,59 +893,35 @@ class FavaClient:
|
||||||
# Get entries in custom date range
|
# Get entries in custom date range
|
||||||
custom = await fava.get_journal_entries(start_date="2024-01-01", end_date="2024-01-31")
|
custom = await fava.get_journal_entries(start_date="2024-01-01", end_date="2024-01-31")
|
||||||
"""
|
"""
|
||||||
try:
|
|
||||||
async with httpx.AsyncClient(timeout=self.timeout) as client:
|
|
||||||
response = await client.get(f"{self.base_url}/journal")
|
|
||||||
response.raise_for_status()
|
|
||||||
result = response.json()
|
|
||||||
entries = result.get("data", [])
|
|
||||||
logger.info(f"Fava /journal returned {len(entries)} entries")
|
|
||||||
|
|
||||||
# Filter by date range or days
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Build query parameters for server-side filtering
|
||||||
|
params = {}
|
||||||
|
|
||||||
# Use date range if both start_date and end_date are provided
|
# Use date range if both start_date and end_date are provided
|
||||||
if start_date and end_date:
|
if start_date and end_date:
|
||||||
try:
|
# Fava uses "YYYY-MM-DD - YYYY-MM-DD" format for time ranges
|
||||||
filter_start = datetime.strptime(start_date, "%Y-%m-%d").date()
|
params["time"] = f"{start_date} - {end_date}"
|
||||||
filter_end = datetime.strptime(end_date, "%Y-%m-%d").date()
|
logger.info(f"Querying journal with date range: {start_date} to {end_date}")
|
||||||
filtered_entries = []
|
|
||||||
for e in entries:
|
|
||||||
entry_date_str = e.get("date")
|
|
||||||
if entry_date_str:
|
|
||||||
try:
|
|
||||||
entry_date = datetime.strptime(entry_date_str, "%Y-%m-%d").date()
|
|
||||||
if filter_start <= entry_date <= filter_end:
|
|
||||||
filtered_entries.append(e)
|
|
||||||
except (ValueError, TypeError):
|
|
||||||
# Include entries with invalid dates (shouldn't happen)
|
|
||||||
filtered_entries.append(e)
|
|
||||||
logger.info(f"Filtered to {len(filtered_entries)} entries between {start_date} and {end_date}")
|
|
||||||
entries = filtered_entries
|
|
||||||
except ValueError as e:
|
|
||||||
logger.error(f"Invalid date format: {e}")
|
|
||||||
# Return all entries if date parsing fails
|
|
||||||
|
|
||||||
# Fall back to days filter if no date range provided
|
# Fall back to days filter if no date range provided
|
||||||
elif days is not None:
|
elif days is not None:
|
||||||
cutoff_date = (datetime.now() - timedelta(days=days)).date()
|
cutoff_date = (datetime.now() - timedelta(days=days)).date()
|
||||||
filtered_entries = []
|
today = datetime.now().date()
|
||||||
for e in entries:
|
params["time"] = f"{cutoff_date.isoformat()} - {today.isoformat()}"
|
||||||
entry_date_str = e.get("date")
|
logger.info(f"Querying journal for last {days} days (from {cutoff_date})")
|
||||||
if entry_date_str:
|
|
||||||
try:
|
|
||||||
entry_date = datetime.strptime(entry_date_str, "%Y-%m-%d").date()
|
|
||||||
if entry_date >= cutoff_date:
|
|
||||||
filtered_entries.append(e)
|
|
||||||
except (ValueError, TypeError):
|
|
||||||
# Include entries with invalid dates (shouldn't happen)
|
|
||||||
filtered_entries.append(e)
|
|
||||||
logger.info(f"Filtered to {len(filtered_entries)} entries from last {days} days (cutoff: {cutoff_date})")
|
|
||||||
entries = filtered_entries
|
|
||||||
|
|
||||||
# Log transactions with "Lightning payment" in narration
|
async with httpx.AsyncClient(timeout=self.timeout) as client:
|
||||||
lightning_entries = [e for e in entries if "Lightning payment" in e.get("narration", "")]
|
response = await client.get(f"{self.base_url}/journal", params=params)
|
||||||
logger.info(f"Found {len(lightning_entries)} Lightning payment entries in journal")
|
response.raise_for_status()
|
||||||
|
result = response.json()
|
||||||
|
entries = result.get("data", [])
|
||||||
|
|
||||||
|
if params:
|
||||||
|
logger.info(f"Fava /journal returned {len(entries)} entries (filtered)")
|
||||||
|
else:
|
||||||
|
logger.info(f"Fava /journal returned {len(entries)} entries (all)")
|
||||||
|
|
||||||
return entries
|
return entries
|
||||||
|
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue