R1 foundation - Phase 1 live build

This commit is contained in:
2026-02-28 03:33:33 +00:00
commit f36ea194f3
45 changed files with 4009 additions and 0 deletions

0
core/__init__.py Normal file
View File

213
core/base_repository.py Normal file
View File

@@ -0,0 +1,213 @@
from __future__ import annotations
"""
BaseRepository: generic CRUD operations for all entities.
Uses raw SQL via SQLAlchemy text() - no ORM models needed.
Every method automatically filters is_deleted=false unless specified.
"""
from uuid import UUID
from datetime import datetime, timezone
from typing import Any
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
class BaseRepository:
def __init__(self, table: str, db: AsyncSession):
self.table = table
self.db = db
async def list(
self,
filters: dict | None = None,
sort: str = "sort_order",
sort_dir: str = "ASC",
page: int = 1,
per_page: int = 50,
include_deleted: bool = False,
) -> list[dict]:
"""List rows with optional filtering, sorting, pagination."""
where_clauses = []
params: dict[str, Any] = {}
if not include_deleted:
where_clauses.append("is_deleted = false")
if filters:
for i, (key, value) in enumerate(filters.items()):
if value is None:
where_clauses.append(f"{key} IS NULL")
elif value == "__notnull__":
where_clauses.append(f"{key} IS NOT NULL")
else:
param_name = f"f_{i}"
where_clauses.append(f"{key} = :{param_name}")
params[param_name] = value
where_sql = " AND ".join(where_clauses) if where_clauses else "1=1"
offset = (page - 1) * per_page
query = text(f"""
SELECT * FROM {self.table}
WHERE {where_sql}
ORDER BY {sort} {sort_dir}
LIMIT :limit OFFSET :offset
""")
params["limit"] = per_page
params["offset"] = offset
result = await self.db.execute(query, params)
return [dict(row._mapping) for row in result]
async def count(
self,
filters: dict | None = None,
include_deleted: bool = False,
) -> int:
"""Count rows matching filters."""
where_clauses = []
params: dict[str, Any] = {}
if not include_deleted:
where_clauses.append("is_deleted = false")
if filters:
for i, (key, value) in enumerate(filters.items()):
if value is None:
where_clauses.append(f"{key} IS NULL")
else:
param_name = f"f_{i}"
where_clauses.append(f"{key} = :{param_name}")
params[param_name] = value
where_sql = " AND ".join(where_clauses) if where_clauses else "1=1"
query = text(f"SELECT count(*) FROM {self.table} WHERE {where_sql}")
result = await self.db.execute(query, params)
return result.scalar() or 0
async def get(self, id: UUID | str) -> dict | None:
"""Get a single row by ID."""
query = text(f"SELECT * FROM {self.table} WHERE id = :id")
result = await self.db.execute(query, {"id": str(id)})
row = result.first()
return dict(row._mapping) if row else None
async def create(self, data: dict) -> dict:
"""Insert a new row. Auto-sets created_at, updated_at, is_deleted."""
data = {k: v for k, v in data.items() if v is not None or k in ("description", "notes", "body")}
data.setdefault("is_deleted", False)
now = datetime.now(timezone.utc)
if "created_at" not in data:
data["created_at"] = now
if "updated_at" not in data:
data["updated_at"] = now
columns = ", ".join(data.keys())
placeholders = ", ".join(f":{k}" for k in data.keys())
query = text(f"""
INSERT INTO {self.table} ({columns})
VALUES ({placeholders})
RETURNING *
""")
result = await self.db.execute(query, data)
row = result.first()
return dict(row._mapping) if row else data
async def update(self, id: UUID | str, data: dict) -> dict | None:
"""Update a row by ID. Auto-sets updated_at."""
data["updated_at"] = datetime.now(timezone.utc)
# Remove None values except for fields that should be nullable
nullable_fields = {
"description", "notes", "body", "area_id", "project_id",
"parent_id", "release_id", "due_date", "deadline", "tags",
"context", "folder_id", "meeting_id", "completed_at",
"waiting_for_contact_id", "waiting_since", "color",
}
clean_data = {}
for k, v in data.items():
if v is not None or k in nullable_fields:
clean_data[k] = v
if not clean_data:
return await self.get(id)
set_clauses = ", ".join(f"{k} = :{k}" for k in clean_data.keys())
clean_data["id"] = str(id)
query = text(f"""
UPDATE {self.table}
SET {set_clauses}
WHERE id = :id
RETURNING *
""")
result = await self.db.execute(query, clean_data)
row = result.first()
return dict(row._mapping) if row else None
async def soft_delete(self, id: UUID | str) -> bool:
"""Soft delete: set is_deleted=true, deleted_at=now()."""
query = text(f"""
UPDATE {self.table}
SET is_deleted = true, deleted_at = :now, updated_at = :now
WHERE id = :id AND is_deleted = false
RETURNING id
""")
now = datetime.now(timezone.utc)
result = await self.db.execute(query, {"id": str(id), "now": now})
return result.first() is not None
async def restore(self, id: UUID | str) -> bool:
"""Restore a soft-deleted row."""
query = text(f"""
UPDATE {self.table}
SET is_deleted = false, deleted_at = NULL, updated_at = :now
WHERE id = :id AND is_deleted = true
RETURNING id
""")
now = datetime.now(timezone.utc)
result = await self.db.execute(query, {"id": str(id), "now": now})
return result.first() is not None
async def permanent_delete(self, id: UUID | str) -> bool:
"""Hard delete. Admin only."""
query = text(f"DELETE FROM {self.table} WHERE id = :id RETURNING id")
result = await self.db.execute(query, {"id": str(id)})
return result.first() is not None
async def bulk_soft_delete(self, ids: list[str]) -> int:
"""Soft delete multiple rows."""
if not ids:
return 0
now = datetime.now(timezone.utc)
placeholders = ", ".join(f":id_{i}" for i in range(len(ids)))
params = {f"id_{i}": str(id) for i, id in enumerate(ids)}
params["now"] = now
query = text(f"""
UPDATE {self.table}
SET is_deleted = true, deleted_at = :now, updated_at = :now
WHERE id IN ({placeholders}) AND is_deleted = false
""")
result = await self.db.execute(query, params)
return result.rowcount
async def list_deleted(self) -> list[dict]:
"""List all soft-deleted rows. Used by Admin > Trash."""
query = text(f"""
SELECT * FROM {self.table}
WHERE is_deleted = true
ORDER BY deleted_at DESC
""")
result = await self.db.execute(query)
return [dict(row._mapping) for row in result]
async def reorder(self, id_order: list[str]) -> None:
"""Update sort_order based on position in list."""
for i, id in enumerate(id_order):
await self.db.execute(
text(f"UPDATE {self.table} SET sort_order = :order WHERE id = :id"),
{"order": (i + 1) * 10, "id": str(id)}
)

47
core/database.py Normal file
View File

@@ -0,0 +1,47 @@
"""
Database connection and session management.
Async SQLAlchemy 2.0 with asyncpg driver.
"""
import os
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
from sqlalchemy import text
DATABASE_URL = os.getenv(
"DATABASE_URL",
"postgresql+asyncpg://postgres:postgres@lifeos-db:5432/lifeos_dev"
)
engine = create_async_engine(
DATABASE_URL,
echo=os.getenv("ENVIRONMENT") == "development",
pool_size=5,
max_overflow=10,
pool_pre_ping=True,
)
async_session_factory = async_sessionmaker(
engine,
class_=AsyncSession,
expire_on_commit=False,
)
async def get_db():
"""FastAPI dependency: yields an async database session."""
async with async_session_factory() as session:
try:
yield session
await session.commit()
except Exception:
await session.rollback()
raise
finally:
await session.close()
async def check_db():
"""Health check: verify database connectivity."""
async with async_session_factory() as session:
result = await session.execute(text("SELECT 1"))
return result.scalar() == 1

72
core/sidebar.py Normal file
View File

@@ -0,0 +1,72 @@
"""
Sidebar navigation data builder.
Loads domains > areas > projects hierarchy for the sidebar tree.
"""
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
async def get_sidebar_data(db: AsyncSession) -> dict:
"""Build full sidebar navigation data."""
# Domains
result = await db.execute(text("""
SELECT id, name, color FROM domains
WHERE is_deleted = false ORDER BY sort_order, name
"""))
domains = [dict(r._mapping) for r in result]
# Areas grouped by domain
result = await db.execute(text("""
SELECT id, domain_id, name FROM areas
WHERE is_deleted = false ORDER BY sort_order, name
"""))
areas = [dict(r._mapping) for r in result]
# Projects grouped by domain/area
result = await db.execute(text("""
SELECT id, domain_id, area_id, name, status FROM projects
WHERE is_deleted = false AND status != 'archived'
ORDER BY sort_order, name
"""))
projects = [dict(r._mapping) for r in result]
# Counts for badges
result = await db.execute(text("""
SELECT count(*) FROM capture WHERE is_deleted = false AND processed = false
"""))
capture_count = result.scalar() or 0
result = await db.execute(text("""
SELECT count(*) FROM daily_focus
WHERE is_deleted = false AND focus_date = CURRENT_DATE AND completed = false
"""))
focus_count = result.scalar() or 0
# Build tree structure
domain_tree = []
for d in domains:
d_areas = [a for a in areas if str(a["domain_id"]) == str(d["id"])]
d_projects = [p for p in projects if str(p["domain_id"]) == str(d["id"])]
# Projects under areas
for a in d_areas:
a["projects"] = [p for p in d_projects if str(p.get("area_id", "")) == str(a["id"])]
# Projects directly under domain (no area)
standalone_projects = [p for p in d_projects if p.get("area_id") is None]
domain_tree.append({
"id": d["id"],
"name": d["name"],
"color": d.get("color", "#4F6EF7"),
"areas": d_areas,
"standalone_projects": standalone_projects,
})
return {
"domain_tree": domain_tree,
"capture_count": capture_count,
"focus_count": focus_count,
}