Compare commits
6 commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 38c2bd702a | |||
| a11f3a7abb | |||
| 4ac99403bd | |||
| 87f7ad0dc4 | |||
| 4c27cf4bd0 | |||
| 7d4a03fd79 |
13 changed files with 767 additions and 2 deletions
|
|
@ -1 +1,3 @@
|
|||
__version__ = "0.8.0"
|
||||
__version__ = "0.10.0"
|
||||
|
||||
from circuitforge_core.community import CommunityDB, CommunityPost, SharedStore
|
||||
|
|
|
|||
8
circuitforge_core/community/__init__.py
Normal file
8
circuitforge_core/community/__init__.py
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
# circuitforge_core/community/__init__.py
|
||||
# MIT License
|
||||
|
||||
from .db import CommunityDB
|
||||
from .models import CommunityPost
|
||||
from .store import SharedStore
|
||||
|
||||
__all__ = ["CommunityDB", "CommunityPost", "SharedStore"]
|
||||
117
circuitforge_core/community/db.py
Normal file
117
circuitforge_core/community/db.py
Normal file
|
|
@ -0,0 +1,117 @@
|
|||
# circuitforge_core/community/db.py
|
||||
# MIT License
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib.resources
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
import psycopg2
|
||||
from psycopg2.pool import ThreadedConnectionPool
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_MIN_CONN = 1
|
||||
_MAX_CONN = 10
|
||||
|
||||
|
||||
class CommunityDB:
|
||||
"""Shared PostgreSQL connection pool + migration runner for the community module.
|
||||
|
||||
Products instantiate one CommunityDB at startup and pass it to SharedStore
|
||||
subclasses. The pool is thread-safe (ThreadedConnectionPool).
|
||||
|
||||
Usage:
|
||||
db = CommunityDB.from_env() # reads COMMUNITY_DB_URL
|
||||
db.run_migrations()
|
||||
store = MyProductStore(db)
|
||||
db.close() # at shutdown
|
||||
"""
|
||||
|
||||
def __init__(self, dsn: str | None) -> None:
|
||||
if not dsn:
|
||||
raise ValueError(
|
||||
"CommunityDB requires a DSN. "
|
||||
"Set COMMUNITY_DB_URL or pass dsn= explicitly."
|
||||
)
|
||||
self._pool = ThreadedConnectionPool(_MIN_CONN, _MAX_CONN, dsn=dsn)
|
||||
logger.debug("CommunityDB pool created (min=%d, max=%d)", _MIN_CONN, _MAX_CONN)
|
||||
|
||||
@classmethod
|
||||
def from_env(cls) -> "CommunityDB":
|
||||
"""Construct from the COMMUNITY_DB_URL environment variable."""
|
||||
import os
|
||||
dsn = os.environ.get("COMMUNITY_DB_URL")
|
||||
return cls(dsn=dsn)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Connection management
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def getconn(self):
|
||||
"""Borrow a connection from the pool. Must be returned via putconn()."""
|
||||
return self._pool.getconn()
|
||||
|
||||
def putconn(self, conn) -> None:
|
||||
"""Return a borrowed connection to the pool."""
|
||||
self._pool.putconn(conn)
|
||||
|
||||
def close(self) -> None:
|
||||
"""Close all pool connections. Call at application shutdown."""
|
||||
self._pool.closeall()
|
||||
logger.debug("CommunityDB pool closed")
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Migration runner
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _discover_migrations(self) -> list[Path]:
|
||||
"""Return sorted list of .sql migration files from the community migrations dir."""
|
||||
pkg = importlib.resources.files("circuitforge_core.community.migrations")
|
||||
files = sorted(
|
||||
[Path(str(p)) for p in pkg.iterdir() if str(p).endswith(".sql")],
|
||||
key=lambda p: p.name,
|
||||
)
|
||||
return files
|
||||
|
||||
def run_migrations(self) -> None:
|
||||
"""Apply all community migration SQL files in numeric order.
|
||||
|
||||
Uses a simple applied-migrations table to avoid re-running already
|
||||
applied migrations. Idempotent.
|
||||
"""
|
||||
conn = self.getconn()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute("""
|
||||
CREATE TABLE IF NOT EXISTS _community_migrations (
|
||||
filename TEXT PRIMARY KEY,
|
||||
applied_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||
)
|
||||
""")
|
||||
conn.commit()
|
||||
|
||||
for migration_file in self._discover_migrations():
|
||||
name = migration_file.name
|
||||
cur.execute(
|
||||
"SELECT 1 FROM _community_migrations WHERE filename = %s",
|
||||
(name,),
|
||||
)
|
||||
if cur.fetchone():
|
||||
logger.debug("Migration %s already applied, skipping", name)
|
||||
continue
|
||||
|
||||
sql = migration_file.read_text()
|
||||
logger.info("Applying community migration: %s", name)
|
||||
cur.execute(sql)
|
||||
cur.execute(
|
||||
"INSERT INTO _community_migrations (filename) VALUES (%s)",
|
||||
(name,),
|
||||
)
|
||||
conn.commit()
|
||||
except Exception:
|
||||
conn.rollback()
|
||||
raise
|
||||
finally:
|
||||
self.putconn(conn)
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
-- 001_community_posts.sql
|
||||
-- Community posts table: published meal plans, recipe successes, and bloopers.
|
||||
-- Applies to: cf_community PostgreSQL database (hosted by cf-orch).
|
||||
-- BSL boundary: this schema is MIT (data layer, no inference).
|
||||
|
||||
CREATE TABLE IF NOT EXISTS community_posts (
|
||||
id BIGSERIAL PRIMARY KEY,
|
||||
slug TEXT NOT NULL UNIQUE,
|
||||
pseudonym TEXT NOT NULL,
|
||||
post_type TEXT NOT NULL CHECK (post_type IN ('plan', 'recipe_success', 'recipe_blooper')),
|
||||
published TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||
title TEXT NOT NULL,
|
||||
description TEXT,
|
||||
photo_url TEXT,
|
||||
|
||||
-- Plan slots (JSON array: [{day, meal_type, recipe_id, recipe_name}])
|
||||
slots JSONB NOT NULL DEFAULT '[]',
|
||||
|
||||
-- Recipe result fields
|
||||
recipe_id BIGINT,
|
||||
recipe_name TEXT,
|
||||
level SMALLINT CHECK (level IS NULL OR level BETWEEN 1 AND 4),
|
||||
outcome_notes TEXT,
|
||||
|
||||
-- Element snapshot (denormalized from corpus at publish time)
|
||||
seasoning_score REAL,
|
||||
richness_score REAL,
|
||||
brightness_score REAL,
|
||||
depth_score REAL,
|
||||
aroma_score REAL,
|
||||
structure_score REAL,
|
||||
texture_profile TEXT,
|
||||
|
||||
-- Dietary / allergen / flavor
|
||||
dietary_tags JSONB NOT NULL DEFAULT '[]',
|
||||
allergen_flags JSONB NOT NULL DEFAULT '[]',
|
||||
flavor_molecules JSONB NOT NULL DEFAULT '[]',
|
||||
|
||||
-- USDA FDC (Food Data Central) macros
|
||||
fat_pct REAL,
|
||||
protein_pct REAL,
|
||||
moisture_pct REAL,
|
||||
|
||||
-- Source product identifier
|
||||
source_product TEXT NOT NULL DEFAULT 'kiwi'
|
||||
);
|
||||
|
||||
-- Indexes for common filter patterns
|
||||
CREATE INDEX IF NOT EXISTS idx_community_posts_published ON community_posts (published DESC);
|
||||
CREATE INDEX IF NOT EXISTS idx_community_posts_post_type ON community_posts (post_type);
|
||||
CREATE INDEX IF NOT EXISTS idx_community_posts_source ON community_posts (source_product);
|
||||
|
||||
-- GIN index for dietary/allergen JSONB array containment queries
|
||||
CREATE INDEX IF NOT EXISTS idx_community_posts_dietary_tags ON community_posts USING GIN (dietary_tags);
|
||||
CREATE INDEX IF NOT EXISTS idx_community_posts_allergen_flags ON community_posts USING GIN (allergen_flags);
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
-- 002_community_post_reactions.sql
|
||||
-- Reserved: community post reactions (thumbs-up, saves count).
|
||||
-- Not yet implemented -- this migration is a stub to reserve the sequence number.
|
||||
-- Applies to: cf_community PostgreSQL database (hosted by cf-orch).
|
||||
|
||||
-- Placeholder: no-op. Will be replaced when reactions feature is designed.
|
||||
SELECT 1;
|
||||
0
circuitforge_core/community/migrations/__init__.py
Normal file
0
circuitforge_core/community/migrations/__init__.py
Normal file
94
circuitforge_core/community/models.py
Normal file
94
circuitforge_core/community/models.py
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
# circuitforge_core/community/models.py
|
||||
# MIT License
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
|
||||
PostType = Literal["plan", "recipe_success", "recipe_blooper"]
|
||||
CreativityLevel = Literal[1, 2, 3, 4]
|
||||
|
||||
_VALID_POST_TYPES: frozenset[str] = frozenset(["plan", "recipe_success", "recipe_blooper"])
|
||||
|
||||
|
||||
def _validate_score(name: str, value: float) -> float:
|
||||
if not (0.0 <= value <= 1.0):
|
||||
raise ValueError(f"{name} must be between 0.0 and 1.0, got {value!r}")
|
||||
return value
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class CommunityPost:
|
||||
"""Immutable snapshot of a published community post.
|
||||
|
||||
Lists (dietary_tags, allergen_flags, flavor_molecules, slots) are stored as
|
||||
tuples to enforce immutability. Pass lists -- they are converted in __post_init__.
|
||||
"""
|
||||
|
||||
# Identity
|
||||
slug: str
|
||||
pseudonym: str
|
||||
post_type: PostType
|
||||
published: datetime
|
||||
title: str
|
||||
|
||||
# Optional content
|
||||
description: str | None
|
||||
photo_url: str | None
|
||||
|
||||
# Plan slots -- list[dict] for post_type="plan"
|
||||
slots: tuple
|
||||
|
||||
# Recipe result fields -- for post_type="recipe_success" | "recipe_blooper"
|
||||
recipe_id: int | None
|
||||
recipe_name: str | None
|
||||
level: CreativityLevel | None
|
||||
outcome_notes: str | None
|
||||
|
||||
# Element snapshot
|
||||
seasoning_score: float
|
||||
richness_score: float
|
||||
brightness_score: float
|
||||
depth_score: float
|
||||
aroma_score: float
|
||||
structure_score: float
|
||||
texture_profile: str
|
||||
|
||||
# Dietary/allergen/flavor
|
||||
dietary_tags: tuple
|
||||
allergen_flags: tuple
|
||||
flavor_molecules: tuple
|
||||
|
||||
# USDA FDC (Food Data Central) macros (optional -- may not be available for all recipes)
|
||||
fat_pct: float | None
|
||||
protein_pct: float | None
|
||||
moisture_pct: float | None
|
||||
|
||||
def __new__(cls, **kwargs):
|
||||
# Convert lists to tuples before frozen dataclass assignment
|
||||
for key in ("slots", "dietary_tags", "allergen_flags", "flavor_molecules"):
|
||||
if key in kwargs and isinstance(kwargs[key], list):
|
||||
kwargs[key] = tuple(kwargs[key])
|
||||
return object.__new__(cls)
|
||||
|
||||
def __init__(self, **kwargs):
|
||||
# Convert lists to tuples
|
||||
for key in ("slots", "dietary_tags", "allergen_flags", "flavor_molecules"):
|
||||
if key in kwargs and isinstance(kwargs[key], list):
|
||||
kwargs[key] = tuple(kwargs[key])
|
||||
for f in self.__dataclass_fields__:
|
||||
object.__setattr__(self, f, kwargs[f])
|
||||
self.__post_init__()
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
if self.post_type not in _VALID_POST_TYPES:
|
||||
raise ValueError(
|
||||
f"post_type must be one of {sorted(_VALID_POST_TYPES)}, got {self.post_type!r}"
|
||||
)
|
||||
for score_name in (
|
||||
"seasoning_score", "richness_score", "brightness_score",
|
||||
"depth_score", "aroma_score", "structure_score",
|
||||
):
|
||||
_validate_score(score_name, getattr(self, score_name))
|
||||
208
circuitforge_core/community/store.py
Normal file
208
circuitforge_core/community/store.py
Normal file
|
|
@ -0,0 +1,208 @@
|
|||
# circuitforge_core/community/store.py
|
||||
# MIT License
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from .models import CommunityPost
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .db import CommunityDB
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _row_to_post(row: dict) -> CommunityPost:
|
||||
"""Convert a psycopg2 row dict to a CommunityPost.
|
||||
|
||||
JSONB columns (slots, dietary_tags, allergen_flags, flavor_molecules) come
|
||||
back from psycopg2 as Python lists already -- no json.loads() needed.
|
||||
"""
|
||||
return CommunityPost(
|
||||
slug=row["slug"],
|
||||
pseudonym=row["pseudonym"],
|
||||
post_type=row["post_type"],
|
||||
published=row["published"],
|
||||
title=row["title"],
|
||||
description=row.get("description"),
|
||||
photo_url=row.get("photo_url"),
|
||||
slots=row.get("slots") or [],
|
||||
recipe_id=row.get("recipe_id"),
|
||||
recipe_name=row.get("recipe_name"),
|
||||
level=row.get("level"),
|
||||
outcome_notes=row.get("outcome_notes"),
|
||||
seasoning_score=row["seasoning_score"] or 0.0,
|
||||
richness_score=row["richness_score"] or 0.0,
|
||||
brightness_score=row["brightness_score"] or 0.0,
|
||||
depth_score=row["depth_score"] or 0.0,
|
||||
aroma_score=row["aroma_score"] or 0.0,
|
||||
structure_score=row["structure_score"] or 0.0,
|
||||
texture_profile=row.get("texture_profile") or "",
|
||||
dietary_tags=row.get("dietary_tags") or [],
|
||||
allergen_flags=row.get("allergen_flags") or [],
|
||||
flavor_molecules=row.get("flavor_molecules") or [],
|
||||
fat_pct=row.get("fat_pct"),
|
||||
protein_pct=row.get("protein_pct"),
|
||||
moisture_pct=row.get("moisture_pct"),
|
||||
)
|
||||
|
||||
|
||||
def _cursor_to_dict(cur, row) -> dict:
|
||||
"""Convert a psycopg2 row tuple to a dict using cursor.description."""
|
||||
if isinstance(row, dict):
|
||||
return row
|
||||
return {desc[0]: val for desc, val in zip(cur.description, row)}
|
||||
|
||||
|
||||
class SharedStore:
|
||||
"""Base class for product community stores.
|
||||
|
||||
Subclass this in each product:
|
||||
class KiwiCommunityStore(SharedStore):
|
||||
def list_posts_for_week(self, week_start: str) -> list[CommunityPost]: ...
|
||||
|
||||
All methods return new objects (immutable pattern). Never mutate rows in-place.
|
||||
"""
|
||||
|
||||
def __init__(self, db: "CommunityDB") -> None:
|
||||
self._db = db
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Reads
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def get_post_by_slug(self, slug: str) -> CommunityPost | None:
|
||||
conn = self._db.getconn()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"SELECT * FROM community_posts WHERE slug = %s LIMIT 1",
|
||||
(slug,),
|
||||
)
|
||||
row = cur.fetchone()
|
||||
if row is None:
|
||||
return None
|
||||
return _row_to_post(_cursor_to_dict(cur, row))
|
||||
finally:
|
||||
self._db.putconn(conn)
|
||||
|
||||
def list_posts(
|
||||
self,
|
||||
limit: int = 20,
|
||||
offset: int = 0,
|
||||
post_type: str | None = None,
|
||||
dietary_tags: list[str] | None = None,
|
||||
allergen_exclude: list[str] | None = None,
|
||||
source_product: str | None = None,
|
||||
) -> list[CommunityPost]:
|
||||
"""Paginated post list with optional filters.
|
||||
|
||||
dietary_tags: JSONB containment -- posts must include ALL listed tags.
|
||||
allergen_exclude: JSONB overlap exclusion -- posts must NOT include any listed flag.
|
||||
"""
|
||||
conn = self._db.getconn()
|
||||
try:
|
||||
conditions = []
|
||||
params: list = []
|
||||
|
||||
if post_type:
|
||||
conditions.append("post_type = %s")
|
||||
params.append(post_type)
|
||||
if dietary_tags:
|
||||
import json
|
||||
conditions.append("dietary_tags @> %s::jsonb")
|
||||
params.append(json.dumps(dietary_tags))
|
||||
if allergen_exclude:
|
||||
import json
|
||||
conditions.append("NOT (allergen_flags && %s::jsonb)")
|
||||
params.append(json.dumps(allergen_exclude))
|
||||
if source_product:
|
||||
conditions.append("source_product = %s")
|
||||
params.append(source_product)
|
||||
|
||||
where = ("WHERE " + " AND ".join(conditions)) if conditions else ""
|
||||
params.extend([limit, offset])
|
||||
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
f"SELECT * FROM community_posts {where} "
|
||||
"ORDER BY published DESC LIMIT %s OFFSET %s",
|
||||
params,
|
||||
)
|
||||
rows = cur.fetchall()
|
||||
return [_row_to_post(_cursor_to_dict(cur, r)) for r in rows]
|
||||
finally:
|
||||
self._db.putconn(conn)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Writes
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def insert_post(self, post: CommunityPost) -> CommunityPost:
|
||||
"""Insert a new community post. Returns the inserted post (unchanged -- slug is the key)."""
|
||||
import json
|
||||
|
||||
conn = self._db.getconn()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"""
|
||||
INSERT INTO community_posts (
|
||||
slug, pseudonym, post_type, published, title, description, photo_url,
|
||||
slots, recipe_id, recipe_name, level, outcome_notes,
|
||||
seasoning_score, richness_score, brightness_score,
|
||||
depth_score, aroma_score, structure_score, texture_profile,
|
||||
dietary_tags, allergen_flags, flavor_molecules,
|
||||
fat_pct, protein_pct, moisture_pct, source_product
|
||||
) VALUES (
|
||||
%s, %s, %s, %s, %s, %s, %s,
|
||||
%s::jsonb, %s, %s, %s, %s,
|
||||
%s, %s, %s, %s, %s, %s, %s,
|
||||
%s::jsonb, %s::jsonb, %s::jsonb,
|
||||
%s, %s, %s, %s
|
||||
)
|
||||
""",
|
||||
(
|
||||
post.slug, post.pseudonym, post.post_type,
|
||||
post.published, post.title, post.description, post.photo_url,
|
||||
json.dumps(list(post.slots)),
|
||||
post.recipe_id, post.recipe_name, post.level, post.outcome_notes,
|
||||
post.seasoning_score, post.richness_score, post.brightness_score,
|
||||
post.depth_score, post.aroma_score, post.structure_score,
|
||||
post.texture_profile,
|
||||
json.dumps(list(post.dietary_tags)),
|
||||
json.dumps(list(post.allergen_flags)),
|
||||
json.dumps(list(post.flavor_molecules)),
|
||||
post.fat_pct, post.protein_pct, post.moisture_pct,
|
||||
"kiwi",
|
||||
),
|
||||
)
|
||||
conn.commit()
|
||||
return post
|
||||
except Exception:
|
||||
conn.rollback()
|
||||
raise
|
||||
finally:
|
||||
self._db.putconn(conn)
|
||||
|
||||
def delete_post(self, slug: str, pseudonym: str) -> bool:
|
||||
"""Hard-delete a post. Only succeeds if pseudonym matches the author.
|
||||
|
||||
Returns True if a row was deleted, False if no matching row found.
|
||||
"""
|
||||
conn = self._db.getconn()
|
||||
try:
|
||||
with conn.cursor() as cur:
|
||||
cur.execute(
|
||||
"DELETE FROM community_posts WHERE slug = %s AND pseudonym = %s",
|
||||
(slug, pseudonym),
|
||||
)
|
||||
conn.commit()
|
||||
return cur.rowcount > 0
|
||||
except Exception:
|
||||
conn.rollback()
|
||||
raise
|
||||
finally:
|
||||
self._db.putconn(conn)
|
||||
|
|
@ -4,13 +4,14 @@ build-backend = "setuptools.build_meta"
|
|||
|
||||
[project]
|
||||
name = "circuitforge-core"
|
||||
version = "0.9.0"
|
||||
version = "0.10.0"
|
||||
description = "Shared scaffold for CircuitForge products (MIT)"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"pyyaml>=6.0",
|
||||
"requests>=2.31",
|
||||
"openai>=1.0",
|
||||
"psycopg2>=2.9",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
|
|
|
|||
0
tests/community/__init__.py
Normal file
0
tests/community/__init__.py
Normal file
64
tests/community/test_db.py
Normal file
64
tests/community/test_db.py
Normal file
|
|
@ -0,0 +1,64 @@
|
|||
# tests/community/test_db.py
|
||||
import os
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, patch
|
||||
from circuitforge_core.community.db import CommunityDB
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_pool():
|
||||
"""Patch psycopg2.pool.ThreadedConnectionPool to avoid needing a real PG instance."""
|
||||
with patch("circuitforge_core.community.db.ThreadedConnectionPool") as mock_cls:
|
||||
mock_instance = MagicMock()
|
||||
mock_cls.return_value = mock_instance
|
||||
yield mock_cls, mock_instance
|
||||
|
||||
|
||||
def test_community_db_requires_url():
|
||||
with pytest.raises(ValueError, match="COMMUNITY_DB_URL"):
|
||||
CommunityDB(dsn=None)
|
||||
|
||||
|
||||
def test_community_db_init_creates_pool(mock_pool):
|
||||
mock_cls, _ = mock_pool
|
||||
CommunityDB(dsn="postgresql://user:pass@localhost/cf_community")
|
||||
mock_cls.assert_called_once()
|
||||
|
||||
|
||||
def test_community_db_close_puts_pool(mock_pool):
|
||||
_, mock_instance = mock_pool
|
||||
db = CommunityDB(dsn="postgresql://user:pass@localhost/cf_community")
|
||||
db.close()
|
||||
mock_instance.closeall.assert_called_once()
|
||||
|
||||
|
||||
def test_community_db_migration_files_discovered():
|
||||
"""Migration runner must find at least 001 and 002 SQL files."""
|
||||
db = CommunityDB.__new__(CommunityDB)
|
||||
files = db._discover_migrations()
|
||||
names = [f.name for f in files]
|
||||
assert any("001" in n for n in names)
|
||||
assert any("002" in n for n in names)
|
||||
# Must be sorted numerically
|
||||
assert files == sorted(files, key=lambda p: p.name)
|
||||
|
||||
|
||||
def test_community_db_run_migrations_executes_sql(mock_pool):
|
||||
_, mock_instance = mock_pool
|
||||
mock_conn = MagicMock()
|
||||
mock_cur = MagicMock()
|
||||
mock_instance.getconn.return_value = mock_conn
|
||||
mock_conn.cursor.return_value.__enter__.return_value = mock_cur
|
||||
mock_cur.fetchone.return_value = None # no migrations applied yet
|
||||
|
||||
db = CommunityDB(dsn="postgresql://user:pass@localhost/cf_community")
|
||||
db.run_migrations()
|
||||
|
||||
# At least one execute call must have happened
|
||||
assert mock_cur.execute.called
|
||||
|
||||
|
||||
def test_community_db_from_env(monkeypatch, mock_pool):
|
||||
monkeypatch.setenv("COMMUNITY_DB_URL", "postgresql://u:p@host/db")
|
||||
db = CommunityDB.from_env()
|
||||
assert db is not None
|
||||
94
tests/community/test_models.py
Normal file
94
tests/community/test_models.py
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
# tests/community/test_models.py
|
||||
import pytest
|
||||
from datetime import datetime, timezone
|
||||
from circuitforge_core.community.models import CommunityPost
|
||||
|
||||
|
||||
def make_post(**kwargs) -> CommunityPost:
|
||||
defaults = dict(
|
||||
slug="kiwi-plan-test-2026-04-12-pasta-week",
|
||||
pseudonym="PastaWitch",
|
||||
post_type="plan",
|
||||
published=datetime(2026, 4, 12, 12, 0, 0, tzinfo=timezone.utc),
|
||||
title="Pasta Week",
|
||||
description="Seven days of carbs",
|
||||
photo_url=None,
|
||||
slots=[{"day": 0, "meal_type": "dinner", "recipe_id": 1, "recipe_name": "Spaghetti"}],
|
||||
recipe_id=None,
|
||||
recipe_name=None,
|
||||
level=None,
|
||||
outcome_notes=None,
|
||||
seasoning_score=0.7,
|
||||
richness_score=0.6,
|
||||
brightness_score=0.3,
|
||||
depth_score=0.5,
|
||||
aroma_score=0.4,
|
||||
structure_score=0.8,
|
||||
texture_profile="chewy",
|
||||
dietary_tags=["vegetarian"],
|
||||
allergen_flags=["gluten"],
|
||||
flavor_molecules=[1234, 5678],
|
||||
fat_pct=12.5,
|
||||
protein_pct=10.0,
|
||||
moisture_pct=55.0,
|
||||
)
|
||||
defaults.update(kwargs)
|
||||
return CommunityPost(**defaults)
|
||||
|
||||
|
||||
def test_community_post_immutable():
|
||||
post = make_post()
|
||||
with pytest.raises((AttributeError, TypeError)):
|
||||
post.title = "changed" # type: ignore
|
||||
|
||||
|
||||
def test_community_post_slug_uri_compatible():
|
||||
post = make_post(slug="kiwi-plan-test-2026-04-12-pasta-week")
|
||||
assert " " not in post.slug
|
||||
assert post.slug == post.slug.lower()
|
||||
|
||||
|
||||
def test_community_post_type_valid():
|
||||
make_post(post_type="plan")
|
||||
make_post(post_type="recipe_success")
|
||||
make_post(post_type="recipe_blooper")
|
||||
|
||||
|
||||
def test_community_post_type_invalid():
|
||||
with pytest.raises(ValueError):
|
||||
make_post(post_type="garbage")
|
||||
|
||||
|
||||
def test_community_post_scores_range():
|
||||
post = make_post(seasoning_score=1.0, richness_score=0.0)
|
||||
assert 0.0 <= post.seasoning_score <= 1.0
|
||||
assert 0.0 <= post.richness_score <= 1.0
|
||||
|
||||
|
||||
def test_community_post_scores_out_of_range():
|
||||
with pytest.raises(ValueError):
|
||||
make_post(seasoning_score=1.5)
|
||||
with pytest.raises(ValueError):
|
||||
make_post(richness_score=-0.1)
|
||||
|
||||
|
||||
def test_community_post_dietary_tags_immutable():
|
||||
post = make_post(dietary_tags=["vegan"])
|
||||
assert isinstance(post.dietary_tags, tuple)
|
||||
|
||||
|
||||
def test_community_post_allergen_flags_immutable():
|
||||
post = make_post(allergen_flags=["nuts", "dairy"])
|
||||
assert isinstance(post.allergen_flags, tuple)
|
||||
|
||||
|
||||
def test_community_post_flavor_molecules_immutable():
|
||||
post = make_post(flavor_molecules=[1, 2, 3])
|
||||
assert isinstance(post.flavor_molecules, tuple)
|
||||
|
||||
|
||||
def test_community_post_optional_fields_none():
|
||||
post = make_post(photo_url=None, recipe_id=None, fat_pct=None)
|
||||
assert post.photo_url is None
|
||||
assert post.recipe_id is None
|
||||
assert post.fat_pct is None
|
||||
115
tests/community/test_store.py
Normal file
115
tests/community/test_store.py
Normal file
|
|
@ -0,0 +1,115 @@
|
|||
# tests/community/test_store.py
|
||||
import pytest
|
||||
from unittest.mock import MagicMock, patch
|
||||
from datetime import datetime, timezone
|
||||
from circuitforge_core.community.store import SharedStore
|
||||
from circuitforge_core.community.models import CommunityPost
|
||||
|
||||
|
||||
def make_post_row() -> dict:
|
||||
return {
|
||||
"id": 1,
|
||||
"slug": "kiwi-plan-test-pasta-week",
|
||||
"pseudonym": "PastaWitch",
|
||||
"post_type": "plan",
|
||||
"published": datetime(2026, 4, 12, 12, 0, 0, tzinfo=timezone.utc),
|
||||
"title": "Pasta Week",
|
||||
"description": None,
|
||||
"photo_url": None,
|
||||
"slots": [{"day": 0, "meal_type": "dinner", "recipe_id": 1, "recipe_name": "Spaghetti"}],
|
||||
"recipe_id": None,
|
||||
"recipe_name": None,
|
||||
"level": None,
|
||||
"outcome_notes": None,
|
||||
"seasoning_score": 0.7,
|
||||
"richness_score": 0.6,
|
||||
"brightness_score": 0.3,
|
||||
"depth_score": 0.5,
|
||||
"aroma_score": 0.4,
|
||||
"structure_score": 0.8,
|
||||
"texture_profile": "chewy",
|
||||
"dietary_tags": ["vegetarian"],
|
||||
"allergen_flags": ["gluten"],
|
||||
"flavor_molecules": [1234],
|
||||
"fat_pct": 12.5,
|
||||
"protein_pct": 10.0,
|
||||
"moisture_pct": 55.0,
|
||||
"source_product": "kiwi",
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_db():
|
||||
db = MagicMock()
|
||||
conn = MagicMock()
|
||||
cur = MagicMock()
|
||||
db.getconn.return_value = conn
|
||||
conn.cursor.return_value.__enter__.return_value = cur
|
||||
return db, conn, cur
|
||||
|
||||
|
||||
def test_shared_store_get_post_by_slug(mock_db):
|
||||
db, conn, cur = mock_db
|
||||
cur.fetchone.return_value = make_post_row()
|
||||
cur.description = [(col,) for col in make_post_row().keys()]
|
||||
|
||||
store = SharedStore(db)
|
||||
post = store.get_post_by_slug("kiwi-plan-test-pasta-week")
|
||||
|
||||
assert post is not None
|
||||
assert isinstance(post, CommunityPost)
|
||||
assert post.slug == "kiwi-plan-test-pasta-week"
|
||||
assert post.pseudonym == "PastaWitch"
|
||||
|
||||
|
||||
def test_shared_store_get_post_by_slug_not_found(mock_db):
|
||||
db, conn, cur = mock_db
|
||||
cur.fetchone.return_value = None
|
||||
|
||||
store = SharedStore(db)
|
||||
post = store.get_post_by_slug("does-not-exist")
|
||||
assert post is None
|
||||
|
||||
|
||||
def test_shared_store_list_posts_returns_list(mock_db):
|
||||
db, conn, cur = mock_db
|
||||
row = make_post_row()
|
||||
cur.fetchall.return_value = [row]
|
||||
cur.description = [(col,) for col in row.keys()]
|
||||
|
||||
store = SharedStore(db)
|
||||
posts = store.list_posts(limit=10, offset=0)
|
||||
|
||||
assert isinstance(posts, list)
|
||||
assert len(posts) == 1
|
||||
assert posts[0].slug == "kiwi-plan-test-pasta-week"
|
||||
|
||||
|
||||
def test_shared_store_delete_post(mock_db):
|
||||
db, conn, cur = mock_db
|
||||
cur.rowcount = 1
|
||||
|
||||
store = SharedStore(db)
|
||||
deleted = store.delete_post(slug="kiwi-plan-test-pasta-week", pseudonym="PastaWitch")
|
||||
assert deleted is True
|
||||
|
||||
|
||||
def test_shared_store_delete_post_wrong_owner(mock_db):
|
||||
db, conn, cur = mock_db
|
||||
cur.rowcount = 0
|
||||
|
||||
store = SharedStore(db)
|
||||
deleted = store.delete_post(slug="kiwi-plan-test-pasta-week", pseudonym="WrongUser")
|
||||
assert deleted is False
|
||||
|
||||
|
||||
def test_shared_store_returns_connection_on_error(mock_db):
|
||||
db, conn, cur = mock_db
|
||||
cur.fetchone.side_effect = Exception("DB error")
|
||||
|
||||
store = SharedStore(db)
|
||||
with pytest.raises(Exception, match="DB error"):
|
||||
store.get_post_by_slug("any-slug")
|
||||
|
||||
# Connection must be returned to pool even on error
|
||||
db.putconn.assert_called_once_with(conn)
|
||||
Loading…
Reference in a new issue