Compare commits
14 commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 01ed48808b | |||
| a2c768c635 | |||
| f7bf121aef | |||
| 8fa8216161 | |||
| b9b601aa23 | |||
| 433207d3c5 | |||
| 56fb6be4b1 | |||
| 0598801aaa | |||
| ffb95a5a30 | |||
| f74457d11f | |||
| d78310d4fd | |||
| a189511760 | |||
| 2e9e3fdc4b | |||
| 3082318e0d |
20 changed files with 1223 additions and 2 deletions
48
CHANGELOG.md
48
CHANGELOG.md
|
|
@ -6,6 +6,54 @@ Versions follow [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
## [0.10.0] — 2026-04-12
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
**`circuitforge_core.community`** — shared community signal module (BSL 1.1, closes #44)
|
||||||
|
|
||||||
|
Provides the PostgreSQL-backed infrastructure for the cross-product community fine-tuning signal pipeline. Products write signals; the training pipeline reads them.
|
||||||
|
|
||||||
|
- `CommunityDB` — psycopg2 connection pool with `run_migrations()`. Picks up all `.sql` files from `circuitforge_core/community/migrations/` in filename order. Safe to call on every startup (idempotent `CREATE TABLE IF NOT EXISTS`).
|
||||||
|
- `CommunityPost` — frozen dataclass capturing a user-authored community post with a snapshot of the originating product item (`element_snapshot` as a tuple of key-value pairs for immutability).
|
||||||
|
- `SharedStore` — base class for product-specific community stores. Provides typed `pg_read()` and `pg_write()` helpers that products subclass without re-implementing connection management.
|
||||||
|
- Migration 001: `community_posts` schema (id, product, item_id, pseudonym, title, body, element_snapshot JSONB, created_at).
|
||||||
|
- Migration 002: `community_reactions` stub (post_id FK, pseudonym, reaction_type, created_at).
|
||||||
|
- `psycopg2-binary` added to `[community]` optional extras in `pyproject.toml`.
|
||||||
|
- All community classes exported from `circuitforge_core.community`.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.9.0] — 2026-04-10
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
**`circuitforge_core.text`** — OpenAI-compatible `/v1/chat/completions` endpoint and pipeline crystallization engine.
|
||||||
|
|
||||||
|
**`circuitforge_core.pipeline`** — multimodal pipeline with staged output crystallization. Products queue draft outputs for human review before committing.
|
||||||
|
|
||||||
|
**`circuitforge_core.stt`** — speech-to-text module. `FasterWhisperBackend` for local transcription via `faster-whisper`. Managed FastAPI app mountable in any product.
|
||||||
|
|
||||||
|
**`circuitforge_core.tts`** — text-to-speech module. `ChatterboxTurbo` backend for local synthesis. Managed FastAPI app.
|
||||||
|
|
||||||
|
**Accessibility preferences** — `preferences` module extended with structured accessibility fields (motion reduction, high contrast, font size, focus highlight) under `accessibility.*` key path.
|
||||||
|
|
||||||
|
**LLM output corrections router** — `make_corrections_router()` for collecting LLM output corrections in any product. Stores corrections in product SQLite for future fine-tuning.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [0.8.0] — 2026-04-08
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
**`circuitforge_core.vision`** — cf-vision managed service shim. Routes vision inference requests to a local cf-vision worker (moondream2 / SigLIP). Closes #43.
|
||||||
|
|
||||||
|
**`circuitforge_core.api.feedback`** — `make_feedback_router()` shared Forgejo issue-filing router. Products mount it under `/api/feedback`; requires `FORGEJO_API_TOKEN`. Closes #30.
|
||||||
|
|
||||||
|
**License validation** — `CF_LICENSE_KEY` validation via Heimdall REST API. Products call `validate_license(key, product)` to gate premium features. Closes #26.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
## [0.7.0] — 2026-04-04
|
## [0.7.0] — 2026-04-04
|
||||||
|
|
||||||
### Added
|
### Added
|
||||||
|
|
|
||||||
21
LICENSE
Normal file
21
LICENSE
Normal file
|
|
@ -0,0 +1,21 @@
|
||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2026 CircuitForge LLC
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
@ -1 +1,8 @@
|
||||||
__version__ = "0.8.0"
|
__version__ = "0.10.0"
|
||||||
|
|
||||||
|
try:
|
||||||
|
from circuitforge_core.community import CommunityDB, CommunityPost, SharedStore
|
||||||
|
__all__ = ["CommunityDB", "CommunityPost", "SharedStore"]
|
||||||
|
except ImportError:
|
||||||
|
# psycopg2 not installed — install with: pip install circuitforge-core[community]
|
||||||
|
pass
|
||||||
|
|
|
||||||
9
circuitforge_core/community/__init__.py
Normal file
9
circuitforge_core/community/__init__.py
Normal file
|
|
@ -0,0 +1,9 @@
|
||||||
|
# circuitforge_core/community/__init__.py
|
||||||
|
# MIT License
|
||||||
|
|
||||||
|
from .models import CommunityPost
|
||||||
|
from .db import CommunityDB
|
||||||
|
from .store import SharedStore
|
||||||
|
from .snipe_store import SellerTrustSignal, SnipeCommunityStore
|
||||||
|
|
||||||
|
__all__ = ["CommunityDB", "CommunityPost", "SharedStore", "SellerTrustSignal", "SnipeCommunityStore"]
|
||||||
117
circuitforge_core/community/db.py
Normal file
117
circuitforge_core/community/db.py
Normal file
|
|
@ -0,0 +1,117 @@
|
||||||
|
# circuitforge_core/community/db.py
|
||||||
|
# MIT License
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import importlib.resources
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import psycopg2
|
||||||
|
from psycopg2.pool import ThreadedConnectionPool
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
_MIN_CONN = 1
|
||||||
|
_MAX_CONN = 10
|
||||||
|
|
||||||
|
|
||||||
|
class CommunityDB:
|
||||||
|
"""Shared PostgreSQL connection pool + migration runner for the community module.
|
||||||
|
|
||||||
|
Products instantiate one CommunityDB at startup and pass it to SharedStore
|
||||||
|
subclasses. The pool is thread-safe (ThreadedConnectionPool).
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
db = CommunityDB.from_env() # reads COMMUNITY_DB_URL
|
||||||
|
db.run_migrations()
|
||||||
|
store = MyProductStore(db)
|
||||||
|
db.close() # at shutdown
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, dsn: str | None) -> None:
|
||||||
|
if not dsn:
|
||||||
|
raise ValueError(
|
||||||
|
"CommunityDB requires a DSN. "
|
||||||
|
"Set COMMUNITY_DB_URL or pass dsn= explicitly."
|
||||||
|
)
|
||||||
|
self._pool = ThreadedConnectionPool(_MIN_CONN, _MAX_CONN, dsn=dsn)
|
||||||
|
logger.debug("CommunityDB pool created (min=%d, max=%d)", _MIN_CONN, _MAX_CONN)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_env(cls) -> "CommunityDB":
|
||||||
|
"""Construct from the COMMUNITY_DB_URL environment variable."""
|
||||||
|
import os
|
||||||
|
dsn = os.environ.get("COMMUNITY_DB_URL")
|
||||||
|
return cls(dsn=dsn)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Connection management
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def getconn(self):
|
||||||
|
"""Borrow a connection from the pool. Must be returned via putconn()."""
|
||||||
|
return self._pool.getconn()
|
||||||
|
|
||||||
|
def putconn(self, conn) -> None:
|
||||||
|
"""Return a borrowed connection to the pool."""
|
||||||
|
self._pool.putconn(conn)
|
||||||
|
|
||||||
|
def close(self) -> None:
|
||||||
|
"""Close all pool connections. Call at application shutdown."""
|
||||||
|
self._pool.closeall()
|
||||||
|
logger.debug("CommunityDB pool closed")
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Migration runner
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def _discover_migrations(self) -> list[Path]:
|
||||||
|
"""Return sorted list of .sql migration files from the community migrations dir."""
|
||||||
|
pkg = importlib.resources.files("circuitforge_core.community.migrations")
|
||||||
|
files = sorted(
|
||||||
|
[Path(str(p)) for p in pkg.iterdir() if str(p).endswith(".sql")],
|
||||||
|
key=lambda p: p.name,
|
||||||
|
)
|
||||||
|
return files
|
||||||
|
|
||||||
|
def run_migrations(self) -> None:
|
||||||
|
"""Apply all community migration SQL files in numeric order.
|
||||||
|
|
||||||
|
Uses a simple applied-migrations table to avoid re-running already
|
||||||
|
applied migrations. Idempotent.
|
||||||
|
"""
|
||||||
|
conn = self.getconn()
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute("""
|
||||||
|
CREATE TABLE IF NOT EXISTS _community_migrations (
|
||||||
|
filename TEXT PRIMARY KEY,
|
||||||
|
applied_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
)
|
||||||
|
""")
|
||||||
|
conn.commit()
|
||||||
|
|
||||||
|
for migration_file in self._discover_migrations():
|
||||||
|
name = migration_file.name
|
||||||
|
cur.execute(
|
||||||
|
"SELECT 1 FROM _community_migrations WHERE filename = %s",
|
||||||
|
(name,),
|
||||||
|
)
|
||||||
|
if cur.fetchone():
|
||||||
|
logger.debug("Migration %s already applied, skipping", name)
|
||||||
|
continue
|
||||||
|
|
||||||
|
sql = migration_file.read_text()
|
||||||
|
logger.info("Applying community migration: %s", name)
|
||||||
|
cur.execute(sql)
|
||||||
|
cur.execute(
|
||||||
|
"INSERT INTO _community_migrations (filename) VALUES (%s)",
|
||||||
|
(name,),
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
except Exception:
|
||||||
|
conn.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
self.putconn(conn)
|
||||||
|
|
@ -0,0 +1,55 @@
|
||||||
|
-- 001_community_posts.sql
|
||||||
|
-- Community posts table: published meal plans, recipe successes, and bloopers.
|
||||||
|
-- Applies to: cf_community PostgreSQL database (hosted by cf-orch).
|
||||||
|
-- BSL boundary: this schema is MIT (data layer, no inference).
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS community_posts (
|
||||||
|
id BIGSERIAL PRIMARY KEY,
|
||||||
|
slug TEXT NOT NULL UNIQUE,
|
||||||
|
pseudonym TEXT NOT NULL,
|
||||||
|
post_type TEXT NOT NULL CHECK (post_type IN ('plan', 'recipe_success', 'recipe_blooper')),
|
||||||
|
published TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
title TEXT NOT NULL,
|
||||||
|
description TEXT,
|
||||||
|
photo_url TEXT,
|
||||||
|
|
||||||
|
-- Plan slots (JSON array: [{day, meal_type, recipe_id, recipe_name}])
|
||||||
|
slots JSONB NOT NULL DEFAULT '[]',
|
||||||
|
|
||||||
|
-- Recipe result fields
|
||||||
|
recipe_id BIGINT,
|
||||||
|
recipe_name TEXT,
|
||||||
|
level SMALLINT CHECK (level IS NULL OR level BETWEEN 1 AND 4),
|
||||||
|
outcome_notes TEXT,
|
||||||
|
|
||||||
|
-- Element snapshot (denormalized from corpus at publish time)
|
||||||
|
seasoning_score REAL,
|
||||||
|
richness_score REAL,
|
||||||
|
brightness_score REAL,
|
||||||
|
depth_score REAL,
|
||||||
|
aroma_score REAL,
|
||||||
|
structure_score REAL,
|
||||||
|
texture_profile TEXT,
|
||||||
|
|
||||||
|
-- Dietary / allergen / flavor
|
||||||
|
dietary_tags JSONB NOT NULL DEFAULT '[]',
|
||||||
|
allergen_flags JSONB NOT NULL DEFAULT '[]',
|
||||||
|
flavor_molecules JSONB NOT NULL DEFAULT '[]',
|
||||||
|
|
||||||
|
-- USDA FDC macros
|
||||||
|
fat_pct REAL,
|
||||||
|
protein_pct REAL,
|
||||||
|
moisture_pct REAL,
|
||||||
|
|
||||||
|
-- Source product identifier
|
||||||
|
source_product TEXT NOT NULL DEFAULT 'kiwi'
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Indexes for common filter patterns
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_community_posts_published ON community_posts (published DESC);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_community_posts_post_type ON community_posts (post_type);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_community_posts_source ON community_posts (source_product);
|
||||||
|
|
||||||
|
-- GIN index for dietary/allergen JSONB array containment queries
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_community_posts_dietary_tags ON community_posts USING GIN (dietary_tags);
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_community_posts_allergen_flags ON community_posts USING GIN (allergen_flags);
|
||||||
|
|
@ -0,0 +1,7 @@
|
||||||
|
-- 002_community_post_reactions.sql
|
||||||
|
-- Reserved: community post reactions (thumbs-up, saves count).
|
||||||
|
-- Not yet implemented -- this migration is a stub to reserve the sequence number.
|
||||||
|
-- Applies to: cf_community PostgreSQL database (hosted by cf-orch).
|
||||||
|
|
||||||
|
-- Placeholder: no-op. Will be replaced when reactions feature is designed.
|
||||||
|
SELECT 1;
|
||||||
|
|
@ -0,0 +1,26 @@
|
||||||
|
-- Seller trust signals: confirmed scammer / confirmed legitimate outcomes from Snipe.
|
||||||
|
-- Separate table from community_posts (Kiwi-specific) — seller signals are a
|
||||||
|
-- structurally different domain and should not overload the recipe post schema.
|
||||||
|
-- Applies to: cf_community PostgreSQL database (hosted by cf-orch).
|
||||||
|
-- BSL boundary: table schema is MIT; signal ingestion route in cf-orch is BSL 1.1.
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS seller_trust_signals (
|
||||||
|
id BIGSERIAL PRIMARY KEY,
|
||||||
|
platform TEXT NOT NULL DEFAULT 'ebay',
|
||||||
|
platform_seller_id TEXT NOT NULL,
|
||||||
|
confirmed_scam BOOLEAN NOT NULL,
|
||||||
|
signal_source TEXT NOT NULL, -- 'blocklist_add' | 'community_vote' | 'resolved'
|
||||||
|
flags JSONB NOT NULL DEFAULT '[]', -- red flag keys at time of signal
|
||||||
|
source_product TEXT NOT NULL DEFAULT 'snipe',
|
||||||
|
recorded_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
|
||||||
|
-- No PII: platform_seller_id is the public eBay username or platform ID only.
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_seller_trust_platform_id
|
||||||
|
ON seller_trust_signals (platform, platform_seller_id);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_seller_trust_confirmed
|
||||||
|
ON seller_trust_signals (confirmed_scam);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_seller_trust_recorded
|
||||||
|
ON seller_trust_signals (recorded_at DESC);
|
||||||
|
|
@ -0,0 +1,19 @@
|
||||||
|
-- 004_community_categories.sql
|
||||||
|
-- MIT License
|
||||||
|
-- Shared eBay category tree published by credentialed Snipe instances.
|
||||||
|
-- Credentialless instances pull from this table during refresh().
|
||||||
|
-- Privacy: only public eBay category metadata (IDs, names, paths) — no user data.
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS community_categories (
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
platform TEXT NOT NULL DEFAULT 'ebay',
|
||||||
|
category_id TEXT NOT NULL,
|
||||||
|
name TEXT NOT NULL,
|
||||||
|
full_path TEXT NOT NULL,
|
||||||
|
source_product TEXT NOT NULL DEFAULT 'snipe',
|
||||||
|
published_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
UNIQUE (platform, category_id)
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX IF NOT EXISTS idx_community_cat_name
|
||||||
|
ON community_categories (platform, name);
|
||||||
2
circuitforge_core/community/migrations/__init__.py
Normal file
2
circuitforge_core/community/migrations/__init__.py
Normal file
|
|
@ -0,0 +1,2 @@
|
||||||
|
# Community module migrations
|
||||||
|
# These SQL files are shipped with circuitforge-core so cf-orch can locate them via importlib.resources.
|
||||||
87
circuitforge_core/community/models.py
Normal file
87
circuitforge_core/community/models.py
Normal file
|
|
@ -0,0 +1,87 @@
|
||||||
|
# circuitforge_core/community/models.py
|
||||||
|
# MIT License
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
PostType = Literal["plan", "recipe_success", "recipe_blooper"]
|
||||||
|
CreativityLevel = Literal[1, 2, 3, 4]
|
||||||
|
|
||||||
|
_VALID_POST_TYPES: frozenset[str] = frozenset(["plan", "recipe_success", "recipe_blooper"])
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_score(name: str, value: float) -> float:
|
||||||
|
if not (0.0 <= value <= 1.0):
|
||||||
|
raise ValueError(f"{name} must be between 0.0 and 1.0, got {value!r}")
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class CommunityPost:
|
||||||
|
"""Immutable snapshot of a published community post.
|
||||||
|
|
||||||
|
Lists (dietary_tags, allergen_flags, flavor_molecules, slots) are stored as
|
||||||
|
tuples to enforce immutability. Pass lists -- they are converted in __post_init__.
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Identity
|
||||||
|
slug: str
|
||||||
|
pseudonym: str
|
||||||
|
post_type: PostType
|
||||||
|
published: datetime
|
||||||
|
title: str
|
||||||
|
|
||||||
|
# Optional content
|
||||||
|
description: str | None
|
||||||
|
photo_url: str | None
|
||||||
|
|
||||||
|
# Plan slots -- list[dict] for post_type="plan"
|
||||||
|
slots: tuple
|
||||||
|
|
||||||
|
# Recipe result fields -- for post_type="recipe_success" | "recipe_blooper"
|
||||||
|
recipe_id: int | None
|
||||||
|
recipe_name: str | None
|
||||||
|
level: CreativityLevel | None
|
||||||
|
outcome_notes: str | None
|
||||||
|
|
||||||
|
# Element snapshot
|
||||||
|
seasoning_score: float
|
||||||
|
richness_score: float
|
||||||
|
brightness_score: float
|
||||||
|
depth_score: float
|
||||||
|
aroma_score: float
|
||||||
|
structure_score: float
|
||||||
|
texture_profile: str
|
||||||
|
|
||||||
|
# Dietary/allergen/flavor
|
||||||
|
dietary_tags: tuple
|
||||||
|
allergen_flags: tuple
|
||||||
|
flavor_molecules: tuple
|
||||||
|
|
||||||
|
# USDA FDC macros (optional -- may not be available for all recipes)
|
||||||
|
fat_pct: float | None
|
||||||
|
protein_pct: float | None
|
||||||
|
moisture_pct: float | None
|
||||||
|
|
||||||
|
def __post_init__(self) -> None:
|
||||||
|
# Coerce list fields to tuples (frozen dataclass: use object.__setattr__)
|
||||||
|
for key in ("slots", "dietary_tags", "allergen_flags", "flavor_molecules"):
|
||||||
|
val = getattr(self, key)
|
||||||
|
if isinstance(val, list):
|
||||||
|
object.__setattr__(self, key, tuple(val))
|
||||||
|
|
||||||
|
# Validate post_type
|
||||||
|
if self.post_type not in _VALID_POST_TYPES:
|
||||||
|
raise ValueError(
|
||||||
|
f"post_type must be one of {sorted(_VALID_POST_TYPES)}, got {self.post_type!r}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Validate scores
|
||||||
|
for score_name in (
|
||||||
|
"seasoning_score", "richness_score", "brightness_score",
|
||||||
|
"depth_score", "aroma_score", "structure_score",
|
||||||
|
):
|
||||||
|
_validate_score(score_name, getattr(self, score_name))
|
||||||
253
circuitforge_core/community/snipe_store.py
Normal file
253
circuitforge_core/community/snipe_store.py
Normal file
|
|
@ -0,0 +1,253 @@
|
||||||
|
# circuitforge_core/community/snipe_store.py
|
||||||
|
# MIT License
|
||||||
|
"""Snipe community store — publishes seller trust signals to the shared community DB.
|
||||||
|
|
||||||
|
Snipe products subclass SharedStore here to write seller trust signals
|
||||||
|
(confirmed scammer / confirmed legitimate) to the cf_community PostgreSQL.
|
||||||
|
These signals aggregate across all Snipe users to power the cross-user
|
||||||
|
seller trust classifier fine-tuning corpus.
|
||||||
|
|
||||||
|
Privacy: only platform_seller_id (public eBay username/ID) and flag keys
|
||||||
|
are written. No PII is stored.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
from circuitforge_core.community import CommunityDB
|
||||||
|
from circuitforge_core.community.snipe_store import SnipeCommunityStore
|
||||||
|
|
||||||
|
db = CommunityDB.from_env()
|
||||||
|
store = SnipeCommunityStore(db, source_product="snipe")
|
||||||
|
store.publish_seller_signal(
|
||||||
|
platform_seller_id="ebay-username",
|
||||||
|
confirmed_scam=True,
|
||||||
|
signal_source="blocklist_add",
|
||||||
|
flags=["new_account", "suspicious_price"],
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
|
from .store import SharedStore
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True)
|
||||||
|
class SellerTrustSignal:
|
||||||
|
"""Immutable snapshot of a recorded seller trust signal."""
|
||||||
|
id: int
|
||||||
|
platform: str
|
||||||
|
platform_seller_id: str
|
||||||
|
confirmed_scam: bool
|
||||||
|
signal_source: str
|
||||||
|
flags: tuple
|
||||||
|
source_product: str
|
||||||
|
recorded_at: datetime
|
||||||
|
|
||||||
|
|
||||||
|
class SnipeCommunityStore(SharedStore):
|
||||||
|
"""Community store for Snipe — seller trust signal publishing and querying."""
|
||||||
|
|
||||||
|
def __init__(self, db, source_product: str = "snipe") -> None:
|
||||||
|
super().__init__(db, source_product=source_product)
|
||||||
|
|
||||||
|
def publish_seller_signal(
|
||||||
|
self,
|
||||||
|
platform_seller_id: str,
|
||||||
|
confirmed_scam: bool,
|
||||||
|
signal_source: str,
|
||||||
|
flags: list[str] | None = None,
|
||||||
|
platform: str = "ebay",
|
||||||
|
) -> SellerTrustSignal:
|
||||||
|
"""Record a seller trust outcome in the shared community DB.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
platform_seller_id: Public eBay username or platform ID (no PII).
|
||||||
|
confirmed_scam: True = confirmed bad actor; False = confirmed legitimate.
|
||||||
|
signal_source: Origin of the signal.
|
||||||
|
'blocklist_add' — user explicitly added to local blocklist
|
||||||
|
'community_vote' — consensus threshold reached from multiple reports
|
||||||
|
'resolved' — seller resolved as legitimate over time
|
||||||
|
flags: List of red-flag keys active at signal time (e.g. ["new_account"]).
|
||||||
|
platform: Source auction platform (default "ebay").
|
||||||
|
|
||||||
|
Returns the inserted SellerTrustSignal.
|
||||||
|
"""
|
||||||
|
flags = flags or []
|
||||||
|
conn = self._db.getconn()
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(
|
||||||
|
"""
|
||||||
|
INSERT INTO seller_trust_signals
|
||||||
|
(platform, platform_seller_id, confirmed_scam,
|
||||||
|
signal_source, flags, source_product)
|
||||||
|
VALUES (%s, %s, %s, %s, %s::jsonb, %s)
|
||||||
|
RETURNING id, recorded_at
|
||||||
|
""",
|
||||||
|
(
|
||||||
|
platform,
|
||||||
|
platform_seller_id,
|
||||||
|
confirmed_scam,
|
||||||
|
signal_source,
|
||||||
|
json.dumps(flags),
|
||||||
|
self._source_product,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
row = cur.fetchone()
|
||||||
|
conn.commit()
|
||||||
|
return SellerTrustSignal(
|
||||||
|
id=row[0],
|
||||||
|
platform=platform,
|
||||||
|
platform_seller_id=platform_seller_id,
|
||||||
|
confirmed_scam=confirmed_scam,
|
||||||
|
signal_source=signal_source,
|
||||||
|
flags=tuple(flags),
|
||||||
|
source_product=self._source_product,
|
||||||
|
recorded_at=row[1],
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
conn.rollback()
|
||||||
|
log.warning(
|
||||||
|
"Failed to publish seller signal for %s (%s)",
|
||||||
|
platform_seller_id, signal_source, exc_info=True,
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
self._db.putconn(conn)
|
||||||
|
|
||||||
|
def list_signals_for_seller(
|
||||||
|
self,
|
||||||
|
platform_seller_id: str,
|
||||||
|
platform: str = "ebay",
|
||||||
|
limit: int = 50,
|
||||||
|
) -> list[SellerTrustSignal]:
|
||||||
|
"""Return recent trust signals for a specific seller."""
|
||||||
|
conn = self._db.getconn()
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(
|
||||||
|
"""
|
||||||
|
SELECT id, platform, platform_seller_id, confirmed_scam,
|
||||||
|
signal_source, flags, source_product, recorded_at
|
||||||
|
FROM seller_trust_signals
|
||||||
|
WHERE platform = %s AND platform_seller_id = %s
|
||||||
|
ORDER BY recorded_at DESC
|
||||||
|
LIMIT %s
|
||||||
|
""",
|
||||||
|
(platform, platform_seller_id, limit),
|
||||||
|
)
|
||||||
|
rows = cur.fetchall()
|
||||||
|
return [
|
||||||
|
SellerTrustSignal(
|
||||||
|
id=r[0], platform=r[1], platform_seller_id=r[2],
|
||||||
|
confirmed_scam=r[3], signal_source=r[4],
|
||||||
|
flags=tuple(json.loads(r[5]) if isinstance(r[5], str) else r[5] or []),
|
||||||
|
source_product=r[6], recorded_at=r[7],
|
||||||
|
)
|
||||||
|
for r in rows
|
||||||
|
]
|
||||||
|
finally:
|
||||||
|
self._db.putconn(conn)
|
||||||
|
|
||||||
|
def scam_signal_count(self, platform_seller_id: str, platform: str = "ebay") -> int:
|
||||||
|
"""Return the number of confirmed_scam=True signals for a seller.
|
||||||
|
|
||||||
|
Used to determine if a seller has crossed the community consensus threshold
|
||||||
|
for appearing in the shared blocklist.
|
||||||
|
"""
|
||||||
|
conn = self._db.getconn()
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(
|
||||||
|
"""
|
||||||
|
SELECT COUNT(*) FROM seller_trust_signals
|
||||||
|
WHERE platform = %s AND platform_seller_id = %s AND confirmed_scam = TRUE
|
||||||
|
""",
|
||||||
|
(platform, platform_seller_id),
|
||||||
|
)
|
||||||
|
return cur.fetchone()[0]
|
||||||
|
finally:
|
||||||
|
self._db.putconn(conn)
|
||||||
|
|
||||||
|
def publish_categories(
|
||||||
|
self,
|
||||||
|
categories: list[tuple[str, str, str]],
|
||||||
|
platform: str = "ebay",
|
||||||
|
) -> int:
|
||||||
|
"""Upsert a batch of eBay leaf categories into the shared community table.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
categories: List of (category_id, name, full_path) tuples.
|
||||||
|
platform: Source auction platform (default "ebay").
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of rows upserted.
|
||||||
|
"""
|
||||||
|
if not categories:
|
||||||
|
return 0
|
||||||
|
conn = self._db.getconn()
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.executemany(
|
||||||
|
"""
|
||||||
|
INSERT INTO community_categories
|
||||||
|
(platform, category_id, name, full_path, source_product)
|
||||||
|
VALUES (%s, %s, %s, %s, %s)
|
||||||
|
ON CONFLICT (platform, category_id)
|
||||||
|
DO UPDATE SET
|
||||||
|
name = EXCLUDED.name,
|
||||||
|
full_path = EXCLUDED.full_path,
|
||||||
|
source_product = EXCLUDED.source_product,
|
||||||
|
published_at = NOW()
|
||||||
|
""",
|
||||||
|
[
|
||||||
|
(platform, cid, name, path, self._source_product)
|
||||||
|
for cid, name, path in categories
|
||||||
|
],
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
return len(categories)
|
||||||
|
except Exception:
|
||||||
|
conn.rollback()
|
||||||
|
log.warning(
|
||||||
|
"Failed to publish %d categories to community store",
|
||||||
|
len(categories), exc_info=True,
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
self._db.putconn(conn)
|
||||||
|
|
||||||
|
def fetch_categories(
|
||||||
|
self,
|
||||||
|
platform: str = "ebay",
|
||||||
|
limit: int = 500,
|
||||||
|
) -> list[tuple[str, str, str]]:
|
||||||
|
"""Fetch community-contributed eBay categories.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
platform: Source auction platform (default "ebay").
|
||||||
|
limit: Maximum rows to return.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of (category_id, name, full_path) tuples ordered by name.
|
||||||
|
"""
|
||||||
|
conn = self._db.getconn()
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(
|
||||||
|
"""
|
||||||
|
SELECT category_id, name, full_path
|
||||||
|
FROM community_categories
|
||||||
|
WHERE platform = %s
|
||||||
|
ORDER BY name
|
||||||
|
LIMIT %s
|
||||||
|
""",
|
||||||
|
(platform, limit),
|
||||||
|
)
|
||||||
|
return [(row[0], row[1], row[2]) for row in cur.fetchall()]
|
||||||
|
finally:
|
||||||
|
self._db.putconn(conn)
|
||||||
209
circuitforge_core/community/store.py
Normal file
209
circuitforge_core/community/store.py
Normal file
|
|
@ -0,0 +1,209 @@
|
||||||
|
# circuitforge_core/community/store.py
|
||||||
|
# MIT License
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
from .models import CommunityPost
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from .db import CommunityDB
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _row_to_post(row: dict) -> CommunityPost:
|
||||||
|
"""Convert a psycopg2 row dict to a CommunityPost.
|
||||||
|
|
||||||
|
JSONB columns (slots, dietary_tags, allergen_flags, flavor_molecules) come
|
||||||
|
back from psycopg2 as Python lists already — no json.loads() needed.
|
||||||
|
"""
|
||||||
|
return CommunityPost(
|
||||||
|
slug=row["slug"],
|
||||||
|
pseudonym=row["pseudonym"],
|
||||||
|
post_type=row["post_type"],
|
||||||
|
published=row["published"],
|
||||||
|
title=row["title"],
|
||||||
|
description=row.get("description"),
|
||||||
|
photo_url=row.get("photo_url"),
|
||||||
|
slots=row.get("slots") or [],
|
||||||
|
recipe_id=row.get("recipe_id"),
|
||||||
|
recipe_name=row.get("recipe_name"),
|
||||||
|
level=row.get("level"),
|
||||||
|
outcome_notes=row.get("outcome_notes"),
|
||||||
|
seasoning_score=row["seasoning_score"] or 0.0,
|
||||||
|
richness_score=row["richness_score"] or 0.0,
|
||||||
|
brightness_score=row["brightness_score"] or 0.0,
|
||||||
|
depth_score=row["depth_score"] or 0.0,
|
||||||
|
aroma_score=row["aroma_score"] or 0.0,
|
||||||
|
structure_score=row["structure_score"] or 0.0,
|
||||||
|
texture_profile=row.get("texture_profile") or "",
|
||||||
|
dietary_tags=row.get("dietary_tags") or [],
|
||||||
|
allergen_flags=row.get("allergen_flags") or [],
|
||||||
|
flavor_molecules=row.get("flavor_molecules") or [],
|
||||||
|
fat_pct=row.get("fat_pct"),
|
||||||
|
protein_pct=row.get("protein_pct"),
|
||||||
|
moisture_pct=row.get("moisture_pct"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _cursor_to_dict(cur, row) -> dict:
|
||||||
|
"""Convert a psycopg2 row tuple to a dict using cursor.description."""
|
||||||
|
if isinstance(row, dict):
|
||||||
|
return row
|
||||||
|
return {desc[0]: val for desc, val in zip(cur.description, row)}
|
||||||
|
|
||||||
|
|
||||||
|
class SharedStore:
|
||||||
|
"""Base class for product community stores.
|
||||||
|
|
||||||
|
Subclass this in each product:
|
||||||
|
class KiwiCommunityStore(SharedStore):
|
||||||
|
def list_posts_for_week(self, week_start: str) -> list[CommunityPost]: ...
|
||||||
|
|
||||||
|
All methods return new objects (immutable pattern). Never mutate rows in-place.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, db: "CommunityDB", source_product: str = "kiwi") -> None:
|
||||||
|
self._db = db
|
||||||
|
self._source_product = source_product
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Reads
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def get_post_by_slug(self, slug: str) -> CommunityPost | None:
|
||||||
|
conn = self._db.getconn()
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(
|
||||||
|
"SELECT * FROM community_posts WHERE slug = %s LIMIT 1",
|
||||||
|
(slug,),
|
||||||
|
)
|
||||||
|
row = cur.fetchone()
|
||||||
|
if row is None:
|
||||||
|
return None
|
||||||
|
return _row_to_post(_cursor_to_dict(cur, row))
|
||||||
|
finally:
|
||||||
|
self._db.putconn(conn)
|
||||||
|
|
||||||
|
def list_posts(
|
||||||
|
self,
|
||||||
|
limit: int = 20,
|
||||||
|
offset: int = 0,
|
||||||
|
post_type: str | None = None,
|
||||||
|
dietary_tags: list[str] | None = None,
|
||||||
|
allergen_exclude: list[str] | None = None,
|
||||||
|
source_product: str | None = None,
|
||||||
|
) -> list[CommunityPost]:
|
||||||
|
"""Paginated post list with optional filters.
|
||||||
|
|
||||||
|
dietary_tags: JSONB containment — posts must include ALL listed tags.
|
||||||
|
allergen_exclude: JSONB overlap exclusion — posts must NOT include any listed flag.
|
||||||
|
"""
|
||||||
|
conn = self._db.getconn()
|
||||||
|
try:
|
||||||
|
conditions = []
|
||||||
|
params: list = []
|
||||||
|
|
||||||
|
if post_type:
|
||||||
|
conditions.append("post_type = %s")
|
||||||
|
params.append(post_type)
|
||||||
|
if dietary_tags:
|
||||||
|
import json
|
||||||
|
conditions.append("dietary_tags @> %s::jsonb")
|
||||||
|
params.append(json.dumps(dietary_tags))
|
||||||
|
if allergen_exclude:
|
||||||
|
import json
|
||||||
|
conditions.append("NOT (allergen_flags && %s::jsonb)")
|
||||||
|
params.append(json.dumps(allergen_exclude))
|
||||||
|
if source_product:
|
||||||
|
conditions.append("source_product = %s")
|
||||||
|
params.append(source_product)
|
||||||
|
|
||||||
|
where = ("WHERE " + " AND ".join(conditions)) if conditions else ""
|
||||||
|
params.extend([limit, offset])
|
||||||
|
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(
|
||||||
|
f"SELECT * FROM community_posts {where} "
|
||||||
|
"ORDER BY published DESC LIMIT %s OFFSET %s",
|
||||||
|
params,
|
||||||
|
)
|
||||||
|
rows = cur.fetchall()
|
||||||
|
return [_row_to_post(_cursor_to_dict(cur, r)) for r in rows]
|
||||||
|
finally:
|
||||||
|
self._db.putconn(conn)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Writes
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
def insert_post(self, post: CommunityPost) -> CommunityPost:
|
||||||
|
"""Insert a new community post. Returns the inserted post (unchanged — slug is the key)."""
|
||||||
|
import json
|
||||||
|
|
||||||
|
conn = self._db.getconn()
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(
|
||||||
|
"""
|
||||||
|
INSERT INTO community_posts (
|
||||||
|
slug, pseudonym, post_type, published, title, description, photo_url,
|
||||||
|
slots, recipe_id, recipe_name, level, outcome_notes,
|
||||||
|
seasoning_score, richness_score, brightness_score,
|
||||||
|
depth_score, aroma_score, structure_score, texture_profile,
|
||||||
|
dietary_tags, allergen_flags, flavor_molecules,
|
||||||
|
fat_pct, protein_pct, moisture_pct, source_product
|
||||||
|
) VALUES (
|
||||||
|
%s, %s, %s, %s, %s, %s, %s,
|
||||||
|
%s::jsonb, %s, %s, %s, %s,
|
||||||
|
%s, %s, %s, %s, %s, %s, %s,
|
||||||
|
%s::jsonb, %s::jsonb, %s::jsonb,
|
||||||
|
%s, %s, %s, %s
|
||||||
|
)
|
||||||
|
""",
|
||||||
|
(
|
||||||
|
post.slug, post.pseudonym, post.post_type,
|
||||||
|
post.published, post.title, post.description, post.photo_url,
|
||||||
|
json.dumps(list(post.slots)),
|
||||||
|
post.recipe_id, post.recipe_name, post.level, post.outcome_notes,
|
||||||
|
post.seasoning_score, post.richness_score, post.brightness_score,
|
||||||
|
post.depth_score, post.aroma_score, post.structure_score,
|
||||||
|
post.texture_profile,
|
||||||
|
json.dumps(list(post.dietary_tags)),
|
||||||
|
json.dumps(list(post.allergen_flags)),
|
||||||
|
json.dumps(list(post.flavor_molecules)),
|
||||||
|
post.fat_pct, post.protein_pct, post.moisture_pct,
|
||||||
|
self._source_product,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
return post
|
||||||
|
except Exception:
|
||||||
|
conn.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
self._db.putconn(conn)
|
||||||
|
|
||||||
|
def delete_post(self, slug: str, pseudonym: str) -> bool:
|
||||||
|
"""Hard-delete a post. Only succeeds if pseudonym matches the author.
|
||||||
|
|
||||||
|
Returns True if a row was deleted, False if no matching row found.
|
||||||
|
"""
|
||||||
|
conn = self._db.getconn()
|
||||||
|
try:
|
||||||
|
with conn.cursor() as cur:
|
||||||
|
cur.execute(
|
||||||
|
"DELETE FROM community_posts WHERE slug = %s AND pseudonym = %s",
|
||||||
|
(slug, pseudonym),
|
||||||
|
)
|
||||||
|
conn.commit()
|
||||||
|
return cur.rowcount > 0
|
||||||
|
except Exception:
|
||||||
|
conn.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
self._db.putconn(conn)
|
||||||
1
docs/plausible.js
Normal file
1
docs/plausible.js
Normal file
|
|
@ -0,0 +1 @@
|
||||||
|
(function(){var s=document.createElement("script");s.defer=true;s.dataset.domain="docs.circuitforge.tech,circuitforge.tech";s.dataset.api="https://analytics.circuitforge.tech/api/event";s.src="https://analytics.circuitforge.tech/js/script.js";document.head.appendChild(s);})();
|
||||||
82
mkdocs.yml
Normal file
82
mkdocs.yml
Normal file
|
|
@ -0,0 +1,82 @@
|
||||||
|
site_name: circuitforge-core
|
||||||
|
site_description: Shared scaffold for CircuitForge products — modules, conventions, and developer reference.
|
||||||
|
site_author: Circuit Forge LLC
|
||||||
|
site_url: https://docs.circuitforge.tech/cf-core
|
||||||
|
repo_url: https://git.opensourcesolarpunk.com/Circuit-Forge/circuitforge-core
|
||||||
|
repo_name: Circuit-Forge/circuitforge-core
|
||||||
|
|
||||||
|
theme:
|
||||||
|
name: material
|
||||||
|
palette:
|
||||||
|
- scheme: default
|
||||||
|
primary: deep purple
|
||||||
|
accent: purple
|
||||||
|
toggle:
|
||||||
|
icon: material/brightness-7
|
||||||
|
name: Switch to dark mode
|
||||||
|
- scheme: slate
|
||||||
|
primary: deep purple
|
||||||
|
accent: purple
|
||||||
|
toggle:
|
||||||
|
icon: material/brightness-4
|
||||||
|
name: Switch to light mode
|
||||||
|
features:
|
||||||
|
- navigation.top
|
||||||
|
- navigation.sections
|
||||||
|
- search.suggest
|
||||||
|
- search.highlight
|
||||||
|
- content.code.copy
|
||||||
|
- content.code.annotate
|
||||||
|
|
||||||
|
markdown_extensions:
|
||||||
|
- admonition
|
||||||
|
- attr_list
|
||||||
|
- md_in_html
|
||||||
|
- pymdownx.details
|
||||||
|
- pymdownx.superfences:
|
||||||
|
custom_fences:
|
||||||
|
- name: mermaid
|
||||||
|
class: mermaid
|
||||||
|
format: !!python/name:pymdownx.superfences.fence_code_format
|
||||||
|
- pymdownx.emoji:
|
||||||
|
emoji_index: !!python/name:material.extensions.emoji.twemoji
|
||||||
|
emoji_generator: !!python/name:material.extensions.emoji.to_svg
|
||||||
|
- pymdownx.highlight:
|
||||||
|
anchor_linenums: true
|
||||||
|
- pymdownx.inlinehilite
|
||||||
|
- pymdownx.tabbed:
|
||||||
|
alternate_style: true
|
||||||
|
- toc:
|
||||||
|
permalink: true
|
||||||
|
|
||||||
|
nav:
|
||||||
|
- Home: index.md
|
||||||
|
- Getting Started:
|
||||||
|
- Installation: getting-started/installation.md
|
||||||
|
- Using in a Product: getting-started/using-in-product.md
|
||||||
|
- Module Reference:
|
||||||
|
- Overview: modules/index.md
|
||||||
|
- db: modules/db.md
|
||||||
|
- llm: modules/llm.md
|
||||||
|
- tiers: modules/tiers.md
|
||||||
|
- config: modules/config.md
|
||||||
|
- hardware: modules/hardware.md
|
||||||
|
- documents: modules/documents.md
|
||||||
|
- affiliates: modules/affiliates.md
|
||||||
|
- preferences: modules/preferences.md
|
||||||
|
- tasks: modules/tasks.md
|
||||||
|
- manage: modules/manage.md
|
||||||
|
- resources: modules/resources.md
|
||||||
|
- text: modules/text.md
|
||||||
|
- stt: modules/stt.md
|
||||||
|
- tts: modules/tts.md
|
||||||
|
- pipeline: modules/pipeline.md
|
||||||
|
- vision: modules/vision.md
|
||||||
|
- wizard: modules/wizard.md
|
||||||
|
- Developer Guide:
|
||||||
|
- Adding a Module: developer/adding-module.md
|
||||||
|
- Editable Install Pattern: developer/editable-install.md
|
||||||
|
- BSL vs MIT Boundaries: developer/licensing.md
|
||||||
|
|
||||||
|
extra_javascript:
|
||||||
|
- plausible.js
|
||||||
|
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "circuitforge-core"
|
name = "circuitforge-core"
|
||||||
version = "0.9.0"
|
version = "0.10.0"
|
||||||
description = "Shared scaffold for CircuitForge products (MIT)"
|
description = "Shared scaffold for CircuitForge products (MIT)"
|
||||||
requires-python = ">=3.11"
|
requires-python = ">=3.11"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
|
@ -14,6 +14,9 @@ dependencies = [
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.optional-dependencies]
|
[project.optional-dependencies]
|
||||||
|
community = [
|
||||||
|
"psycopg2>=2.9",
|
||||||
|
]
|
||||||
manage = [
|
manage = [
|
||||||
"platformdirs>=4.0",
|
"platformdirs>=4.0",
|
||||||
"typer[all]>=0.12",
|
"typer[all]>=0.12",
|
||||||
|
|
@ -83,6 +86,9 @@ cf-manage = "circuitforge_core.manage.cli:app"
|
||||||
where = ["."]
|
where = ["."]
|
||||||
include = ["circuitforge_core*"]
|
include = ["circuitforge_core*"]
|
||||||
|
|
||||||
|
[tool.setuptools.package-data]
|
||||||
|
"circuitforge_core.community.migrations" = ["*.sql"]
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
[tool.pytest.ini_options]
|
||||||
testpaths = ["tests"]
|
testpaths = ["tests"]
|
||||||
asyncio_mode = "auto"
|
asyncio_mode = "auto"
|
||||||
|
|
|
||||||
0
tests/community/__init__.py
Normal file
0
tests/community/__init__.py
Normal file
63
tests/community/test_db.py
Normal file
63
tests/community/test_db.py
Normal file
|
|
@ -0,0 +1,63 @@
|
||||||
|
# tests/community/test_db.py
|
||||||
|
import os
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import MagicMock, patch, call
|
||||||
|
from circuitforge_core.community.db import CommunityDB
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_pool():
|
||||||
|
"""Patch psycopg2.pool.ThreadedConnectionPool to avoid needing a real PG instance."""
|
||||||
|
with patch("circuitforge_core.community.db.ThreadedConnectionPool") as mock_cls:
|
||||||
|
mock_instance = MagicMock()
|
||||||
|
mock_cls.return_value = mock_instance
|
||||||
|
yield mock_cls, mock_instance
|
||||||
|
|
||||||
|
|
||||||
|
def test_community_db_requires_url():
|
||||||
|
with pytest.raises(ValueError, match="COMMUNITY_DB_URL"):
|
||||||
|
CommunityDB(dsn=None)
|
||||||
|
|
||||||
|
|
||||||
|
def test_community_db_init_creates_pool(mock_pool):
|
||||||
|
mock_cls, _ = mock_pool
|
||||||
|
CommunityDB(dsn="postgresql://user:pass@localhost/cf_community")
|
||||||
|
mock_cls.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_community_db_close_puts_pool(mock_pool):
|
||||||
|
_, mock_instance = mock_pool
|
||||||
|
db = CommunityDB(dsn="postgresql://user:pass@localhost/cf_community")
|
||||||
|
db.close()
|
||||||
|
mock_instance.closeall.assert_called_once()
|
||||||
|
|
||||||
|
|
||||||
|
def test_community_db_migration_files_discovered():
|
||||||
|
"""Migration runner must find at least 001 and 002 SQL files."""
|
||||||
|
db = CommunityDB.__new__(CommunityDB)
|
||||||
|
files = db._discover_migrations()
|
||||||
|
names = [f.name for f in files]
|
||||||
|
assert any("001" in n for n in names)
|
||||||
|
assert any("002" in n for n in names)
|
||||||
|
# Must be sorted numerically
|
||||||
|
assert files == sorted(files, key=lambda p: p.name)
|
||||||
|
|
||||||
|
|
||||||
|
def test_community_db_run_migrations_executes_sql(mock_pool):
|
||||||
|
_, mock_instance = mock_pool
|
||||||
|
mock_conn = MagicMock()
|
||||||
|
mock_cur = MagicMock()
|
||||||
|
mock_instance.getconn.return_value = mock_conn
|
||||||
|
mock_conn.cursor.return_value.__enter__.return_value = mock_cur
|
||||||
|
|
||||||
|
db = CommunityDB(dsn="postgresql://user:pass@localhost/cf_community")
|
||||||
|
db.run_migrations()
|
||||||
|
|
||||||
|
# At least one execute call must have happened
|
||||||
|
assert mock_cur.execute.called
|
||||||
|
|
||||||
|
|
||||||
|
def test_community_db_from_env(monkeypatch, mock_pool):
|
||||||
|
monkeypatch.setenv("COMMUNITY_DB_URL", "postgresql://u:p@host/db")
|
||||||
|
db = CommunityDB.from_env()
|
||||||
|
assert db is not None
|
||||||
94
tests/community/test_models.py
Normal file
94
tests/community/test_models.py
Normal file
|
|
@ -0,0 +1,94 @@
|
||||||
|
# tests/community/test_models.py
|
||||||
|
import pytest
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from circuitforge_core.community.models import CommunityPost
|
||||||
|
|
||||||
|
|
||||||
|
def make_post(**kwargs) -> CommunityPost:
|
||||||
|
defaults = dict(
|
||||||
|
slug="kiwi-plan-test-2026-04-12-pasta-week",
|
||||||
|
pseudonym="PastaWitch",
|
||||||
|
post_type="plan",
|
||||||
|
published=datetime(2026, 4, 12, 12, 0, 0, tzinfo=timezone.utc),
|
||||||
|
title="Pasta Week",
|
||||||
|
description="Seven days of carbs",
|
||||||
|
photo_url=None,
|
||||||
|
slots=[{"day": 0, "meal_type": "dinner", "recipe_id": 1, "recipe_name": "Spaghetti"}],
|
||||||
|
recipe_id=None,
|
||||||
|
recipe_name=None,
|
||||||
|
level=None,
|
||||||
|
outcome_notes=None,
|
||||||
|
seasoning_score=0.7,
|
||||||
|
richness_score=0.6,
|
||||||
|
brightness_score=0.3,
|
||||||
|
depth_score=0.5,
|
||||||
|
aroma_score=0.4,
|
||||||
|
structure_score=0.8,
|
||||||
|
texture_profile="chewy",
|
||||||
|
dietary_tags=["vegetarian"],
|
||||||
|
allergen_flags=["gluten"],
|
||||||
|
flavor_molecules=[1234, 5678],
|
||||||
|
fat_pct=12.5,
|
||||||
|
protein_pct=10.0,
|
||||||
|
moisture_pct=55.0,
|
||||||
|
)
|
||||||
|
defaults.update(kwargs)
|
||||||
|
return CommunityPost(**defaults)
|
||||||
|
|
||||||
|
|
||||||
|
def test_community_post_immutable():
|
||||||
|
post = make_post()
|
||||||
|
with pytest.raises((AttributeError, TypeError)):
|
||||||
|
post.title = "changed" # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
def test_community_post_slug_uri_compatible():
|
||||||
|
post = make_post(slug="kiwi-plan-test-2026-04-12-pasta-week")
|
||||||
|
assert " " not in post.slug
|
||||||
|
assert post.slug == post.slug.lower()
|
||||||
|
|
||||||
|
|
||||||
|
def test_community_post_type_valid():
|
||||||
|
make_post(post_type="plan")
|
||||||
|
make_post(post_type="recipe_success")
|
||||||
|
make_post(post_type="recipe_blooper")
|
||||||
|
|
||||||
|
|
||||||
|
def test_community_post_type_invalid():
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
make_post(post_type="garbage")
|
||||||
|
|
||||||
|
|
||||||
|
def test_community_post_scores_range():
|
||||||
|
post = make_post(seasoning_score=1.0, richness_score=0.0)
|
||||||
|
assert 0.0 <= post.seasoning_score <= 1.0
|
||||||
|
assert 0.0 <= post.richness_score <= 1.0
|
||||||
|
|
||||||
|
|
||||||
|
def test_community_post_scores_out_of_range():
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
make_post(seasoning_score=1.5)
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
make_post(richness_score=-0.1)
|
||||||
|
|
||||||
|
|
||||||
|
def test_community_post_dietary_tags_immutable():
|
||||||
|
post = make_post(dietary_tags=["vegan"])
|
||||||
|
assert isinstance(post.dietary_tags, tuple)
|
||||||
|
|
||||||
|
|
||||||
|
def test_community_post_allergen_flags_immutable():
|
||||||
|
post = make_post(allergen_flags=["nuts", "dairy"])
|
||||||
|
assert isinstance(post.allergen_flags, tuple)
|
||||||
|
|
||||||
|
|
||||||
|
def test_community_post_flavor_molecules_immutable():
|
||||||
|
post = make_post(flavor_molecules=[1, 2, 3])
|
||||||
|
assert isinstance(post.flavor_molecules, tuple)
|
||||||
|
|
||||||
|
|
||||||
|
def test_community_post_optional_fields_none():
|
||||||
|
post = make_post(photo_url=None, recipe_id=None, fat_pct=None)
|
||||||
|
assert post.photo_url is None
|
||||||
|
assert post.recipe_id is None
|
||||||
|
assert post.fat_pct is None
|
||||||
115
tests/community/test_store.py
Normal file
115
tests/community/test_store.py
Normal file
|
|
@ -0,0 +1,115 @@
|
||||||
|
# tests/community/test_store.py
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import MagicMock, patch
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from circuitforge_core.community.store import SharedStore
|
||||||
|
from circuitforge_core.community.models import CommunityPost
|
||||||
|
|
||||||
|
|
||||||
|
def make_post_row() -> dict:
|
||||||
|
return {
|
||||||
|
"id": 1,
|
||||||
|
"slug": "kiwi-plan-test-pasta-week",
|
||||||
|
"pseudonym": "PastaWitch",
|
||||||
|
"post_type": "plan",
|
||||||
|
"published": datetime(2026, 4, 12, 12, 0, 0, tzinfo=timezone.utc),
|
||||||
|
"title": "Pasta Week",
|
||||||
|
"description": None,
|
||||||
|
"photo_url": None,
|
||||||
|
"slots": [{"day": 0, "meal_type": "dinner", "recipe_id": 1, "recipe_name": "Spaghetti"}],
|
||||||
|
"recipe_id": None,
|
||||||
|
"recipe_name": None,
|
||||||
|
"level": None,
|
||||||
|
"outcome_notes": None,
|
||||||
|
"seasoning_score": 0.7,
|
||||||
|
"richness_score": 0.6,
|
||||||
|
"brightness_score": 0.3,
|
||||||
|
"depth_score": 0.5,
|
||||||
|
"aroma_score": 0.4,
|
||||||
|
"structure_score": 0.8,
|
||||||
|
"texture_profile": "chewy",
|
||||||
|
"dietary_tags": ["vegetarian"],
|
||||||
|
"allergen_flags": ["gluten"],
|
||||||
|
"flavor_molecules": [1234],
|
||||||
|
"fat_pct": 12.5,
|
||||||
|
"protein_pct": 10.0,
|
||||||
|
"moisture_pct": 55.0,
|
||||||
|
"source_product": "kiwi",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_db():
|
||||||
|
db = MagicMock()
|
||||||
|
conn = MagicMock()
|
||||||
|
cur = MagicMock()
|
||||||
|
db.getconn.return_value = conn
|
||||||
|
conn.cursor.return_value.__enter__.return_value = cur
|
||||||
|
return db, conn, cur
|
||||||
|
|
||||||
|
|
||||||
|
def test_shared_store_get_post_by_slug(mock_db):
|
||||||
|
db, conn, cur = mock_db
|
||||||
|
cur.fetchone.return_value = make_post_row()
|
||||||
|
cur.description = [(col,) for col in make_post_row().keys()]
|
||||||
|
|
||||||
|
store = SharedStore(db)
|
||||||
|
post = store.get_post_by_slug("kiwi-plan-test-pasta-week")
|
||||||
|
|
||||||
|
assert post is not None
|
||||||
|
assert isinstance(post, CommunityPost)
|
||||||
|
assert post.slug == "kiwi-plan-test-pasta-week"
|
||||||
|
assert post.pseudonym == "PastaWitch"
|
||||||
|
|
||||||
|
|
||||||
|
def test_shared_store_get_post_by_slug_not_found(mock_db):
|
||||||
|
db, conn, cur = mock_db
|
||||||
|
cur.fetchone.return_value = None
|
||||||
|
|
||||||
|
store = SharedStore(db)
|
||||||
|
post = store.get_post_by_slug("does-not-exist")
|
||||||
|
assert post is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_shared_store_list_posts_returns_list(mock_db):
|
||||||
|
db, conn, cur = mock_db
|
||||||
|
row = make_post_row()
|
||||||
|
cur.fetchall.return_value = [row]
|
||||||
|
cur.description = [(col,) for col in row.keys()]
|
||||||
|
|
||||||
|
store = SharedStore(db)
|
||||||
|
posts = store.list_posts(limit=10, offset=0)
|
||||||
|
|
||||||
|
assert isinstance(posts, list)
|
||||||
|
assert len(posts) == 1
|
||||||
|
assert posts[0].slug == "kiwi-plan-test-pasta-week"
|
||||||
|
|
||||||
|
|
||||||
|
def test_shared_store_delete_post(mock_db):
|
||||||
|
db, conn, cur = mock_db
|
||||||
|
cur.rowcount = 1
|
||||||
|
|
||||||
|
store = SharedStore(db)
|
||||||
|
deleted = store.delete_post(slug="kiwi-plan-test-pasta-week", pseudonym="PastaWitch")
|
||||||
|
assert deleted is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_shared_store_delete_post_wrong_owner(mock_db):
|
||||||
|
db, conn, cur = mock_db
|
||||||
|
cur.rowcount = 0
|
||||||
|
|
||||||
|
store = SharedStore(db)
|
||||||
|
deleted = store.delete_post(slug="kiwi-plan-test-pasta-week", pseudonym="WrongUser")
|
||||||
|
assert deleted is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_shared_store_returns_connection_on_error(mock_db):
|
||||||
|
db, conn, cur = mock_db
|
||||||
|
cur.fetchone.side_effect = Exception("DB error")
|
||||||
|
|
||||||
|
store = SharedStore(db)
|
||||||
|
with pytest.raises(Exception, match="DB error"):
|
||||||
|
store.get_post_by_slug("any-slug")
|
||||||
|
|
||||||
|
# Connection must be returned to pool even on error
|
||||||
|
db.putconn.assert_called_once_with(conn)
|
||||||
Loading…
Reference in a new issue