diff --git a/app/services/ocr/vl_model.py b/app/services/ocr/vl_model.py index 2f3a621..737a415 100644 --- a/app/services/ocr/vl_model.py +++ b/app/services/ocr/vl_model.py @@ -39,7 +39,7 @@ def _try_docuvision(image_path: str | Path) -> str | None: client = CFOrchClient(cf_orch_url) with client.allocate( service="cf-docuvision", - model_candidates=[], # cf-docuvision has no model selection + model_candidates=["cf-docuvision"], ttl_s=60.0, caller="kiwi-ocr", ) as alloc: @@ -48,8 +48,9 @@ def _try_docuvision(image_path: str | Path) -> str | None: doc_client = DocuvisionClient(alloc.url) result = doc_client.extract_text(image_path) return result.text if result.text else None - except Exception: - return None # graceful degradation + except Exception as exc: + logger.debug("cf-docuvision fast-path failed, falling back: %s", exc) + return None class VisionLanguageOCR: @@ -141,15 +142,11 @@ class VisionLanguageOCR: # Try docuvision fast path first (skips heavy local VLM if available) docuvision_text = _try_docuvision(image_path) if docuvision_text is not None: - return { - "raw_text": docuvision_text, - "merchant": {}, - "transaction": {}, - "items": [], - "totals": {}, - "confidence": {"overall": None}, - "warnings": [], - } + parsed = self._parse_json_from_text(docuvision_text) + if parsed is not None: + parsed["raw_text"] = docuvision_text + return self._validate_result(parsed) + # If parsing fails, fall through to local VLM self._load_model() diff --git a/app/services/recipe/llm_recipe.py b/app/services/recipe/llm_recipe.py index 5f8ff33..a4bb62f 100644 --- a/app/services/recipe/llm_recipe.py +++ b/app/services/recipe/llm_recipe.py @@ -238,12 +238,16 @@ class LLMRecipeGenerator: raw_notes = parsed.get("notes", "") notes_str: str = raw_notes if isinstance(raw_notes, str) else "" + all_ingredients: list[str] = list(parsed.get("ingredients", [])) + pantry_set = {item.lower() for item in (req.pantry_items or [])} + missing = [i for i in all_ingredients if i.lower() not in pantry_set] + suggestion = RecipeSuggestion( id=0, title=parsed.get("title") or "LLM Recipe", match_count=len(req.pantry_items), element_coverage={}, - missing_ingredients=list(parsed.get("ingredients", [])), + missing_ingredients=missing, directions=directions_list, notes=notes_str, level=req.level, diff --git a/tests/services/recipe/test_llm_recipe.py b/tests/services/recipe/test_llm_recipe.py index 06744e5..4e8d5a3 100644 --- a/tests/services/recipe/test_llm_recipe.py +++ b/tests/services/recipe/test_llm_recipe.py @@ -140,7 +140,8 @@ def test_generate_returns_result_when_llm_responds(monkeypatch): assert len(result.suggestions) == 1 suggestion = result.suggestions[0] assert suggestion.title == "Mushroom Butter Pasta" - assert "butter" in suggestion.missing_ingredients + # All LLM ingredients (butter, mushrooms, pasta) are in the pantry, so none are missing + assert suggestion.missing_ingredients == [] assert len(suggestion.directions) > 0 assert "parmesan" in suggestion.notes.lower() assert result.element_gaps == ["Brightness"] @@ -218,8 +219,11 @@ def test_recipe_gen_falls_back_without_cf_orch(monkeypatch): fake_router = MagicMock() fake_router.complete.side_effect = _fake_complete - # Patch where LLMRouter is imported inside _call_llm - with patch("circuitforge_core.llm.router.LLMRouter", return_value=fake_router): + # LLMRouter is imported locally inside _call_llm, so patch it at its source module. + # new_callable=MagicMock makes the class itself a MagicMock; set return_value so + # that LLMRouter() (instantiation) yields fake_router rather than a new MagicMock. + with patch("circuitforge_core.llm.router.LLMRouter", new_callable=MagicMock) as mock_router_cls: + mock_router_cls.return_value = fake_router gen._call_llm("direct path prompt") assert router_called.get("prompt") == "direct path prompt"