Files
agents/fake_ChatOllama.py
2026-03-21 14:24:10 +01:00

109 lines
4.6 KiB
Python

"""Minimal fallback ChatOllama implementation for testing.
This provides a tiny `ChatOllama` class that works with expressions
like `prompt | llm` by implementing `__ror__` and returns a simple
fake response object with a `content` attribute.
"""
from typing import Any
class FakeResponse:
def __init__(self, content: str) -> None:
self.content = content
class _Chain:
def __init__(self, llm: "ChatOllama", prompt: Any) -> None:
self.llm = llm
self.prompt = prompt
def invoke(self, inputs: dict) -> FakeResponse:
service = inputs.get("service", "<unknown>")
data = inputs.get("data", "")
# Summarize the provided data so the fake response is slightly useful
try:
if isinstance(data, list):
summary = f"{len(data)} items"
elif isinstance(data, str):
summary = f"{len(data)} characters"
else:
summary = type(data).__name__
except Exception:
summary = "uninspectable"
content = (
f"[FAKE LLM] Analysis for service '{service}': input contains {summary}."
)
return FakeResponse(content)
class ChatOllama:
"""A tiny fake ChatOllama-compatible object.
Usage (matches fallback import in `log_reader_agent.py`):
from fake_ChatOllama import ChatOllama
llm = ChatOllama(model="llama3:8b")
chain = prompt | llm
resp = chain.invoke({"service": "X", "data": "..."})
print(resp.content)
"""
def __init__(self, model: str = "llama", base_url: str | None = None, **kwargs) -> None:
self.model = model
self.base_url = base_url
self.kwargs = kwargs
def __repr__(self) -> str: # pragma: no cover - trivial
return f"<ChatOllama model={self.model!r} base_url={self.base_url!r}>"
def __ror__(self, other: Any) -> _Chain:
"""Support `prompt | llm` by returning a chain-like object."""
return _Chain(self, other)
# Convenience: allow direct invoke if someone calls llm.invoke(...)
def invoke(self, inputs: dict) -> FakeResponse:
return _Chain(self, None).invoke(inputs)
# Make the object callable so langchain_core can coerce it into a Runnable
def __call__(self, inputs: dict, *args, **kwargs) -> FakeResponse:
# Support being called with either a plain dict (from manual invoke)
# or with a Prompt/ChatPromptValue object (from `prompt | llm` execution).
if isinstance(inputs, dict):
return self.invoke(inputs)
# Fallback: coerce non-dict prompt values to text and return a fake response
try:
# Try common method names for prompt-like objects
if hasattr(inputs, "to_string") and callable(inputs.to_string):
text = inputs.to_string()
elif hasattr(inputs, "to_text") and callable(inputs.to_text):
text = inputs.to_text()
else:
text = str(inputs)
except Exception:
text = str(inputs)
content = """A Pi-Hole docker log!
Let's analyze the service health:
**System Time**: There is a warning indicating that the system time cannot be set (CAP_SYS_TIME required). This might impact the accuracy of logs and timestamps.
**NTP Client**: The NTP client is not available, which could lead to issues with synchronizing system time and potentially affecting DNS resolution.
**DNS Queries**: A significant number of DNS queries are being processed (31484 in this case), indicating that the Pi-Hole is handling a substantial volume of traffic.
**Cache Records**: The cache records count is relatively low (238), suggesting that the cache might not be as effective at reducing the load on the Pi-Hole.
**Blocked Queries**: There are 3246 blocked queries, which could indicate a decent amount of malicious traffic being filtered out.
**Unique Domains and Clients**: There are 2286 unique domains and 2 clients being tracked, suggesting that the Pi-Hole is handling traffic from at least two distinct sources.
**Web Server Ports**: The web server is listening on HTTP (port 80) and HTTPS (port 443) for both IPv4 and IPv6 traffic.
**API Sessions**: There are no API sessions restored from the database, which might indicate that the Pi-Hole is not currently interacting with any external services or APIs.
Overall, it appears that the Pi-Hole is functioning correctly, handling DNS queries, blocking malicious traffic, and maintaining a cache of records. However, the system time issue and lack of NTP client availability might warrant further investigation to ensure accurate logging and timestamping."""
return FakeResponse(content)