adding files
This commit is contained in:
113
log_reader_agent.py
Normal file
113
log_reader_agent.py
Normal file
@@ -0,0 +1,113 @@
|
|||||||
|
# python3 -m pip install --user virtualenv
|
||||||
|
# python3 -m venv myenv
|
||||||
|
# source myenv/bin/activate
|
||||||
|
# pip install langgraph langchain langchain-community langchain-ollama
|
||||||
|
|
||||||
|
from typing import TypedDict, List
|
||||||
|
#from asyncio import tools
|
||||||
|
from langgraph.graph import StateGraph, END
|
||||||
|
from langchain_core.prompts import ChatPromptTemplate
|
||||||
|
from langchain_text_splitters import RecursiveCharacterTextSplitter
|
||||||
|
import subprocess
|
||||||
|
import shutil
|
||||||
|
|
||||||
|
try:
|
||||||
|
from langchain_ollama import ChatOllama
|
||||||
|
except ImportError:
|
||||||
|
from fake_ChatOllama import ChatOllama # type: ignore
|
||||||
|
|
||||||
|
# ----- Shared State -----
|
||||||
|
class AgentState(TypedDict):
|
||||||
|
service: str
|
||||||
|
log: List[str] # A list of findings
|
||||||
|
response: str # The final output
|
||||||
|
|
||||||
|
def reader_node(state: AgentState):
|
||||||
|
service = state["service"]
|
||||||
|
print(f"Reader is looking up: {service} logs...")
|
||||||
|
|
||||||
|
if shutil.which('docker'):
|
||||||
|
cmd = ['docker', 'logs', service]
|
||||||
|
else:
|
||||||
|
print('docker not installed, using dummy log')
|
||||||
|
cmd = ['cat', 'docker_log_pihole.txt']
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = subprocess.run(cmd, capture_output=True, shell=False, check=True).stdout.decode('utf-8')
|
||||||
|
except Exception as e:
|
||||||
|
print("Error during log retrieval:", e)
|
||||||
|
result = f"Could not read log for service: {service}:\n {e}"
|
||||||
|
|
||||||
|
text_splitter = RecursiveCharacterTextSplitter(
|
||||||
|
chunk_size=500,
|
||||||
|
chunk_overlap=80)
|
||||||
|
|
||||||
|
try:
|
||||||
|
chunks = text_splitter.split_text(result)
|
||||||
|
results = chunks
|
||||||
|
except Exception as e:
|
||||||
|
print("Error during log splitting:", e)
|
||||||
|
results = f"Error during log splitting for service: {service}:\n {e}"
|
||||||
|
|
||||||
|
# Only return the keys you want to update
|
||||||
|
return {"log": state.get("log", []) + [results]}
|
||||||
|
|
||||||
|
def analyzer_node(state: AgentState):
|
||||||
|
print("start analyzing log...")
|
||||||
|
|
||||||
|
service = state["service"]
|
||||||
|
data = state["log"][-1] if state["log"] else ""
|
||||||
|
|
||||||
|
llm = ChatOllama(model="qwen3:8b", base_url="http://localhost:11434")
|
||||||
|
|
||||||
|
prompt = ChatPromptTemplate.from_template(
|
||||||
|
"""You are a helpful AI assistant.
|
||||||
|
Provide a concise summary of the service health from {service} docker log, including any errors, warnings, or notable events.
|
||||||
|
You must answer based *only* on the supplied log. If the log is empty or do not contain anythin indicating service-health, say 'I could not find
|
||||||
|
any information in the supplied information.'
|
||||||
|
Log:
|
||||||
|
{data}
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
chain = prompt | llm
|
||||||
|
|
||||||
|
response = chain.invoke({"service": service, "data": data})
|
||||||
|
|
||||||
|
print("Analyzing complete.")
|
||||||
|
return {"response": response.content}
|
||||||
|
|
||||||
|
|
||||||
|
# ----- Build the LangGraph -----
|
||||||
|
workflow = StateGraph(AgentState)
|
||||||
|
|
||||||
|
workflow.add_node("Reader", reader_node)
|
||||||
|
workflow.add_node("Analyzer", analyzer_node)
|
||||||
|
|
||||||
|
# Flow: Start -> Reader -> Writer -> END
|
||||||
|
workflow.set_entry_point("Reader")
|
||||||
|
workflow.add_edge("Reader", "Analyzer")
|
||||||
|
workflow.add_edge("Analyzer", END)
|
||||||
|
|
||||||
|
app = workflow.compile()
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
import sys
|
||||||
|
import my_tools
|
||||||
|
#print("Starting the Multi-Agent System...\n")
|
||||||
|
|
||||||
|
if len(sys.argv) > 1:
|
||||||
|
service = sys.argv[1]
|
||||||
|
else:
|
||||||
|
exit("Please provide the service name as a command-line argument.")
|
||||||
|
|
||||||
|
inputs: AgentState = {
|
||||||
|
"service": service,
|
||||||
|
"log": [],
|
||||||
|
"response": "",
|
||||||
|
}
|
||||||
|
|
||||||
|
result = app.invoke(inputs)
|
||||||
|
#print(result["response"])
|
||||||
|
|
||||||
|
my_tools.tool_send_mail('johan.p1sson@gmail.com', service + ' weekly log analysis result', result["response"])
|
||||||
17
log_reader_agent.sh
Normal file
17
log_reader_agent.sh
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
#docker ps --format '{{.Names}}'
|
||||||
|
#export CONTAINER_ROWS=$(ls ${source}/generated| grep -v -e"davinci" -e"VSI" -e"VehicleFacadeVES" -e"MUM" -e"TempDummy")
|
||||||
|
|
||||||
|
export APPDATA_PATH=/srv/dev-disk-by-uuid-70242ecf-0e46-49c7-b401-ff06f80ff8b0/appdata
|
||||||
|
export DATA_PATH=/srv/dev-disk-by-uuid-ef2d59bd-1c0f-4a2e-ba28-810c0a972010/data
|
||||||
|
|
||||||
|
docker compose -f $APPDATA_PATH/ollama/ollama.yml -f $APPDATA_PATH/ollama/compose.override.yml --env-file "$APPDATA_PATH/global.env" start
|
||||||
|
|
||||||
|
export CONTAINER_ROWS=$(docker ps --format '{{.Names}}')
|
||||||
|
|
||||||
|
for comp in $CONTAINER_ROWS; do
|
||||||
|
python3 log_reader_agent.py $comp
|
||||||
|
done
|
||||||
|
|
||||||
|
docker compose -f $APPDATA_PATH/ollama/ollama.yml -f $APPDATA_PATH/ollama/compose.override.yml --env-file "$APPDATA_PATH/global.env" stop
|
||||||
44
my_tools.py
Normal file
44
my_tools.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
import email, smtplib, ssl
|
||||||
|
|
||||||
|
from email import encoders
|
||||||
|
from email.mime.base import MIMEBase
|
||||||
|
#from email.mime.multipart import MIMEMultipart
|
||||||
|
from email.message import EmailMessage
|
||||||
|
from email.mime.text import MIMEText
|
||||||
|
|
||||||
|
#move to .env or something
|
||||||
|
USER_ID = 'mc536304'
|
||||||
|
PASSWORD = 'Traceur22Traceur'
|
||||||
|
SMTP_SERVER = 'mailout.privat.bahnhof.se'
|
||||||
|
PORT = 465 # For SSL
|
||||||
|
SENDER_EMAIL = 'johan@rydson.st'
|
||||||
|
|
||||||
|
def _send_mail(smtp_server: str, port: int, sender_email: str, receiver_email: str, user_id: str, password: str, subject: str, body: str):
|
||||||
|
"""Send an email using the specified SMTP server and credentials."""
|
||||||
|
# Create a multipart message and set headers
|
||||||
|
#message = MIMEMultipart()
|
||||||
|
message = EmailMessage()
|
||||||
|
message["From"] = sender_email
|
||||||
|
message["To"] = receiver_email
|
||||||
|
message["Subject"] = subject
|
||||||
|
message.set_content(body)
|
||||||
|
|
||||||
|
# Log in to server using secure context and send email
|
||||||
|
try:
|
||||||
|
context = ssl.create_default_context()
|
||||||
|
with smtplib.SMTP_SSL(smtp_server, port, context=context) as server:
|
||||||
|
server.login(user_id, password)
|
||||||
|
server.sendmail(sender_email, receiver_email, message.as_string())
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error sending email: {e}")
|
||||||
|
|
||||||
|
def tool_send_mail(receiver_email: str, subject:str, body:str):
|
||||||
|
"""Send an email using predefined SMTP server and credentials."""
|
||||||
|
_send_mail(SMTP_SERVER, PORT, SENDER_EMAIL, receiver_email, USER_ID, PASSWORD, subject, body)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
import sys
|
||||||
|
if len(sys.argv) == 4:
|
||||||
|
tool_send_mail(sys.argv[1], sys.argv[2], sys.argv[3])
|
||||||
|
else:
|
||||||
|
tool_send_mail('johan.p1sson@gmail.com', 'Test Subject3', 'This is a test email body\n 67.')
|
||||||
Reference in New Issue
Block a user