prompt work

This commit is contained in:
2026-04-19 08:00:08 +02:00
parent 6cda9ceddb
commit 369ff69c8c

View File

@@ -5,6 +5,7 @@
# source venv/bin/activate # source venv/bin/activate
from datetime import timedelta
from typing import TypedDict, List from typing import TypedDict, List
from urllib import response from urllib import response
#from asyncio import tools #from asyncio import tools
@@ -60,6 +61,7 @@ class HTMLCrawler(HTMLParser):
# Configuration # Configuration
LLM_BASE_URL = "http://192.168.50.215:11434" LLM_BASE_URL = "http://192.168.50.215:11434"
MODEL_NAME = "qwen3:8b" MODEL_NAME = "qwen3:8b"
#MODEL_NAME = "gpt-oss:20b"
LINKDING_API_URL = "https://linkding.hal.se/api/bookmarks/" LINKDING_API_URL = "https://linkding.hal.se/api/bookmarks/"
LINKDING_API_TOKEN = "fa54dee2ccbcad80a0c6259bdbbed896581e1423" LINKDING_API_TOKEN = "fa54dee2ccbcad80a0c6259bdbbed896581e1423"
@@ -72,6 +74,14 @@ def todays_date() -> str:
from datetime import datetime from datetime import datetime
return datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ") return datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")
@tool
def calculate_date(dat: str, days: int) -> str:
"""Use this tool whenever you need to calculate the date in ISO 8601 format given a date and the number of days to deduce from that date."""
from datetime import datetime
date1 = datetime.strptime(dat, "%Y-%m-%dT%H:%M:%SZ")
date2 = date1 - timedelta(days=days)
return date2.strftime("%Y-%m-%dT%H:%M:%SZ")
@tool @tool
def crawl_homepage(url:str) -> str: def crawl_homepage(url:str) -> str:
"""Use this tool whenever you need to read the content of a home page on the internet. """Use this tool whenever you need to read the content of a home page on the internet.
@@ -80,7 +90,7 @@ def crawl_homepage(url:str) -> str:
url: home page url to crawl url: home page url to crawl
Returns: Returns:
Formatted test from homepage Formatted text from homepage
""" """
htmlParser = HTMLCrawler() htmlParser = HTMLCrawler()
try: try:
@@ -156,7 +166,23 @@ def add_tag_to_bookmark(bookmark_id: int, tag: str) -> str:
except requests.exceptions.RequestException as e: except requests.exceptions.RequestException as e:
return f"Error occurred while adding tag: {e}" return f"Error occurred while adding tag: {e}"
@tool
def write_to_file(filename: str, content: str) -> str:
"""Use this tool whenever you need to write content to a file.
Args:
filename: The name of the file to which the content should be written.
content: The content to write to the file.
Returns:
A message indicating whether the content was successfully written or if an error occurred.
"""
try:
with open(filename, 'a') as f:
f.write(content)
return f"Content successfully written to {filename}."
except Exception as e:
return f"Error occurred while writing to file: {e}"
# ----- Shared State ----- # ----- Shared State -----
class AgentState(TypedDict): class AgentState(TypedDict):
@@ -165,20 +191,41 @@ class AgentState(TypedDict):
# ----- Agent Nodes ----- # ----- Agent Nodes -----
def agent_node(state: AgentState): def agent_node(state: AgentState):
"""This is the main agent node that processes messages and decides when to call tools.""" """This is the main agent node that processes messages and decides when to call tools."""
llm_with_tools = llm.bind_tools([add_tag_to_bookmark, fetch_bookmarks, crawl_homepage, todays_date]) llm_with_tools = llm.bind_tools([add_tag_to_bookmark, fetch_bookmarks, crawl_homepage, todays_date, calculate_date, write_to_file])
system_prompt = SystemMessage(f""" system_prompt = SystemMessage(f"""
You are a research agent with web search capabilities. You are a bookmark processing agent. You have these tools:
INSTRUCTIONS: 1. **todays_date**: Get today's date
1. **MUST use web_search tool** first to gather information 2. **calculate_date**: Calculate a past date
2. Provide comprehensive research based on search results 3. **fetch_bookmarks**: Get bookmarks added since a date
4. **crawl_homepage**: Read website content
5. **write_to_file**: Write content to ~/bookmark_summaries.md
6. **add_tag_to_bookmark**: Add tags to bookmarks
Always call **web_search** before responding. YOUR TASK - FOLLOW THIS EXACTLY:
PHASE 1: Get bookmarks
- Call todays_date to get current date
- Call calculate_date to get the date 28 days ago
- Call fetch_bookmarks with that date to get all bookmarks
If No bookmarks found, stop here. Otherwise, move to PHASE 2.
PHASE 2: Process EACH bookmark (do NOT skip any):
For each bookmark from fetch_bookmarks:
Step A: Call crawl_homepage with the bookmark URL
Step B: IMMEDIATELY call write_to_file to write: [URL] | [DESCRIPTION] | [CRAWLED CONTENT SUMMARY OF MAX 100 WORDS]
Step C: IMMEDIATELY call add_tag_to_bookmark with the bookmark ID and 1-2 relevant tags
Step D: ONLY THEN move to the next bookmark
CRITICAL RULES:
- NEVER respond with text - ONLY call tools
- Process ALL bookmarks before finishing
- For each bookmark, MUST call: crawl_homepage, write_to_file, add_tag_to_bookmark (IN THAT ORDER)
- Do not stop until all bookmarks have all three tools called
""") """)
human_prompt = HumanMessage("What are the latest bookmarks added to my Linkding?")
messages = [system_prompt, human_prompt] messages = [system_prompt] + state['messages']
response = llm_with_tools.invoke(messages) response = llm_with_tools.invoke(messages)
@@ -207,7 +254,7 @@ def create_agent():
builder = StateGraph(AgentState) builder = StateGraph(AgentState)
builder.add_node("agent", agent_node) builder.add_node("agent", agent_node)
builder.add_node("tools", ToolNode([add_tag_to_bookmark, fetch_bookmarks, crawl_homepage, todays_date])) builder.add_node("tools", ToolNode([add_tag_to_bookmark, fetch_bookmarks, crawl_homepage, todays_date, calculate_date, write_to_file]))
builder.set_entry_point("agent") builder.set_entry_point("agent")
builder.add_conditional_edges("agent", should_continue, ["tools", END]) builder.add_conditional_edges("agent", should_continue, ["tools", END])
@@ -219,23 +266,38 @@ def create_agent():
return graph return graph
agent = create_agent() agent = create_agent()
result = agent.invoke({}) human_prompt = HumanMessage("Process all bookmarks from the last 14 days: fetch them, summarize their content, write summaries to a file, and add relevant tags.")
result = agent.invoke({'messages': [human_prompt]})
print(result['messages'])
print(result['messages'][-1].content)
#print(result['messages'])
#print(result['messages'][-1].content)
"""
result = fetch_bookmarks.invoke({}) result = fetch_bookmarks.invoke({})
print(result) print(result)
result = fetch_bookmarks.invoke({'date_added': "2026-04-09T20:26:31Z"})
print(result)
result = crawl_homepage.invoke({'url': "http://example.com"}) result = crawl_homepage.invoke({'url': "http://example.com"})
print(result) print(result)
result = todays_date.invoke({}) result = todays_date.invoke({})
print(result) print(result)
result = add_tag_to_bookmark.invoke({'bookmark_id': 123, 'tag': 'newtag'}) #result = add_tag_to_bookmark.invoke({'bookmark_id': 123, 'tag': 'newtag'})
#print(result)
#result = add_tag_to_bookmark.invoke({'bookmark_id': 4, 'tag': 'another_tag'})
#print(result)
result = calculate_date.invoke({'dat': "2026-03-01T00:00:00Z", 'days': 3})
print(result) print(result)
result = add_tag_to_bookmark.invoke({'bookmark_id': 4, 'tag': 'another_tag'}) result = write_to_file.invoke({'filename': 'test.txt', 'content': 'This is a test.'})
print(result) print(result)
"""