1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
|
from typing import Any
from langchain_ollama import ChatOllama
from langchain.agents import create_agent
from langchain_community.tools.file_management import ListDirectoryTool
from langchain.agents.middleware import after_model, AgentState
GEMMA3 = "gemma3"
LLAMA3 = "llama3.1:8b"
model = ChatOllama(
model=LLAMA3,
validate_model_on_init=True,
temperature=0.8,
num_predict=256,
)
@after_model
def log_response(state: AgentState, runtime) -> dict[str, Any] | None:
print(f"Model returned: {state['messages'][-1].content}")
return None
agent = create_agent(
model,
tools=[
ListDirectoryTool(),
],
system_prompt="You are a helpful assistent.",
middleware=[
log_response,
],
)
result = agent.invoke(
{
"messages": [
{
"role": "user",
"content": "What files are in .?",
}
]
}
)
for message in result["messages"]:
print(message.content)
|