from langchain_ollama import ChatOllama from langchain.agents import create_agent from langchain_community.tools.file_management import FileSearchTool GEMMA3 = "gemma3" LLAMA3 = "llama3.1:8b" model = ChatOllama( model=LLAMA3, validate_model_on_init=True, temperature=0.8, num_predict=256, ) agent = create_agent( model, tools=[ FileSearchTool(), ], system_prompt="You are a helpful assistent.", ) result = agent.invoke( { "messages": [ { "role": "user", "content": "What files match *.py in .?", } ] } ) for message in result["messages"]: print(message.content)