from langgraph.prebuilt import create_react_agent from langchain_ollama import ChatOllama from typing import Annotated from langgraph.prebuilt import ToolNode, tools_condition from typing_extensions import TypedDict from langchain_core.messages import HumanMessage, SystemMessage from langgraph.graph import StateGraph, START, END from langgraph.graph.message import add_messages import logging import prompt_toolkit import prompt_toolkit.history import prompt_toolkit.auto_suggest logger = logging.getLogger(__name__) from . import tools cli_history = prompt_toolkit.history.FileHistory('output/cli_history.txt') #MODEL = "gemma3:27b" MODEL = "qwen3:latest" def create_model(): available_tools = tools.get_tools() logger.info("Available tools:") for tool in available_tools: logger.info("- %s", tool.name) llm = ChatOllama(model=MODEL) llm.bind_tools(tools=available_tools ) return create_react_agent(llm, tools=available_tools ) def main(): logging.basicConfig(level='INFO') messages = [SystemMessage("You are a useful assistant with access to built in system tools.")] llm = create_model() prev_idx = 0 while True: user_input = prompt_toolkit.prompt("Human: ", history=cli_history, auto_suggest=prompt_toolkit.auto_suggest.AutoSuggestFromHistory(), ) messages.append(HumanMessage(user_input)) result = llm.invoke({ 'messages': messages, }) messages = result['messages'] for msg in messages[prev_idx:]: print(f'{msg.type}: {msg.content}') del msg prev_idx = len(messages) if __name__ == '__main__': main()