PocketFlow#
Pocket Flow is a 100-line minimalist LLM framework
Lightweight: Just 100 lines. Zero bloat, zero dependencies, zero vendor lock-in.
Expressive: Everything you love—(Multi-)Agents, Workflow, RAG, and more.
Agentic Coding: Let AI Agents (e.g., Cursor AI) build Agents—10x productivity boost!
!pip install pocketflow --quiet
!pip install litellm --quiet
from google.colab import drive
drive.mount('/content/drive') # Add My Drive/<>
import os
os.chdir('drive/My Drive')
os.chdir('Books_Writings/NLPBook/')
Mounted at /content/drive
%run keys.ipynb
import os
from litellm import completion
def call_llm(messages):
"""Calls the litellm completion API with the given messages."""
response = completion(
model = "openai/gpt-4.1",
messages=messages
)
return response.choices[0].message.content
# Test the LLM call
messages = [{"role": "user", "content": "In a few words, what's the meaning of life?"}]
response = call_llm(messages)
print(f"Prompt: {messages[0]['content']}")
print(f"Response: {response}")
Prompt: In a few words, what's the meaning of life?
Response: To seek connection, understanding, and fulfillment—making meaning in your own way.
from pocketflow import Node, Flow
# from utils import call_llm
class ChatNode(Node):
def prep(self, shared):
# Initialize messages if this is the first run
if "messages" not in shared:
shared["messages"] = []
print("Welcome to the chat! Type 'exit' to end the conversation.")
# Get user input
user_input = input("\nYou: ")
# Check if user wants to exit
if user_input.lower() == 'exit':
return None
# Add user message to history
shared["messages"].append({"role": "user", "content": user_input})
# Return all messages for the LLM
return shared["messages"]
def exec(self, messages):
if messages is None:
return None
# Call LLM with the entire conversation history
response = call_llm(messages)
return response
def post(self, shared, prep_res, exec_res):
if prep_res is None or exec_res is None:
print("\nGoodbye!")
return None # End the conversation
# Print the assistant's response
print(f"\nAssistant: {exec_res}")
# Add assistant message to history
shared["messages"].append({"role": "assistant", "content": exec_res})
# Loop back to continue the conversation
return "continue"
# Create the flow with self-loop
chat_node = ChatNode()
chat_node - "continue" >> chat_node # Loop back to continue conversation
flow = Flow(start=chat_node)
# Start the chat
shared = {}
flow.run(shared)
Welcome to the chat! Type 'exit' to end the conversation.
You: What is 1+2?
Assistant: 1 + 2 = **3**
You: exit
Goodbye!
/usr/local/lib/python3.12/dist-packages/pocketflow/__init__.py:44: UserWarning: Flow ends: 'None' not found in ['continue']
if not nxt and curr.successors: warnings.warn(f"Flow ends: '{action}' not found in {list(curr.successors)}")