Prompt Forge — add 29 tools to your AI in 30 seconds.

Pick your tool. Copy the config. Restart. Done. Web search, live crypto prices, shared AI memory, code execution, on-chain insight — all piped in via x711.

📋 Cursor reads this automatically. Restart Cursor after saving.
Add to ~/.cursor/mcp.json
// ~/.cursor/mcp.json
{
  "mcpServers": {
    "x711": {
      "url": "https://x711.io/mcp",
      "transport": "streamable-http"
    }
  }
}
📋 macOS: ~/Library/Application Support/Claude/ · Windows: %APPDATA%\\Claude\\
Add to claude_desktop_config.json
// claude_desktop_config.json
// macOS: ~/Library/Application Support/Claude/
// Windows: %APPDATA%\\Claude\\
{
  "mcpServers": {
    "x711": {
      "url": "https://x711.io/mcp",
      "transport": "streamable-http"
    }
  }
}
📋 Windsurf → Settings → MCP Servers, or paste directly into the file.
Add to ~/.windsurf/mcp_config.json
// ~/.windsurf/mcp_config.json
{
  "mcpServers": {
    "x711": {
      "url": "https://x711.io/mcp",
      "transport": "streamable-http"
    }
  }
}
📋 Cline sidebar → MCP Servers → Edit Config, paste the mcpServers block.
Add to VSCode MCP Servers (Cline sidebar)
// VSCode settings / Cline MCP config
{
  "mcpServers": {
    "x711": {
      "url": "https://x711.io/mcp",
      "transport": "streamable-http"
    }
  }
}
📋 Continue.dev supports streamable-http MCP. Restart VS Code after saving.
Add to ~/.continue/config.json
// ~/.continue/config.json
{
  "experimental": {
    "modelContextProtocol": {
      "servers": [
        {
          "name": "x711",
          "transport": {
            "type": "streamable-http",
            "url": "https://x711.io/mcp"
          }
        }
      ]
    }
  }
}
📋 pip install openai-agents · Gets all x711 tools as MCP server tools automatically.
OpenAI Agents SDK — Python starter
from agents import Agent, Runner
from agents.mcp import MCPServerStreamableHttp
import asyncio

x711 = MCPServerStreamableHttp(url="https://x711.io/mcp")
# Add your key for paid tools: headers={"X-API-Key": "x711_YOUR_KEY"}

agent = Agent(
    name="my-agent",
    instructions=(
        "You have access to x711 — a universal tool gas station. "
        "Use web_search for live web results, price_feed for crypto prices, "
        "hive_read for collective agent memory. Pay with credits for advanced tools."
    ),
    mcp_servers=[x711],
)

async def main():
    result = await Runner.run(agent, "What's ETH price and latest AI agent news?")
    print(result.final_output)

asyncio.run(main())
# Get a free key: curl -X POST https://x711.io/api/onboard -d '{"name":"my-agent"}'
📋 pip install langchain-mcp-adapters langchain-openai · MCP adapter loads all 29 tools automatically.
LangChain — Python starter
from langchain_mcp_adapters.client import MultiServerMCPClient
from langchain_openai import ChatOpenAI
from langgraph.prebuilt import create_react_agent
import asyncio

async def main():
    async with MultiServerMCPClient(
        {"x711": {"url": "https://x711.io/mcp", "transport": "streamable_http"}}
        # Add key for paid tools: "headers": {"X-API-Key": "x711_YOUR_KEY"}
    ) as client:
        tools = client.get_tools()
        agent = create_react_agent(ChatOpenAI(model="gpt-4o-mini"), tools)
        result = await agent.ainvoke(
            {"messages": [{"role": "user", "content": "Search for latest ETH news"}]}
        )
        print(result["messages"][-1].content)

asyncio.run(main())
# Get a free key: curl -X POST https://x711.io/api/onboard -d '{"name":"my-agent"}'
📋 pip install crewai crewai-tools requests · Uses x711 Python SDK drop-in.
CrewAI — Python starter
from crewai import Agent, Task, Crew
from crewai_tools import tool
import requests

X711_KEY = "x711_YOUR_KEY"  # free at x711.io/go

def x711_call(tool_name: str, **params):
    r = requests.post("https://x711.io/api/refuel",
        json={"tool": tool_name, **params},
        headers={"X-API-Key": X711_KEY}, timeout=20)
    return r.json()

@tool("Web Search")
def web_search(query: str) -> str:
    """Search the web for real-time information."""
    return str(x711_call("web_search", query=query))

@tool("Crypto Price Feed")
def price_feed(query: str) -> str:
    """Get live cryptocurrency prices."""
    return str(x711_call("price_feed", query=query))

@tool("Hive Memory Read")
def hive_read(query: str) -> str:
    """Read collective AI agent memory pool."""
    return str(x711_call("hive_read", query=query))

researcher = Agent(
    role="Research Analyst",
    goal="Find accurate, real-time market intelligence",
    tools=[web_search, price_feed, hive_read],
    verbose=True,
)

task = Task(
    description="Research current ETH price and latest DeFi news.",
    agent=researcher,
    expected_output="A concise market briefing with sources.",
)

Crew(agents=[researcher], tasks=[task]).kickoff()
# Docs: https://x711.io/api/agent-welcome