1 n8n流程图

image.png

image.png
{
"name": "n8n-mcp-xx书",
"nodes": [
{
"parameters": {
"options": {}
},
"type": "@n8n/n8n-nodes-langchain.chatTrigger",
"typeVersion": 1.1,
"position": [
0,
0
],
"id": "de641539-794b-45d6-b5f8-f2ed38033b63",
"name": "When chat message received",
"webhookId": "efb120d9-332f-40f3-a074-7873dc9fcc94"
},
{
"parameters": {
"promptType": "define",
"text": "=你是交底书生成专家,根据用户问题内容{{$json.chatInput }}自动选择使用交底书mcp生成交底书",
"options": {}
},
"type": "@n8n/n8n-nodes-langchain.agent",
"typeVersion": 1.9,
"position": [
260,
0
],
"id": "1c2b31e7-014e-4730-a786-e487dc69d4fc",
"name": "AI Agent"
},
{
"parameters": {
"connectionType": "sse",
"operation": "executeTool",
"toolName": "={{$fromAI(\"tool\")}}",
"toolParameters": "={{ $fromAI('Tool_Parameters', ``, 'json') }}"
},
"type": "n8n-nodes-mcp.mcpClientTool",
"typeVersion": 1,
"position": [
440,
220
],
"id": "5828c51c-2cd3-45f1-94a8-c643ed0d5fc2",
"name": "MCP Client",
"credentials": {
"mcpClientSseApi": {
"id": "zGmxUyCqAcRxk9P6",
"name": "MCP Client -魔塔mcp"
}
}
},
{
"parameters": {
"options": {}
},
"type": "@n8n/n8n-nodes-langchain.lmChatDeepSeek",
"typeVersion": 1,
"position": [
160,
220
],
"id": "4372c908-4a5b-41d9-881f-513927d32016",
"name": "DeepSeek Chat Model",
"credentials": {
"deepSeekApi": {
"id": "jMMXQvLh3LN8OMZu",
"name": "DeepSeek account 2"
}
}
},
{
"parameters": {},
"type": "@n8n/n8n-nodes-langchain.memoryBufferWindow",
"typeVersion": 1.3,
"position": [
300,
220
],
"id": "31342492-241f-4af2-ab41-a84e35c69398",
"name": "Simple Memory"
},
{
"parameters": {
"sseEndpoint": "=http://122.145.14.238:8000/sse"
},
"type": "@n8n/n8n-nodes-langchain.mcpClientTool",
"typeVersion": 1,
"position": [
640,
220
],
"id": "4c4b651c-52c1-4b50-9db2-16a6ab9da838",
"name": "交底书撰写"
}
],
"pinData": {},
"connections": {
"When chat message received": {
"main": [
[
{
"node": "AI Agent",
"type": "main",
"index": 0
}
]
]
},
"MCP Client": {
"ai_tool": [
[
{
"node": "AI Agent",
"type": "ai_tool",
"index": 0
}
]
]
},
"DeepSeek Chat Model": {
"ai_languageModel": [
[
{
"node": "AI Agent",
"type": "ai_languageModel",
"index": 0
}
]
]
},
"Simple Memory": {
"ai_memory": [
[
{
"node": "AI Agent",
"type": "ai_memory",
"index": 0
}
]
]
},
"交底书撰写": {
"ai_tool": [
[
{
"node": "AI Agent",
"type": "ai_tool",
"index": 0
}
]
]
}
},
"active": false,
"settings": {
"executionOrder": "v1"
},
"versionId": "0c6fb444-986d-4236-893a-0db4a6fefd97",
"meta": {
"templateCredsSetupCompleted": true,
"instanceId": "cc70799ffa116d6456cbbb8995bdbd0d368f3995d3046992ffaaab7036cf839d"
},
"id": "S2wHsf0OnFBL2xCo",
"tags": []
}
2 本地 mcp 服务代码
代码部署在服务器上
# from mcp.server.fastmcp import FastMCP
from fastmcp import FastMCP
import requests
mcp = FastMCP(
name="generatetechdisclosure",
host="0.0.0.0",
port=8000,
description="xx撰写MCP服务",
sse_path='/sse'
)
@mcp.tool()
def generate_tech_disclosure() -> str:
"""生成xx"""
url = "https://ai-xxx.openai.azure.com/openai/deployments/gpt-4.1/chat/completions"
params = {
"api-version": "2025-xxx"
}
headers = {
"api-key": "5Oxxxx",
"Content-Type": "application/json",
"Authorization": "Bearer xxxx"
}
data = {
"messages": [
{
"role": "system",
"content": [
{
"type": "text",
"text": "你是一个帮助用户查找信息的 AI 助手。请帮我生成一份不少于4000字的 xxx"
}
]
}
],
"temperature": 1,
"top_p": 1,
"max_tokens": 4000
}
response = requests.post(url, params=params, headers=headers, json=data)
if response.status_code == 200:
return response.json()['choices'][0]['message']['content']
else:
raise Exception(f"API请求失败,状态码:{response.status_code}")
if __name__ == "__main__":
try:
print("Starting server...")
# mcp.run(transport='stdio')
mcp.run(transport="sse", host="0.0.0.0", port=8000)
except Exception as e:
print(f"Error: {e}")