feat: 统一 apps/mcp/services 为 entries/ Skill 格式

将 20 个条目从三个分散目录(apps/8、mcp/7、services/5)迁移到统一的
entries/ 目录。每个条目包含:
- manifest.json:极简结构化元数据
- install.md:自然语言安装说明(如适用)
- usage.md:自然语言使用说明和连接配置

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Johnson
2026-03-08 09:16:55 +08:00
parent 4083ee3f94
commit f6042a0f56
69 changed files with 931 additions and 547 deletions

View File

@@ -1,33 +0,0 @@
{
"id": "anythingllm",
"name": "AnythingLLM",
"icon": "linear-gradient(135deg, #10B981, #059669)",
"iconLetter": "A",
"shortDesc": "全能型 AI 桌面应用,支持 RAG、Agent 和文档对话",
"fullDesc": "AnythingLLM 是一个全能型 AI 应用,可以将任何文档、资源或内容转化为上下文,供任何 LLM 在聊天中使用。支持多用户管理、权限控制和嵌入式对话。",
"category": "tools",
"tags": [
"RAG",
"Desktop",
"文档对话"
],
"version": "1.6.2",
"author": "Mintplex Labs",
"githubUrl": "https://github.com/Mintplex-Labs/anything-llm",
"stars": 30200,
"install": {
"method": "docker",
"requirements": {
"docker": true,
"minMemory": "2GB",
"minDisk": "5GB",
"ports": [
3001
]
},
"configNeeded": [
"Docker 运行环境",
"LLM API Key"
]
}
}

View File

@@ -1,33 +0,0 @@
{
"id": "coze",
"name": "Coze",
"icon": "linear-gradient(135deg, #3B82F6, #1D4ED8)",
"iconLetter": "C",
"shortDesc": "字节跳动 AI Bot 开发平台(开源社区版)",
"fullDesc": "Coze 是一个 AI 聊天机器人和应用开发平台,提供 LLM、知识库、插件和工作流等能力。支持快速构建、测试和部署 AI Bot无需编程经验。",
"category": "ai-platform",
"tags": [
"Bot",
"LLM",
"插件"
],
"version": "0.5.0",
"author": "Coze",
"githubUrl": "https://github.com/coze-dev/coze",
"stars": 12300,
"install": {
"method": "docker",
"requirements": {
"docker": true,
"minMemory": "4GB",
"minDisk": "8GB",
"ports": [
8800
]
},
"configNeeded": [
"Docker 运行环境",
"API Key 配置"
]
}
}

View File

@@ -1,35 +0,0 @@
{
"id": "dify",
"name": "Dify",
"icon": "linear-gradient(135deg, #1C64F2, #7C3AED)",
"iconLetter": "D",
"shortDesc": "开源 AI 应用开发平台,支持 RAG、Agent、工作流编排",
"fullDesc": "Dify 是一个开源的 LLM 应用开发平台,提供从 Agent 构建到 AI Workflow 编排、RAG 检索、模型管理等能力,轻松构建和运营生成式 AI 原生应用。支持数百种模型接入。",
"category": "ai-platform",
"tags": [
"LLM",
"RAG",
"Agent",
"Workflow"
],
"version": "0.8.3",
"author": "Dify.AI",
"githubUrl": "https://github.com/langgenius/dify",
"stars": 56200,
"install": {
"method": "docker-compose",
"requirements": {
"docker": true,
"minMemory": "4GB",
"minDisk": "10GB",
"ports": [
3000,
5001
]
},
"configNeeded": [
"Docker 运行环境",
"OpenAI API Key可选"
]
}
}

View File

@@ -1,33 +0,0 @@
{
"id": "lobechat",
"name": "LobeChat",
"icon": "linear-gradient(135deg, #000000, #333333)",
"iconLetter": "L",
"shortDesc": "开源高性能聊天机器人框架,支持多模型和插件",
"fullDesc": "LobeChat 是一个开源的现代设计 ChatGPT/LLM UI 框架支持多模型服务提供商OpenAI / Claude / Gemini / Ollama 等),多模态和可扩展的插件系统。一键免费部署私有 ChatGPT/Claude 应用。",
"category": "chat",
"tags": [
"Chat",
"多模型",
"插件"
],
"version": "1.20.0",
"author": "LobeHub",
"githubUrl": "https://github.com/lobehub/lobe-chat",
"stars": 48900,
"install": {
"method": "docker",
"requirements": {
"docker": true,
"minMemory": "1GB",
"minDisk": "3GB",
"ports": [
3210
]
},
"configNeeded": [
"Docker 运行环境",
"OpenAI API Key可选"
]
}
}

View File

@@ -1,33 +0,0 @@
{
"id": "n8n",
"name": "n8n",
"icon": "linear-gradient(135deg, #EA4B71, #D93668)",
"iconLetter": "n",
"shortDesc": "可视化工作流自动化平台,支持 400+ 集成",
"fullDesc": "n8n 是一个可扩展的工作流自动化工具。使用公平代码许可,拥有原生 AI 能力,可以连接任何东西。支持自托管,提供丰富的第三方服务集成节点。",
"category": "workflow",
"tags": [
"Automation",
"Workflow",
"Integration"
],
"version": "1.64.0",
"author": "n8n-io",
"githubUrl": "https://github.com/n8n-io/n8n",
"stars": 50600,
"install": {
"method": "docker",
"requirements": {
"docker": true,
"minMemory": "2GB",
"minDisk": "5GB",
"ports": [
5678
]
},
"configNeeded": [
"Docker 运行环境",
"数据库SQLite / PostgreSQL"
]
}
}

View File

@@ -1,33 +0,0 @@
{
"id": "open-webui",
"name": "Open WebUI",
"icon": "linear-gradient(135deg, #6366F1, #8B5CF6)",
"iconLetter": "O",
"shortDesc": "自托管的 AI 对话界面,支持 Ollama 和 OpenAI 兼容 API",
"fullDesc": "Open WebUI 是一个可扩展的自托管 AI 界面,支持完全离线操作。支持多种 LLM 运行器,包括 Ollama 和 OpenAI 兼容 API内置 RAG 集成、网页浏览、代码执行等功能。",
"category": "chat",
"tags": [
"Chat",
"Ollama",
"WebUI"
],
"version": "0.4.8",
"author": "Open WebUI",
"githubUrl": "https://github.com/open-webui/open-webui",
"stars": 52800,
"install": {
"method": "docker",
"requirements": {
"docker": true,
"minMemory": "2GB",
"minDisk": "5GB",
"ports": [
8080
]
},
"configNeeded": [
"Docker 运行环境",
"Ollama 或 OpenAI API Key"
]
}
}

View File

@@ -1,36 +0,0 @@
{
"id": "openclaw",
"name": "OpenClaw",
"icon": "linear-gradient(135deg, #F59E0B, #DC2626)",
"iconLetter": "O",
"shortDesc": "开源 Agent 运行时平台,支持多 Agent 编排和工具策略",
"fullDesc": "OpenClaw 是一个开源的 Agent 运行时平台,提供嵌入式运行时、会话管理、工具策略控制、多 Agent 编排、Sandbox 隔离执行等能力。支持流式输出、对话压缩、队列管理和 hooks 扩展,适合构建企业级 AI 应用。",
"category": "ai-platform",
"tags": [
"Agent",
"Runtime",
"Multi-Agent",
"Sandbox"
],
"version": "0.9.0",
"author": "OpenClaw",
"githubUrl": "https://github.com/openclaw/openclaw",
"stars": 8500,
"install": {
"method": "docker-compose",
"requirements": {
"docker": true,
"minMemory": "4GB",
"minDisk": "10GB",
"ports": [
8080,
3000
]
},
"configNeeded": [
"Docker 运行环境",
"Node.js 18+",
"API Key 配置"
]
}
}

View File

@@ -1,35 +0,0 @@
{
"id": "ragflow",
"name": "RagFlow",
"icon": "linear-gradient(135deg, #F97316, #EF4444)",
"iconLetter": "R",
"shortDesc": "基于深度文档理解的开源 RAG 引擎",
"fullDesc": "RagFlow 是一款基于深度文档理解构建的开源 RAG 引擎。可以为各种规模的企业及个人提供流畅的 RAG 工作流结合大语言模型LLM针对用户各类不同的复杂格式数据提供可靠的问答以及有理有据的引用。",
"category": "rag",
"tags": [
"RAG",
"文档理解",
"知识库"
],
"version": "0.14.0",
"author": "InfiniFlow",
"githubUrl": "https://github.com/infiniflow/ragflow",
"stars": 28400,
"install": {
"method": "docker-compose",
"requirements": {
"docker": true,
"minMemory": "8GB",
"minDisk": "20GB",
"ports": [
9380,
443,
80
]
},
"configNeeded": [
"Docker 运行环境",
"Elasticsearch / Infinity 数据库"
]
}
}

View File

@@ -0,0 +1,25 @@
# 安装 AnythingLLM
AnythingLLM 是一个全能型 AI 应用,可以将任何文档、资源或内容转化为上下文,供任何 LLM 在聊天中使用。支持多用户管理、权限控制和嵌入式对话。
## 环境要求
- Docker 已安装并运行
- 内存:≥ 2GB
- 磁盘空间:≥ 5GB
- 端口3001
## 安装方式
使用 `docker` 部署。
### 步骤
1. 拉取 Docker 镜像并启动容器:
```bash
docker run -d -p 3001:3001 --name anythingllm anythingllm/anythingllm:latest
```
## 配置项
- Docker 运行环境
- LLM API Key
## 验证
访问 `http://localhost:3001` 确认服务已启动。

View File

@@ -0,0 +1,20 @@
{
"id": "anythingllm",
"name": "AnythingLLM",
"type": "docker-app",
"version": "1.6.2",
"author": "Mintplex Labs",
"description": "全能型 AI 桌面应用,支持 RAG、Agent 和文档对话",
"tags": [
"RAG",
"Desktop",
"文档对话"
],
"icon": "linear-gradient(135deg, #10B981, #059669)",
"iconLetter": "A",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

View File

@@ -0,0 +1,8 @@
# 使用 AnythingLLM
## 服务描述
全能型 AI 桌面应用,支持 RAG、Agent 和文档对话
## 访问方式
- **Web 界面**`http://localhost:3001`
- **GitHub**https://github.com/Mintplex-Labs/anything-llm

View File

@@ -1,19 +1,19 @@
{
"id": "baidu-map",
"name": "百度地图 API",
"description": "地理位置服务,支持地理编码、路线规划和 POI 搜索",
"endpoint": "https://api.map.baidu.com/v3",
"capabilities": [
"geocoding",
"route_plan",
"poi_search",
"distance_calc"
],
"type": "http-api",
"version": "3.0",
"author": "百度",
"description": "地理位置服务,支持地理编码、路线规划和 POI 搜索",
"tags": [
"地图",
"地理位置",
"POI"
],
"icon": "globe",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

View File

@@ -0,0 +1,14 @@
# 使用 百度地图 API
## 服务描述
地理位置服务,支持地理编码、路线规划和 POI 搜索
## 连接方式
- **类型**HTTP REST API
- **端点**`https://api.map.baidu.com/v3`
## 可用能力
- `geocoding`
- `route_plan`
- `poi_search`
- `distance_calc`

25
entries/coze/install.md Normal file
View File

@@ -0,0 +1,25 @@
# 安装 Coze
Coze 是一个 AI 聊天机器人和应用开发平台,提供 LLM、知识库、插件和工作流等能力。支持快速构建、测试和部署 AI Bot无需编程经验。
## 环境要求
- Docker 已安装并运行
- 内存:≥ 4GB
- 磁盘空间:≥ 8GB
- 端口8800
## 安装方式
使用 `docker` 部署。
### 步骤
1. 拉取 Docker 镜像并启动容器:
```bash
docker run -d -p 8800:8800 --name coze coze/coze:latest
```
## 配置项
- Docker 运行环境
- API Key 配置
## 验证
访问 `http://localhost:8800` 确认服务已启动。

View File

@@ -0,0 +1,20 @@
{
"id": "coze",
"name": "Coze",
"type": "docker-app",
"version": "0.5.0",
"author": "Coze",
"description": "字节跳动 AI Bot 开发平台(开源社区版)",
"tags": [
"Bot",
"LLM",
"插件"
],
"icon": "linear-gradient(135deg, #3B82F6, #1D4ED8)",
"iconLetter": "C",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

8
entries/coze/usage.md Normal file
View File

@@ -0,0 +1,8 @@
# 使用 Coze
## 服务描述
字节跳动 AI Bot 开发平台(开源社区版)
## 访问方式
- **Web 界面**`http://localhost:8800`
- **GitHub**https://github.com/coze-dev/coze

11
entries/db-mcp/install.md Normal file
View File

@@ -0,0 +1,11 @@
# 安装 Database MCP
## 环境要求
- Node.js >= 18
## 安装步骤
## 验证
运行以下命令确认可正常启动:
```bash
npx -y @modelcontextprotocol/server-postgres postgresql://localhost/mydb --help
```

View File

@@ -0,0 +1,19 @@
{
"id": "db-mcp",
"name": "Database MCP",
"type": "mcp",
"version": "0.5.2",
"author": "Community",
"description": "PostgreSQL 数据库查询与管理,支持 SQL 执行和 Schema 浏览",
"tags": [
"数据库",
"PostgreSQL",
"SQL"
],
"icon": "terminal",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

28
entries/db-mcp/usage.md Normal file
View File

@@ -0,0 +1,28 @@
# 使用 Database MCP
## 服务描述
PostgreSQL 数据库查询与管理,支持 SQL 执行和 Schema 浏览
## 连接方式
- **传输协议**stdio
- **启动命令**`npx -y @modelcontextprotocol/server-postgres postgresql://localhost/mydb`
### 连接配置
```json
{
"transport": "stdio",
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-postgres",
"postgresql://localhost/mydb"
]
}
```
## 可用能力
- `sql_query`
- `schema_browse`
- `data_export`
共提供 **8** 个工具。

View File

@@ -1,18 +1,19 @@
{
"id": "deepl-translate",
"name": "DeepL 翻译",
"description": "高质量多语言翻译服务,支持 30+ 语言互译和文档翻译",
"endpoint": "https://api-free.deepl.com/v2",
"capabilities": [
"text_translate",
"doc_translate",
"language_detect"
],
"type": "http-api",
"version": "2.0",
"author": "DeepL",
"description": "高质量多语言翻译服务,支持 30+ 语言互译和文档翻译",
"tags": [
"翻译",
"多语言",
"NLP"
],
"icon": "globe",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

View File

@@ -0,0 +1,13 @@
# 使用 DeepL 翻译
## 服务描述
高质量多语言翻译服务,支持 30+ 语言互译和文档翻译
## 连接方式
- **类型**HTTP REST API
- **端点**`https://api-free.deepl.com/v2`
## 可用能力
- `text_translate`
- `doc_translate`
- `language_detect`

View File

@@ -1,23 +1,19 @@
{
"id": "dify-mcp",
"name": "Dify MCP Bridge",
"description": "Dify 平台 Agent 工具集桥接,支持调用 Dify 内置工具和自定义工具",
"sourceAppId": "dify",
"sourceAppName": "Dify",
"capabilities": [
"tool_invoke",
"workflow_trigger",
"knowledge_query"
],
"type": "mcp",
"version": "0.8.3",
"author": "Dify.AI",
"description": "Dify 平台 Agent 工具集桥接,支持调用 Dify 内置工具和自定义工具",
"tags": [
"Agent",
"工具集",
"Workflow"
],
"connection": {
"transport": "streamable-http",
"url": "http://localhost:3000/mcp"
}
"icon": "terminal",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

21
entries/dify-mcp/usage.md Normal file
View File

@@ -0,0 +1,21 @@
# 使用 Dify MCP Bridge
## 服务描述
Dify 平台 Agent 工具集桥接,支持调用 Dify 内置工具和自定义工具
## 连接方式
- **传输协议**streamable-http
- **服务地址**`http://localhost:3000/mcp`
### 连接配置
```json
{
"transport": "streamable-http",
"url": "http://localhost:3000/mcp"
}
```
## 可用能力
- `tool_invoke`
- `workflow_trigger`
- `knowledge_query`

View File

@@ -1,20 +1,19 @@
{
"id": "dify-rag",
"name": "Dify RAG API",
"description": "Dify 知识库检索服务,支持语义搜索和多知识库联合检索",
"sourceAppId": "dify",
"sourceAppName": "Dify",
"endpoint": "http://localhost:3000/api/v1/datasets",
"capabilities": [
"semantic_search",
"multi_dataset",
"relevance_ranking"
],
"type": "http-api",
"version": "0.8.3",
"author": "Dify.AI",
"description": "Dify 知识库检索服务,支持语义搜索和多知识库联合检索",
"tags": [
"RAG",
"知识库",
"检索"
],
"icon": "globe",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

13
entries/dify-rag/usage.md Normal file
View File

@@ -0,0 +1,13 @@
# 使用 Dify RAG API
## 服务描述
Dify 知识库检索服务,支持语义搜索和多知识库联合检索
## 连接方式
- **类型**HTTP REST API
- **端点**`http://localhost:3000/api/v1/datasets`
## 可用能力
- `semantic_search`
- `multi_dataset`
- `relevance_ranking`

27
entries/dify/install.md Normal file
View File

@@ -0,0 +1,27 @@
# 安装 Dify
Dify 是一个开源的 LLM 应用开发平台,提供从 Agent 构建到 AI Workflow 编排、RAG 检索、模型管理等能力,轻松构建和运营生成式 AI 原生应用。支持数百种模型接入。
## 环境要求
- Docker 已安装并运行
- 内存:≥ 4GB
- 磁盘空间:≥ 10GB
- 端口3000, 5001
## 安装方式
使用 `docker-compose` 部署。
### 步骤
1. 克隆项目仓库或下载 docker-compose.yml
2. 根据需要修改 `.env` 配置文件
3. 启动服务:
```bash
docker compose up -d
```
## 配置项
- Docker 运行环境
- OpenAI API Key可选
## 验证
访问 `http://localhost:3000` 确认服务已启动。

View File

@@ -0,0 +1,21 @@
{
"id": "dify",
"name": "Dify",
"type": "docker-app",
"version": "0.8.3",
"author": "Dify.AI",
"description": "开源 AI 应用开发平台,支持 RAG、Agent、工作流编排",
"tags": [
"LLM",
"RAG",
"Agent",
"Workflow"
],
"icon": "linear-gradient(135deg, #1C64F2, #7C3AED)",
"iconLetter": "D",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

8
entries/dify/usage.md Normal file
View File

@@ -0,0 +1,8 @@
# 使用 Dify
## 服务描述
开源 AI 应用开发平台,支持 RAG、Agent、工作流编排
## 访问方式
- **Web 界面**`http://localhost:3000`
- **GitHub**https://github.com/langgenius/dify

View File

@@ -0,0 +1,11 @@
# 安装 Fetch MCP
## 环境要求
- Python >= 3.10
## 安装步骤
## 验证
运行以下命令确认可正常启动:
```bash
uvx mcp-server-fetch --help
```

View File

@@ -0,0 +1,19 @@
{
"id": "fetch-mcp",
"name": "Fetch MCP",
"type": "mcp",
"version": "0.6.2",
"author": "Anthropic",
"description": "网页内容获取与转换,将网页转为 Markdown 供 LLM 使用",
"tags": [
"网页",
"抓取",
"Markdown"
],
"icon": "terminal",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

View File

@@ -0,0 +1,26 @@
# 使用 Fetch MCP
## 服务描述
网页内容获取与转换,将网页转为 Markdown 供 LLM 使用
## 连接方式
- **传输协议**stdio
- **启动命令**`uvx mcp-server-fetch`
### 连接配置
```json
{
"transport": "stdio",
"command": "uvx",
"args": [
"mcp-server-fetch"
]
}
```
## 可用能力
- `url_fetch`
- `html_to_markdown`
- `content_extract`
共提供 **2** 个工具。

11
entries/fs-mcp/install.md Normal file
View File

@@ -0,0 +1,11 @@
# 安装 Filesystem MCP
## 环境要求
- Node.js >= 18
## 安装步骤
## 验证
运行以下命令确认可正常启动:
```bash
npx -y @modelcontextprotocol/server-filesystem /path/to/allowed/dir --help
```

View File

@@ -0,0 +1,18 @@
{
"id": "fs-mcp",
"name": "Filesystem MCP",
"type": "mcp",
"version": "1.2.0",
"author": "Anthropic",
"description": "本地文件系统操作,支持读写、搜索、监控文件变化",
"tags": [
"文件系统",
"本地"
],
"icon": "terminal",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

29
entries/fs-mcp/usage.md Normal file
View File

@@ -0,0 +1,29 @@
# 使用 Filesystem MCP
## 服务描述
本地文件系统操作,支持读写、搜索、监控文件变化
## 连接方式
- **传输协议**stdio
- **启动命令**`npx -y @modelcontextprotocol/server-filesystem /path/to/allowed/dir`
### 连接配置
```json
{
"transport": "stdio",
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-filesystem",
"/path/to/allowed/dir"
]
}
```
## 可用能力
- `file_read`
- `file_write`
- `file_search`
- `file_watch`
共提供 **11** 个工具。

View File

@@ -0,0 +1,17 @@
# 安装 GitHub MCP
## 环境要求
- Node.js >= 18
## 安装步骤
### 配置环境变量
```bash
export GITHUB_PERSONAL_ACCESS_TOKEN=<your-token>
```
## 验证
运行以下命令确认可正常启动:
```bash
npx -y @modelcontextprotocol/server-github --help
```

View File

@@ -0,0 +1,19 @@
{
"id": "github-mcp",
"name": "GitHub MCP",
"type": "mcp",
"version": "0.9.1",
"author": "GitHub",
"description": "代码仓库操作,支持 PR、Issue、代码搜索和仓库管理",
"tags": [
"代码",
"Git",
"PR"
],
"icon": "terminal",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

View File

@@ -0,0 +1,28 @@
# 使用 GitHub MCP
## 服务描述
代码仓库操作,支持 PR、Issue、代码搜索和仓库管理
## 连接方式
- **传输协议**stdio
- **启动命令**`npx -y @modelcontextprotocol/server-github`
### 连接配置
```json
{
"transport": "stdio",
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-github"
]
}
```
## 可用能力
- `repo_read`
- `pr_manage`
- `issue_manage`
- `code_search`
共提供 **35** 个工具。

View File

@@ -0,0 +1,25 @@
# 安装 LobeChat
LobeChat 是一个开源的现代设计 ChatGPT/LLM UI 框架支持多模型服务提供商OpenAI / Claude / Gemini / Ollama 等),多模态和可扩展的插件系统。一键免费部署私有 ChatGPT/Claude 应用。
## 环境要求
- Docker 已安装并运行
- 内存:≥ 1GB
- 磁盘空间:≥ 3GB
- 端口3210
## 安装方式
使用 `docker` 部署。
### 步骤
1. 拉取 Docker 镜像并启动容器:
```bash
docker run -d -p 3210:3210 --name lobechat lobechat/lobechat:latest
```
## 配置项
- Docker 运行环境
- OpenAI API Key可选
## 验证
访问 `http://localhost:3210` 确认服务已启动。

View File

@@ -0,0 +1,20 @@
{
"id": "lobechat",
"name": "LobeChat",
"type": "docker-app",
"version": "1.20.0",
"author": "LobeHub",
"description": "开源高性能聊天机器人框架,支持多模型和插件",
"tags": [
"Chat",
"多模型",
"插件"
],
"icon": "linear-gradient(135deg, #000000, #333333)",
"iconLetter": "L",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

View File

@@ -0,0 +1,8 @@
# 使用 LobeChat
## 服务描述
开源高性能聊天机器人框架,支持多模型和插件
## 访问方式
- **Web 界面**`http://localhost:3210`
- **GitHub**https://github.com/lobehub/lobe-chat

View File

@@ -0,0 +1,11 @@
# 安装 Memory MCP
## 环境要求
- Node.js >= 18
## 安装步骤
## 验证
运行以下命令确认可正常启动:
```bash
npx -y @modelcontextprotocol/server-memory --help
```

View File

@@ -0,0 +1,19 @@
{
"id": "memory-mcp",
"name": "Memory MCP",
"type": "mcp",
"version": "0.6.2",
"author": "Anthropic",
"description": "基于知识图谱的持久化记忆服务,支持实体和关系的存取",
"tags": [
"记忆",
"知识图谱",
"持久化"
],
"icon": "terminal",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

View File

@@ -0,0 +1,27 @@
# 使用 Memory MCP
## 服务描述
基于知识图谱的持久化记忆服务,支持实体和关系的存取
## 连接方式
- **传输协议**stdio
- **启动命令**`npx -y @modelcontextprotocol/server-memory`
### 连接配置
```json
{
"transport": "stdio",
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-memory"
]
}
```
## 可用能力
- `entity_store`
- `relation_store`
- `graph_query`
共提供 **7** 个工具。

25
entries/n8n/install.md Normal file
View File

@@ -0,0 +1,25 @@
# 安装 n8n
n8n 是一个可扩展的工作流自动化工具。使用公平代码许可,拥有原生 AI 能力,可以连接任何东西。支持自托管,提供丰富的第三方服务集成节点。
## 环境要求
- Docker 已安装并运行
- 内存:≥ 2GB
- 磁盘空间:≥ 5GB
- 端口5678
## 安装方式
使用 `docker` 部署。
### 步骤
1. 拉取 Docker 镜像并启动容器:
```bash
docker run -d -p 5678:5678 --name n8n n8n/n8n:latest
```
## 配置项
- Docker 运行环境
- 数据库SQLite / PostgreSQL
## 验证
访问 `http://localhost:5678` 确认服务已启动。

20
entries/n8n/manifest.json Normal file
View File

@@ -0,0 +1,20 @@
{
"id": "n8n",
"name": "n8n",
"type": "docker-app",
"version": "1.64.0",
"author": "n8n-io",
"description": "可视化工作流自动化平台,支持 400+ 集成",
"tags": [
"Automation",
"Workflow",
"Integration"
],
"icon": "linear-gradient(135deg, #EA4B71, #D93668)",
"iconLetter": "n",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

8
entries/n8n/usage.md Normal file
View File

@@ -0,0 +1,8 @@
# 使用 n8n
## 服务描述
可视化工作流自动化平台,支持 400+ 集成
## 访问方式
- **Web 界面**`http://localhost:5678`
- **GitHub**https://github.com/n8n-io/n8n

View File

@@ -0,0 +1,25 @@
# 安装 Open WebUI
Open WebUI 是一个可扩展的自托管 AI 界面,支持完全离线操作。支持多种 LLM 运行器,包括 Ollama 和 OpenAI 兼容 API内置 RAG 集成、网页浏览、代码执行等功能。
## 环境要求
- Docker 已安装并运行
- 内存:≥ 2GB
- 磁盘空间:≥ 5GB
- 端口8080
## 安装方式
使用 `docker` 部署。
### 步骤
1. 拉取 Docker 镜像并启动容器:
```bash
docker run -d -p 8080:8080 --name open-webui open-webui/open-webui:latest
```
## 配置项
- Docker 运行环境
- Ollama 或 OpenAI API Key
## 验证
访问 `http://localhost:8080` 确认服务已启动。

View File

@@ -0,0 +1,20 @@
{
"id": "open-webui",
"name": "Open WebUI",
"type": "docker-app",
"version": "0.4.8",
"author": "Open WebUI",
"description": "自托管的 AI 对话界面,支持 Ollama 和 OpenAI 兼容 API",
"tags": [
"Chat",
"Ollama",
"WebUI"
],
"icon": "linear-gradient(135deg, #6366F1, #8B5CF6)",
"iconLetter": "O",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

View File

@@ -0,0 +1,8 @@
# 使用 Open WebUI
## 服务描述
自托管的 AI 对话界面,支持 Ollama 和 OpenAI 兼容 API
## 访问方式
- **Web 界面**`http://localhost:8080`
- **GitHub**https://github.com/open-webui/open-webui

View File

@@ -0,0 +1,28 @@
# 安装 OpenClaw
OpenClaw 是一个开源的 Agent 运行时平台,提供嵌入式运行时、会话管理、工具策略控制、多 Agent 编排、Sandbox 隔离执行等能力。支持流式输出、对话压缩、队列管理和 hooks 扩展,适合构建企业级 AI 应用。
## 环境要求
- Docker 已安装并运行
- 内存:≥ 4GB
- 磁盘空间:≥ 10GB
- 端口8080, 3000
## 安装方式
使用 `docker-compose` 部署。
### 步骤
1. 克隆项目仓库或下载 docker-compose.yml
2. 根据需要修改 `.env` 配置文件
3. 启动服务:
```bash
docker compose up -d
```
## 配置项
- Docker 运行环境
- Node.js 18+
- API Key 配置
## 验证
访问 `http://localhost:8080` 确认服务已启动。

View File

@@ -0,0 +1,21 @@
{
"id": "openclaw",
"name": "OpenClaw",
"type": "docker-app",
"version": "0.9.0",
"author": "OpenClaw",
"description": "开源 Agent 运行时平台,支持多 Agent 编排和工具策略",
"tags": [
"Agent",
"Runtime",
"Multi-Agent",
"Sandbox"
],
"icon": "linear-gradient(135deg, #F59E0B, #DC2626)",
"iconLetter": "O",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

View File

@@ -0,0 +1,8 @@
# 使用 OpenClaw
## 服务描述
开源 Agent 运行时平台,支持多 Agent 编排和工具策略
## 访问方式
- **Web 界面**`http://localhost:8080`
- **GitHub**https://github.com/openclaw/openclaw

View File

@@ -0,0 +1,16 @@
# 安装 Playwright MCP
## 环境要求
- Node.js >= 18
## 安装步骤
1. 运行安装命令:
```bash
npx playwright install chromium
```
## 验证
运行以下命令确认可正常启动:
```bash
npx @playwright/mcp@latest --help
```

View File

@@ -0,0 +1,19 @@
{
"id": "playwright-mcp",
"name": "Playwright MCP",
"type": "mcp",
"version": "0.0.68",
"author": "Microsoft",
"description": "浏览器自动化 — 网页导航、截图、表单填写、DOM 操作、PDF 生成",
"tags": [
"浏览器",
"自动化",
"Playwright"
],
"icon": "terminal",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

View File

@@ -0,0 +1,29 @@
# 使用 Playwright MCP
## 服务描述
浏览器自动化 — 网页导航、截图、表单填写、DOM 操作、PDF 生成
## 连接方式
- **传输协议**stdio
- **启动命令**`npx @playwright/mcp@latest`
### 连接配置
```json
{
"transport": "stdio",
"command": "npx",
"args": [
"@playwright/mcp@latest"
]
}
```
## 可用能力
- `page_navigate`
- `screenshot`
- `dom_query`
- `form_fill`
- `pdf_generate`
- `content_extract`
共提供 **33** 个工具。

View File

@@ -1,20 +1,19 @@
{
"id": "ragflow-pipeline",
"name": "RagFlow Pipeline",
"description": "深度文档理解 RAG 引擎,支持复杂格式文档解析和多级检索",
"sourceAppId": "ragflow",
"sourceAppName": "RagFlow",
"endpoint": "http://localhost:9380/api/v1",
"capabilities": [
"doc_parsing",
"deep_retrieval",
"citation"
],
"type": "http-api",
"version": "0.14.0",
"author": "InfiniFlow",
"description": "深度文档理解 RAG 引擎,支持复杂格式文档解析和多级检索",
"tags": [
"RAG",
"文档理解",
"Pipeline"
],
"icon": "globe",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

View File

@@ -0,0 +1,13 @@
# 使用 RagFlow Pipeline
## 服务描述
深度文档理解 RAG 引擎,支持复杂格式文档解析和多级检索
## 连接方式
- **类型**HTTP REST API
- **端点**`http://localhost:9380/api/v1`
## 可用能力
- `doc_parsing`
- `deep_retrieval`
- `citation`

View File

@@ -0,0 +1,27 @@
# 安装 RagFlow
RagFlow 是一款基于深度文档理解构建的开源 RAG 引擎。可以为各种规模的企业及个人提供流畅的 RAG 工作流结合大语言模型LLM针对用户各类不同的复杂格式数据提供可靠的问答以及有理有据的引用。
## 环境要求
- Docker 已安装并运行
- 内存:≥ 8GB
- 磁盘空间:≥ 20GB
- 端口9380, 443, 80
## 安装方式
使用 `docker-compose` 部署。
### 步骤
1. 克隆项目仓库或下载 docker-compose.yml
2. 根据需要修改 `.env` 配置文件
3. 启动服务:
```bash
docker compose up -d
```
## 配置项
- Docker 运行环境
- Elasticsearch / Infinity 数据库
## 验证
访问 `http://localhost:9380` 确认服务已启动。

View File

@@ -0,0 +1,20 @@
{
"id": "ragflow",
"name": "RagFlow",
"type": "docker-app",
"version": "0.14.0",
"author": "InfiniFlow",
"description": "基于深度文档理解的开源 RAG 引擎",
"tags": [
"RAG",
"文档理解",
"知识库"
],
"icon": "linear-gradient(135deg, #F97316, #EF4444)",
"iconLetter": "R",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

8
entries/ragflow/usage.md Normal file
View File

@@ -0,0 +1,8 @@
# 使用 RagFlow
## 服务描述
基于深度文档理解的开源 RAG 引擎
## 访问方式
- **Web 界面**`http://localhost:9380`
- **GitHub**https://github.com/infiniflow/ragflow

View File

@@ -1,18 +1,19 @@
{
"id": "wecom-webhook",
"name": "企业微信 Webhook",
"description": "企业微信机器人消息通知支持文本、Markdown 和卡片消息",
"endpoint": "https://qyapi.weixin.qq.com/cgi-bin/webhook/send",
"capabilities": [
"text_message",
"markdown_message",
"card_message"
],
"type": "http-api",
"version": "1.0",
"author": "腾讯",
"description": "企业微信机器人消息通知支持文本、Markdown 和卡片消息",
"tags": [
"通知",
"企业微信",
"Webhook"
],
"icon": "globe",
"platformSupport": [
"macos",
"windows",
"linux"
]
}

View File

@@ -0,0 +1,13 @@
# 使用 企业微信 Webhook
## 服务描述
企业微信机器人消息通知支持文本、Markdown 和卡片消息
## 连接方式
- **类型**HTTP REST API
- **端点**`https://qyapi.weixin.qq.com/cgi-bin/webhook/send`
## 可用能力
- `text_message`
- `markdown_message`
- `card_message`

View File

@@ -1,25 +1,27 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"id": "desirecore-registry-manifest",
"version": "2.0.0",
"version": "3.0.0",
"name": "DesireCore Registry",
"description": "DesireCore 官方应用商店与服务注册表",
"maintainer": "DesireCore Team",
"repository": "https://github.com/desirecore/registry",
"lastUpdated": "2026-02-24",
"lastUpdated": "2026-03-08",
"stats": {
"apps": 8,
"mcpServices": 7,
"httpServices": 5,
"descriptors": 18,
"serviceCategories": 6,
"appCategories": 5
"appCategories": 5,
"entries": 20
},
"dataVersion": {
"apps": "2.0.0",
"mcp": "2.0.0",
"services": "2.0.0",
"descriptors": "1.1.0",
"categories": "1.1.0"
"categories": "1.1.0",
"entries": "1.0.0"
}
}

View File

@@ -1,37 +0,0 @@
{
"id": "db-mcp",
"name": "Database MCP",
"description": "PostgreSQL 数据库查询与管理,支持 SQL 执行和 Schema 浏览",
"capabilities": [
"sql_query",
"schema_browse",
"data_export"
],
"toolCount": 8,
"version": "0.5.2",
"author": "Community",
"tags": [
"数据库",
"PostgreSQL",
"SQL"
],
"install": {
"method": "npx",
"packageName": "@modelcontextprotocol/server-postgres",
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-postgres",
"postgresql://localhost/mydb"
]
},
"connection": {
"transport": "stdio",
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-postgres",
"postgresql://localhost/mydb"
]
}
}

View File

@@ -1,33 +0,0 @@
{
"id": "fetch-mcp",
"name": "Fetch MCP",
"description": "网页内容获取与转换,将网页转为 Markdown 供 LLM 使用",
"capabilities": [
"url_fetch",
"html_to_markdown",
"content_extract"
],
"toolCount": 2,
"version": "0.6.2",
"author": "Anthropic",
"tags": [
"网页",
"抓取",
"Markdown"
],
"install": {
"method": "uvx",
"packageName": "mcp-server-fetch",
"command": "uvx",
"args": [
"mcp-server-fetch"
]
},
"connection": {
"transport": "stdio",
"command": "uvx",
"args": [
"mcp-server-fetch"
]
}
}

View File

@@ -1,37 +0,0 @@
{
"id": "fs-mcp",
"name": "Filesystem MCP",
"description": "本地文件系统操作,支持读写、搜索、监控文件变化",
"capabilities": [
"file_read",
"file_write",
"file_search",
"file_watch"
],
"toolCount": 11,
"version": "1.2.0",
"author": "Anthropic",
"tags": [
"文件系统",
"本地"
],
"install": {
"method": "npx",
"packageName": "@modelcontextprotocol/server-filesystem",
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-filesystem",
"/path/to/allowed/dir"
]
},
"connection": {
"transport": "stdio",
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-filesystem",
"/path/to/allowed/dir"
]
}
}

View File

@@ -1,39 +0,0 @@
{
"id": "github-mcp",
"name": "GitHub MCP",
"description": "代码仓库操作,支持 PR、Issue、代码搜索和仓库管理",
"capabilities": [
"repo_read",
"pr_manage",
"issue_manage",
"code_search"
],
"toolCount": 35,
"version": "0.9.1",
"author": "GitHub",
"tags": [
"代码",
"Git",
"PR"
],
"install": {
"method": "npx",
"packageName": "@modelcontextprotocol/server-github",
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-github"
],
"env": {
"GITHUB_PERSONAL_ACCESS_TOKEN": "<your-token>"
}
},
"connection": {
"transport": "stdio",
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-github"
]
}
}

View File

@@ -1,35 +0,0 @@
{
"id": "memory-mcp",
"name": "Memory MCP",
"description": "基于知识图谱的持久化记忆服务,支持实体和关系的存取",
"capabilities": [
"entity_store",
"relation_store",
"graph_query"
],
"toolCount": 7,
"version": "0.6.2",
"author": "Anthropic",
"tags": [
"记忆",
"知识图谱",
"持久化"
],
"install": {
"method": "npx",
"packageName": "@modelcontextprotocol/server-memory",
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-memory"
]
},
"connection": {
"transport": "stdio",
"command": "npx",
"args": [
"-y",
"@modelcontextprotocol/server-memory"
]
}
}

View File

@@ -1,39 +0,0 @@
{
"id": "playwright-mcp",
"name": "Playwright MCP",
"description": "浏览器自动化 — 网页导航、截图、表单填写、DOM 操作、PDF 生成",
"capabilities": [
"page_navigate",
"screenshot",
"dom_query",
"form_fill",
"pdf_generate",
"content_extract"
],
"toolCount": 33,
"version": "0.0.68",
"author": "Microsoft",
"tags": [
"浏览器",
"自动化",
"Playwright"
],
"install": {
"method": "npx",
"packageName": "@playwright/mcp",
"command": "npx",
"args": [
"@playwright/mcp@latest"
],
"postInstall": [
"npx playwright install chromium"
]
},
"connection": {
"transport": "stdio",
"command": "npx",
"args": [
"@playwright/mcp@latest"
]
}
}