#
tokens: 3660/50000 8/8 files
lines: off (toggle) GitHub
raw markdown copy
# Directory Structure

```
├── .github
│   └── workflows
│       ├── pypi-publish.yaml
│       └── release.yml
├── .gitignore
├── .python-version
├── cliff.toml
├── LICENSE
├── Makefile
├── pyproject.toml
├── README.md
├── server.py
└── uv.lock
```

# Files

--------------------------------------------------------------------------------
/.python-version:
--------------------------------------------------------------------------------

```
3.10

```

--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------

```
.venv
__pycache__
*.egg-info
```

--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------

```markdown
[![MseeP.ai Security Assessment Badge](https://mseep.net/pr/cr7258-higress-ai-search-mcp-server-badge.png)](https://mseep.ai/app/cr7258-higress-ai-search-mcp-server)

# Higress AI-Search MCP Server

## Overview

A Model Context Protocol (MCP) server that provides an AI search tool to enhance AI model responses with real-time search results from various search engines through [Higress](https://higress.cn/) [ai-search](https://github.com/alibaba/higress/blob/main/plugins/wasm-go/extensions/ai-search/README.md) feature.

<a href="https://glama.ai/mcp/servers/gk0xde4wbp">
  <img width="380" height="200" src="https://glama.ai/mcp/servers/gk0xde4wbp/badge" alt="Higress AI-Search Server MCP server" />
</a>

## Demo

### Cline

https://github.com/user-attachments/assets/60a06d99-a46c-40fc-b156-793e395542bb

### Claude Desktop

https://github.com/user-attachments/assets/5c9e639f-c21c-4738-ad71-1a88cc0bcb46

## Features

- **Internet Search**: Google, Bing, Quark - for general web information
- **Academic Search**: Arxiv - for scientific papers and research
- **Internal Knowledge Search**

## Prerequisites

- [uv](https://github.com/astral-sh/uv) for package installation.
- Config Higress with [ai-search](https://github.com/alibaba/higress/blob/main/plugins/wasm-go/extensions/ai-search/README.md) plugin and [ai-proxy](https://github.com/alibaba/higress/blob/main/plugins/wasm-go/extensions/ai-proxy/README.md) plugin.

## Configuration

The server can be configured using environment variables:

- `HIGRESS_URL`(optional): URL for the Higress service (default: `http://localhost:8080/v1/chat/completions`).
- `MODEL`(required): LLM model to use for generating responses.
- `INTERNAL_KNOWLEDGE_BASES`(optional): Description of internal knowledge bases.

### Option 1: Using uvx

Using uvx will automatically install the package from PyPI, no need to clone the repository locally.

```bash
{
  "mcpServers": {
    "higress-ai-search-mcp-server": {
      "command": "uvx",
      "args": [
        "higress-ai-search-mcp-server"
      ],
      "env": {
        "HIGRESS_URL": "http://localhost:8080/v1/chat/completions",
        "MODEL": "qwen-turbo",
        "INTERNAL_KNOWLEDGE_BASES": "Employee handbook, company policies, internal process documents"
      }
    }
  }
}
```

### Option 2: Using uv with local development

Using uv requires cloning the repository locally and specifying the path to the source code.

```bash
{
  "mcpServers": {
    "higress-ai-search-mcp-server": {
      "command": "uv",
      "args": [
        "--directory",
        "path/to/src/higress-ai-search-mcp-server",
        "run",
        "higress-ai-search-mcp-server"
      ],
      "env": {
        "HIGRESS_URL": "http://localhost:8080/v1/chat/completions",
        "MODEL": "qwen-turbo",
        "INTERNAL_KNOWLEDGE_BASES": "Employee handbook, company policies, internal process documents"
      }
    }
  }
}
```

## License

This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
```

--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------

```toml
[project]
name = "higress-ai-search-mcp-server"
version = "1.0.0"
description = "Higress ai-search MCP Server"
readme = "README.md"
requires-python = ">=3.10"
dependencies = [
    "fastmcp>=0.4.1",
    "httpx>=0.24.0",
    "tomli>=2.2.1",
    "tomli-w>=1.2.0",
]

[project.license]
file = "LICENSE"

[project.scripts]
higress-ai-search-mcp-server = "server:main"

[tool.setuptools]
license-files = []

```

--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------

```yaml
name: Release

on:
  push:
    tags:
      - 'v*'

jobs:
  release:
    runs-on: ubuntu-latest
    permissions:
      contents: write
    steps:
      - uses: actions/checkout@v4
        with:
          fetch-depth: 0

      - name: Set up Python
        uses: actions/setup-python@v4
        with:
          python-version: '3.x'

      - name: Install dependencies
        run: |
          python -m pip install --upgrade pip
          pip install git-cliff

      - name: Get version from tag
        id: get_version
        run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV

      - name: Generate changelog
        run: |
          git-cliff --output CHANGELOG.md --latest

      - name: Create Release
        uses: softprops/action-gh-release@v1
        with:
          name: v${{ env.VERSION }}
          body_path: CHANGELOG.md
          draft: false
          prerelease: false
```

--------------------------------------------------------------------------------
/.github/workflows/pypi-publish.yaml:
--------------------------------------------------------------------------------

```yaml
# This workflow will upload a Python Package using Twine when a release is created
# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries

# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.

name: PyPI Publish

on:
  workflow_run:
    workflows: ["Release"]
    types:
      - completed

env:
  UV_PUBLISH_TOKEN: '${{ secrets.PYPI_API_TOKEN }}'

jobs:
  deploy:
    runs-on: ubuntu-latest
    if: ${{ github.event.workflow_run.conclusion == 'success' }}
    steps:
    - uses: actions/checkout@v2

    - name: Set up Python
      uses: actions/setup-python@v2
      with:
        python-version: '3.10.x'

    - name: Install dependencies
      run: |
        python -m pip install uv
        uv sync

    - name: Build package
      run: uv build

    - name: Publish package
      run: uv publish
```

--------------------------------------------------------------------------------
/cliff.toml:
--------------------------------------------------------------------------------

```toml
# git-cliff ~ configuration file
# https://git-cliff.org/docs/configuration

[changelog]
# template for the changelog header
header = """
# Changelog\n
"""
# template for the changelog body
# https://keats.github.io/tera/docs/#introduction
body = """
{% if version %}\
    {% if previous.version %}\
        ## [{{ version | trim_start_matches(pat="v") }}]($REPO/compare/{{ previous.version }}..{{ version }}) - {{ timestamp | date(format="%Y-%m-%d") }}
    {% else %}\
        ## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }}
    {% endif %}\
{% else %}\
    ## [unreleased]
{% endif %}\
{% for group, commits in commits | group_by(attribute="group") %}
    ### {{ group | striptags | trim | upper_first }}
    {% for commit in commits
    | filter(attribute="scope")
    | sort(attribute="scope") %}
        - **({{commit.scope}})**{% if commit.breaking %} [**breaking**]{% endif %} \
            {{ commit.message }} - ([{{ commit.id | truncate(length=7, end="") }}]($REPO/commit/{{ commit.id }})) - @{{ commit.author.name }}
    {%- endfor -%}
    {% raw %}\n{% endraw %}\
    {%- for commit in commits %}
        {%- if commit.scope -%}
        {% else -%}
            - {% if commit.breaking %} [**breaking**]{% endif %}\
                {{ commit.message }} - ([{{ commit.id | truncate(length=7, end="") }}]($REPO/commit/{{ commit.id }})) - @{{ commit.author.name }}
        {% endif -%}
    {% endfor -%}
{% endfor %}\n
"""
# template for the changelog footer
footer = """
<!-- generated by git-cliff -->
"""
# remove the leading and trailing whitespace from the templates
trim = true
# postprocessors
postprocessors = [
    { pattern = '\$REPO', replace = "https://github.com/cr7258/higress-ai-search-mcp-server.git" }, # replace repository URL
]

[git]
# parse the commits based on https://www.conventionalcommits.org
conventional_commits = true
# filter out the commits that are not conventional
filter_unconventional = true
# process each line of a commit as an individual commit
split_commits = false
# regex for preprocessing the commit messages
commit_preprocessors = [
    # { pattern = '\((\w+\s)?#([0-9]+)\)', replace = "([#${2}](https://github.com/cr7258/higress-ai-search-mcp-server/issues/${2}))"}, # replace issue numbers
]
# regex for parsing and grouping commits
commit_parsers = [
  { message = "^feat", group = "<!-- 0 -->⛰️  Features" },
  { message = "^fix", group = "<!-- 1 -->🐛 Bug Fixes" },
  { message = "^doc", group = "<!-- 3 -->📚 Documentation" },
  { message = "^perf", group = "<!-- 4 -->⚡ Performance" },
  { message = "^refactor\\(clippy\\)", skip = true },
  { message = "^refactor", group = "<!-- 2 -->🚜 Refactor" },
  { message = "^style", group = "<!-- 5 -->🎨 Styling" },
  { message = "^test", group = "<!-- 6 -->🧪 Testing" },
  { message = "^chore\\(release\\): prepare for", skip = true },
  { message = "^chore\\(deps.*\\)", skip = true },
  { message = "^chore\\(pr\\)", skip = true },
  { message = "^chore\\(pull\\)", skip = true },
  { message = "^chore\\(npm\\).*yarn\\.lock", skip = true },
  { message = "^chore|^ci", group = "<!-- 7 -->⚙️ Miscellaneous Tasks" },
  { body = ".*security", group = "<!-- 8 -->🛡️ Security" },
  { message = "^revert", group = "<!-- 9 -->◀️ Revert" },
]

# filter out the commits that are not matched by commit parsers
filter_commits = false
# sort the tags topologically
topo_order = false
# sort the commits inside sections by oldest/newest order
sort_commits = "oldest"
# regex for matching git tags
tag_pattern = "^v[0-9]"
# regex for skipping tags
skip_tags = ""
# regex for ignoring tags
ignore_tags = ""
# use tag date instead of commit date
date_order = true
# path to git binary
git_path = "git"
# whether to use relaxed or strict semver parsing
relaxed_semver = true
# only show the changes for the current version
tag_range = true
```

--------------------------------------------------------------------------------
/server.py:
--------------------------------------------------------------------------------

```python
import os
import json
import httpx
import inspect
from fastmcp import FastMCP
import logging
from typing import Dict, Any, Optional
from functools import wraps

# Create MCP Server
MCP_SERVER_NAME = "higress-ai-search-mcp-server"
mcp = FastMCP(MCP_SERVER_NAME)

# Configure logging
logging.basicConfig(
    level=logging.INFO, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
logger = logging.getLogger(MCP_SERVER_NAME)

# Get Higress configuration from environment variables
HIGRESS_URL = os.getenv("HIGRESS_URL", "http://localhost:8080/v1/chat/completions")

# Get MODEL from environment variables (required)
MODEL = os.getenv("MODEL")
if not MODEL:
    raise ValueError("MODEL environment variable is required. Please set it to the LLM model you want to use.")

# Get knowledge base information from environment variables
INTERNAL_KNOWLEDGE_BASES = os.getenv("INTERNAL_KNOWLEDGE_BASES", "")
INTERNAL_KB_DESCRIPTION = f"👨‍💻 **Internal Knowledge Search**: {INTERNAL_KNOWLEDGE_BASES}" if INTERNAL_KNOWLEDGE_BASES else ""

def dynamic_docstring(func):
    @wraps(func)
    async def wrapper(*args, **kwargs):
        return await func(*args, **kwargs)
    
    base_doc = """
    Enhance AI model responses with real-time search results from search engines.
    
    This tool sends a query to Higress, which integrates with various search engines to provide up-to-date information:
    
    🌐 **Internet Search**: Google, Bing, Quark - for general web information
    📖 **Academic Search**: Arxiv - for scientific papers and research
    {internal_knowledge}
    
    Args:
        query: The user's question or search query
        
    Returns:
        The enhanced AI response with search results incorporated
    """.format(internal_knowledge=INTERNAL_KB_DESCRIPTION)
    
    # Update the function's docstring
    wrapper.__doc__ = base_doc
    return wrapper

@mcp.tool()
@dynamic_docstring
async def ai_search(query: str) -> Dict[str, Any]:
    """Dynamic docstring will be set by the decorator"""
    logger.info(f"Sending query to Higress: {query}")
    
    payload = {
        "model": MODEL,
        "messages": [
            {
                "role": "user",
                "content": query
            }
        ]
    }
    
    try:
        async with httpx.AsyncClient() as client:
            response = await client.post(
                HIGRESS_URL,
                json=payload,
                headers={"Content-Type": "application/json"},
                timeout=30.0  # 30 seconds timeout
            )
            
            if response.status_code != 200:
                logger.error(f"Error from Higress: {response.status_code} - {response.text}")
                return {
                    "status": "error",
                    "code": response.status_code,
                    "message": f"Higress returned an error: {response.text}"
                }
                
            result = response.json()
            logger.info(f"Received response from Higress")
            return result
            
    except httpx.RequestError as e:
        logger.error(f"Request error: {str(e)}")
        return {
            "status": "error",
            "message": f"Failed to connect to Higress: {str(e)}"
        }
    except json.JSONDecodeError as e:
        logger.error(f"JSON decode error: {str(e)}")
        return {
            "status": "error",
            "message": f"Failed to parse Higress response: {str(e)}"
        }
    except Exception as e:
        logger.error(f"Unexpected error: {str(e)}")
        return {
            "status": "error",
            "message": f"An unexpected error occurred: {str(e)}"
        }

def main():
    """Entry point for the MCP server when run as a module."""
    logger.info(f"Starting {MCP_SERVER_NAME} with Higress at {HIGRESS_URL}")
    mcp.run()

if __name__ == "__main__":
    main()

```