# Directory Structure
```
├── .github
│ └── workflows
│ └── npm-publish.yml
├── .gitignore
├── Dockerfile
├── LICENSE
├── package-lock.json
├── package.json
├── README.md
├── smithery.yaml
├── src
│ └── index.ts
└── tsconfig.json
```
# Files
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
```
1 | node_modules/
2 | build/
3 | *.log
4 | .env*
```
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
```markdown
1 | # mcp-google-server A MCP Server for Google Custom Search and Webpage Reading
2 | [](https://smithery.ai/server/@adenot/mcp-google-search)
3 |
4 | A Model Context Protocol server that provides web search capabilities using Google Custom Search API and webpage content extraction functionality.
5 |
6 | ## Setup
7 |
8 | ### Getting Google API Key and Search Engine ID
9 |
10 | 1. Create a Google Cloud Project:
11 | - Go to [Google Cloud Console](https://console.cloud.google.com/)
12 | - Create a new project or select an existing one
13 | - Enable billing for your project
14 |
15 | 2. Enable Custom Search API:
16 | - Go to [API Library](https://console.cloud.google.com/apis/library)
17 | - Search for "Custom Search API"
18 | - Click "Enable"
19 |
20 | 3. Get API Key:
21 | - Go to [Credentials](https://console.cloud.google.com/apis/credentials)
22 | - Click "Create Credentials" > "API Key"
23 | - Copy your API key
24 | - (Optional) Restrict the API key to only Custom Search API
25 |
26 | 4. Create Custom Search Engine:
27 | - Go to [Programmable Search Engine](https://programmablesearchengine.google.com/create/new)
28 | - Enter the sites you want to search (use www.google.com for general web search)
29 | - Click "Create"
30 | - On the next page, click "Customize"
31 | - In the settings, enable "Search the entire web"
32 | - Copy your Search Engine ID (cx)
33 |
34 | ## Development
35 |
36 | Install dependencies:
37 | ```bash
38 | npm install
39 | ```
40 |
41 | Build the server:
42 | ```bash
43 | npm run build
44 | ```
45 |
46 | For development with auto-rebuild:
47 | ```bash
48 | npm run watch
49 | ```
50 |
51 | ## Features
52 |
53 | ### Search Tool
54 | Perform web searches using Google Custom Search API:
55 | - Search the entire web or specific sites
56 | - Control number of results (1-10)
57 | - Get structured results with title, link, and snippet
58 |
59 | ### Webpage Reader Tool
60 | Extract content from any webpage:
61 | - Fetch and parse webpage content
62 | - Extract page title and main text
63 | - Clean content by removing scripts and styles
64 | - Return structured data with title, text, and URL
65 |
66 | ## Installation
67 |
68 | ### Installing via Smithery
69 |
70 | To install Google Custom Search Server for Claude Desktop automatically via [Smithery](https://smithery.ai/server/@adenot/mcp-google-search):
71 |
72 | ```bash
73 | npx -y @smithery/cli install @adenot/mcp-google-search --client claude
74 | ```
75 |
76 | To use with Claude Desktop, add the server config with your Google API credentials:
77 |
78 | On MacOS: `~/Library/Application Support/Claude/claude_desktop_config.json`
79 | On Windows: `%APPDATA%/Claude/claude_desktop_config.json`
80 |
81 | ```json
82 | {
83 | "mcpServers": {
84 | "google-search": {
85 | "command": "npx",
86 | "args": [
87 | "-y",
88 | "@adenot/mcp-google-search"
89 | ],
90 | "env": {
91 | "GOOGLE_API_KEY": "your-api-key-here",
92 | "GOOGLE_SEARCH_ENGINE_ID": "your-search-engine-id-here"
93 | }
94 | }
95 | }
96 | }
97 | ```
98 |
99 | ## Usage
100 |
101 | ### Search Tool
102 | ```json
103 | {
104 | "name": "search",
105 | "arguments": {
106 | "query": "your search query",
107 | "num": 5 // optional, default is 5, max is 10
108 | }
109 | }
110 | ```
111 |
112 | ### Webpage Reader Tool
113 | ```json
114 | {
115 | "name": "read_webpage",
116 | "arguments": {
117 | "url": "https://example.com"
118 | }
119 | }
120 | ```
121 |
122 | Example response from webpage reader:
123 | ```json
124 | {
125 | "title": "Example Domain",
126 | "text": "Extracted and cleaned webpage content...",
127 | "url": "https://example.com"
128 | }
129 | ```
130 |
131 | ### Debugging
132 |
133 | Since MCP servers communicate over stdio, debugging can be challenging. We recommend using the [MCP Inspector](https://github.com/modelcontextprotocol/inspector), which is available as a package script:
134 |
135 | ```bash
136 | npm run inspector
137 | ```
138 |
139 | The Inspector will provide a URL to access debugging tools in your browser.
140 |
```
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
```json
1 | {
2 | "compilerOptions": {
3 | "target": "ES2022",
4 | "module": "Node16",
5 | "moduleResolution": "Node16",
6 | "outDir": "./build",
7 | "rootDir": "./src",
8 | "strict": true,
9 | "esModuleInterop": true,
10 | "skipLibCheck": true,
11 | "forceConsistentCasingInFileNames": true
12 | },
13 | "include": ["src/**/*"],
14 | "exclude": ["node_modules"]
15 | }
16 |
```
--------------------------------------------------------------------------------
/.github/workflows/npm-publish.yml:
--------------------------------------------------------------------------------
```yaml
1 | # This workflow will run tests using node and then publish a package to GitHub Packages when a release is created
2 | # For more information see: https://docs.github.com/en/actions/publishing-packages/publishing-nodejs-packages
3 |
4 | name: NPM Publish
5 |
6 | on:
7 | release:
8 | types: [created]
9 |
10 | jobs:
11 | publish-npm:
12 | runs-on: ubuntu-latest
13 | steps:
14 | - uses: actions/checkout@v4
15 | - uses: actions/setup-node@v4
16 | with:
17 | node-version: 20
18 | registry-url: https://registry.npmjs.org/
19 | - run: npm ci
20 | - run: npm run build
21 | - run: npm publish --access public
22 | env:
23 | NODE_AUTH_TOKEN: ${{secrets.npm_token}}
24 |
```
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
```json
1 | {
2 | "name": "@adenot/mcp-google-search",
3 | "version": "0.3.1",
4 | "description": "A Model Context Protocol server for Google Search",
5 | "type": "module",
6 | "bin": {
7 | "mcp-google-search": "./build/index.js"
8 | },
9 | "files": [
10 | "build"
11 | ],
12 | "scripts": {
13 | "build": "npx tsc && node -e \"require('fs').chmodSync('build/index.js', '755')\"",
14 | "prepare": "npm run build",
15 | "watch": "npx tsc --watch",
16 | "inspector": "npx @modelcontextprotocol/inspector build/index.js",
17 | "prepublishOnly": "npm run build"
18 | },
19 | "dependencies": {
20 | "@modelcontextprotocol/sdk": "0.6.0",
21 | "axios": "^1.7.9",
22 | "cheerio": "^1.0.0"
23 | },
24 | "devDependencies": {
25 | "@types/node": "^20.11.24",
26 | "typescript": "^5.3.3"
27 | }
28 | }
29 |
```
--------------------------------------------------------------------------------
/smithery.yaml:
--------------------------------------------------------------------------------
```yaml
1 | # Smithery configuration file: https://smithery.ai/docs/config#smitheryyaml
2 |
3 | startCommand:
4 | type: stdio
5 | configSchema:
6 | # JSON Schema defining the configuration options for the MCP.
7 | type: object
8 | required:
9 | - googleApiKey
10 | - googleSearchEngineId
11 | properties:
12 | googleApiKey:
13 | type: string
14 | description: The API key for Google Custom Search.
15 | googleSearchEngineId:
16 | type: string
17 | description: The Search Engine ID for Google Custom Search.
18 | commandFunction:
19 | # A function that produces the CLI command to start the MCP on stdio.
20 | |-
21 | (config) => ({command: 'node', args: ['build/index.js'], env: {GOOGLE_API_KEY: config.googleApiKey, GOOGLE_SEARCH_ENGINE_ID: config.googleSearchEngineId}})
22 |
```
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
```dockerfile
1 | # Generated by https://smithery.ai. See: https://smithery.ai/docs/config#dockerfile
2 | # Use an official Node.js image as a parent image
3 | FROM node:18-alpine AS builder
4 |
5 | # Set the working directory
6 | WORKDIR /app
7 |
8 | # Copy package.json and package-lock.json
9 | COPY package.json package-lock.json ./
10 |
11 | # Install dependencies
12 | RUN npm install --ignore-scripts
13 |
14 | # Copy the TypeScript source code
15 | COPY src ./src
16 | COPY tsconfig.json ./
17 |
18 | # Build the TypeScript code
19 | RUN npm run build
20 |
21 | # Use a lightweight image for the final build
22 | FROM node:18-alpine
23 |
24 | # Set the working directory
25 | WORKDIR /app
26 |
27 | # Copy the build files from the builder stage
28 | COPY --from=builder /app/build ./build
29 | COPY package.json package-lock.json ./
30 |
31 | # Install only production dependencies
32 | RUN npm ci --omit=dev --ignore-scripts
33 |
34 | # Expose the port on which the server will run (if required)
35 | # EXPOSE 8080
36 |
37 | # Define environment variables
38 | ENV GOOGLE_API_KEY=your-api-key-here
39 | ENV GOOGLE_SEARCH_ENGINE_ID=your-search-engine-id-here
40 |
41 | # Run the application
42 | ENTRYPOINT ["node", "build/index.js"]
43 |
```
--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------
```typescript
1 | #!/usr/bin/env node
2 | import { Server } from '@modelcontextprotocol/sdk/server/index.js';
3 | import { StdioServerTransport } from '@modelcontextprotocol/sdk/server/stdio.js';
4 | import {
5 | CallToolRequestSchema,
6 | ErrorCode,
7 | ListToolsRequestSchema,
8 | McpError,
9 | } from '@modelcontextprotocol/sdk/types.js';
10 | import axios, { AxiosProxyConfig } from 'axios';
11 | import * as cheerio from 'cheerio';
12 | import { URL } from 'url';
13 |
14 | const API_KEY = process.env.GOOGLE_API_KEY;
15 | const SEARCH_ENGINE_ID = process.env.GOOGLE_SEARCH_ENGINE_ID;
16 |
17 | // Create proxy configuration from environment variables
18 | function createProxyConfig(): AxiosProxyConfig | false {
19 | const httpsProxy = process.env.HTTPS_PROXY || process.env.https_proxy;
20 | const httpProxy = process.env.HTTP_PROXY || process.env.http_proxy;
21 |
22 | const proxyUrl = httpsProxy || httpProxy;
23 |
24 | if (!proxyUrl) {
25 | return false;
26 | }
27 |
28 | try {
29 | const url = new URL(proxyUrl);
30 | return {
31 | protocol: url.protocol.replace(':', ''),
32 | host: url.hostname,
33 | port: parseInt(url.port) || (url.protocol === 'https:' ? 443 : 80),
34 | auth: url.username && url.password ? {
35 | username: url.username,
36 | password: url.password
37 | } : undefined
38 | };
39 | } catch (error) {
40 | console.warn(`Invalid proxy URL: ${proxyUrl}`);
41 | return false;
42 | }
43 | }
44 |
45 | if (!API_KEY) {
46 | throw new Error('GOOGLE_API_KEY environment variable is required');
47 | }
48 |
49 | if (!SEARCH_ENGINE_ID) {
50 | throw new Error('GOOGLE_SEARCH_ENGINE_ID environment variable is required');
51 | }
52 |
53 | interface SearchResult {
54 | title: string;
55 | link: string;
56 | snippet: string;
57 | }
58 |
59 | interface WebpageContent {
60 | title: string;
61 | text: string;
62 | url: string;
63 | }
64 |
65 | const isValidSearchArgs = (
66 | args: any
67 | ): args is { query: string; num?: number } =>
68 | typeof args === 'object' &&
69 | args !== null &&
70 | typeof args.query === 'string' &&
71 | (args.num === undefined || typeof args.num === 'number');
72 |
73 | const isValidWebpageArgs = (
74 | args: any
75 | ): args is { url: string } =>
76 | typeof args === 'object' &&
77 | args !== null &&
78 | typeof args.url === 'string';
79 |
80 | class SearchServer {
81 | private server: Server;
82 | private axiosInstance;
83 |
84 | constructor() {
85 | this.server = new Server(
86 | {
87 | name: 'search-server',
88 | version: '0.1.0',
89 | },
90 | {
91 | capabilities: {
92 | tools: {},
93 | },
94 | }
95 | );
96 |
97 | const proxyConfig = createProxyConfig();
98 | this.axiosInstance = axios.create({
99 | baseURL: 'https://www.googleapis.com/customsearch/v1',
100 | params: {
101 | key: API_KEY,
102 | cx: SEARCH_ENGINE_ID,
103 | },
104 | proxy: proxyConfig,
105 | });
106 |
107 | this.setupToolHandlers();
108 |
109 | // Error handling
110 | this.server.onerror = (error) => console.error('[MCP Error]', error);
111 | process.on('SIGINT', async () => {
112 | await this.server.close();
113 | process.exit(0);
114 | });
115 | }
116 |
117 | private setupToolHandlers() {
118 | this.server.setRequestHandler(ListToolsRequestSchema, async () => ({
119 | tools: [
120 | {
121 | name: 'search',
122 | description: 'Perform a web search query',
123 | inputSchema: {
124 | type: 'object',
125 | properties: {
126 | query: {
127 | type: 'string',
128 | description: 'Search query',
129 | },
130 | num: {
131 | type: 'number',
132 | description: 'Number of results (1-10)',
133 | minimum: 1,
134 | maximum: 10,
135 | },
136 | },
137 | required: ['query'],
138 | },
139 | },
140 | {
141 | name: 'read_webpage',
142 | description: 'Fetch and extract text content from a webpage',
143 | inputSchema: {
144 | type: 'object',
145 | properties: {
146 | url: {
147 | type: 'string',
148 | description: 'URL of the webpage to read',
149 | },
150 | },
151 | required: ['url'],
152 | },
153 | },
154 | ],
155 | }));
156 |
157 | this.server.setRequestHandler(CallToolRequestSchema, async (request) => {
158 | if (request.params.name === 'search') {
159 | if (!isValidSearchArgs(request.params.arguments)) {
160 | throw new McpError(
161 | ErrorCode.InvalidParams,
162 | 'Invalid search arguments'
163 | );
164 | }
165 |
166 | const { query, num = 5 } = request.params.arguments;
167 |
168 | try {
169 | const response = await this.axiosInstance.get('', {
170 | params: {
171 | q: query,
172 | num: Math.min(num, 10),
173 | },
174 | });
175 |
176 | const results: SearchResult[] = response.data.items.map((item: any) => ({
177 | title: item.title,
178 | link: item.link,
179 | snippet: item.snippet,
180 | }));
181 |
182 | return {
183 | content: [
184 | {
185 | type: 'text',
186 | text: JSON.stringify(results, null, 2),
187 | },
188 | ],
189 | };
190 | } catch (error) {
191 | if (axios.isAxiosError(error)) {
192 | return {
193 | content: [
194 | {
195 | type: 'text',
196 | text: `Search API error: ${
197 | error.response?.data?.error?.message ?? error.message
198 | }`,
199 | },
200 | ],
201 | isError: true,
202 | };
203 | }
204 | throw error;
205 | }
206 | } else if (request.params.name === 'read_webpage') {
207 | if (!isValidWebpageArgs(request.params.arguments)) {
208 | throw new McpError(
209 | ErrorCode.InvalidParams,
210 | 'Invalid webpage arguments'
211 | );
212 | }
213 |
214 | const { url } = request.params.arguments;
215 |
216 | try {
217 | const proxyConfig = createProxyConfig();
218 | const response = await axios.get(url, {
219 | proxy: proxyConfig,
220 | });
221 | const $ = cheerio.load(response.data);
222 |
223 | // Remove script and style elements
224 | $('script, style').remove();
225 |
226 | const content: WebpageContent = {
227 | title: $('title').text().trim(),
228 | text: $('body').text().trim().replace(/\s+/g, ' '),
229 | url: url,
230 | };
231 |
232 | return {
233 | content: [
234 | {
235 | type: 'text',
236 | text: JSON.stringify(content, null, 2),
237 | },
238 | ],
239 | };
240 | } catch (error) {
241 | if (axios.isAxiosError(error)) {
242 | return {
243 | content: [
244 | {
245 | type: 'text',
246 | text: `Webpage fetch error: ${error.message}`,
247 | },
248 | ],
249 | isError: true,
250 | };
251 | }
252 | throw error;
253 | }
254 | }
255 |
256 | throw new McpError(
257 | ErrorCode.MethodNotFound,
258 | `Unknown tool: ${request.params.name}`
259 | );
260 | });
261 | }
262 |
263 | async run() {
264 | const transport = new StdioServerTransport();
265 | await this.server.connect(transport);
266 | console.log('Search MCP server running on stdio');
267 | }
268 | }
269 |
270 | const server = new SearchServer();
271 | server.run().catch(console.error);
272 |
```