# Directory Structure
```
├── .gitattributes
├── .gitignore
├── .npmrc
├── biome.json
├── bun.lockb
├── dev
│ └── graphql.ts
├── Dockerfile
├── package.json
├── README.md
├── smithery.yaml
├── src
│ ├── helpers
│ │ ├── deprecation.ts
│ │ ├── header.ts
│ │ ├── introspection.ts
│ │ └── package.ts
│ └── index.ts
└── tsconfig.json
```
# Files
--------------------------------------------------------------------------------
/.npmrc:
--------------------------------------------------------------------------------
```
1 | package-lock=false
2 |
```
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
```
1 | *.ts linguist-language=TypeScript
2 |
```
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
```
1 | # Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
2 |
3 | # Logs
4 |
5 | logs
6 | _.log
7 | npm-debug.log_
8 | yarn-debug.log*
9 | yarn-error.log*
10 | lerna-debug.log*
11 | .pnpm-debug.log*
12 |
13 | # Caches
14 |
15 | .cache
16 |
17 | # Diagnostic reports (https://nodejs.org/api/report.html)
18 |
19 | report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
20 |
21 | # Runtime data
22 |
23 | pids
24 | _.pid
25 | _.seed
26 | *.pid.lock
27 |
28 | # Directory for instrumented libs generated by jscoverage/JSCover
29 |
30 | lib-cov
31 |
32 | # Coverage directory used by tools like istanbul
33 |
34 | coverage
35 | *.lcov
36 |
37 | # nyc test coverage
38 |
39 | .nyc_output
40 |
41 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
42 |
43 | .grunt
44 |
45 | # Bower dependency directory (https://bower.io/)
46 |
47 | bower_components
48 |
49 | # node-waf configuration
50 |
51 | .lock-wscript
52 |
53 | # Compiled binary addons (https://nodejs.org/api/addons.html)
54 |
55 | build/Release
56 |
57 | # Dependency directories
58 |
59 | node_modules/
60 | jspm_packages/
61 |
62 | # Snowpack dependency directory (https://snowpack.dev/)
63 |
64 | web_modules/
65 |
66 | # TypeScript cache
67 |
68 | *.tsbuildinfo
69 |
70 | # Optional npm cache directory
71 |
72 | .npm
73 |
74 | # Optional eslint cache
75 |
76 | .eslintcache
77 |
78 | # Optional stylelint cache
79 |
80 | .stylelintcache
81 |
82 | # Microbundle cache
83 |
84 | .rpt2_cache/
85 | .rts2_cache_cjs/
86 | .rts2_cache_es/
87 | .rts2_cache_umd/
88 |
89 | # Optional REPL history
90 |
91 | .node_repl_history
92 |
93 | # Output of 'npm pack'
94 |
95 | *.tgz
96 |
97 | # Yarn Integrity file
98 |
99 | .yarn-integrity
100 |
101 | # dotenv environment variable files
102 |
103 | .env
104 | .env.development.local
105 | .env.test.local
106 | .env.production.local
107 | .env.local
108 |
109 | # parcel-bundler cache (https://parceljs.org/)
110 |
111 | .parcel-cache
112 |
113 | # Next.js build output
114 |
115 | .next
116 | out
117 |
118 | # Nuxt.js build / generate output
119 |
120 | .nuxt
121 | dist
122 |
123 | # Gatsby files
124 |
125 | # Comment in the public line in if your project uses Gatsby and not Next.js
126 |
127 | # https://nextjs.org/blog/next-9-1#public-directory-support
128 |
129 | # public
130 |
131 | # vuepress build output
132 |
133 | .vuepress/dist
134 |
135 | # vuepress v2.x temp and cache directory
136 |
137 | .temp
138 |
139 | # Docusaurus cache and generated files
140 |
141 | .docusaurus
142 |
143 | # Serverless directories
144 |
145 | .serverless/
146 |
147 | # FuseBox cache
148 |
149 | .fusebox/
150 |
151 | # DynamoDB Local files
152 |
153 | .dynamodb/
154 |
155 | # TernJS port file
156 |
157 | .tern-port
158 |
159 | # Stores VSCode versions used for testing VSCode extensions
160 |
161 | .vscode-test
162 |
163 | # yarn v2
164 |
165 | .yarn/cache
166 | .yarn/unplugged
167 | .yarn/build-state.yml
168 | .yarn/install-state.gz
169 | .pnp.*
170 |
171 | # IntelliJ based IDEs
172 | .idea
173 |
174 | # Finder (MacOS) folder config
175 | .DS_Store
176 |
177 | dist/
178 |
179 | # GraphQL schema for debugging
180 | /schema.graphql
181 |
```
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
```markdown
1 | # mcp-graphql
2 |
3 | [](https://smithery.ai/server/mcp-graphql)
4 |
5 | A Model Context Protocol server that enables LLMs to interact with GraphQL APIs. This implementation provides schema introspection and query execution capabilities, allowing models to discover and use GraphQL APIs dynamically.
6 |
7 | ## Usage
8 |
9 | Run `mcp-graphql` with the correct endpoint, it will automatically try to introspect your queries.
10 |
11 | ### Command Line Arguments
12 |
13 | | Argument | Description | Default |
14 | | -------------------- | ------------------------------------------------ | ------------------------------- |
15 | | `--endpoint` | GraphQL endpoint URL | `http://localhost:4000/graphql` |
16 | | `--headers` | JSON string containing headers for requests | `{}` |
17 | | `--enable-mutations` | Enable mutation operations (disabled by default) | `false` |
18 | | `--name` | Name of the MCP server | `mcp-graphql` |
19 | | `--schema` | Path to a local GraphQL schema file (optional) | - |
20 |
21 | ### Examples
22 |
23 | ```bash
24 | # Basic usage with a local GraphQL server
25 | npx mcp-graphql --endpoint http://localhost:3000/graphql
26 |
27 | # Using with custom headers
28 | npx mcp-graphql --endpoint https://api.example.com/graphql --headers '{"Authorization":"Bearer token123"}'
29 |
30 | # Enable mutation operations
31 | npx mcp-graphql --endpoint http://localhost:3000/graphql --enable-mutations
32 |
33 | # Using a local schema file instead of introspection
34 | npx mcp-graphql --endpoint http://localhost:3000/graphql --schema ./schema.graphql
35 | ```
36 |
37 | ## Available Tools
38 |
39 | The server provides two main tools:
40 |
41 | 1. **introspect-schema**: This tool retrieves the GraphQL schema. Use this first if you don't have access to the schema as a resource.
42 | This uses either the local schema file or an introspection query.
43 |
44 | 2. **query-graphql**: Execute GraphQL queries against the endpoint. By default, mutations are disabled unless `--enable-mutations` is specified.
45 |
46 | ## Resources
47 |
48 | - **graphql-schema**: The server exposes the GraphQL schema as a resource that clients can access. This is either the local schema file or based on an introspection query.
49 |
50 | ## Installation
51 |
52 | ### Installing via Smithery
53 |
54 | To install GraphQL MCP Toolkit for Claude Desktop automatically via [Smithery](https://smithery.ai/server/mcp-graphql):
55 |
56 | ```bash
57 | npx -y @smithery/cli install mcp-graphql --client claude
58 | ```
59 |
60 | ### Installing Manually
61 |
62 | It can be manually installed to Claude:
63 |
64 | ```json
65 | {
66 | "mcpServers": {
67 | "mcp-graphql": {
68 | "command": "npx",
69 | "args": ["mcp-graphql", "--endpoint", "http://localhost:3000/graphql"]
70 | }
71 | }
72 | }
73 | ```
74 |
75 | ## Security Considerations
76 |
77 | Mutations are disabled by default as a security measure to prevent an LLM from modifying your database or service data. Consider carefully before enabling mutations in production environments.
78 |
79 | ## Customize for your own server
80 |
81 | This is a very generic implementation where it allows for complete introspection and for your users to do whatever (including mutations). If you need a more specific implementation I'd suggest to just create your own MCP and lock down tool calling for clients to only input specific query fields and/or variables. You can use this as a reference.
82 |
```
--------------------------------------------------------------------------------
/biome.json:
--------------------------------------------------------------------------------
```json
1 | {
2 | "formatter": {
3 | "enabled": true,
4 | "indentStyle": "tab",
5 | "indentWidth": 2
6 | }
7 | }
```
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
```json
1 | {
2 | "compilerOptions": {
3 | "target": "ES2022",
4 | "module": "Node16",
5 | "moduleResolution": "Node16",
6 | "outDir": "dist",
7 | "rootDir": "src",
8 | "strict": true,
9 | "esModuleInterop": true,
10 | "skipLibCheck": true,
11 | "forceConsistentCasingInFileNames": true,
12 | "declaration": true
13 | },
14 | "include": ["src/**/*"],
15 | "exclude": ["node_modules", "dist"]
16 | }
17 |
```
--------------------------------------------------------------------------------
/src/helpers/package.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { readFileSync } from "node:fs";
2 | import { dirname, join } from "node:path";
3 | import { fileURLToPath } from "node:url";
4 |
5 | const __filename = fileURLToPath(import.meta.url);
6 | const __dirname = dirname(__filename);
7 |
8 | // Current package version so I only need to update it in one place
9 | const { version } = JSON.parse(
10 | readFileSync(join(__dirname, "../../package.json"), "utf-8"),
11 | );
12 |
13 | export function getVersion() {
14 | return version;
15 | }
16 |
```
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
```dockerfile
1 | # Use a Node.js base image that supports bun installation
2 | FROM node:18-alpine as builder
3 |
4 | # Install bun
5 | RUN npm install -g bun
6 |
7 | # Set the working directory
8 | WORKDIR /app
9 |
10 | # Copy package and lock files
11 | COPY package.json bun.lockb ./
12 |
13 | # Install dependencies using bun
14 | RUN bun install
15 |
16 | # Copy the rest of the application
17 | COPY . .
18 |
19 | # Build the application
20 | RUN bun run build
21 |
22 | # Create a release image
23 | FROM node:18-alpine
24 |
25 | # Install bun
26 | RUN npm install -g bun
27 |
28 | # Set the working directory
29 | WORKDIR /app
30 |
31 | # Copy built files and package.json from builder
32 | COPY --from=builder /app/dist /app/dist
33 | COPY --from=builder /app/package.json /app/
34 |
35 | # Configure to use STDIO
36 | ENV NODE_ENV=production
37 | ENV MCP_TRANSPORT=stdio
38 |
39 | # Run the server with STDIO transport
40 | CMD ["node", "/app/dist/index.js"]
41 |
```
--------------------------------------------------------------------------------
/smithery.yaml:
--------------------------------------------------------------------------------
```yaml
1 | # Smithery configuration file: https://smithery.ai/docs/config#smitheryyaml
2 |
3 | startCommand:
4 | type: stdio
5 | configSchema:
6 | # JSON Schema defining the configuration options for the MCP.
7 | type: object
8 | required:
9 | - endpoint
10 | properties:
11 | headers:
12 | type: string
13 | description: Optional JSON string of headers to send with the GraphQL requests.
14 | endpoint:
15 | type: string
16 | description: The GraphQL server endpoint URL.
17 | commandFunction:
18 | # A JS function that produces the CLI command based on the given config to start the MCP on stdio.
19 | |-
20 | (config) => ({
21 | command: 'node',
22 | args: ['/app/dist/index.js', '--endpoint', config.endpoint].concat(config.headers ? ['--headers', config.headers] : []),
23 | env: { MCP_TRANSPORT: 'stdio', NODE_ENV: 'production' }
24 | })
25 |
```
--------------------------------------------------------------------------------
/src/helpers/header.ts:
--------------------------------------------------------------------------------
```typescript
1 | /**
2 | * Parse and merge headers from various sources
3 | * @param configHeaders - Default headers from configuration
4 | * @param inputHeaders - Headers provided by the user (string or object)
5 | * @returns Merged headers object
6 | */
7 | export function parseAndMergeHeaders(
8 | configHeaders: Record<string, string>,
9 | inputHeaders?: string | Record<string, string>
10 | ): Record<string, string> {
11 | // Parse headers if they're provided as a string
12 | let parsedHeaders: Record<string, string> = {};
13 |
14 | if (typeof inputHeaders === "string") {
15 | try {
16 | parsedHeaders = JSON.parse(inputHeaders);
17 | } catch (e) {
18 | throw new Error(`Invalid headers JSON: ${e}`);
19 | }
20 | } else if (inputHeaders) {
21 | parsedHeaders = inputHeaders;
22 | }
23 |
24 | // Merge with config headers (config headers are overridden by input headers)
25 | return { ...configHeaders, ...parsedHeaders };
26 | }
27 |
```
--------------------------------------------------------------------------------
/src/helpers/deprecation.ts:
--------------------------------------------------------------------------------
```typescript
1 | /**
2 | * Helper module for handling deprecation warnings
3 | */
4 |
5 | /**
6 | * Check for deprecated command line arguments and output warnings
7 | */
8 | export function checkDeprecatedArguments(): void {
9 | const deprecatedArgs = [
10 | "--endpoint",
11 | "--headers",
12 | "--enable-mutations",
13 | "--name",
14 | "--schema",
15 | ];
16 | const usedDeprecatedArgs = deprecatedArgs.filter((arg) =>
17 | process.argv.includes(arg),
18 | );
19 |
20 | if (usedDeprecatedArgs.length > 0) {
21 | console.error(
22 | `WARNING: Deprecated command line arguments detected: ${usedDeprecatedArgs.join(", ")}`,
23 | );
24 | console.error(
25 | "As of version 1.0.0, command line arguments have been replaced with environment variables.",
26 | );
27 | console.error("Please use environment variables instead. For example:");
28 | console.error(
29 | " Instead of: npx mcp-graphql --endpoint http://example.com/graphql",
30 | );
31 | console.error(" Use: ENDPOINT=http://example.com/graphql npx mcp-graphql");
32 | console.error("");
33 | }
34 | }
35 |
```
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
```json
1 | {
2 | "name": "@kailashg101/graphql-mcp-toolkit",
3 | "version": "1.0.2",
4 | "description": "MCP server for connecting to GraphQL servers",
5 | "module": "index.ts",
6 | "type": "module",
7 | "author": "Kailash G",
8 | "license": "MIT",
9 | "publishConfig": {
10 | "access": "public"
11 | },
12 | "bin": {
13 | "mcp-graphql": "./dist/index.js"
14 | },
15 | "files": [
16 | "dist"
17 | ],
18 | "devDependencies": {
19 | "@graphql-tools/schema": "^10.0.21",
20 | "@standard-schema/spec": "^1.0.0",
21 | "@types/bun": "latest",
22 | "@types/yargs": "17.0.33",
23 | "graphql-yoga": "^5.13.1",
24 | "typescript": "5.8.2"
25 | },
26 | "dependencies": {
27 | "@modelcontextprotocol/sdk": "1.6.1",
28 | "graphql": "^16.10.0",
29 | "yargs": "17.7.2",
30 | "zod": "3.24.2",
31 | "zod-to-json-schema": "3.24.3"
32 | },
33 | "scripts": {
34 | "dev": "bun --watch src/index.ts",
35 | "build": "bun build src/index.ts --outdir dist --target node && bun -e \"require('fs').chmodSync('dist/index.js', '755')\"",
36 | "start": "bun run dist/index.js"
37 | },
38 | "packageManager": "[email protected]"
39 | }
40 |
```
--------------------------------------------------------------------------------
/src/helpers/introspection.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { buildClientSchema, getIntrospectionQuery, printSchema } from "graphql";
2 | import { readFile } from "node:fs/promises";
3 | /**
4 | * Introspect a GraphQL endpoint and return the schema as the GraphQL SDL
5 | * @param endpoint - The endpoint to introspect
6 | * @returns The schema
7 | */
8 | export async function introspectEndpoint(
9 | endpoint: string,
10 | headers?: Record<string, string>,
11 | ) {
12 | const response = await fetch(endpoint, {
13 | method: "POST",
14 | headers: {
15 | "Content-Type": "application/json",
16 | ...headers,
17 | },
18 | body: JSON.stringify({
19 | query: getIntrospectionQuery(),
20 | }),
21 | });
22 |
23 | if (!response.ok) {
24 | throw new Error(`GraphQL request failed: ${response.statusText}`);
25 | }
26 |
27 | const responseJson = await response.json();
28 | // Transform to a schema object
29 | const schema = buildClientSchema(responseJson.data);
30 |
31 | // Print the schema SDL
32 | return printSchema(schema);
33 | }
34 |
35 | /**
36 | * Introspect a local GraphQL schema file and return the schema as the GraphQL SDL
37 | * @param path - The path to the local schema file
38 | * @returns The schema
39 | */
40 | export async function introspectLocalSchema(path: string) {
41 | const schema = await readFile(path, "utf8");
42 | return schema;
43 | }
44 |
```
--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------
```typescript
1 | #!/usr/bin/env node
2 |
3 | import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
4 | import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
5 | import { parse } from "graphql/language";
6 | import { z } from "zod";
7 | import { checkDeprecatedArguments } from "./helpers/deprecation.js";
8 | import { parseAndMergeHeaders } from "./helpers/header.js";
9 | import {
10 | introspectEndpoint,
11 | introspectLocalSchema,
12 | } from "./helpers/introspection.js";
13 | import { getVersion } from "./helpers/package.js";
14 |
15 | // Check for deprecated command line arguments
16 | checkDeprecatedArguments();
17 |
18 | const EnvSchema = z.object({
19 | NAME: z.string().default("mcp-graphql"),
20 | ENDPOINT: z.string().url().default("http://localhost:4000/graphql"),
21 | ALLOW_MUTATIONS: z.boolean().default(false),
22 | HEADERS: z
23 | .string()
24 | .default("{}")
25 | .transform((val) => {
26 | try {
27 | return JSON.parse(val);
28 | } catch (e) {
29 | throw new Error("HEADERS must be a valid JSON string");
30 | }
31 | }),
32 | SCHEMA: z.string().optional(),
33 | });
34 |
35 | const env = EnvSchema.parse(process.env);
36 |
37 | const server = new McpServer({
38 | name: env.NAME,
39 | version: getVersion(),
40 | description: `GraphQL MCP server for ${env.ENDPOINT}`,
41 | });
42 |
43 | server.resource("graphql-schema", new URL(env.ENDPOINT).href, async (uri) => {
44 | try {
45 | let schema: string;
46 | if (env.SCHEMA) {
47 | schema = await introspectLocalSchema(env.SCHEMA);
48 | } else {
49 | schema = await introspectEndpoint(env.ENDPOINT, env.HEADERS);
50 | }
51 |
52 | return {
53 | contents: [
54 | {
55 | uri: uri.href,
56 | text: schema,
57 | },
58 | ],
59 | };
60 | } catch (error) {
61 | throw new Error(`Failed to get GraphQL schema: ${error}`);
62 | }
63 | });
64 |
65 | server.tool(
66 | "introspect-schema",
67 | "Introspect the GraphQL schema, use this tool before doing a query to get the schema information if you do not have it available as a resource already.",
68 | {
69 | endpoint: z
70 | .string()
71 | .url()
72 | .optional()
73 | .describe(
74 | `Optional: Override the default endpoint, the already used endpoint is: ${env.ENDPOINT}`
75 | ),
76 | headers: z
77 | .union([z.record(z.string()), z.string()])
78 | .optional()
79 | .describe(
80 | `Optional: Add additional headers, the already used headers are: ${JSON.stringify(
81 | env.HEADERS
82 | )}`
83 | ),
84 | },
85 | async ({ endpoint, headers }) => {
86 | try {
87 | let schema: string;
88 | if (env.SCHEMA) {
89 | schema = await introspectLocalSchema(env.SCHEMA);
90 | } else {
91 | const useEndpoint = endpoint || env.ENDPOINT;
92 | const useHeaders = parseAndMergeHeaders(env.HEADERS, headers);
93 | schema = await introspectEndpoint(useEndpoint, useHeaders);
94 | }
95 |
96 | return {
97 | content: [
98 | {
99 | type: "text",
100 | text: schema,
101 | },
102 | ],
103 | };
104 | } catch (error) {
105 | throw new Error(`Failed to introspect schema: ${error}`);
106 | }
107 | }
108 | );
109 |
110 | server.tool(
111 | "query-graphql",
112 | "Query a GraphQL endpoint with the given query and variables",
113 | {
114 | query: z.string(),
115 | variables: z.string().optional(),
116 | endpoint: z
117 | .string()
118 | .url()
119 | .optional()
120 | .describe(
121 | `Optional: Override the default endpoint, the already used endpoint is: ${env.ENDPOINT}`
122 | ),
123 | headers: z
124 | .union([z.record(z.string()), z.string()])
125 | .optional()
126 | .describe(
127 | `Optional: Add additional headers, the already used headers are: ${JSON.stringify(
128 | env.HEADERS
129 | )}`
130 | ),
131 | },
132 | async ({ query, variables, endpoint, headers }) => {
133 | try {
134 | const parsedQuery = parse(query);
135 |
136 | // Check if the query is a mutation
137 | const isMutation = parsedQuery.definitions.some(
138 | (def) =>
139 | def.kind === "OperationDefinition" && def.operation === "mutation"
140 | );
141 |
142 | if (isMutation && !env.ALLOW_MUTATIONS) {
143 | return {
144 | isError: true,
145 | content: [
146 | {
147 | type: "text",
148 | text: "Mutations are not allowed unless you enable them in the configuration. Please use a query operation instead.",
149 | },
150 | ],
151 | };
152 | }
153 | } catch (error) {
154 | return {
155 | isError: true,
156 | content: [
157 | {
158 | type: "text",
159 | text: `Invalid GraphQL query: ${error}`,
160 | },
161 | ],
162 | };
163 | }
164 |
165 | try {
166 | const useEndpoint = endpoint || env.ENDPOINT;
167 | const useHeaders = parseAndMergeHeaders(env.HEADERS, headers);
168 |
169 | const response = await fetch(useEndpoint, {
170 | method: "POST",
171 | headers: {
172 | "Content-Type": "application/json",
173 | ...useHeaders,
174 | },
175 | body: JSON.stringify({
176 | query,
177 | variables,
178 | }),
179 | });
180 |
181 | if (!response.ok) {
182 | throw new Error(`GraphQL request failed: ${response.statusText}`);
183 | }
184 |
185 | const data = await response.json();
186 |
187 | if (data.errors && data.errors.length > 0) {
188 | // Contains GraphQL errors
189 | return {
190 | isError: true,
191 | content: [
192 | {
193 | type: "text",
194 | text: `The GraphQL response has errors, please fix the query: ${JSON.stringify(
195 | data,
196 | null,
197 | 2
198 | )}`,
199 | },
200 | ],
201 | };
202 | }
203 |
204 | return {
205 | content: [
206 | {
207 | type: "text",
208 | text: JSON.stringify(data, null, 2),
209 | },
210 | ],
211 | };
212 | } catch (error) {
213 | throw new Error(`Failed to execute GraphQL query: ${error}`);
214 | }
215 | }
216 | );
217 |
218 | async function main() {
219 | const transport = new StdioServerTransport();
220 | await server.connect(transport);
221 |
222 | console.error(
223 | `Started graphql mcp server ${env.NAME} for endpoint: ${env.ENDPOINT}`
224 | );
225 | }
226 |
227 | main().catch((error) => {
228 | console.error(`Fatal error in main(): ${error}`);
229 | process.exit(1);
230 | });
231 |
```
--------------------------------------------------------------------------------
/dev/graphql.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { makeExecutableSchema } from "@graphql-tools/schema";
2 | import { createYoga } from "graphql-yoga";
3 | import fs from "node:fs";
4 |
5 | /**
6 | * Simple GraphQL server implementation for testing purposes
7 | *
8 | * This is a simple GraphQL server implementation for testing purposes.
9 | * It is not intended to be used in production.
10 | *
11 | * It is used to test the GraphQL schema and resolvers.
12 | *
13 | */
14 |
15 | // Define types
16 | interface User {
17 | id: string;
18 | name: string;
19 | email: string;
20 | createdAt: string;
21 | updatedAt: string | null;
22 | }
23 |
24 | interface Post {
25 | id: string;
26 | title: string;
27 | content: string;
28 | published: boolean;
29 | authorId: string;
30 | createdAt: string;
31 | updatedAt: string | null;
32 | }
33 |
34 | interface Comment {
35 | id: string;
36 | text: string;
37 | postId: string;
38 | authorId: string;
39 | createdAt: string;
40 | }
41 |
42 | interface CreateUserInput {
43 | name: string;
44 | email: string;
45 | }
46 |
47 | interface UpdateUserInput {
48 | name?: string;
49 | email?: string;
50 | }
51 |
52 | interface CreatePostInput {
53 | title: string;
54 | content: string;
55 | published?: boolean;
56 | authorId: string;
57 | }
58 |
59 | interface AddCommentInput {
60 | text: string;
61 | postId: string;
62 | authorId: string;
63 | }
64 |
65 | // Define resolver context type
66 | type ResolverContext = Record<string, never>;
67 |
68 | // Read schema from file
69 | const typeDefs = fs.readFileSync("./schema-simple.graphql", "utf-8");
70 |
71 | // Create mock data
72 | const users: User[] = [
73 | {
74 | id: "1",
75 | name: "John Doe",
76 | email: "[email protected]",
77 | createdAt: new Date().toISOString(),
78 | updatedAt: null,
79 | },
80 | {
81 | id: "2",
82 | name: "Jane Smith",
83 | email: "[email protected]",
84 | createdAt: new Date().toISOString(),
85 | updatedAt: null,
86 | },
87 | {
88 | id: "3",
89 | name: "Bob Johnson",
90 | email: "[email protected]",
91 | createdAt: new Date().toISOString(),
92 | updatedAt: null,
93 | },
94 | ];
95 |
96 | const posts: Post[] = [
97 | {
98 | id: "1",
99 | title: "First Post",
100 | content: "This is my first post",
101 | published: true,
102 | authorId: "1",
103 | createdAt: new Date().toISOString(),
104 | updatedAt: null,
105 | },
106 | {
107 | id: "2",
108 | title: "GraphQL is Awesome",
109 | content: "Here is why GraphQL is better than REST",
110 | published: true,
111 | authorId: "1",
112 | createdAt: new Date().toISOString(),
113 | updatedAt: null,
114 | },
115 | {
116 | id: "3",
117 | title: "Yoga Tutorial",
118 | content: "Learn how to use GraphQL Yoga",
119 | published: false,
120 | authorId: "2",
121 | createdAt: new Date().toISOString(),
122 | updatedAt: null,
123 | },
124 | ];
125 |
126 | const comments: Comment[] = [
127 | {
128 | id: "1",
129 | text: "Great post!",
130 | postId: "1",
131 | authorId: "2",
132 | createdAt: new Date().toISOString(),
133 | },
134 | {
135 | id: "2",
136 | text: "I learned a lot",
137 | postId: "1",
138 | authorId: "3",
139 | createdAt: new Date().toISOString(),
140 | },
141 | {
142 | id: "3",
143 | text: "Looking forward to more content",
144 | postId: "2",
145 | authorId: "2",
146 | createdAt: new Date().toISOString(),
147 | },
148 | ];
149 |
150 | // Define resolvers
151 | const resolvers = {
152 | Query: {
153 | user: (
154 | _parent: unknown,
155 | { id }: { id: string },
156 | _context: ResolverContext,
157 | ) => users.find((user) => user.id === id),
158 | users: () => users,
159 | post: (
160 | _parent: unknown,
161 | { id }: { id: string },
162 | _context: ResolverContext,
163 | ) => posts.find((post) => post.id === id),
164 | posts: () => posts,
165 | commentsByPost: (
166 | _parent: unknown,
167 | { postId }: { postId: string },
168 | _context: ResolverContext,
169 | ) => comments.filter((comment) => comment.postId === postId),
170 | },
171 | Mutation: {
172 | createUser: (
173 | _parent: unknown,
174 | { input }: { input: CreateUserInput },
175 | _context: ResolverContext,
176 | ) => {
177 | const newUser: User = {
178 | id: String(users.length + 1),
179 | name: input.name,
180 | email: input.email,
181 | createdAt: new Date().toISOString(),
182 | updatedAt: null,
183 | };
184 | users.push(newUser);
185 | return newUser;
186 | },
187 | updateUser: (
188 | _parent: unknown,
189 | { id, input }: { id: string; input: UpdateUserInput },
190 | _context: ResolverContext,
191 | ) => {
192 | const userIndex = users.findIndex((user) => user.id === id);
193 | if (userIndex === -1) throw new Error(`User with ID ${id} not found`);
194 |
195 | users[userIndex] = {
196 | ...users[userIndex],
197 | ...input,
198 | updatedAt: new Date().toISOString(),
199 | };
200 |
201 | return users[userIndex];
202 | },
203 | deleteUser: (
204 | _parent: unknown,
205 | { id }: { id: string },
206 | _context: ResolverContext,
207 | ) => {
208 | const userIndex = users.findIndex((user) => user.id === id);
209 | if (userIndex === -1) return false;
210 |
211 | users.splice(userIndex, 1);
212 | return true;
213 | },
214 | createPost: (
215 | _parent: unknown,
216 | { input }: { input: CreatePostInput },
217 | _context: ResolverContext,
218 | ) => {
219 | const newPost: Post = {
220 | id: String(posts.length + 1),
221 | title: input.title,
222 | content: input.content,
223 | published: input.published ?? false,
224 | authorId: input.authorId,
225 | createdAt: new Date().toISOString(),
226 | updatedAt: null,
227 | };
228 | posts.push(newPost);
229 | return newPost;
230 | },
231 | addComment: (
232 | _parent: unknown,
233 | { input }: { input: AddCommentInput },
234 | _context: ResolverContext,
235 | ) => {
236 | const newComment: Comment = {
237 | id: String(comments.length + 1),
238 | text: input.text,
239 | postId: input.postId,
240 | authorId: input.authorId,
241 | createdAt: new Date().toISOString(),
242 | };
243 | comments.push(newComment);
244 | return newComment;
245 | },
246 | },
247 | User: {
248 | posts: (parent: User) =>
249 | posts.filter((post) => post.authorId === parent.id),
250 | comments: (parent: User) =>
251 | comments.filter((comment) => comment.authorId === parent.id),
252 | },
253 | Post: {
254 | author: (parent: Post) => users.find((user) => user.id === parent.authorId),
255 | comments: (parent: Post) =>
256 | comments.filter((comment) => comment.postId === parent.id),
257 | },
258 | Comment: {
259 | post: (parent: Comment) => posts.find((post) => post.id === parent.postId),
260 | author: (parent: Comment) =>
261 | users.find((user) => user.id === parent.authorId),
262 | },
263 | };
264 |
265 | // Create executable schema
266 | const schema = makeExecutableSchema({
267 | typeDefs,
268 | resolvers,
269 | });
270 |
271 | // Create Yoga instance
272 | const yoga = createYoga({ schema });
273 |
274 | // Start server with proper request handler
275 | const server = Bun.serve({
276 | port: 4000,
277 | fetch: (request) => {
278 | // Add dev logger for incoming requests
279 | console.log(
280 | `[${new Date().toISOString()}] Incoming request: ${request.method} ${
281 | request.url
282 | }`,
283 | );
284 | return yoga.fetch(request);
285 | },
286 | });
287 |
288 | console.info(
289 | `GraphQL server is running on ${new URL(
290 | yoga.graphqlEndpoint,
291 | `http://${server.hostname}:${server.port}`,
292 | )}`,
293 | );
294 |
```