This is page 3 of 5. Use http://codebase.md/rashidazarang/airtable-mcp?lines=true&page={x} to view the full context.
# Directory Structure
```
├── .eslintrc.js
├── .github
│ ├── ISSUE_TEMPLATE
│ │ ├── bug_report.md
│ │ ├── custom.md
│ │ └── feature_request.md
│ └── pull_request_template.md
├── .gitignore
├── .nvmrc
├── .prettierrc
├── bin
│ ├── airtable-crud-cli.js
│ └── airtable-mcp.js
├── CHANGELOG.md
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── docker
│ ├── Dockerfile
│ └── Dockerfile.node
├── docs
│ ├── guides
│ │ ├── CLAUDE_INTEGRATION.md
│ │ ├── ENHANCED_FEATURES.md
│ │ ├── INSTALLATION.md
│ │ └── QUICK_START.md
│ └── releases
│ ├── RELEASE_NOTES_v1.2.2.md
│ ├── RELEASE_NOTES_v1.2.4.md
│ ├── RELEASE_NOTES_v1.4.0.md
│ ├── RELEASE_NOTES_v1.5.0.md
│ └── RELEASE_NOTES_v1.6.0.md
├── examples
│ ├── airtable-crud-example.js
│ ├── building-mcp.md
│ ├── claude_config.json
│ ├── claude_simple_config.json
│ ├── env-demo.js
│ ├── example_usage.md
│ ├── example-tasks-update.json
│ ├── example-tasks.json
│ ├── python_debug_patch.txt
│ ├── sample-transform.js
│ ├── typescript
│ │ ├── advanced-ai-prompts.ts
│ │ ├── basic-usage.ts
│ │ └── claude-desktop-config.json
│ └── windsurf_mcp_config.json
├── index.js
├── ISSUE_RESPONSES.md
├── jest.config.js
├── LICENSE
├── package-lock.json
├── package.json
├── PROJECT_STRUCTURE.md
├── README.md
├── RELEASE_SUMMARY_v3.2.x.md
├── RELEASE_v3.2.1.md
├── RELEASE_v3.2.3.md
├── RELEASE_v3.2.4.md
├── requirements.txt
├── SECURITY_NOTICE.md
├── smithery.yaml
├── src
│ ├── index.js
│ ├── javascript
│ │ ├── airtable_simple_production.js
│ │ └── airtable_simple.js
│ ├── python
│ │ ├── airtable_mcp
│ │ │ ├── __init__.py
│ │ │ └── src
│ │ │ └── server.py
│ │ ├── inspector_server.py
│ │ ├── inspector.py
│ │ ├── setup.py
│ │ ├── simple_airtable_server.py
│ │ └── test_client.py
│ └── typescript
│ ├── ai-prompts.d.ts
│ ├── airtable-mcp-server.d.ts
│ ├── airtable-mcp-server.ts
│ ├── app
│ │ ├── airtable-client.ts
│ │ ├── config.ts
│ │ ├── context.ts
│ │ ├── exceptions.ts
│ │ ├── governance.ts
│ │ ├── logger.ts
│ │ ├── rateLimiter.ts
│ │ ├── tools
│ │ │ ├── create.ts
│ │ │ ├── describe.ts
│ │ │ ├── handleError.ts
│ │ │ ├── index.ts
│ │ │ ├── listBases.ts
│ │ │ ├── listExceptions.ts
│ │ │ ├── listGovernance.ts
│ │ │ ├── query.ts
│ │ │ ├── update.ts
│ │ │ ├── upsert.ts
│ │ │ └── webhooks.ts
│ │ └── types.ts
│ ├── errors.ts
│ ├── index.d.ts
│ ├── index.ts
│ ├── prompt-templates.ts
│ ├── tools-schemas.ts
│ └── tools.d.ts
├── TESTING_REPORT.md
├── tests
│ ├── test_all_features.sh
│ ├── test_mcp_comprehensive.js
│ ├── test_v1.5.0_final.sh
│ └── test_v1.6.0_comprehensive.sh
├── tsconfig.json
└── types
└── typescript
├── airtable-mcp-server.d.ts
├── app
│ ├── airtable-client.d.ts
│ ├── config.d.ts
│ ├── context.d.ts
│ ├── exceptions.d.ts
│ ├── governance.d.ts
│ ├── logger.d.ts
│ ├── rateLimiter.d.ts
│ ├── tools
│ │ ├── create.d.ts
│ │ ├── describe.d.ts
│ │ ├── handleError.d.ts
│ │ ├── index.d.ts
│ │ ├── listBases.d.ts
│ │ ├── listExceptions.d.ts
│ │ ├── listGovernance.d.ts
│ │ ├── query.d.ts
│ │ ├── update.d.ts
│ │ ├── upsert.d.ts
│ │ └── webhooks.d.ts
│ └── types.d.ts
├── errors.d.ts
├── index.d.ts
├── prompt-templates.d.ts
├── test-suite.d.ts
└── tools-schemas.d.ts
```
# Files
--------------------------------------------------------------------------------
/tests/test_v1.5.0_final.sh:
--------------------------------------------------------------------------------
```bash
1 | #!/bin/bash
2 |
3 | # COMPREHENSIVE FINAL TEST SUITE - Airtable MCP Server v1.5.0
4 | # Tests ALL 23 tools with no assumptions
5 |
6 | set -e
7 | SERVER_URL="http://localhost:8010/mcp"
8 | PASSED=0
9 | FAILED=0
10 | TEST_RECORD_ID=""
11 | TEST_WEBHOOK_ID=""
12 | CREATED_FIELD_ID=""
13 |
14 | echo "🧪 FINAL COMPREHENSIVE TEST SUITE - v1.5.0"
15 | echo "==========================================="
16 | echo "Testing ALL 23 tools with real API calls"
17 | echo ""
18 |
19 | # Function to make MCP calls
20 | call_tool() {
21 | local tool_name="$1"
22 | local params="$2"
23 | curl -s -X POST "$SERVER_URL" \
24 | -H "Content-Type: application/json" \
25 | -d "{\"jsonrpc\": \"2.0\", \"id\": 1, \"method\": \"tools/call\", \"params\": {\"name\": \"$tool_name\", \"arguments\": $params}}"
26 | }
27 |
28 | # Enhanced test function with better error reporting
29 | test_tool() {
30 | local tool_name="$1"
31 | local params="$2"
32 | local description="$3"
33 | local expect_fail="$4"
34 |
35 | echo -n "🔧 $tool_name: $description... "
36 |
37 | if result=$(call_tool "$tool_name" "$params" 2>&1); then
38 | if echo "$result" | jq -e '.result.content[0].text' > /dev/null 2>&1; then
39 | response_text=$(echo "$result" | jq -r '.result.content[0].text')
40 | if [[ "$expect_fail" == "true" ]]; then
41 | if echo "$response_text" | grep -q "error\|Error\|not found\|requires"; then
42 | echo "✅ PASS (Expected failure)"
43 | ((PASSED++))
44 | else
45 | echo "❌ FAIL (Should have failed)"
46 | echo " Response: ${response_text:0:100}..."
47 | ((FAILED++))
48 | fi
49 | else
50 | echo "✅ PASS"
51 | ((PASSED++))
52 | # Store important IDs for later tests
53 | if [[ "$tool_name" == "create_record" ]]; then
54 | TEST_RECORD_ID=$(echo "$result" | jq -r '.result.content[0].text' | grep -o 'rec[a-zA-Z0-9]\{10,20\}' | head -1)
55 | echo " 📝 Stored record ID: $TEST_RECORD_ID"
56 | elif [[ "$tool_name" == "create_webhook" ]]; then
57 | TEST_WEBHOOK_ID=$(echo "$result" | jq -r '.result.content[0].text' | grep -o 'ach[a-zA-Z0-9]\{10,20\}' | head -1)
58 | echo " 🪝 Stored webhook ID: $TEST_WEBHOOK_ID"
59 | elif [[ "$tool_name" == "create_field" ]]; then
60 | CREATED_FIELD_ID=$(echo "$result" | jq -r '.result.content[0].text' | grep -o 'fld[a-zA-Z0-9]\{10,20\}' | head -1)
61 | echo " 🏗️ Stored field ID: $CREATED_FIELD_ID"
62 | fi
63 | fi
64 | else
65 | if echo "$result" | jq -e '.error' > /dev/null 2>&1; then
66 | error_msg=$(echo "$result" | jq -r '.error.message')
67 | if [[ "$expect_fail" == "true" ]]; then
68 | echo "✅ PASS (Expected error: $error_msg)"
69 | ((PASSED++))
70 | else
71 | echo "❌ FAIL (API Error: $error_msg)"
72 | ((FAILED++))
73 | fi
74 | else
75 | echo "❌ FAIL (Invalid response)"
76 | echo " Response: $result"
77 | ((FAILED++))
78 | fi
79 | fi
80 | else
81 | echo "❌ FAIL (Request failed)"
82 | echo " Error: $result"
83 | ((FAILED++))
84 | fi
85 | }
86 |
87 | echo "📊 PHASE 1: Core Data Operations (7 tools)"
88 | echo "==========================================="
89 |
90 | test_tool "list_tables" "{}" "List all tables in base"
91 | test_tool "list_records" "{\"table\": \"Test Table CRUD\", \"maxRecords\": 3}" "List records with limit"
92 | test_tool "create_record" "{\"table\": \"Test Table CRUD\", \"fields\": {\"Name\": \"v1.5.0 Test Record\", \"Description\": \"Created during final testing\", \"Status\": \"Testing\"}}" "Create test record"
93 |
94 | # Use the created record ID for get_record test
95 | if [[ -n "$TEST_RECORD_ID" ]]; then
96 | test_tool "get_record" "{\"table\": \"Test Table CRUD\", \"recordId\": \"$TEST_RECORD_ID\"}" "Get the created record"
97 | test_tool "update_record" "{\"table\": \"Test Table CRUD\", \"recordId\": \"$TEST_RECORD_ID\", \"fields\": {\"Status\": \"Updated\"}}" "Update the created record"
98 | else
99 | echo "⚠️ Skipping get_record and update_record tests (no record ID)"
100 | ((FAILED += 2))
101 | fi
102 |
103 | test_tool "search_records" "{\"table\": \"Test Table CRUD\", \"searchTerm\": \"v1.5.0\"}" "Search for our test record"
104 |
105 | echo ""
106 | echo "🔗 PHASE 2: Webhook Management (5 tools)"
107 | echo "========================================"
108 |
109 | test_tool "list_webhooks" "{}" "List existing webhooks"
110 | test_tool "create_webhook" "{\"notificationUrl\": \"https://webhook.site/test-v1.5.0\", \"specification\": {\"options\": {\"filters\": {\"dataTypes\": [\"tableData\"]}}}}" "Create test webhook"
111 |
112 | if [[ -n "$TEST_WEBHOOK_ID" ]]; then
113 | test_tool "get_webhook_payloads" "{\"webhookId\": \"$TEST_WEBHOOK_ID\"}" "Get webhook payloads"
114 | test_tool "refresh_webhook" "{\"webhookId\": \"$TEST_WEBHOOK_ID\"}" "Refresh webhook"
115 | test_tool "delete_webhook" "{\"webhookId\": \"$TEST_WEBHOOK_ID\"}" "Delete test webhook"
116 | else
117 | echo "⚠️ Skipping webhook payload/refresh/delete tests (no webhook ID)"
118 | ((FAILED += 3))
119 | fi
120 |
121 | echo ""
122 | echo "🏗️ PHASE 3: NEW Schema Discovery (6 tools)"
123 | echo "==========================================="
124 |
125 | test_tool "list_bases" "{}" "Discover all accessible bases"
126 | test_tool "get_base_schema" "{}" "Get complete base schema"
127 | test_tool "describe_table" "{\"table\": \"Test Table CRUD\"}" "Describe table with field details"
128 | test_tool "list_field_types" "{}" "List all available field types"
129 | test_tool "get_table_views" "{\"table\": \"Test Table CRUD\"}" "Get table views"
130 |
131 | # Test pagination for list_bases
132 | test_tool "list_bases" "{\"offset\": \"invalid_offset\"}" "Test list_bases with invalid offset"
133 |
134 | echo ""
135 | echo "🔧 PHASE 4: NEW Field Management (4 tools)"
136 | echo "=========================================="
137 |
138 | test_tool "create_field" "{\"table\": \"Test Table CRUD\", \"name\": \"v1.5.0 Test Field\", \"type\": \"singleLineText\", \"description\": \"Field created during v1.5.0 testing\"}" "Create new field"
139 |
140 | if [[ -n "$CREATED_FIELD_ID" ]]; then
141 | test_tool "update_field" "{\"table\": \"Test Table CRUD\", \"fieldId\": \"$CREATED_FIELD_ID\", \"name\": \"v1.5.0 Updated Field\", \"description\": \"Updated during testing\"}" "Update the created field"
142 | test_tool "delete_field" "{\"table\": \"Test Table CRUD\", \"fieldId\": \"$CREATED_FIELD_ID\", \"confirm\": true}" "Delete the test field"
143 | else
144 | echo "⚠️ Skipping field update/delete tests (no field ID)"
145 | ((FAILED += 2))
146 | fi
147 |
148 | # Test safety checks
149 | test_tool "delete_field" "{\"table\": \"Test Table CRUD\", \"fieldId\": \"fldDummyID\", \"confirm\": false}" "Test field deletion without confirmation" "true"
150 |
151 | echo ""
152 | echo "🏢 PHASE 5: NEW Table Management (3 tools)"
153 | echo "========================================="
154 |
155 | test_tool "create_table" "{\"name\": \"v1.5.0 Test Table\", \"description\": \"Table created during v1.5.0 testing\", \"fields\": [{\"name\": \"Name\", \"type\": \"singleLineText\"}, {\"name\": \"Notes\", \"type\": \"multilineText\"}]}" "Create new table"
156 | test_tool "update_table" "{\"table\": \"v1.5.0 Test Table\", \"name\": \"v1.5.0 Updated Table\", \"description\": \"Updated description\"}" "Update table metadata"
157 |
158 | # Test safety checks
159 | test_tool "delete_table" "{\"table\": \"v1.5.0 Updated Table\", \"confirm\": false}" "Test table deletion without confirmation" "true"
160 | test_tool "delete_table" "{\"table\": \"v1.5.0 Updated Table\", \"confirm\": true}" "Delete the test table"
161 |
162 | echo ""
163 | echo "⚠️ PHASE 6: Error Handling & Edge Cases"
164 | echo "======================================="
165 |
166 | test_tool "get_record" "{\"table\": \"NonExistentTable\", \"recordId\": \"recFakeID123\"}" "Test with non-existent table" "true"
167 | test_tool "describe_table" "{\"table\": \"NonExistentTable\"}" "Test describe non-existent table" "true"
168 | test_tool "create_field" "{\"table\": \"NonExistentTable\", \"name\": \"Test\", \"type\": \"singleLineText\"}" "Test create field in non-existent table" "true"
169 | test_tool "update_table" "{\"table\": \"NonExistentTable\", \"name\": \"New Name\"}" "Test update non-existent table" "true"
170 |
171 | echo ""
172 | echo "🔒 PHASE 7: Security Verification"
173 | echo "================================"
174 |
175 | # Check that logs don't contain sensitive data
176 | echo -n "🔒 Security check: Log file doesn't contain tokens... "
177 | if grep -q "pat" /tmp/v1.5.0_test.log; then
178 | echo "❌ FAIL (Token found in logs)"
179 | ((FAILED++))
180 | else
181 | echo "✅ PASS"
182 | ((PASSED++))
183 | fi
184 |
185 | # Clean up test record if it exists
186 | if [[ -n "$TEST_RECORD_ID" ]]; then
187 | echo -n "🧹 Cleanup: Deleting test record... "
188 | cleanup_result=$(test_tool "delete_record" "{\"table\": \"Test Table CRUD\", \"recordId\": \"$TEST_RECORD_ID\"}" "Delete test record" 2>&1)
189 | if echo "$cleanup_result" | grep -q "✅ PASS"; then
190 | echo "✅ CLEANED"
191 | else
192 | echo "⚠️ CLEANUP FAILED"
193 | fi
194 | fi
195 |
196 | echo ""
197 | echo "📈 FINAL TEST RESULTS"
198 | echo "===================="
199 | echo "✅ Passed: $PASSED"
200 | echo "❌ Failed: $FAILED"
201 | echo "📊 Total Tests: $((PASSED + FAILED))"
202 | echo "📊 Success Rate: $(echo "scale=1; $PASSED * 100 / ($PASSED + $FAILED)" | bc -l)%"
203 |
204 | if [ $FAILED -eq 0 ]; then
205 | echo ""
206 | echo "🎉 🎉 🎉 ALL TESTS PASSED! 🎉 🎉 🎉"
207 | echo ""
208 | echo "✅ v1.5.0 is READY FOR PRODUCTION!"
209 | echo ""
210 | echo "🚀 ACHIEVEMENTS:"
211 | echo "• 23 tools working perfectly"
212 | echo "• Complete schema management"
213 | echo "• Robust error handling"
214 | echo "• Security verified"
215 | echo "• All edge cases handled"
216 | echo ""
217 | echo "📦 Ready for GitHub and NPM release!"
218 | exit 0
219 | else
220 | echo ""
221 | echo "❌ SOME TESTS FAILED"
222 | echo "Please review failures above before release."
223 | exit 1
224 | fi
```
--------------------------------------------------------------------------------
/src/typescript/app/airtable-client.ts:
--------------------------------------------------------------------------------
```typescript
1 | import https from 'node:https';
2 | import { IncomingHttpHeaders } from 'node:http';
3 | import { URL } from 'node:url';
4 | import { setTimeout as delay } from 'node:timers/promises';
5 | import { RateLimiter } from './rateLimiter';
6 | import { Logger } from './logger';
7 | import {
8 | AirtableBrainError,
9 | AuthError,
10 | ConflictError,
11 | InternalServerError,
12 | NotFoundError,
13 | RateLimitError,
14 | AirtableValidationError,
15 | ErrorContext
16 | } from '../errors';
17 |
18 | interface RequestOptions {
19 | method?: 'GET' | 'POST' | 'PUT' | 'PATCH' | 'DELETE';
20 | /**
21 | * Path including leading slash and version segment, e.g. `/v0/meta/bases/app123`.
22 | */
23 | path: string;
24 | query?: Record<string, string | number | boolean | Array<string | number | boolean> | undefined>;
25 | body?: unknown;
26 | baseId?: string;
27 | idempotencyKey?: string;
28 | }
29 |
30 | interface ClientOptions {
31 | baseLimiter: RateLimiter;
32 | patLimiter: RateLimiter;
33 | logger: Logger;
34 | userAgent: string;
35 | patHash: string;
36 | maxRetries?: number;
37 | }
38 |
39 | type AirtableResponse<T> = {
40 | status: number;
41 | body: T;
42 | headers: IncomingHttpHeaders;
43 | };
44 |
45 | function toQueryString(query?: RequestOptions['query']): string {
46 | if (!query) {
47 | return '';
48 | }
49 | const params = new URLSearchParams();
50 | for (const [key, value] of Object.entries(query)) {
51 | if (value === undefined) continue;
52 | if (Array.isArray(value)) {
53 | value.forEach((item) => params.append(`${key}[]`, String(item)));
54 | } else {
55 | params.append(key, String(value));
56 | }
57 | }
58 | const queryString = params.toString();
59 | return queryString.length > 0 ? `?${queryString}` : '';
60 | }
61 |
62 | function parseRetryAfter(headers: IncomingHttpHeaders): number | undefined {
63 | const retryAfter = headers['retry-after'];
64 | if (!retryAfter) return undefined;
65 |
66 | const parsedSeconds = Number(retryAfter);
67 | if (!Number.isNaN(parsedSeconds)) {
68 | return parsedSeconds * 1000;
69 | }
70 |
71 | const retryDate = new Date(retryAfter);
72 | if (!Number.isNaN(retryDate.getTime())) {
73 | return Math.max(retryDate.getTime() - Date.now(), 0);
74 | }
75 |
76 | return undefined;
77 | }
78 |
79 | export class AirtableClient {
80 | private readonly baseLimiter: RateLimiter;
81 | private readonly patLimiter: RateLimiter;
82 | private readonly logger: Logger;
83 | private readonly userAgent: string;
84 | private readonly pat: string;
85 | private readonly patHash: string;
86 | private readonly maxRetries: number;
87 |
88 | constructor(personalAccessToken: string, options: ClientOptions) {
89 | this.pat = personalAccessToken;
90 | this.baseLimiter = options.baseLimiter;
91 | this.patLimiter = options.patLimiter;
92 | this.logger = options.logger;
93 | this.userAgent = options.userAgent;
94 | this.patHash = options.patHash;
95 | this.maxRetries = options.maxRetries ?? 3;
96 | }
97 |
98 | async listBases(): Promise<{ bases: unknown[] }> {
99 | return this.request<{ bases: unknown[] }>({
100 | method: 'GET',
101 | path: '/v0/meta/bases'
102 | });
103 | }
104 |
105 | async getBase(baseId: string): Promise<unknown> {
106 | return this.request<unknown>({
107 | method: 'GET',
108 | path: `/v0/meta/bases/${encodeURIComponent(baseId)}`,
109 | baseId
110 | });
111 | }
112 |
113 | async listTables(baseId: string): Promise<{ tables: unknown[] }> {
114 | return this.request<{ tables: unknown[] }>({
115 | method: 'GET',
116 | path: `/v0/meta/bases/${encodeURIComponent(baseId)}/tables`,
117 | baseId
118 | });
119 | }
120 |
121 | async queryRecords<T = unknown>(
122 | baseId: string,
123 | table: string,
124 | query?: RequestOptions['query']
125 | ): Promise<T> {
126 | const requestOptions: RequestOptions = {
127 | method: 'GET',
128 | path: `/v0/${encodeURIComponent(baseId)}/${encodeURIComponent(table)}`,
129 | baseId
130 | };
131 |
132 | if (query && Object.keys(query).length > 0) {
133 | requestOptions.query = query;
134 | }
135 |
136 | return this.request<T>(requestOptions);
137 | }
138 |
139 | async createRecords<T = unknown>(
140 | baseId: string,
141 | table: string,
142 | payload: unknown,
143 | idempotencyKey?: string
144 | ): Promise<T> {
145 | const requestOptions: RequestOptions = {
146 | method: 'POST',
147 | path: `/v0/${encodeURIComponent(baseId)}/${encodeURIComponent(table)}`,
148 | baseId,
149 | body: payload
150 | };
151 |
152 | if (idempotencyKey) {
153 | requestOptions.idempotencyKey = idempotencyKey;
154 | }
155 |
156 | return this.request<T>(requestOptions);
157 | }
158 |
159 | async updateRecords<T = unknown>(
160 | baseId: string,
161 | table: string,
162 | payload: unknown,
163 | idempotencyKey?: string
164 | ): Promise<T> {
165 | const requestOptions: RequestOptions = {
166 | method: 'PATCH',
167 | path: `/v0/${encodeURIComponent(baseId)}/${encodeURIComponent(table)}`,
168 | baseId,
169 | body: payload
170 | };
171 |
172 | if (idempotencyKey) {
173 | requestOptions.idempotencyKey = idempotencyKey;
174 | }
175 |
176 | return this.request<T>(requestOptions);
177 | }
178 |
179 | async upsertRecords<T = unknown>(
180 | baseId: string,
181 | table: string,
182 | payload: unknown,
183 | idempotencyKey?: string
184 | ): Promise<T> {
185 | const requestOptions: RequestOptions = {
186 | method: 'PATCH',
187 | path: `/v0/${encodeURIComponent(baseId)}/${encodeURIComponent(table)}`,
188 | baseId,
189 | body: payload
190 | };
191 | if (idempotencyKey) {
192 | requestOptions.idempotencyKey = idempotencyKey;
193 | }
194 | return this.request<T>(requestOptions);
195 | }
196 |
197 | private async request<T>(options: RequestOptions): Promise<T> {
198 | const { baseId } = options;
199 | if (baseId) {
200 | await this.baseLimiter.schedule(baseId);
201 | }
202 | await this.patLimiter.schedule(this.patHash);
203 | return this.withRetry(() => this.performRequest<T>(options));
204 | }
205 |
206 | private async withRetry<T>(fn: () => Promise<T>): Promise<T> {
207 | let attempt = 0;
208 | let lastError: unknown;
209 | while (attempt < this.maxRetries) {
210 | try {
211 | return await fn();
212 | } catch (error) {
213 | lastError = error;
214 | attempt += 1;
215 |
216 | if (error instanceof RateLimitError) {
217 | const delayMs = error.retryAfterMs ?? this.backoffWithJitter(attempt);
218 | this.logger.warn('Rate limited, backing off', {
219 | attempt,
220 | delayMs
221 | });
222 | await delay(delayMs);
223 | continue;
224 | }
225 |
226 | if (error instanceof InternalServerError && attempt < this.maxRetries) {
227 | const delayMs = this.backoffWithJitter(attempt);
228 | this.logger.warn('Upstream error, retrying', {
229 | attempt,
230 | delayMs
231 | });
232 | await delay(delayMs);
233 | continue;
234 | }
235 |
236 | throw error;
237 | }
238 | }
239 |
240 | if (lastError instanceof AirtableBrainError) {
241 | throw lastError.withContext({ attempt: this.maxRetries, totalAttempts: this.maxRetries });
242 | }
243 | throw lastError;
244 | }
245 |
246 | private backoffWithJitter(attempt: number): number {
247 | const baseDelay = Math.min(1000 * 2 ** (attempt - 1), 8000);
248 | const jitter = Math.random() * 250;
249 | return baseDelay + jitter;
250 | }
251 |
252 | private performRequest<T>(options: RequestOptions): Promise<T> {
253 | const { method = 'GET', path, query, body, idempotencyKey } = options;
254 | const logger = this.logger.child({
255 | op: 'airtable_request',
256 | method,
257 | path,
258 | baseId: options.baseId,
259 | patHash: this.patHash
260 | });
261 |
262 | const queryString = toQueryString(query);
263 | const url = new URL(`https://api.airtable.com${path}${queryString}`);
264 |
265 | const payload = body === undefined ? undefined : JSON.stringify(body);
266 |
267 | return new Promise<T>((resolve, reject) => {
268 | const request = https.request(
269 | {
270 | method,
271 | hostname: url.hostname,
272 | path: url.pathname + url.search,
273 | headers: {
274 | Authorization: `Bearer ${this.pat}`,
275 | 'Content-Type': 'application/json',
276 | 'User-Agent': this.userAgent,
277 | ...(payload ? { 'Content-Length': Buffer.byteLength(payload) } : {}),
278 | ...(idempotencyKey ? { 'Idempotency-Key': idempotencyKey } : {})
279 | }
280 | },
281 | (response) => {
282 | const chunks: Buffer[] = [];
283 | response.on('data', (chunk: Buffer) => {
284 | chunks.push(chunk);
285 | });
286 |
287 | response.on('end', () => {
288 | const rawBody = Buffer.concat(chunks).toString('utf8');
289 | let parsedBody: unknown;
290 | if (rawBody.length > 0) {
291 | try {
292 | parsedBody = JSON.parse(rawBody);
293 | } catch (error) {
294 | reject(
295 | new InternalServerError('Failed to parse Airtable response', {
296 | cause: error,
297 | status: response.statusCode ?? 0
298 | })
299 | );
300 | return;
301 | }
302 | }
303 |
304 | const result: AirtableResponse<unknown> = {
305 | status: response.statusCode ?? 0,
306 | body: parsedBody,
307 | headers: response.headers
308 | };
309 |
310 | try {
311 | if (result.status >= 200 && result.status < 300) {
312 | resolve(parsedBody as T);
313 | return;
314 | }
315 | reject(this.toDomainError(result, options));
316 | } catch (error) {
317 | reject(error);
318 | }
319 | });
320 | }
321 | );
322 |
323 | request.on('error', (error) => {
324 | logger.error('Network error calling Airtable', {
325 | error: error instanceof Error ? error.message : String(error)
326 | });
327 | reject(
328 | new InternalServerError('Network error communicating with Airtable', {
329 | cause: error
330 | })
331 | );
332 | });
333 |
334 | request.setTimeout(30_000, () => {
335 | request.destroy();
336 | reject(
337 | new InternalServerError('Airtable request timed out', {
338 | status: 504
339 | })
340 | );
341 | });
342 |
343 | if (payload) {
344 | request.write(payload);
345 | }
346 | request.end();
347 | });
348 | }
349 |
350 | private toDomainError(response: AirtableResponse<unknown>, request: RequestOptions): AirtableBrainError {
351 | const { status, body, headers } = response;
352 | const baseContext: ErrorContext = {
353 | endpoint: request.path
354 | };
355 | if (request.baseId) {
356 | baseContext.baseId = request.baseId;
357 | }
358 |
359 | if (status === 401 || status === 403) {
360 | return new AuthError('Authentication failed with Airtable', {
361 | status,
362 | context: baseContext
363 | });
364 | }
365 |
366 | if (status === 404) {
367 | return new NotFoundError('Requested resource was not found in Airtable', {
368 | status,
369 | context: baseContext
370 | });
371 | }
372 |
373 | if (status === 409) {
374 | return new ConflictError('Airtable reported a conflict', {
375 | status,
376 | context: baseContext
377 | });
378 | }
379 |
380 | if (status === 400 || status === 422) {
381 | const validationContext: ErrorContext = { ...baseContext };
382 | const upstreamErrorType = this.safeExtractErrorType(body);
383 | if (upstreamErrorType) {
384 | validationContext.upstreamErrorType = upstreamErrorType;
385 | }
386 | return new AirtableValidationError('Airtable validation error', {
387 | status,
388 | context: validationContext
389 | });
390 | }
391 |
392 | if (status === 429) {
393 | const retryAfterMs = parseRetryAfter(headers);
394 | return new RateLimitError('Airtable rate limit exceeded', {
395 | status,
396 | ...(retryAfterMs !== undefined ? { retryAfterMs } : {}),
397 | context: baseContext
398 | });
399 | }
400 |
401 | if (status >= 500) {
402 | const internalContext: ErrorContext = { ...baseContext };
403 | const upstreamErrorType = this.safeExtractErrorType(body);
404 | if (upstreamErrorType) {
405 | internalContext.upstreamErrorType = upstreamErrorType;
406 | }
407 | return new InternalServerError('Airtable returned an internal error', {
408 | status,
409 | context: internalContext
410 | });
411 | }
412 |
413 | return new InternalServerError('Unexpected Airtable response', {
414 | status,
415 | context: baseContext
416 | });
417 | }
418 |
419 | private safeExtractErrorType(body: unknown): string | undefined {
420 | if (body && typeof body === 'object' && 'error' in body) {
421 | const error = (body as Record<string, unknown>).error;
422 | if (error && typeof error === 'object' && 'type' in error) {
423 | const type = (error as Record<string, unknown>).type;
424 | if (typeof type === 'string') {
425 | return type;
426 | }
427 | }
428 | }
429 | return undefined;
430 | }
431 | }
432 |
```
--------------------------------------------------------------------------------
/src/python/inspector_server.py:
--------------------------------------------------------------------------------
```python
1 | #!/usr/bin/env python3
2 | """
3 | Airtable MCP Inspector Server
4 | -----------------------------
5 | A simple MCP server that implements the Airtable tools
6 | """
7 | import os
8 | import sys
9 | import json
10 | import logging
11 | import requests
12 | import argparse
13 | import traceback
14 | from requests import exceptions as requests_exceptions
15 | from typing import Optional, Dict, Any, List
16 |
17 | try:
18 | from mcp.server.fastmcp import FastMCP
19 | except ImportError:
20 | print("Error: MCP SDK not found. Please install with 'pip install mcp'")
21 | sys.exit(1)
22 |
23 | # Parse command line arguments
24 | def parse_args():
25 | parser = argparse.ArgumentParser(description="Airtable MCP Server")
26 | parser.add_argument("--token", dest="api_token", help="Airtable Personal Access Token")
27 | parser.add_argument("--base", dest="base_id", help="Airtable Base ID")
28 | parser.add_argument("--config", dest="config_json", help="Configuration as JSON (for Smithery integration)")
29 | return parser.parse_args()
30 |
31 | # Set up logging
32 | logging.basicConfig(level=logging.INFO)
33 | logger = logging.getLogger("airtable-mcp")
34 |
35 | # Network safety defaults
36 | REQUEST_TIMEOUT_SECONDS = float(os.environ.get("AIRTABLE_REQUEST_TIMEOUT", "30"))
37 |
38 | # Parse arguments
39 | args = parse_args()
40 |
41 | # Handle config JSON from Smithery if provided
42 | config = {}
43 | if args.config_json:
44 | try:
45 | # Strip any trailing quotes or backslashes that might be present
46 | config_str = args.config_json.rstrip('\\"')
47 | # Additional sanitization for JSON format
48 | config_str = config_str.strip()
49 | # Handle escaped quotes
50 | if config_str.startswith('"') and config_str.endswith('"'):
51 | config_str = config_str[1:-1]
52 | # Fix escaped quotes within JSON
53 | config_str = config_str.replace('\\"', '"')
54 | # Replace escaped backslashes
55 | config_str = config_str.replace('\\\\', '\\')
56 |
57 | logger.info(f"Parsing sanitized config: {config_str}")
58 | config = json.loads(config_str)
59 | logger.info(f"Successfully parsed config: {config}")
60 | except json.JSONDecodeError as e:
61 | logger.error(f"Failed to parse config JSON: {e}")
62 | logger.error(f"Raw config string: {args.config_json}")
63 | # Try one more approach - sometimes config is double-quoted JSON
64 | try:
65 | # Try to interpret as Python string literal
66 | import ast
67 | literal_str = ast.literal_eval(f"'''{args.config_json}'''")
68 | config = json.loads(literal_str)
69 | logger.info(f"Successfully parsed config using ast: {config}")
70 | except Exception as ast_error:
71 | logger.error(f"Failed alternate parsing method: {ast_error}")
72 |
73 | # Create MCP server
74 | app = FastMCP("Airtable Tools")
75 |
76 | # Add error handling wrapper for all MCP methods
77 | def handle_exceptions(func):
78 | """Decorator to properly handle and format exceptions in MCP functions"""
79 | async def wrapper(*args, **kwargs):
80 | try:
81 | return await func(*args, **kwargs)
82 | except Exception as e:
83 | error_trace = traceback.format_exc()
84 | logger.error(f"Error in MCP handler: {str(e)}\n{error_trace}")
85 | sys.stderr.write(f"Error in MCP handler: {str(e)}\n{error_trace}\n")
86 |
87 | # For tool functions that return strings, return a formatted error message
88 | if hasattr(func, "__annotations__") and func.__annotations__.get("return") == str:
89 | return f"Error: {str(e)}"
90 |
91 | # For RPC methods that return dicts, return a properly formatted JSON error
92 | return {"error": {"code": -32000, "message": str(e)}}
93 | return wrapper
94 |
95 | # Patch the tool method to automatically apply error handling
96 | original_tool = app.tool
97 | def patched_tool(*args, **kwargs):
98 | def decorator(func):
99 | wrapped_func = handle_exceptions(func)
100 | return original_tool(*args, **kwargs)(wrapped_func)
101 | return decorator
102 |
103 | # Replace app.tool with our patched version
104 | app.tool = patched_tool
105 |
106 | # Get token from arguments, config, or environment
107 | token = args.api_token or config.get("airtable_token", "") or os.environ.get("AIRTABLE_PERSONAL_ACCESS_TOKEN", "")
108 | # Clean up token if it has trailing quote
109 | if token and token.endswith('"'):
110 | token = token[:-1]
111 |
112 | base_id = args.base_id or config.get("base_id", "") or os.environ.get("AIRTABLE_BASE_ID", "")
113 |
114 | if not token:
115 | logger.warning("No Airtable API token provided. Use --token, --config, or set AIRTABLE_PERSONAL_ACCESS_TOKEN environment variable.")
116 | else:
117 | logger.info("Airtable authentication configured")
118 |
119 | if base_id:
120 | logger.info(f"Using base ID: {base_id}")
121 | else:
122 | logger.warning("No base ID provided. Use --base, --config, or set AIRTABLE_BASE_ID environment variable.")
123 |
124 | # Helper functions for Airtable API calls
125 | async def api_call(endpoint, method="GET", data=None, params=None):
126 | """Make an Airtable API call"""
127 | if not token:
128 | return {"error": "No Airtable API token provided. Use --token, --config, or set AIRTABLE_PERSONAL_ACCESS_TOKEN environment variable."}
129 |
130 | headers = {
131 | "Authorization": f"Bearer {token}",
132 | "Content-Type": "application/json"
133 | }
134 |
135 | url = f"https://api.airtable.com/v0/{endpoint}"
136 |
137 | try:
138 | if method == "GET":
139 | response = requests.get(url, headers=headers, params=params, timeout=REQUEST_TIMEOUT_SECONDS)
140 | elif method == "POST":
141 | response = requests.post(url, headers=headers, json=data, timeout=REQUEST_TIMEOUT_SECONDS)
142 | elif method == "PATCH":
143 | response = requests.patch(url, headers=headers, json=data, timeout=REQUEST_TIMEOUT_SECONDS)
144 | elif method == "DELETE":
145 | response = requests.delete(url, headers=headers, params=params, timeout=REQUEST_TIMEOUT_SECONDS)
146 | else:
147 | raise ValueError(f"Unsupported method: {method}")
148 |
149 | response.raise_for_status()
150 | return response.json()
151 | except requests_exceptions.Timeout as e:
152 | logger.error(f"API call timed out after {REQUEST_TIMEOUT_SECONDS}s: {str(e)}")
153 | return {"error": f"Request to Airtable timed out after {REQUEST_TIMEOUT_SECONDS}s"}
154 | except Exception as e:
155 | logger.error(f"API call error: {str(e)}")
156 | return {"error": str(e)}
157 |
158 | # Define MCP tool functions
159 | @app.tool()
160 | async def list_bases() -> str:
161 | """List all accessible Airtable bases"""
162 | if not token:
163 | return "Please provide an Airtable API token to list your bases."
164 |
165 | result = await api_call("meta/bases")
166 |
167 | if "error" in result:
168 | return f"Error: {result['error']}"
169 |
170 | bases = result.get("bases", [])
171 | if not bases:
172 | return "No bases found accessible with your token."
173 |
174 | base_list = [f"{i+1}. {base['name']} (ID: {base['id']})" for i, base in enumerate(bases)]
175 | return "Available bases:\n" + "\n".join(base_list)
176 |
177 | @app.tool()
178 | async def list_tables(base_id_param: Optional[str] = None) -> str:
179 | """List all tables in the specified base or the default base"""
180 | global base_id
181 | current_base = base_id_param or base_id
182 |
183 | if not token:
184 | return "Please provide an Airtable API token to list tables."
185 |
186 | if not current_base:
187 | return "Error: No base ID provided. Please specify a base_id or set AIRTABLE_BASE_ID environment variable."
188 |
189 | result = await api_call(f"meta/bases/{current_base}/tables")
190 |
191 | if "error" in result:
192 | return f"Error: {result['error']}"
193 |
194 | tables = result.get("tables", [])
195 | if not tables:
196 | return "No tables found in this base."
197 |
198 | table_list = [f"{i+1}. {table['name']} (ID: {table['id']}, Fields: {len(table.get('fields', []))})"
199 | for i, table in enumerate(tables)]
200 | return "Tables in this base:\n" + "\n".join(table_list)
201 |
202 | @app.tool()
203 | async def list_records(table_name: str, max_records: Optional[int] = 100, filter_formula: Optional[str] = None) -> str:
204 | """List records from a table with optional filtering"""
205 | if not token:
206 | return "Please provide an Airtable API token to list records."
207 |
208 | if not base_id:
209 | return "Error: No base ID set. Please use --base or set AIRTABLE_BASE_ID environment variable."
210 |
211 | params = {"maxRecords": max_records}
212 |
213 | if filter_formula:
214 | params["filterByFormula"] = filter_formula
215 |
216 | result = await api_call(f"{base_id}/{table_name}", params=params)
217 |
218 | if "error" in result:
219 | return f"Error: {result['error']}"
220 |
221 | records = result.get("records", [])
222 | if not records:
223 | return "No records found in this table."
224 |
225 | # Format the records for display
226 | formatted_records = []
227 | for i, record in enumerate(records):
228 | record_id = record.get("id", "unknown")
229 | fields = record.get("fields", {})
230 | field_text = ", ".join([f"{k}: {v}" for k, v in fields.items()])
231 | formatted_records.append(f"{i+1}. ID: {record_id} - {field_text}")
232 |
233 | return "Records:\n" + "\n".join(formatted_records)
234 |
235 | @app.tool()
236 | async def get_record(table_name: str, record_id: str) -> str:
237 | """Get a specific record from a table"""
238 | if not token:
239 | return "Please provide an Airtable API token to get records."
240 |
241 | if not base_id:
242 | return "Error: No base ID set. Please set AIRTABLE_BASE_ID environment variable."
243 |
244 | result = await api_call(f"{base_id}/{table_name}/{record_id}")
245 |
246 | if "error" in result:
247 | return f"Error: {result['error']}"
248 |
249 | fields = result.get("fields", {})
250 | if not fields:
251 | return f"Record {record_id} found but contains no fields."
252 |
253 | # Format the fields for display
254 | formatted_fields = []
255 | for key, value in fields.items():
256 | formatted_fields.append(f"{key}: {value}")
257 |
258 | return f"Record ID: {record_id}\n" + "\n".join(formatted_fields)
259 |
260 | @app.tool()
261 | async def create_records(table_name: str, records_json: str) -> str:
262 | """Create records in a table from JSON string"""
263 | if not token:
264 | return "Please provide an Airtable API token to create records."
265 |
266 | if not base_id:
267 | return "Error: No base ID set. Please set AIRTABLE_BASE_ID environment variable."
268 |
269 | try:
270 | records_data = json.loads(records_json)
271 |
272 | # Format the records for Airtable API
273 | if not isinstance(records_data, list):
274 | records_data = [records_data]
275 |
276 | records = [{"fields": record} for record in records_data]
277 |
278 | data = {"records": records}
279 | result = await api_call(f"{base_id}/{table_name}", method="POST", data=data)
280 |
281 | if "error" in result:
282 | return f"Error: {result['error']}"
283 |
284 | created_records = result.get("records", [])
285 | return f"Successfully created {len(created_records)} records."
286 |
287 | except json.JSONDecodeError:
288 | return "Error: Invalid JSON format in records_json parameter."
289 | except Exception as e:
290 | return f"Error creating records: {str(e)}"
291 |
292 | @app.tool()
293 | async def update_records(table_name: str, records_json: str) -> str:
294 | """Update records in a table from JSON string"""
295 | if not token:
296 | return "Please provide an Airtable API token to update records."
297 |
298 | if not base_id:
299 | return "Error: No base ID set. Please set AIRTABLE_BASE_ID environment variable."
300 |
301 | try:
302 | records_data = json.loads(records_json)
303 |
304 | # Format the records for Airtable API
305 | if not isinstance(records_data, list):
306 | records_data = [records_data]
307 |
308 | records = []
309 | for record in records_data:
310 | if "id" not in record:
311 | return "Error: Each record must have an 'id' field."
312 |
313 | rec_id = record.pop("id")
314 | fields = record.get("fields", record) # Support both {id, fields} format and direct fields
315 | records.append({"id": rec_id, "fields": fields})
316 |
317 | data = {"records": records}
318 | result = await api_call(f"{base_id}/{table_name}", method="PATCH", data=data)
319 |
320 | if "error" in result:
321 | return f"Error: {result['error']}"
322 |
323 | updated_records = result.get("records", [])
324 | return f"Successfully updated {len(updated_records)} records."
325 |
326 | except json.JSONDecodeError:
327 | return "Error: Invalid JSON format in records_json parameter."
328 | except Exception as e:
329 | return f"Error updating records: {str(e)}"
330 |
331 | @app.tool()
332 | async def set_base_id(base_id_param: str) -> str:
333 | """Set the current Airtable base ID"""
334 | global base_id
335 | base_id = base_id_param
336 | return f"Base ID set to: {base_id}"
337 |
338 | # Note: rpc_method is not available in the current MCP version
339 | # These methods would be used for Claude-specific functionality
340 | # but are not needed for basic MCP operation
341 |
342 | # Start the server
343 | if __name__ == "__main__":
344 | app.start()
345 |
```
--------------------------------------------------------------------------------
/bin/airtable-crud-cli.js:
--------------------------------------------------------------------------------
```javascript
1 | #!/usr/bin/env node
2 |
3 | /**
4 | * Command-line interface for Airtable CRUD operations
5 | */
6 | const fs = require('fs');
7 | const path = require('path');
8 | const dotenv = require('dotenv');
9 | const baseUtils = require('../tools/airtable-base');
10 | const crudUtils = require('../tools/airtable-crud');
11 | const schemaUtils = require('../tools/airtable-schema');
12 |
13 | // Load environment variables
14 | dotenv.config();
15 |
16 | // Get the base ID from environment variables
17 | const baseId = process.env.AIRTABLE_BASE_ID;
18 | if (!baseId) {
19 | console.error('Error: AIRTABLE_BASE_ID not set in .env file');
20 | process.exit(1);
21 | }
22 |
23 | // Parse command line arguments
24 | const args = process.argv.slice(2);
25 | const command = args[0];
26 |
27 | // Display help if no command is provided
28 | if (!command) {
29 | showHelp();
30 | process.exit(0);
31 | }
32 |
33 | // Process the command
34 | processCommand(command, args.slice(1))
35 | .then(() => {
36 | console.log('Command completed successfully');
37 | })
38 | .catch(error => {
39 | console.error(`Error: ${error.message}`);
40 | process.exit(1);
41 | });
42 |
43 | /**
44 | * Process the command
45 | * @param {string} command - The command to process
46 | * @param {Array} args - The command arguments
47 | */
48 | async function processCommand(command, args) {
49 | switch (command) {
50 | case 'list-bases':
51 | await listBases();
52 | break;
53 |
54 | case 'list-tables':
55 | await listTables();
56 | break;
57 |
58 | case 'list-records':
59 | await listRecords(args);
60 | break;
61 |
62 | case 'get-record':
63 | await getRecord(args);
64 | break;
65 |
66 | case 'create-records':
67 | await createRecords(args);
68 | break;
69 |
70 | case 'update-records':
71 | await updateRecords(args);
72 | break;
73 |
74 | case 'delete-records':
75 | await deleteRecords(args);
76 | break;
77 |
78 | case 'export-records':
79 | await exportRecords(args);
80 | break;
81 |
82 | case 'import-records':
83 | await importRecords(args);
84 | break;
85 |
86 | case 'help':
87 | showHelp();
88 | break;
89 |
90 | default:
91 | console.error(`Unknown command: ${command}`);
92 | showHelp();
93 | process.exit(1);
94 | }
95 | }
96 |
97 | /**
98 | * List all accessible bases
99 | */
100 | async function listBases() {
101 | console.log('Listing accessible bases...');
102 | const bases = await baseUtils.listAllBases();
103 |
104 | console.log(`Found ${bases.length} accessible bases:`);
105 | bases.forEach(base => {
106 | console.log(`- ${base.name} (${base.id})`);
107 | });
108 | }
109 |
110 | /**
111 | * List all tables in the base
112 | */
113 | async function listTables() {
114 | console.log(`Listing tables in base ${baseId}...`);
115 | const tables = await baseUtils.listTables(baseId);
116 |
117 | console.log(`Found ${tables.length} tables:`);
118 | tables.forEach(table => {
119 | console.log(`- ${table.name} (${table.id})`);
120 | });
121 | }
122 |
123 | /**
124 | * List records from a table
125 | * @param {Array} args - Command arguments
126 | */
127 | async function listRecords(args) {
128 | if (args.length < 1) {
129 | console.error('Error: Table name is required');
130 | console.log('Usage: node airtable-crud-cli.js list-records <tableName> [maxRecords] [filterFormula]');
131 | process.exit(1);
132 | }
133 |
134 | const tableName = args[0];
135 | const maxRecords = args[1] ? parseInt(args[1]) : 100;
136 | const filterFormula = args[2] || null;
137 |
138 | console.log(`Listing records from table "${tableName}"...`);
139 | console.log(`Max records: ${maxRecords}`);
140 | if (filterFormula) {
141 | console.log(`Filter: ${filterFormula}`);
142 | }
143 |
144 | const records = await crudUtils.readRecords(baseId, tableName, maxRecords, filterFormula);
145 |
146 | console.log(`Found ${records.length} records:`);
147 | records.forEach(record => {
148 | console.log(`- ${record.id}: ${JSON.stringify(record)}`);
149 | });
150 | }
151 |
152 | /**
153 | * Get a specific record by ID
154 | * @param {Array} args - Command arguments
155 | */
156 | async function getRecord(args) {
157 | if (args.length < 2) {
158 | console.error('Error: Table name and record ID are required');
159 | console.log('Usage: node airtable-crud-cli.js get-record <tableName> <recordId>');
160 | process.exit(1);
161 | }
162 |
163 | const tableName = args[0];
164 | const recordId = args[1];
165 |
166 | console.log(`Getting record ${recordId} from table "${tableName}"...`);
167 |
168 | const record = await crudUtils.getRecord(baseId, tableName, recordId);
169 |
170 | console.log('Record:');
171 | console.log(JSON.stringify(record, null, 2));
172 | }
173 |
174 | /**
175 | * Create records in a table
176 | * @param {Array} args - Command arguments
177 | */
178 | async function createRecords(args) {
179 | if (args.length < 2) {
180 | console.error('Error: Table name and JSON file are required');
181 | console.log('Usage: node airtable-crud-cli.js create-records <tableName> <jsonFile>');
182 | process.exit(1);
183 | }
184 |
185 | const tableName = args[0];
186 | const jsonFile = args[1];
187 |
188 | // Read the JSON file
189 | let records;
190 | try {
191 | const jsonData = fs.readFileSync(jsonFile, 'utf8');
192 | records = JSON.parse(jsonData);
193 |
194 | if (!Array.isArray(records)) {
195 | console.error('Error: JSON file must contain an array of records');
196 | process.exit(1);
197 | }
198 | } catch (error) {
199 | console.error(`Error reading JSON file: ${error.message}`);
200 | process.exit(1);
201 | }
202 |
203 | console.log(`Creating ${records.length} records in table "${tableName}"...`);
204 |
205 | const createdRecords = await crudUtils.createRecords(baseId, tableName, records);
206 |
207 | console.log(`Created ${createdRecords.length} records`);
208 | console.log('First record:');
209 | console.log(JSON.stringify(createdRecords[0], null, 2));
210 | }
211 |
212 | /**
213 | * Update records in a table
214 | * @param {Array} args - Command arguments
215 | */
216 | async function updateRecords(args) {
217 | if (args.length < 2) {
218 | console.error('Error: Table name and JSON file are required');
219 | console.log('Usage: node airtable-crud-cli.js update-records <tableName> <jsonFile>');
220 | process.exit(1);
221 | }
222 |
223 | const tableName = args[0];
224 | const jsonFile = args[1];
225 |
226 | // Read the JSON file
227 | let records;
228 | try {
229 | const jsonData = fs.readFileSync(jsonFile, 'utf8');
230 | records = JSON.parse(jsonData);
231 |
232 | if (!Array.isArray(records)) {
233 | console.error('Error: JSON file must contain an array of records');
234 | process.exit(1);
235 | }
236 |
237 | // Check if records have id and fields
238 | for (const record of records) {
239 | if (!record.id) {
240 | console.error('Error: Each record must have an id field');
241 | process.exit(1);
242 | }
243 |
244 | if (!record.fields || typeof record.fields !== 'object') {
245 | console.error('Error: Each record must have a fields object');
246 | process.exit(1);
247 | }
248 | }
249 | } catch (error) {
250 | console.error(`Error reading JSON file: ${error.message}`);
251 | process.exit(1);
252 | }
253 |
254 | console.log(`Updating ${records.length} records in table "${tableName}"...`);
255 |
256 | const updatedRecords = await crudUtils.updateRecords(baseId, tableName, records);
257 |
258 | console.log(`Updated ${updatedRecords.length} records`);
259 | console.log('First record:');
260 | console.log(JSON.stringify(updatedRecords[0], null, 2));
261 | }
262 |
263 | /**
264 | * Delete records from a table
265 | * @param {Array} args - Command arguments
266 | */
267 | async function deleteRecords(args) {
268 | if (args.length < 2) {
269 | console.error('Error: Table name and record IDs are required');
270 | console.log('Usage: node airtable-crud-cli.js delete-records <tableName> <recordId1,recordId2,...>');
271 | process.exit(1);
272 | }
273 |
274 | const tableName = args[0];
275 | const recordIds = args[1].split(',');
276 |
277 | console.log(`Deleting ${recordIds.length} records from table "${tableName}"...`);
278 |
279 | const deletedRecords = await crudUtils.deleteRecords(baseId, tableName, recordIds);
280 |
281 | console.log(`Deleted ${deletedRecords.length} records`);
282 | }
283 |
284 | /**
285 | * Export records from a table to a JSON file
286 | * @param {Array} args - Command arguments
287 | */
288 | async function exportRecords(args) {
289 | if (args.length < 2) {
290 | console.error('Error: Table name and output file are required');
291 | console.log('Usage: node airtable-crud-cli.js export-records <tableName> <outputFile> [maxRecords] [filterFormula]');
292 | process.exit(1);
293 | }
294 |
295 | const tableName = args[0];
296 | const outputFile = args[1];
297 | const maxRecords = args[2] ? parseInt(args[2]) : 100;
298 | const filterFormula = args[3] || null;
299 |
300 | console.log(`Exporting records from table "${tableName}" to ${outputFile}...`);
301 | console.log(`Max records: ${maxRecords}`);
302 | if (filterFormula) {
303 | console.log(`Filter: ${filterFormula}`);
304 | }
305 |
306 | const records = await crudUtils.readRecords(baseId, tableName, maxRecords, filterFormula);
307 |
308 | // Write records to file
309 | try {
310 | fs.writeFileSync(outputFile, JSON.stringify(records, null, 2));
311 | console.log(`Exported ${records.length} records to ${outputFile}`);
312 | } catch (error) {
313 | console.error(`Error writing to file: ${error.message}`);
314 | process.exit(1);
315 | }
316 | }
317 |
318 | /**
319 | * Import records from a JSON file to a table
320 | * @param {Array} args - Command arguments
321 | */
322 | async function importRecords(args) {
323 | if (args.length < 2) {
324 | console.error('Error: Table name and input file are required');
325 | console.log('Usage: node airtable-crud-cli.js import-records <tableName> <inputFile> [--update] [--clear]');
326 | process.exit(1);
327 | }
328 |
329 | const tableName = args[0];
330 | const inputFile = args[1];
331 | const update = args.includes('--update');
332 | const clear = args.includes('--clear');
333 |
334 | // Read the JSON file
335 | let records;
336 | try {
337 | const jsonData = fs.readFileSync(inputFile, 'utf8');
338 | records = JSON.parse(jsonData);
339 |
340 | if (!Array.isArray(records)) {
341 | console.error('Error: JSON file must contain an array of records');
342 | process.exit(1);
343 | }
344 | } catch (error) {
345 | console.error(`Error reading JSON file: ${error.message}`);
346 | process.exit(1);
347 | }
348 |
349 | console.log(`Importing ${records.length} records to table "${tableName}"...`);
350 |
351 | // Clear the table if requested
352 | if (clear) {
353 | console.log('Clearing existing records...');
354 | const existingRecords = await crudUtils.readRecords(baseId, tableName, 100000);
355 |
356 | if (existingRecords.length > 0) {
357 | const recordIds = existingRecords.map(record => record.id);
358 | await crudUtils.deleteRecords(baseId, tableName, recordIds);
359 | console.log(`Deleted ${existingRecords.length} existing records`);
360 | }
361 | }
362 |
363 | // Update existing records if requested
364 | if (update) {
365 | console.log('Updating existing records...');
366 |
367 | // Get existing records
368 | const existingRecords = await crudUtils.readRecords(baseId, tableName, 100000);
369 | const existingRecordsMap = {};
370 |
371 | // Create a map of existing records by a key field (assuming 'Name' is the key)
372 | existingRecords.forEach(record => {
373 | if (record.Name) {
374 | existingRecordsMap[record.Name] = record;
375 | }
376 | });
377 |
378 | // Separate records to update and create
379 | const recordsToUpdate = [];
380 | const recordsToCreate = [];
381 |
382 | records.forEach(record => {
383 | if (record.Name && existingRecordsMap[record.Name]) {
384 | // Record exists, update it
385 | recordsToUpdate.push({
386 | id: existingRecordsMap[record.Name].id,
387 | fields: record
388 | });
389 | } else {
390 | // Record doesn't exist, create it
391 | recordsToCreate.push(record);
392 | }
393 | });
394 |
395 | // Update existing records
396 | if (recordsToUpdate.length > 0) {
397 | const updatedRecords = await crudUtils.updateRecords(baseId, tableName, recordsToUpdate);
398 | console.log(`Updated ${updatedRecords.length} existing records`);
399 | }
400 |
401 | // Create new records
402 | if (recordsToCreate.length > 0) {
403 | const createdRecords = await crudUtils.createRecords(baseId, tableName, recordsToCreate);
404 | console.log(`Created ${createdRecords.length} new records`);
405 | }
406 | } else {
407 | // Create all records
408 | const createdRecords = await crudUtils.createRecords(baseId, tableName, records);
409 | console.log(`Created ${createdRecords.length} records`);
410 | }
411 | }
412 |
413 | /**
414 | * Show help
415 | */
416 | function showHelp() {
417 | console.log('Airtable CRUD CLI');
418 | console.log('================');
419 | console.log('');
420 | console.log('Usage: node airtable-crud-cli.js <command> [options]');
421 | console.log('');
422 | console.log('Commands:');
423 | console.log(' list-bases List all accessible bases');
424 | console.log(' list-tables List all tables in the base');
425 | console.log(' list-records <tableName> [max] [filter] List records from a table');
426 | console.log(' get-record <tableName> <recordId> Get a specific record');
427 | console.log(' create-records <tableName> <jsonFile> Create records from a JSON file');
428 | console.log(' update-records <tableName> <jsonFile> Update records from a JSON file');
429 | console.log(' delete-records <tableName> <id1,id2,...> Delete records from a table');
430 | console.log(' export-records <tableName> <file> [max] Export records to a JSON file');
431 | console.log(' import-records <tableName> <file> [flags] Import records from a JSON file');
432 | console.log(' help Show this help');
433 | console.log('');
434 | console.log('Flags for import-records:');
435 | console.log(' --update Update existing records (match by Name field)');
436 | console.log(' --clear Clear all existing records before import');
437 | console.log('');
438 | console.log('Examples:');
439 | console.log(' node airtable-crud-cli.js list-tables');
440 | console.log(' node airtable-crud-cli.js list-records "My Table" 10');
441 | console.log(' node airtable-crud-cli.js get-record "My Table" rec123456');
442 | console.log(' node airtable-crud-cli.js create-records "My Table" data.json');
443 | console.log(' node airtable-crud-cli.js export-records "My Table" export.json 1000');
444 | console.log(' node airtable-crud-cli.js import-records "My Table" import.json --update');
445 | }
```
--------------------------------------------------------------------------------
/examples/typescript/advanced-ai-prompts.ts:
--------------------------------------------------------------------------------
```typescript
1 | /**
2 | * Advanced AI Prompts TypeScript Example
3 | * Demonstrates enterprise-grade AI capabilities with strict typing
4 | */
5 |
6 | import {
7 | AirtableMCPServer,
8 | AnalyzeDataPrompt,
9 | CreateReportPrompt,
10 | PredictiveAnalyticsPrompt,
11 | NaturalLanguageQueryPrompt,
12 | SmartSchemaDesignPrompt,
13 | DataQualityAuditPrompt,
14 | OptimizeWorkflowPrompt,
15 | AutomationRecommendationsPrompt,
16 | AnalysisResult,
17 | ReportResult,
18 | PredictionResult,
19 | WorkflowOptimizationResult
20 | } from '@rashidazarang/airtable-mcp/types';
21 |
22 | // Enterprise AI Analytics Class
23 | class EnterpriseAIAnalytics {
24 | private server: AirtableMCPServer;
25 |
26 | constructor() {
27 | this.server = new AirtableMCPServer();
28 | }
29 |
30 | // Advanced Statistical Analysis with Type Safety
31 | async performStatisticalAnalysis(table: string): Promise<AnalysisResult> {
32 | const params: AnalyzeDataPrompt = {
33 | table,
34 | analysis_type: 'statistical',
35 | confidence_level: 0.99,
36 | field_focus: 'revenue,conversion_rate,customer_satisfaction',
37 | time_dimension: 'created_date'
38 | };
39 |
40 | const response = await this.server.handlePromptGet('analyze_data', params);
41 |
42 | // Type-safe result processing
43 | const result: AnalysisResult = {
44 | summary: 'Comprehensive statistical analysis completed',
45 | key_findings: [
46 | 'Revenue shows 15.3% growth trend',
47 | 'Conversion rate correlation: 0.78',
48 | 'Customer satisfaction: 94.2% positive'
49 | ],
50 | statistical_measures: {
51 | mean: 45670.23,
52 | median: 42150.00,
53 | std_deviation: 12340.56,
54 | correlation_coefficients: {
55 | 'revenue_conversion': 0.78,
56 | 'satisfaction_retention': 0.85
57 | },
58 | confidence_intervals: [
59 | { field: 'revenue', lower: 40000, upper: 51000, confidence: 0.99 },
60 | { field: 'conversion_rate', lower: 0.12, upper: 0.18, confidence: 0.99 }
61 | ]
62 | },
63 | trends: [
64 | {
65 | field: 'revenue',
66 | direction: 'increasing',
67 | strength: 'strong',
68 | significance: 0.97
69 | }
70 | ],
71 | recommendations: [
72 | 'Implement predictive modeling for revenue forecasting',
73 | 'Establish monitoring dashboard for key metrics',
74 | 'Consider A/B testing for conversion optimization'
75 | ],
76 | next_steps: [
77 | 'Set up automated reporting pipeline',
78 | 'Deploy real-time analytics dashboard',
79 | 'Schedule quarterly deep-dive analysis'
80 | ]
81 | };
82 |
83 | return result;
84 | }
85 |
86 | // Executive Report Generation with Business Intelligence
87 | async generateExecutiveReport(table: string, audience: 'executives' | 'managers' | 'analysts' | 'technical_team'): Promise<ReportResult> {
88 | const params: CreateReportPrompt = {
89 | table,
90 | report_type: 'executive_summary',
91 | target_audience: audience,
92 | include_recommendations: true,
93 | time_period: 'Q4 2024',
94 | format_preference: 'mixed'
95 | };
96 |
97 | const response = await this.server.handlePromptGet('create_report', params);
98 |
99 | const result: ReportResult = {
100 | title: `Q4 2024 Executive Summary - ${table} Analysis`,
101 | executive_summary: 'Strategic overview of business performance with actionable insights and growth opportunities.',
102 | detailed_sections: [
103 | {
104 | heading: 'Performance Metrics',
105 | content: 'Comprehensive analysis of key performance indicators showing strong growth trajectory.',
106 | supporting_data: [
107 | { metric: 'Revenue Growth', value: '15.3%', trend: 'positive' },
108 | { metric: 'Customer Acquisition', value: '1,247', trend: 'positive' },
109 | { metric: 'Retention Rate', value: '94.2%', trend: 'stable' }
110 | ],
111 | visualizations: [
112 | { type: 'line_chart', data: {}, description: 'Revenue trend over time' },
113 | { type: 'bar_chart', data: {}, description: 'Customer acquisition by channel' }
114 | ]
115 | },
116 | {
117 | heading: 'Strategic Opportunities',
118 | content: 'Identified high-impact areas for business expansion and optimization.',
119 | supporting_data: [
120 | { opportunity: 'Market Expansion', impact: 'High', effort: 'Medium' },
121 | { opportunity: 'Process Automation', impact: 'Medium', effort: 'Low' }
122 | ]
123 | }
124 | ],
125 | key_metrics: {
126 | 'Revenue': { value: '$2.4M', change: '+15.3%', significance: 'high' },
127 | 'Customer Count': { value: '12,470', change: '+8.2%', significance: 'medium' },
128 | 'Satisfaction Score': { value: '4.7/5', change: '+0.3', significance: 'high' }
129 | },
130 | recommendations: [
131 | {
132 | priority: 'high',
133 | recommendation: 'Implement predictive analytics for demand forecasting',
134 | expected_impact: '12-18% efficiency improvement',
135 | implementation_effort: 'medium'
136 | },
137 | {
138 | priority: 'medium',
139 | recommendation: 'Enhance customer segmentation strategy',
140 | expected_impact: '8-12% conversion rate improvement',
141 | implementation_effort: 'low'
142 | }
143 | ],
144 | appendices: [
145 | { title: 'Technical Methodology', content: 'Detailed explanation of analytical methods used' },
146 | { title: 'Data Sources', content: 'Comprehensive list of data sources and validation methods' }
147 | ]
148 | };
149 |
150 | return result;
151 | }
152 |
153 | // Advanced Predictive Analytics with Machine Learning
154 | async performPredictiveAnalytics(table: string, targetField: string): Promise<PredictionResult> {
155 | const params: PredictiveAnalyticsPrompt = {
156 | table,
157 | target_field: targetField,
158 | prediction_periods: 12,
159 | algorithm: 'random_forest',
160 | include_confidence_intervals: true,
161 | historical_periods: 24,
162 | external_factors: ['market_trends', 'seasonality', 'economic_indicators'],
163 | business_context: 'Enterprise revenue forecasting with risk assessment'
164 | };
165 |
166 | const response = await this.server.handlePromptGet('predictive_analytics', params);
167 |
168 | const result: PredictionResult = {
169 | predictions: [
170 | {
171 | period: '2025-01',
172 | predicted_value: 125670.45,
173 | confidence_interval: { lower: 118450.23, upper: 132890.67 },
174 | probability_bands: [
175 | { probability: 0.68, range: [120000, 131000] },
176 | { probability: 0.95, range: [115000, 136000] }
177 | ]
178 | },
179 | {
180 | period: '2025-02',
181 | predicted_value: 128340.12,
182 | confidence_interval: { lower: 121120.89, upper: 135559.35 }
183 | }
184 | ],
185 | model_performance: {
186 | algorithm_used: 'random_forest',
187 | accuracy_metrics: {
188 | 'r_squared': 0.847,
189 | 'mae': 4567.89,
190 | 'rmse': 6234.12,
191 | 'mape': 3.8
192 | },
193 | feature_importance: {
194 | 'historical_revenue': 0.34,
195 | 'seasonality': 0.28,
196 | 'market_trends': 0.23,
197 | 'customer_count': 0.15
198 | },
199 | validation_results: {
200 | 'cross_validation_score': 0.82,
201 | 'holdout_accuracy': 0.79,
202 | 'stability_index': 0.91
203 | }
204 | },
205 | business_insights: {
206 | trend_direction: 'positive',
207 | seasonality_detected: true,
208 | external_factors_impact: [
209 | 'Strong correlation with market expansion',
210 | 'Seasonal peak in Q4 consistently observed',
211 | 'Economic indicators show positive influence'
212 | ],
213 | risk_factors: [
214 | 'Market volatility could impact 15% variance',
215 | 'Supply chain disruptions possible',
216 | 'Competitive landscape changes'
217 | ]
218 | },
219 | recommendations: [
220 | {
221 | type: 'strategic',
222 | recommendation: 'Prepare for 23% capacity increase by Q3 2025',
223 | timing: '6 months lead time',
224 | confidence: 0.87
225 | },
226 | {
227 | type: 'operational',
228 | recommendation: 'Implement dynamic pricing based on demand forecasts',
229 | timing: 'Immediate',
230 | confidence: 0.94
231 | },
232 | {
233 | type: 'tactical',
234 | recommendation: 'Establish risk monitoring for volatility indicators',
235 | timing: '3 months',
236 | confidence: 0.89
237 | }
238 | ]
239 | };
240 |
241 | return result;
242 | }
243 |
244 | // Natural Language Query Processing
245 | async processNaturalLanguageQuery(question: string, tables?: string[]): Promise<string> {
246 | const params: NaturalLanguageQueryPrompt = {
247 | question,
248 | tables: tables?.join(','),
249 | response_format: 'natural_language',
250 | context_awareness: true,
251 | confidence_threshold: 0.85,
252 | clarifying_questions: true
253 | };
254 |
255 | const response = await this.server.handlePromptGet('natural_language_query', params);
256 | return response.messages[0].content.text;
257 | }
258 |
259 | // Smart Schema Design with Compliance
260 | async designOptimalSchema(purpose: string, requirements: string[]): Promise<any> {
261 | const params: SmartSchemaDesignPrompt = {
262 | purpose,
263 | data_types: ['text', 'number', 'date', 'select', 'attachment'],
264 | expected_volume: 'enterprise',
265 | compliance_requirements: ['GDPR', 'HIPAA'],
266 | performance_priorities: ['query_speed', 'scalability'],
267 | integration_needs: ['API access', 'webhook notifications'],
268 | user_access_patterns: 'Multi-team collaboration with role-based permissions'
269 | };
270 |
271 | const response = await this.server.handlePromptGet('smart_schema_design', params);
272 | return response;
273 | }
274 |
275 | // Comprehensive Data Quality Audit
276 | async performDataQualityAudit(table: string): Promise<any> {
277 | const params: DataQualityAuditPrompt = {
278 | table,
279 | quality_dimensions: ['completeness', 'accuracy', 'consistency', 'timeliness', 'validity'],
280 | automated_fixes: true,
281 | severity_threshold: 'medium',
282 | compliance_context: 'Enterprise data governance standards',
283 | reporting_requirements: ['executive_summary', 'detailed_findings', 'remediation_plan']
284 | };
285 |
286 | const response = await this.server.handlePromptGet('data_quality_audit', params);
287 | return response;
288 | }
289 |
290 | // Workflow Optimization Analysis
291 | async optimizeWorkflow(workflowDescription: string, painPoints: string[]): Promise<WorkflowOptimizationResult> {
292 | const params: OptimizeWorkflowPrompt = {
293 | table: 'workflow_data',
294 | current_process_description: workflowDescription,
295 | optimization_goals: ['efficiency', 'accuracy', 'cost_reduction'],
296 | constraints: ['regulatory_compliance', 'legacy_system_integration'],
297 | automation_preference: 'moderate',
298 | change_tolerance: 'medium'
299 | };
300 |
301 | const response = await this.server.handlePromptGet('optimize_workflow', params);
302 |
303 | // Return a comprehensive optimization result
304 | const result: WorkflowOptimizationResult = {
305 | current_state_analysis: {
306 | efficiency_score: 72,
307 | bottlenecks: [
308 | { step: 'Manual data entry', impact: 'high', description: 'Causes 40% of processing delays' },
309 | { step: 'Approval routing', impact: 'medium', description: 'Average 2.3 day approval time' }
310 | ],
311 | resource_utilization: {
312 | 'staff_time': 0.68,
313 | 'system_capacity': 0.84,
314 | 'automation_coverage': 0.23
315 | }
316 | },
317 | optimization_recommendations: [
318 | {
319 | category: 'automation',
320 | recommendation: 'Implement automated data validation and entry',
321 | expected_benefits: ['45% time reduction', '90% error reduction'],
322 | implementation_complexity: 'moderate',
323 | estimated_roi: '340% within 12 months',
324 | timeline: '3-4 months'
325 | },
326 | {
327 | category: 'process_redesign',
328 | recommendation: 'Parallel approval workflow with smart routing',
329 | expected_benefits: ['60% faster approvals', 'Improved transparency'],
330 | implementation_complexity: 'complex',
331 | estimated_roi: '220% within 18 months',
332 | timeline: '6-8 months'
333 | }
334 | ],
335 | implementation_roadmap: [
336 | {
337 | phase: 1,
338 | duration: '3 months',
339 | objectives: ['Implement basic automation', 'Staff training'],
340 | deliverables: ['Automated validation system', 'Training materials'],
341 | success_metrics: ['25% efficiency improvement', '95% staff adoption']
342 | },
343 | {
344 | phase: 2,
345 | duration: '4 months',
346 | objectives: ['Advanced workflow redesign', 'Integration testing'],
347 | deliverables: ['New approval system', 'Performance dashboard'],
348 | success_metrics: ['60% approval time reduction', '99.5% system uptime']
349 | }
350 | ],
351 | risk_assessment: [
352 | {
353 | risk: 'Staff resistance to change',
354 | probability: 'medium',
355 | impact: 'medium',
356 | mitigation: 'Comprehensive change management and training program'
357 | },
358 | {
359 | risk: 'System integration challenges',
360 | probability: 'low',
361 | impact: 'high',
362 | mitigation: 'Phased rollout with fallback procedures'
363 | }
364 | ]
365 | };
366 |
367 | return result;
368 | }
369 |
370 | // Automation Recommendations Engine
371 | async generateAutomationRecommendations(workflowDescription: string): Promise<any> {
372 | const params: AutomationRecommendationsPrompt = {
373 | workflow_description: workflowDescription,
374 | current_pain_points: ['manual_data_entry', 'approval_delays', 'reporting_overhead'],
375 | automation_scope: 'end_to_end',
376 | technical_constraints: ['legacy_system_compatibility', 'security_requirements'],
377 | business_impact_priority: ['time_efficiency', 'error_reduction', 'cost_savings'],
378 | implementation_timeline: 'medium_term',
379 | risk_tolerance: 'moderate'
380 | };
381 |
382 | const response = await this.server.handlePromptGet('automation_recommendations', params);
383 | return response;
384 | }
385 | }
386 |
387 | // Example usage with comprehensive error handling
388 | async function demonstrateEnterpriseAI(): Promise<void> {
389 | const analytics = new EnterpriseAIAnalytics();
390 |
391 | try {
392 | console.log('🤖 Starting Enterprise AI Analysis...');
393 |
394 | // Statistical Analysis
395 | console.log('\n📊 Performing Statistical Analysis...');
396 | const analysisResult = await analytics.performStatisticalAnalysis('Sales');
397 | console.log('Analysis completed:', analysisResult.summary);
398 |
399 | // Executive Report
400 | console.log('\n📋 Generating Executive Report...');
401 | const reportResult = await analytics.generateExecutiveReport('Sales', 'executives');
402 | console.log('Report generated:', reportResult.title);
403 |
404 | // Predictive Analytics
405 | console.log('\n🔮 Running Predictive Analytics...');
406 | const predictionResult = await analytics.performPredictiveAnalytics('Sales', 'revenue');
407 | console.log('Predictions generated:', predictionResult.predictions.length, 'periods');
408 |
409 | // Natural Language Query
410 | console.log('\n🗣️ Processing Natural Language Query...');
411 | const nlResult = await analytics.processNaturalLanguageQuery(
412 | 'What are the top 5 performing products by revenue this quarter?',
413 | ['Products', 'Sales']
414 | );
415 | console.log('NL Response:', nlResult.substring(0, 100) + '...');
416 |
417 | // Workflow Optimization
418 | console.log('\n⚡ Analyzing Workflow Optimization...');
419 | const workflowResult = await analytics.optimizeWorkflow(
420 | 'Manual invoice processing with email approvals',
421 | ['Slow approval times', 'Manual data entry errors']
422 | );
423 | console.log('Optimization completed, efficiency score:', workflowResult.current_state_analysis.efficiency_score);
424 |
425 | console.log('\n✅ All Enterprise AI operations completed successfully!');
426 |
427 | } catch (error) {
428 | console.error('❌ Enterprise AI Error:', error);
429 | throw error;
430 | }
431 | }
432 |
433 | // Export for testing and integration
434 | export {
435 | EnterpriseAIAnalytics,
436 | demonstrateEnterpriseAI
437 | };
438 |
439 | // Run demonstration if executed directly
440 | if (require.main === module) {
441 | demonstrateEnterpriseAI()
442 | .then(() => process.exit(0))
443 | .catch((error) => {
444 | console.error('Fatal error:', error);
445 | process.exit(1);
446 | });
447 | }
```
--------------------------------------------------------------------------------
/src/python/airtable_mcp/src/server.py:
--------------------------------------------------------------------------------
```python
1 | #!/usr/bin/env python3
2 | """
3 | Airtable MCP Server
4 | -------------------
5 | This is a Model Context Protocol (MCP) server that exposes Airtable operations as tools.
6 | """
7 | import os
8 | import sys
9 | import json
10 | import asyncio
11 | import logging
12 | import argparse
13 | from contextlib import asynccontextmanager
14 | from typing import Any, Dict, List, Optional, AsyncIterator, Callable
15 | from dotenv import load_dotenv
16 |
17 | print(f"Python version: {sys.version}")
18 | print(f"Python executable: {sys.executable}")
19 | print(f"Python path: {sys.path}")
20 |
21 | # Import MCP-related modules - will be available when run with Python 3.10+
22 | try:
23 | from mcp.server.fastmcp import FastMCP
24 | from mcp.server import stdio
25 | print("Successfully imported MCP modules")
26 | except ImportError as e:
27 | print(f"Error importing MCP modules: {e}")
28 | print("Error: MCP SDK requires Python 3.10+")
29 | print("Please install Python 3.10 or newer and try again.")
30 | sys.exit(1)
31 |
32 | # Set up logging
33 | logging.basicConfig(level=logging.INFO)
34 | logger = logging.getLogger("airtable-mcp")
35 |
36 | # Parse command line arguments
37 | def parse_args():
38 | parser = argparse.ArgumentParser(description="Airtable MCP Server")
39 | parser.add_argument("--token", dest="api_token", help="Airtable Personal Access Token")
40 | parser.add_argument("--base", dest="base_id", help="Airtable Base ID")
41 | parser.add_argument("--port", type=int, default=8080, help="MCP server port for dev mode")
42 | parser.add_argument("--host", default="127.0.0.1", help="MCP server host for dev mode")
43 | parser.add_argument("--dev", action="store_true", help="Run in development mode")
44 | return parser.parse_args()
45 |
46 | # Load environment variables as fallback
47 | load_dotenv()
48 |
49 | # Create MCP server
50 | mcp = FastMCP("Airtable Tools")
51 |
52 | # Server state will be initialized in main()
53 | server_state = {
54 | "base_id": "",
55 | "token": "",
56 | }
57 |
58 | # Authentication middleware
59 | @mcp.middleware
60 | async def auth_middleware(context, next_handler):
61 | # Skip auth check for tool listing
62 | if hasattr(context, 'operation') and context.operation == "list_tools":
63 | return await next_handler(context)
64 |
65 | # Allow all operations without a token check - actual API calls will be checked later
66 | return await next_handler(context)
67 |
68 | # Helper functions for Airtable API calls
69 | async def api_call(endpoint, method="GET", data=None, params=None):
70 | """Make an Airtable API call"""
71 | import requests
72 |
73 | # Check if token is available before making API calls
74 | if not server_state["token"]:
75 | return {"error": "No Airtable API token provided. Please set via --token or AIRTABLE_PERSONAL_ACCESS_TOKEN"}
76 |
77 | headers = {
78 | "Authorization": f"Bearer {server_state['token']}",
79 | "Content-Type": "application/json"
80 | }
81 |
82 | url = f"https://api.airtable.com/v0/{endpoint}"
83 |
84 | try:
85 | if method == "GET":
86 | response = requests.get(url, headers=headers, params=params)
87 | elif method == "POST":
88 | response = requests.post(url, headers=headers, json=data)
89 | elif method == "PATCH":
90 | response = requests.patch(url, headers=headers, json=data)
91 | elif method == "DELETE":
92 | response = requests.delete(url, headers=headers, params=params)
93 | else:
94 | raise ValueError(f"Unsupported method: {method}")
95 |
96 | response.raise_for_status()
97 | return response.json()
98 | except Exception as e:
99 | logger.error(f"API call error: {str(e)}")
100 | return {"error": str(e)}
101 |
102 |
103 | # Define MCP tool functions
104 |
105 | @mcp.tool()
106 | async def list_bases() -> str:
107 | """List all accessible Airtable bases"""
108 | if not server_state["token"]:
109 | return "Please provide an Airtable API token to list your bases."
110 |
111 | result = await api_call("meta/bases")
112 |
113 | if "error" in result:
114 | return f"Error: {result['error']}"
115 |
116 | bases = result.get("bases", [])
117 | if not bases:
118 | return "No bases found accessible with your token."
119 |
120 | base_list = [f"{i+1}. {base['name']} (ID: {base['id']})" for i, base in enumerate(bases)]
121 | return "Available bases:\n" + "\n".join(base_list)
122 |
123 |
124 | @mcp.tool()
125 | async def list_tables(base_id: Optional[str] = None) -> str:
126 | """List all tables in the specified base or the default base"""
127 | if not server_state["token"]:
128 | return "Please provide an Airtable API token to list tables."
129 |
130 | base = base_id or server_state["base_id"]
131 |
132 | if not base:
133 | return "Error: No base ID provided. Please specify a base_id or set AIRTABLE_BASE_ID in your .env file."
134 |
135 | result = await api_call(f"meta/bases/{base}/tables")
136 |
137 | if "error" in result:
138 | return f"Error: {result['error']}"
139 |
140 | tables = result.get("tables", [])
141 | if not tables:
142 | return "No tables found in this base."
143 |
144 | table_list = [f"{i+1}. {table['name']} (ID: {table['id']}, Fields: {len(table.get('fields', []))})"
145 | for i, table in enumerate(tables)]
146 | return "Tables in this base:\n" + "\n".join(table_list)
147 |
148 |
149 | @mcp.tool()
150 | async def list_records(table_name: str, max_records: Optional[int] = 100, filter_formula: Optional[str] = None) -> str:
151 | """List records from a table with optional filtering"""
152 | if not server_state["token"]:
153 | return "Please provide an Airtable API token to list records."
154 |
155 | base = server_state["base_id"]
156 |
157 | if not base:
158 | return "Error: No base ID set. Please set a base ID."
159 |
160 | params = {"maxRecords": max_records}
161 |
162 | if filter_formula:
163 | params["filterByFormula"] = filter_formula
164 |
165 | result = await api_call(f"{base}/{table_name}", params=params)
166 |
167 | if "error" in result:
168 | return f"Error: {result['error']}"
169 |
170 | records = result.get("records", [])
171 | if not records:
172 | return "No records found in this table."
173 |
174 | # Format the records for display
175 | formatted_records = []
176 | for i, record in enumerate(records):
177 | record_id = record.get("id", "unknown")
178 | fields = record.get("fields", {})
179 | field_text = ", ".join([f"{k}: {v}" for k, v in fields.items()])
180 | formatted_records.append(f"{i+1}. ID: {record_id} - {field_text}")
181 |
182 | return "Records:\n" + "\n".join(formatted_records)
183 |
184 |
185 | @mcp.tool()
186 | async def get_record(table_name: str, record_id: str) -> str:
187 | """Get a specific record from a table"""
188 | if not server_state["token"]:
189 | return "Please provide an Airtable API token to get records."
190 |
191 | base = server_state["base_id"]
192 |
193 | if not base:
194 | return "Error: No base ID set. Please set a base ID."
195 |
196 | result = await api_call(f"{base}/{table_name}/{record_id}")
197 |
198 | if "error" in result:
199 | return f"Error: {result['error']}"
200 |
201 | fields = result.get("fields", {})
202 | if not fields:
203 | return f"Record {record_id} found but contains no fields."
204 |
205 | # Format the fields for display
206 | formatted_fields = []
207 | for key, value in fields.items():
208 | formatted_fields.append(f"{key}: {value}")
209 |
210 | return f"Record ID: {record_id}\n" + "\n".join(formatted_fields)
211 |
212 |
213 | @mcp.tool()
214 | async def create_records(table_name: str, records_json: str) -> str:
215 | """Create records in a table from JSON string"""
216 | if not server_state["token"]:
217 | return "Please provide an Airtable API token to create records."
218 |
219 | base = server_state["base_id"]
220 |
221 | if not base:
222 | return "Error: No base ID set. Please set a base ID."
223 |
224 | try:
225 | records_data = json.loads(records_json)
226 |
227 | # Format the records for Airtable API
228 | if not isinstance(records_data, list):
229 | records_data = [records_data]
230 |
231 | records = [{"fields": record} for record in records_data]
232 |
233 | data = {"records": records}
234 | result = await api_call(f"{base}/{table_name}", method="POST", data=data)
235 |
236 | if "error" in result:
237 | return f"Error: {result['error']}"
238 |
239 | created_records = result.get("records", [])
240 | return f"Successfully created {len(created_records)} records."
241 |
242 | except json.JSONDecodeError:
243 | return "Error: Invalid JSON format in records_json parameter."
244 | except Exception as e:
245 | return f"Error creating records: {str(e)}"
246 |
247 |
248 | @mcp.tool()
249 | async def update_records(table_name: str, records_json: str) -> str:
250 | """Update records in a table from JSON string"""
251 | if not server_state["token"]:
252 | return "Please provide an Airtable API token to update records."
253 |
254 | base = server_state["base_id"]
255 |
256 | if not base:
257 | return "Error: No base ID set. Please set a base ID."
258 |
259 | try:
260 | records_data = json.loads(records_json)
261 |
262 | # Format the records for Airtable API
263 | if not isinstance(records_data, list):
264 | records_data = [records_data]
265 |
266 | records = []
267 | for record in records_data:
268 | if "id" not in record:
269 | return "Error: Each record must have an 'id' field."
270 |
271 | rec_id = record.pop("id")
272 | fields = record.get("fields", record) # Support both {id, fields} format and direct fields
273 | records.append({"id": rec_id, "fields": fields})
274 |
275 | data = {"records": records}
276 | result = await api_call(f"{base}/{table_name}", method="PATCH", data=data)
277 |
278 | if "error" in result:
279 | return f"Error: {result['error']}"
280 |
281 | updated_records = result.get("records", [])
282 | return f"Successfully updated {len(updated_records)} records."
283 |
284 | except json.JSONDecodeError:
285 | return "Error: Invalid JSON format in records_json parameter."
286 | except Exception as e:
287 | return f"Error updating records: {str(e)}"
288 |
289 |
290 | @mcp.tool()
291 | async def delete_records(table_name: str, record_ids: str) -> str:
292 | """Delete records from a table by their IDs (comma-separated or JSON array)"""
293 | if not server_state["token"]:
294 | return "Please provide an Airtable API token to delete records."
295 |
296 | base = server_state["base_id"]
297 |
298 | if not base:
299 | return "Error: No base ID set. Please set a base ID."
300 |
301 | try:
302 | # Handle both comma-separated and JSON array formats
303 | if record_ids.startswith("["):
304 | ids_list = json.loads(record_ids)
305 | else:
306 | ids_list = [rid.strip() for rid in record_ids.split(",")]
307 |
308 | # Delete records in batches of 10 (Airtable API limit)
309 | deleted_count = 0
310 | for i in range(0, len(ids_list), 10):
311 | batch = ids_list[i:i+10]
312 | params = {"records[]": batch}
313 |
314 | result = await api_call(f"{base}/{table_name}", method="DELETE", params=params)
315 |
316 | if "error" in result:
317 | return f"Error deleting records: {result['error']}"
318 |
319 | deleted_count += len(result.get("records", []))
320 |
321 | return f"Successfully deleted {deleted_count} records."
322 |
323 | except json.JSONDecodeError:
324 | return "Error: Invalid format for record_ids. Use comma-separated IDs or JSON array."
325 | except Exception as e:
326 | return f"Error deleting records: {str(e)}"
327 |
328 |
329 | @mcp.tool()
330 | async def set_base_id(base_id: str) -> str:
331 | """Set the current Airtable base ID"""
332 | server_state["base_id"] = base_id
333 | return f"Base ID set to: {base_id}"
334 |
335 |
336 | # Resources implementation for MCP protocol
337 | @mcp.resource("airtable://base/{base_id}")
338 | async def get_base_resource(base_id: str) -> Dict:
339 | """Get base metadata as a resource"""
340 | if not server_state["token"]:
341 | return {"error": "No Airtable API token provided"}
342 |
343 | result = await api_call(f"meta/bases/{base_id}/tables")
344 | if "error" in result:
345 | return {"error": result["error"]}
346 |
347 | tables = result.get("tables", [])
348 | return {
349 | "base_id": base_id,
350 | "tables_count": len(tables),
351 | "tables": [{"id": t["id"], "name": t["name"]} for t in tables]
352 | }
353 |
354 |
355 | @mcp.resource("airtable://base/{base_id}/table/{table_name}")
356 | async def get_table_resource(base_id: str, table_name: str) -> Dict:
357 | """Get table data as a resource"""
358 | if not server_state["token"]:
359 | return {"error": "No Airtable API token provided"}
360 |
361 | result = await api_call(f"{base_id}/{table_name}", params={"maxRecords": 100})
362 | if "error" in result:
363 | return {"error": result["error"]}
364 |
365 | records = result.get("records", [])
366 | return {
367 | "base_id": base_id,
368 | "table_name": table_name,
369 | "records_count": len(records),
370 | "records": records
371 | }
372 |
373 |
374 | # Roots implementation for filesystem access
375 | @mcp.rpc_method("roots/list")
376 | async def roots_list() -> Dict:
377 | """List available filesystem roots for data import/export"""
378 | roots = [
379 | {
380 | "uri": "file:///tmp/airtable-exports",
381 | "name": "Airtable Exports Directory"
382 | }
383 | ]
384 | return {"roots": roots}
385 |
386 |
387 | # Prompts implementation for guided interactions
388 | @mcp.rpc_method("prompts/list")
389 | async def prompts_list() -> Dict:
390 | """List available prompt templates"""
391 | prompts = [
392 | {
393 | "name": "analyze_base",
394 | "description": "Analyze an Airtable base structure and suggest optimizations",
395 | "arguments": [
396 | {
397 | "name": "base_id",
398 | "description": "The Airtable base ID to analyze",
399 | "required": True
400 | }
401 | ]
402 | },
403 | {
404 | "name": "create_table_schema",
405 | "description": "Generate a table schema based on requirements",
406 | "arguments": [
407 | {
408 | "name": "requirements",
409 | "description": "Description of the table requirements",
410 | "required": True
411 | },
412 | {
413 | "name": "table_name",
414 | "description": "Name for the new table",
415 | "required": True
416 | }
417 | ]
418 | },
419 | {
420 | "name": "data_migration",
421 | "description": "Plan data migration between tables or bases",
422 | "arguments": [
423 | {
424 | "name": "source",
425 | "description": "Source table/base identifier",
426 | "required": True
427 | },
428 | {
429 | "name": "destination",
430 | "description": "Destination table/base identifier",
431 | "required": True
432 | }
433 | ]
434 | }
435 | ]
436 | return {"prompts": prompts}
437 |
438 |
439 | @mcp.rpc_method("prompts/get")
440 | async def prompts_get(name: str, arguments: Optional[Dict] = None) -> Dict:
441 | """Get a specific prompt template with filled arguments"""
442 |
443 | prompts_templates = {
444 | "analyze_base": """Analyze the Airtable base '{base_id}' and provide:
445 | 1. Overview of all tables and their relationships
446 | 2. Data quality assessment
447 | 3. Performance optimization suggestions
448 | 4. Schema improvement recommendations
449 | 5. Automation opportunities""",
450 |
451 | "create_table_schema": """Create a table schema for '{table_name}' with these requirements:
452 | {requirements}
453 |
454 | Please provide:
455 | 1. Field definitions with appropriate types
456 | 2. Validation rules
457 | 3. Linked record relationships
458 | 4. Views and filters setup
459 | 5. Sample data structure""",
460 |
461 | "data_migration": """Plan a data migration from '{source}' to '{destination}':
462 | 1. Analyze source structure
463 | 2. Map fields between source and destination
464 | 3. Identify data transformation needs
465 | 4. Handle relationship mappings
466 | 5. Provide migration script
467 | 6. Include validation steps"""
468 | }
469 |
470 | if name not in prompts_templates:
471 | return {"error": f"Unknown prompt: {name}"}
472 |
473 | template = prompts_templates[name]
474 |
475 | if arguments:
476 | try:
477 | prompt = template.format(**arguments)
478 | except KeyError as e:
479 | return {"error": f"Missing required argument: {e}"}
480 | else:
481 | prompt = template
482 |
483 | return {
484 | "messages": [
485 | {
486 | "role": "user",
487 | "content": prompt
488 | }
489 | ]
490 | }
491 |
492 |
493 | # Sampling implementation for completion suggestions
494 | @mcp.rpc_method("completion/complete")
495 | async def completion_complete(ref: Dict, argument: Dict, partial: str) -> Dict:
496 | """Provide completion suggestions for partial inputs"""
497 |
498 | completions = []
499 |
500 | # Handle tool argument completions
501 | if ref.get("type") == "ref/tool":
502 | tool_name = ref.get("name")
503 | arg_name = argument.get("name")
504 |
505 | if tool_name == "list_tables" and arg_name == "base_id":
506 | # Suggest recent base IDs
507 | if server_state["base_id"]:
508 | completions.append({
509 | "value": server_state["base_id"],
510 | "label": "Current base",
511 | "insertText": server_state["base_id"]
512 | })
513 |
514 | elif tool_name == "list_records" and arg_name == "filter_formula":
515 | # Suggest common filter formulas
516 | formulas = [
517 | "{Status} = 'Active'",
518 | "NOT({Completed})",
519 | "AND({Priority} = 'High', {Status} = 'Open')",
520 | "OR({Assigned} = 'Me', {Assigned} = BLANK())",
521 | "DATETIME_DIFF(TODAY(), {DueDate}, 'days') < 7"
522 | ]
523 | for formula in formulas:
524 | if not partial or partial.lower() in formula.lower():
525 | completions.append({
526 | "value": formula,
527 | "label": formula,
528 | "insertText": formula
529 | })
530 |
531 | elif tool_name in ["create_records", "update_records"] and arg_name == "records_json":
532 | # Suggest JSON templates
533 | templates = [
534 | '{"Name": "New Item", "Status": "Active"}',
535 | '[{"Name": "Item 1"}, {"Name": "Item 2"}]',
536 | '{"id": "rec123", "fields": {"Status": "Updated"}}'
537 | ]
538 | for template in templates:
539 | completions.append({
540 | "value": template,
541 | "label": f"Template: {template[:30]}...",
542 | "insertText": template
543 | })
544 |
545 | return {
546 | "completion": {
547 | "values": completions[:10] # Limit to 10 suggestions
548 | }
549 | }
550 |
551 |
552 | # Resources list implementation
553 | @mcp.rpc_method("resources/list")
554 | async def resources_list() -> Dict:
555 | """List available Airtable resources"""
556 | resources = []
557 |
558 | # Add resource templates even without a base configured
559 | resources.append({
560 | "uri": "airtable://templates/base-schema",
561 | "name": "Base Schema Template",
562 | "description": "Template for creating base schemas",
563 | "mimeType": "application/json"
564 | })
565 |
566 | resources.append({
567 | "uri": "airtable://templates/automation-scripts",
568 | "name": "Automation Scripts",
569 | "description": "Common Airtable automation scripts",
570 | "mimeType": "text/javascript"
571 | })
572 |
573 | if server_state["base_id"]:
574 | # Add base resource
575 | resources.append({
576 | "uri": f"airtable://base/{server_state['base_id']}",
577 | "name": "Current Airtable Base",
578 | "description": f"Base ID: {server_state['base_id']}",
579 | "mimeType": "application/json"
580 | })
581 |
582 | # Try to add table resources if we have access
583 | if server_state["token"]:
584 | result = await api_call(f"meta/bases/{server_state['base_id']}/tables")
585 | if "tables" in result:
586 | for table in result.get("tables", []):
587 | fields_count = len(table.get("fields", []))
588 | resources.append({
589 | "uri": f"airtable://base/{server_state['base_id']}/table/{table['name']}",
590 | "name": f"Table: {table['name']}",
591 | "description": f"{fields_count} fields, ID: {table['id']}",
592 | "mimeType": "application/json"
593 | })
594 |
595 | return {"resources": resources}
596 |
597 |
598 | # Resources read implementation
599 | @mcp.rpc_method("resources/read")
600 | async def resources_read(uri: str) -> Dict:
601 | """Read a specific resource by URI"""
602 |
603 | # Handle template resources
604 | if uri == "airtable://templates/base-schema":
605 | return {
606 | "contents": [
607 | {
608 | "uri": uri,
609 | "mimeType": "application/json",
610 | "text": json.dumps({
611 | "tables": [
612 | {
613 | "name": "Projects",
614 | "fields": [
615 | {"name": "Name", "type": "singleLineText"},
616 | {"name": "Status", "type": "singleSelect", "options": ["Planning", "Active", "Complete"]},
617 | {"name": "Start Date", "type": "date"},
618 | {"name": "End Date", "type": "date"},
619 | {"name": "Owner", "type": "collaborator"},
620 | {"name": "Tasks", "type": "linkedRecords"}
621 | ]
622 | },
623 | {
624 | "name": "Tasks",
625 | "fields": [
626 | {"name": "Title", "type": "singleLineText"},
627 | {"name": "Description", "type": "multilineText"},
628 | {"name": "Project", "type": "linkedRecords"},
629 | {"name": "Assignee", "type": "collaborator"},
630 | {"name": "Priority", "type": "singleSelect", "options": ["Low", "Medium", "High"]},
631 | {"name": "Complete", "type": "checkbox"}
632 | ]
633 | }
634 | ]
635 | }, indent=2)
636 | }
637 | ]
638 | }
639 |
640 | elif uri == "airtable://templates/automation-scripts":
641 | return {
642 | "contents": [
643 | {
644 | "uri": uri,
645 | "mimeType": "text/javascript",
646 | "text": """// Common Airtable Automation Scripts
647 |
648 | // 1. Send notification when record matches condition
649 | function notifyOnCondition(record) {
650 | if (record.getCellValue('Status') === 'Urgent') {
651 | // Send notification logic here
652 | console.log('Urgent task:', record.getCellValue('Name'));
653 | }
654 | }
655 |
656 | // 2. Auto-calculate fields
657 | function calculateFields(record) {
658 | const startDate = record.getCellValue('Start Date');
659 | const endDate = record.getCellValue('End Date');
660 | if (startDate && endDate) {
661 | const duration = Math.ceil((endDate - startDate) / (1000 * 60 * 60 * 24));
662 | return { 'Duration (days)': duration };
663 | }
664 | }
665 |
666 | // 3. Bulk update records
667 | async function bulkUpdate(table, condition, updates) {
668 | const query = await table.selectRecordsAsync();
669 | const recordsToUpdate = query.records.filter(condition);
670 |
671 | const updatePromises = recordsToUpdate.map(record =>
672 | table.updateRecordAsync(record.id, updates)
673 | );
674 |
675 | await Promise.all(updatePromises);
676 | }"""
677 | }
678 | ]
679 | }
680 |
681 | # Handle base and table resources
682 | elif uri.startswith("airtable://base/"):
683 | parts = uri.replace("airtable://base/", "").split("/table/")
684 | if len(parts) == 2:
685 | base_id, table_name = parts
686 | result = await get_table_resource(base_id, table_name)
687 | return {
688 | "contents": [
689 | {
690 | "uri": uri,
691 | "mimeType": "application/json",
692 | "text": json.dumps(result, indent=2)
693 | }
694 | ]
695 | }
696 | elif len(parts) == 1:
697 | base_id = parts[0]
698 | result = await get_base_resource(base_id)
699 | return {
700 | "contents": [
701 | {
702 | "uri": uri,
703 | "mimeType": "application/json",
704 | "text": json.dumps(result, indent=2)
705 | }
706 | ]
707 | }
708 |
709 | return {"error": f"Unknown resource URI: {uri}"}
710 |
711 |
712 | def main():
713 | """Run the MCP server"""
714 | try:
715 | # Parse command line arguments
716 | args = parse_args()
717 |
718 | # Set server state from command line args or fallback to env vars
719 | server_state["token"] = args.api_token or os.getenv("AIRTABLE_PERSONAL_ACCESS_TOKEN", "")
720 | server_state["base_id"] = args.base_id or os.getenv("AIRTABLE_BASE_ID", "")
721 |
722 | if not server_state["token"]:
723 | logger.warning("No Airtable API token provided. Please set via --token or AIRTABLE_PERSONAL_ACCESS_TOKEN")
724 | logger.info("Tool listing will work but API calls will require a token")
725 |
726 | # Setup asyncio event loop
727 | if sys.platform == 'win32':
728 | asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
729 |
730 | # Run the server
731 | if args.dev:
732 | # Development mode
733 | mcp.run(host=args.host, port=args.port)
734 | else:
735 | # Production mode - stdio interface for MCP
736 | mcp.run()
737 |
738 | except Exception as e:
739 | logger.error(f"Server error: {str(e)}")
740 | sys.exit(1)
741 |
742 |
743 | if __name__ == "__main__":
744 | main()
```
--------------------------------------------------------------------------------
/types/typescript/app/types.d.ts:
--------------------------------------------------------------------------------
```typescript
1 | import { z } from 'zod';
2 | export declare const describeInputSchema: z.ZodEffects<z.ZodObject<{
3 | scope: z.ZodEnum<["base", "table"]>;
4 | baseId: z.ZodString;
5 | table: z.ZodOptional<z.ZodString>;
6 | includeFields: z.ZodDefault<z.ZodOptional<z.ZodBoolean>>;
7 | includeViews: z.ZodDefault<z.ZodOptional<z.ZodBoolean>>;
8 | }, "strict", z.ZodTypeAny, {
9 | scope: "base" | "table";
10 | baseId: string;
11 | includeFields: boolean;
12 | includeViews: boolean;
13 | table?: string | undefined;
14 | }, {
15 | scope: "base" | "table";
16 | baseId: string;
17 | table?: string | undefined;
18 | includeFields?: boolean | undefined;
19 | includeViews?: boolean | undefined;
20 | }>, {
21 | scope: "base" | "table";
22 | baseId: string;
23 | includeFields: boolean;
24 | includeViews: boolean;
25 | table?: string | undefined;
26 | }, {
27 | scope: "base" | "table";
28 | baseId: string;
29 | table?: string | undefined;
30 | includeFields?: boolean | undefined;
31 | includeViews?: boolean | undefined;
32 | }>;
33 | export declare const describeInputShape: {
34 | scope: z.ZodEnum<["base", "table"]>;
35 | baseId: z.ZodString;
36 | table: z.ZodOptional<z.ZodString>;
37 | includeFields: z.ZodDefault<z.ZodOptional<z.ZodBoolean>>;
38 | includeViews: z.ZodDefault<z.ZodOptional<z.ZodBoolean>>;
39 | };
40 | export declare const describeOutputSchema: z.ZodObject<{
41 | base: z.ZodObject<{
42 | id: z.ZodString;
43 | name: z.ZodString;
44 | }, "passthrough", z.ZodTypeAny, z.objectOutputType<{
45 | id: z.ZodString;
46 | name: z.ZodString;
47 | }, z.ZodTypeAny, "passthrough">, z.objectInputType<{
48 | id: z.ZodString;
49 | name: z.ZodString;
50 | }, z.ZodTypeAny, "passthrough">>;
51 | tables: z.ZodOptional<z.ZodArray<z.ZodObject<{
52 | id: z.ZodString;
53 | name: z.ZodString;
54 | description: z.ZodOptional<z.ZodString>;
55 | primaryFieldId: z.ZodOptional<z.ZodString>;
56 | fields: z.ZodOptional<z.ZodArray<z.ZodObject<{
57 | id: z.ZodString;
58 | name: z.ZodString;
59 | type: z.ZodString;
60 | options: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
61 | }, "passthrough", z.ZodTypeAny, z.objectOutputType<{
62 | id: z.ZodString;
63 | name: z.ZodString;
64 | type: z.ZodString;
65 | options: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
66 | }, z.ZodTypeAny, "passthrough">, z.objectInputType<{
67 | id: z.ZodString;
68 | name: z.ZodString;
69 | type: z.ZodString;
70 | options: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
71 | }, z.ZodTypeAny, "passthrough">>, "many">>;
72 | views: z.ZodOptional<z.ZodArray<z.ZodRecord<z.ZodString, z.ZodUnknown>, "many">>;
73 | }, "passthrough", z.ZodTypeAny, z.objectOutputType<{
74 | id: z.ZodString;
75 | name: z.ZodString;
76 | description: z.ZodOptional<z.ZodString>;
77 | primaryFieldId: z.ZodOptional<z.ZodString>;
78 | fields: z.ZodOptional<z.ZodArray<z.ZodObject<{
79 | id: z.ZodString;
80 | name: z.ZodString;
81 | type: z.ZodString;
82 | options: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
83 | }, "passthrough", z.ZodTypeAny, z.objectOutputType<{
84 | id: z.ZodString;
85 | name: z.ZodString;
86 | type: z.ZodString;
87 | options: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
88 | }, z.ZodTypeAny, "passthrough">, z.objectInputType<{
89 | id: z.ZodString;
90 | name: z.ZodString;
91 | type: z.ZodString;
92 | options: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
93 | }, z.ZodTypeAny, "passthrough">>, "many">>;
94 | views: z.ZodOptional<z.ZodArray<z.ZodRecord<z.ZodString, z.ZodUnknown>, "many">>;
95 | }, z.ZodTypeAny, "passthrough">, z.objectInputType<{
96 | id: z.ZodString;
97 | name: z.ZodString;
98 | description: z.ZodOptional<z.ZodString>;
99 | primaryFieldId: z.ZodOptional<z.ZodString>;
100 | fields: z.ZodOptional<z.ZodArray<z.ZodObject<{
101 | id: z.ZodString;
102 | name: z.ZodString;
103 | type: z.ZodString;
104 | options: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
105 | }, "passthrough", z.ZodTypeAny, z.objectOutputType<{
106 | id: z.ZodString;
107 | name: z.ZodString;
108 | type: z.ZodString;
109 | options: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
110 | }, z.ZodTypeAny, "passthrough">, z.objectInputType<{
111 | id: z.ZodString;
112 | name: z.ZodString;
113 | type: z.ZodString;
114 | options: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
115 | }, z.ZodTypeAny, "passthrough">>, "many">>;
116 | views: z.ZodOptional<z.ZodArray<z.ZodRecord<z.ZodString, z.ZodUnknown>, "many">>;
117 | }, z.ZodTypeAny, "passthrough">>, "many">>;
118 | views: z.ZodOptional<z.ZodArray<z.ZodRecord<z.ZodString, z.ZodUnknown>, "many">>;
119 | }, "strict", z.ZodTypeAny, {
120 | base: {
121 | id: string;
122 | name: string;
123 | } & {
124 | [k: string]: unknown;
125 | };
126 | views?: Record<string, unknown>[] | undefined;
127 | tables?: z.objectOutputType<{
128 | id: z.ZodString;
129 | name: z.ZodString;
130 | description: z.ZodOptional<z.ZodString>;
131 | primaryFieldId: z.ZodOptional<z.ZodString>;
132 | fields: z.ZodOptional<z.ZodArray<z.ZodObject<{
133 | id: z.ZodString;
134 | name: z.ZodString;
135 | type: z.ZodString;
136 | options: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
137 | }, "passthrough", z.ZodTypeAny, z.objectOutputType<{
138 | id: z.ZodString;
139 | name: z.ZodString;
140 | type: z.ZodString;
141 | options: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
142 | }, z.ZodTypeAny, "passthrough">, z.objectInputType<{
143 | id: z.ZodString;
144 | name: z.ZodString;
145 | type: z.ZodString;
146 | options: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
147 | }, z.ZodTypeAny, "passthrough">>, "many">>;
148 | views: z.ZodOptional<z.ZodArray<z.ZodRecord<z.ZodString, z.ZodUnknown>, "many">>;
149 | }, z.ZodTypeAny, "passthrough">[] | undefined;
150 | }, {
151 | base: {
152 | id: string;
153 | name: string;
154 | } & {
155 | [k: string]: unknown;
156 | };
157 | views?: Record<string, unknown>[] | undefined;
158 | tables?: z.objectInputType<{
159 | id: z.ZodString;
160 | name: z.ZodString;
161 | description: z.ZodOptional<z.ZodString>;
162 | primaryFieldId: z.ZodOptional<z.ZodString>;
163 | fields: z.ZodOptional<z.ZodArray<z.ZodObject<{
164 | id: z.ZodString;
165 | name: z.ZodString;
166 | type: z.ZodString;
167 | options: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
168 | }, "passthrough", z.ZodTypeAny, z.objectOutputType<{
169 | id: z.ZodString;
170 | name: z.ZodString;
171 | type: z.ZodString;
172 | options: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
173 | }, z.ZodTypeAny, "passthrough">, z.objectInputType<{
174 | id: z.ZodString;
175 | name: z.ZodString;
176 | type: z.ZodString;
177 | options: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
178 | }, z.ZodTypeAny, "passthrough">>, "many">>;
179 | views: z.ZodOptional<z.ZodArray<z.ZodRecord<z.ZodString, z.ZodUnknown>, "many">>;
180 | }, z.ZodTypeAny, "passthrough">[] | undefined;
181 | }>;
182 | export declare const queryInputSchema: z.ZodObject<{
183 | baseId: z.ZodString;
184 | table: z.ZodString;
185 | fields: z.ZodOptional<z.ZodArray<z.ZodString, "many">>;
186 | filterByFormula: z.ZodOptional<z.ZodString>;
187 | view: z.ZodOptional<z.ZodString>;
188 | sorts: z.ZodOptional<z.ZodArray<z.ZodObject<{
189 | field: z.ZodString;
190 | direction: z.ZodDefault<z.ZodOptional<z.ZodEnum<["asc", "desc"]>>>;
191 | }, "strict", z.ZodTypeAny, {
192 | field: string;
193 | direction: "asc" | "desc";
194 | }, {
195 | field: string;
196 | direction?: "asc" | "desc" | undefined;
197 | }>, "many">>;
198 | pageSize: z.ZodOptional<z.ZodNumber>;
199 | maxRecords: z.ZodOptional<z.ZodNumber>;
200 | offset: z.ZodOptional<z.ZodString>;
201 | returnFieldsByFieldId: z.ZodDefault<z.ZodOptional<z.ZodBoolean>>;
202 | }, "strict", z.ZodTypeAny, {
203 | table: string;
204 | baseId: string;
205 | returnFieldsByFieldId: boolean;
206 | fields?: string[] | undefined;
207 | filterByFormula?: string | undefined;
208 | view?: string | undefined;
209 | sorts?: {
210 | field: string;
211 | direction: "asc" | "desc";
212 | }[] | undefined;
213 | pageSize?: number | undefined;
214 | maxRecords?: number | undefined;
215 | offset?: string | undefined;
216 | }, {
217 | table: string;
218 | baseId: string;
219 | fields?: string[] | undefined;
220 | filterByFormula?: string | undefined;
221 | view?: string | undefined;
222 | sorts?: {
223 | field: string;
224 | direction?: "asc" | "desc" | undefined;
225 | }[] | undefined;
226 | pageSize?: number | undefined;
227 | maxRecords?: number | undefined;
228 | offset?: string | undefined;
229 | returnFieldsByFieldId?: boolean | undefined;
230 | }>;
231 | export declare const queryInputShape: {
232 | baseId: z.ZodString;
233 | table: z.ZodString;
234 | fields: z.ZodOptional<z.ZodArray<z.ZodString, "many">>;
235 | filterByFormula: z.ZodOptional<z.ZodString>;
236 | view: z.ZodOptional<z.ZodString>;
237 | sorts: z.ZodOptional<z.ZodArray<z.ZodObject<{
238 | field: z.ZodString;
239 | direction: z.ZodDefault<z.ZodOptional<z.ZodEnum<["asc", "desc"]>>>;
240 | }, "strict", z.ZodTypeAny, {
241 | field: string;
242 | direction: "asc" | "desc";
243 | }, {
244 | field: string;
245 | direction?: "asc" | "desc" | undefined;
246 | }>, "many">>;
247 | pageSize: z.ZodOptional<z.ZodNumber>;
248 | maxRecords: z.ZodOptional<z.ZodNumber>;
249 | offset: z.ZodOptional<z.ZodString>;
250 | returnFieldsByFieldId: z.ZodDefault<z.ZodOptional<z.ZodBoolean>>;
251 | };
252 | export declare const queryOutputSchema: z.ZodObject<{
253 | records: z.ZodArray<z.ZodObject<{
254 | id: z.ZodString;
255 | createdTime: z.ZodOptional<z.ZodString>;
256 | fields: z.ZodRecord<z.ZodString, z.ZodUnknown>;
257 | }, "strict", z.ZodTypeAny, {
258 | id: string;
259 | fields: Record<string, unknown>;
260 | createdTime?: string | undefined;
261 | }, {
262 | id: string;
263 | fields: Record<string, unknown>;
264 | createdTime?: string | undefined;
265 | }>, "many">;
266 | offset: z.ZodOptional<z.ZodString>;
267 | summary: z.ZodOptional<z.ZodObject<{
268 | returned: z.ZodNumber;
269 | hasMore: z.ZodBoolean;
270 | }, "strict", z.ZodTypeAny, {
271 | returned: number;
272 | hasMore: boolean;
273 | }, {
274 | returned: number;
275 | hasMore: boolean;
276 | }>>;
277 | }, "strict", z.ZodTypeAny, {
278 | records: {
279 | id: string;
280 | fields: Record<string, unknown>;
281 | createdTime?: string | undefined;
282 | }[];
283 | offset?: string | undefined;
284 | summary?: {
285 | returned: number;
286 | hasMore: boolean;
287 | } | undefined;
288 | }, {
289 | records: {
290 | id: string;
291 | fields: Record<string, unknown>;
292 | createdTime?: string | undefined;
293 | }[];
294 | offset?: string | undefined;
295 | summary?: {
296 | returned: number;
297 | hasMore: boolean;
298 | } | undefined;
299 | }>;
300 | export declare const createInputSchema: z.ZodObject<{
301 | baseId: z.ZodString;
302 | table: z.ZodString;
303 | records: z.ZodArray<z.ZodObject<{
304 | fields: z.ZodRecord<z.ZodString, z.ZodUnknown>;
305 | }, "strict", z.ZodTypeAny, {
306 | fields: Record<string, unknown>;
307 | }, {
308 | fields: Record<string, unknown>;
309 | }>, "many">;
310 | typecast: z.ZodDefault<z.ZodOptional<z.ZodBoolean>>;
311 | idempotencyKey: z.ZodOptional<z.ZodString>;
312 | dryRun: z.ZodDefault<z.ZodOptional<z.ZodBoolean>>;
313 | }, "strict", z.ZodTypeAny, {
314 | table: string;
315 | baseId: string;
316 | records: {
317 | fields: Record<string, unknown>;
318 | }[];
319 | typecast: boolean;
320 | dryRun: boolean;
321 | idempotencyKey?: string | undefined;
322 | }, {
323 | table: string;
324 | baseId: string;
325 | records: {
326 | fields: Record<string, unknown>;
327 | }[];
328 | typecast?: boolean | undefined;
329 | idempotencyKey?: string | undefined;
330 | dryRun?: boolean | undefined;
331 | }>;
332 | export declare const createOutputSchema: z.ZodObject<{
333 | diff: z.ZodObject<{
334 | added: z.ZodNumber;
335 | updated: z.ZodNumber;
336 | unchanged: z.ZodNumber;
337 | }, "strict", z.ZodTypeAny, {
338 | added: number;
339 | updated: number;
340 | unchanged: number;
341 | }, {
342 | added: number;
343 | updated: number;
344 | unchanged: number;
345 | }>;
346 | records: z.ZodOptional<z.ZodArray<z.ZodObject<{
347 | id: z.ZodString;
348 | createdTime: z.ZodOptional<z.ZodString>;
349 | fields: z.ZodRecord<z.ZodString, z.ZodUnknown>;
350 | }, "strict", z.ZodTypeAny, {
351 | id: string;
352 | fields: Record<string, unknown>;
353 | createdTime?: string | undefined;
354 | }, {
355 | id: string;
356 | fields: Record<string, unknown>;
357 | createdTime?: string | undefined;
358 | }>, "many">>;
359 | dryRun: z.ZodBoolean;
360 | warnings: z.ZodOptional<z.ZodArray<z.ZodString, "many">>;
361 | }, "strict", z.ZodTypeAny, {
362 | dryRun: boolean;
363 | diff: {
364 | added: number;
365 | updated: number;
366 | unchanged: number;
367 | };
368 | records?: {
369 | id: string;
370 | fields: Record<string, unknown>;
371 | createdTime?: string | undefined;
372 | }[] | undefined;
373 | warnings?: string[] | undefined;
374 | }, {
375 | dryRun: boolean;
376 | diff: {
377 | added: number;
378 | updated: number;
379 | unchanged: number;
380 | };
381 | records?: {
382 | id: string;
383 | fields: Record<string, unknown>;
384 | createdTime?: string | undefined;
385 | }[] | undefined;
386 | warnings?: string[] | undefined;
387 | }>;
388 | export declare const updateOutputSchema: z.ZodObject<{
389 | diff: z.ZodObject<{
390 | added: z.ZodNumber;
391 | updated: z.ZodNumber;
392 | unchanged: z.ZodNumber;
393 | conflicts: z.ZodNumber;
394 | }, "strict", z.ZodTypeAny, {
395 | added: number;
396 | updated: number;
397 | unchanged: number;
398 | conflicts: number;
399 | }, {
400 | added: number;
401 | updated: number;
402 | unchanged: number;
403 | conflicts: number;
404 | }>;
405 | records: z.ZodOptional<z.ZodArray<z.ZodObject<{
406 | id: z.ZodString;
407 | createdTime: z.ZodOptional<z.ZodString>;
408 | fields: z.ZodRecord<z.ZodString, z.ZodUnknown>;
409 | }, "strict", z.ZodTypeAny, {
410 | id: string;
411 | fields: Record<string, unknown>;
412 | createdTime?: string | undefined;
413 | }, {
414 | id: string;
415 | fields: Record<string, unknown>;
416 | createdTime?: string | undefined;
417 | }>, "many">>;
418 | dryRun: z.ZodBoolean;
419 | conflicts: z.ZodOptional<z.ZodArray<z.ZodObject<{
420 | id: z.ZodString;
421 | field: z.ZodString;
422 | before: z.ZodOptional<z.ZodUnknown>;
423 | after: z.ZodOptional<z.ZodUnknown>;
424 | current: z.ZodUnknown;
425 | }, "strict", z.ZodTypeAny, {
426 | id: string;
427 | field: string;
428 | before?: unknown;
429 | after?: unknown;
430 | current?: unknown;
431 | }, {
432 | id: string;
433 | field: string;
434 | before?: unknown;
435 | after?: unknown;
436 | current?: unknown;
437 | }>, "many">>;
438 | }, "strict", z.ZodTypeAny, {
439 | dryRun: boolean;
440 | diff: {
441 | added: number;
442 | updated: number;
443 | unchanged: number;
444 | conflicts: number;
445 | };
446 | records?: {
447 | id: string;
448 | fields: Record<string, unknown>;
449 | createdTime?: string | undefined;
450 | }[] | undefined;
451 | conflicts?: {
452 | id: string;
453 | field: string;
454 | before?: unknown;
455 | after?: unknown;
456 | current?: unknown;
457 | }[] | undefined;
458 | }, {
459 | dryRun: boolean;
460 | diff: {
461 | added: number;
462 | updated: number;
463 | unchanged: number;
464 | conflicts: number;
465 | };
466 | records?: {
467 | id: string;
468 | fields: Record<string, unknown>;
469 | createdTime?: string | undefined;
470 | }[] | undefined;
471 | conflicts?: {
472 | id: string;
473 | field: string;
474 | before?: unknown;
475 | after?: unknown;
476 | current?: unknown;
477 | }[] | undefined;
478 | }>;
479 | export declare const updateInputSchema: z.ZodObject<{
480 | baseId: z.ZodString;
481 | table: z.ZodString;
482 | records: z.ZodArray<z.ZodObject<{
483 | id: z.ZodString;
484 | fields: z.ZodRecord<z.ZodString, z.ZodUnknown>;
485 | }, "strict", z.ZodTypeAny, {
486 | id: string;
487 | fields: Record<string, unknown>;
488 | }, {
489 | id: string;
490 | fields: Record<string, unknown>;
491 | }>, "many">;
492 | typecast: z.ZodDefault<z.ZodOptional<z.ZodBoolean>>;
493 | idempotencyKey: z.ZodOptional<z.ZodString>;
494 | dryRun: z.ZodDefault<z.ZodOptional<z.ZodBoolean>>;
495 | conflictStrategy: z.ZodDefault<z.ZodOptional<z.ZodEnum<["fail_on_conflict", "server_merge", "client_merge"]>>>;
496 | ifUnchangedHash: z.ZodOptional<z.ZodString>;
497 | }, "strict", z.ZodTypeAny, {
498 | table: string;
499 | baseId: string;
500 | records: {
501 | id: string;
502 | fields: Record<string, unknown>;
503 | }[];
504 | typecast: boolean;
505 | dryRun: boolean;
506 | conflictStrategy: "fail_on_conflict" | "server_merge" | "client_merge";
507 | idempotencyKey?: string | undefined;
508 | ifUnchangedHash?: string | undefined;
509 | }, {
510 | table: string;
511 | baseId: string;
512 | records: {
513 | id: string;
514 | fields: Record<string, unknown>;
515 | }[];
516 | typecast?: boolean | undefined;
517 | idempotencyKey?: string | undefined;
518 | dryRun?: boolean | undefined;
519 | conflictStrategy?: "fail_on_conflict" | "server_merge" | "client_merge" | undefined;
520 | ifUnchangedHash?: string | undefined;
521 | }>;
522 | export declare const upsertInputSchema: z.ZodObject<{
523 | baseId: z.ZodString;
524 | table: z.ZodString;
525 | records: z.ZodArray<z.ZodObject<{
526 | fields: z.ZodRecord<z.ZodString, z.ZodUnknown>;
527 | }, "strict", z.ZodTypeAny, {
528 | fields: Record<string, unknown>;
529 | }, {
530 | fields: Record<string, unknown>;
531 | }>, "many">;
532 | performUpsert: z.ZodObject<{
533 | fieldsToMergeOn: z.ZodArray<z.ZodString, "many">;
534 | }, "strict", z.ZodTypeAny, {
535 | fieldsToMergeOn: string[];
536 | }, {
537 | fieldsToMergeOn: string[];
538 | }>;
539 | typecast: z.ZodDefault<z.ZodOptional<z.ZodBoolean>>;
540 | idempotencyKey: z.ZodOptional<z.ZodString>;
541 | dryRun: z.ZodDefault<z.ZodOptional<z.ZodBoolean>>;
542 | conflictStrategy: z.ZodDefault<z.ZodOptional<z.ZodEnum<["fail_on_conflict", "server_merge", "client_merge"]>>>;
543 | }, "strict", z.ZodTypeAny, {
544 | table: string;
545 | baseId: string;
546 | records: {
547 | fields: Record<string, unknown>;
548 | }[];
549 | typecast: boolean;
550 | dryRun: boolean;
551 | conflictStrategy: "fail_on_conflict" | "server_merge" | "client_merge";
552 | performUpsert: {
553 | fieldsToMergeOn: string[];
554 | };
555 | idempotencyKey?: string | undefined;
556 | }, {
557 | table: string;
558 | baseId: string;
559 | records: {
560 | fields: Record<string, unknown>;
561 | }[];
562 | performUpsert: {
563 | fieldsToMergeOn: string[];
564 | };
565 | typecast?: boolean | undefined;
566 | idempotencyKey?: string | undefined;
567 | dryRun?: boolean | undefined;
568 | conflictStrategy?: "fail_on_conflict" | "server_merge" | "client_merge" | undefined;
569 | }>;
570 | export declare const upsertOutputSchema: z.ZodObject<{
571 | diff: z.ZodObject<{
572 | added: z.ZodNumber;
573 | updated: z.ZodNumber;
574 | unchanged: z.ZodNumber;
575 | conflicts: z.ZodNumber;
576 | }, "strict", z.ZodTypeAny, {
577 | added: number;
578 | updated: number;
579 | unchanged: number;
580 | conflicts: number;
581 | }, {
582 | added: number;
583 | updated: number;
584 | unchanged: number;
585 | conflicts: number;
586 | }>;
587 | records: z.ZodOptional<z.ZodArray<z.ZodObject<{
588 | id: z.ZodString;
589 | createdTime: z.ZodOptional<z.ZodString>;
590 | fields: z.ZodRecord<z.ZodString, z.ZodUnknown>;
591 | }, "strict", z.ZodTypeAny, {
592 | id: string;
593 | fields: Record<string, unknown>;
594 | createdTime?: string | undefined;
595 | }, {
596 | id: string;
597 | fields: Record<string, unknown>;
598 | createdTime?: string | undefined;
599 | }>, "many">>;
600 | dryRun: z.ZodBoolean;
601 | conflicts: z.ZodOptional<z.ZodArray<z.ZodObject<{
602 | id: z.ZodString;
603 | field: z.ZodString;
604 | before: z.ZodOptional<z.ZodUnknown>;
605 | after: z.ZodOptional<z.ZodUnknown>;
606 | current: z.ZodUnknown;
607 | }, "strict", z.ZodTypeAny, {
608 | id: string;
609 | field: string;
610 | before?: unknown;
611 | after?: unknown;
612 | current?: unknown;
613 | }, {
614 | id: string;
615 | field: string;
616 | before?: unknown;
617 | after?: unknown;
618 | current?: unknown;
619 | }>, "many">>;
620 | }, "strict", z.ZodTypeAny, {
621 | dryRun: boolean;
622 | diff: {
623 | added: number;
624 | updated: number;
625 | unchanged: number;
626 | conflicts: number;
627 | };
628 | records?: {
629 | id: string;
630 | fields: Record<string, unknown>;
631 | createdTime?: string | undefined;
632 | }[] | undefined;
633 | conflicts?: {
634 | id: string;
635 | field: string;
636 | before?: unknown;
637 | after?: unknown;
638 | current?: unknown;
639 | }[] | undefined;
640 | }, {
641 | dryRun: boolean;
642 | diff: {
643 | added: number;
644 | updated: number;
645 | unchanged: number;
646 | conflicts: number;
647 | };
648 | records?: {
649 | id: string;
650 | fields: Record<string, unknown>;
651 | createdTime?: string | undefined;
652 | }[] | undefined;
653 | conflicts?: {
654 | id: string;
655 | field: string;
656 | before?: unknown;
657 | after?: unknown;
658 | current?: unknown;
659 | }[] | undefined;
660 | }>;
661 | export declare const listExceptionsInputSchema: z.ZodObject<{
662 | since: z.ZodOptional<z.ZodString>;
663 | severity: z.ZodOptional<z.ZodEnum<["info", "warning", "error"]>>;
664 | limit: z.ZodDefault<z.ZodOptional<z.ZodNumber>>;
665 | cursor: z.ZodOptional<z.ZodString>;
666 | }, "strict", z.ZodTypeAny, {
667 | limit: number;
668 | since?: string | undefined;
669 | severity?: "info" | "warning" | "error" | undefined;
670 | cursor?: string | undefined;
671 | }, {
672 | since?: string | undefined;
673 | severity?: "info" | "warning" | "error" | undefined;
674 | limit?: number | undefined;
675 | cursor?: string | undefined;
676 | }>;
677 | export declare const exceptionItemSchema: z.ZodObject<{
678 | id: z.ZodString;
679 | timestamp: z.ZodString;
680 | severity: z.ZodEnum<["info", "warning", "error"]>;
681 | category: z.ZodEnum<["rate_limit", "validation", "auth", "conflict", "schema_drift", "other"]>;
682 | summary: z.ZodString;
683 | details: z.ZodOptional<z.ZodString>;
684 | proposedFix: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
685 | }, "strict", z.ZodTypeAny, {
686 | id: string;
687 | summary: string;
688 | severity: "info" | "warning" | "error";
689 | timestamp: string;
690 | category: "validation" | "rate_limit" | "auth" | "conflict" | "schema_drift" | "other";
691 | details?: string | undefined;
692 | proposedFix?: Record<string, unknown> | undefined;
693 | }, {
694 | id: string;
695 | summary: string;
696 | severity: "info" | "warning" | "error";
697 | timestamp: string;
698 | category: "validation" | "rate_limit" | "auth" | "conflict" | "schema_drift" | "other";
699 | details?: string | undefined;
700 | proposedFix?: Record<string, unknown> | undefined;
701 | }>;
702 | export declare const listExceptionsOutputSchema: z.ZodObject<{
703 | items: z.ZodArray<z.ZodObject<{
704 | id: z.ZodString;
705 | timestamp: z.ZodString;
706 | severity: z.ZodEnum<["info", "warning", "error"]>;
707 | category: z.ZodEnum<["rate_limit", "validation", "auth", "conflict", "schema_drift", "other"]>;
708 | summary: z.ZodString;
709 | details: z.ZodOptional<z.ZodString>;
710 | proposedFix: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
711 | }, "strict", z.ZodTypeAny, {
712 | id: string;
713 | summary: string;
714 | severity: "info" | "warning" | "error";
715 | timestamp: string;
716 | category: "validation" | "rate_limit" | "auth" | "conflict" | "schema_drift" | "other";
717 | details?: string | undefined;
718 | proposedFix?: Record<string, unknown> | undefined;
719 | }, {
720 | id: string;
721 | summary: string;
722 | severity: "info" | "warning" | "error";
723 | timestamp: string;
724 | category: "validation" | "rate_limit" | "auth" | "conflict" | "schema_drift" | "other";
725 | details?: string | undefined;
726 | proposedFix?: Record<string, unknown> | undefined;
727 | }>, "many">;
728 | cursor: z.ZodOptional<z.ZodString>;
729 | }, "strict", z.ZodTypeAny, {
730 | items: {
731 | id: string;
732 | summary: string;
733 | severity: "info" | "warning" | "error";
734 | timestamp: string;
735 | category: "validation" | "rate_limit" | "auth" | "conflict" | "schema_drift" | "other";
736 | details?: string | undefined;
737 | proposedFix?: Record<string, unknown> | undefined;
738 | }[];
739 | cursor?: string | undefined;
740 | }, {
741 | items: {
742 | id: string;
743 | summary: string;
744 | severity: "info" | "warning" | "error";
745 | timestamp: string;
746 | category: "validation" | "rate_limit" | "auth" | "conflict" | "schema_drift" | "other";
747 | details?: string | undefined;
748 | proposedFix?: Record<string, unknown> | undefined;
749 | }[];
750 | cursor?: string | undefined;
751 | }>;
752 | export declare const governanceOutputSchema: z.ZodObject<{
753 | allowedBases: z.ZodArray<z.ZodString, "many">;
754 | allowedTables: z.ZodDefault<z.ZodOptional<z.ZodArray<z.ZodObject<{
755 | baseId: z.ZodString;
756 | table: z.ZodString;
757 | }, "strict", z.ZodTypeAny, {
758 | table: string;
759 | baseId: string;
760 | }, {
761 | table: string;
762 | baseId: string;
763 | }>, "many">>>;
764 | allowedOperations: z.ZodDefault<z.ZodArray<z.ZodEnum<["describe", "query", "create", "update", "upsert"]>, "many">>;
765 | piiFields: z.ZodDefault<z.ZodOptional<z.ZodArray<z.ZodObject<{
766 | baseId: z.ZodString;
767 | table: z.ZodString;
768 | field: z.ZodString;
769 | policy: z.ZodEnum<["mask", "hash", "drop"]>;
770 | }, "strict", z.ZodTypeAny, {
771 | table: string;
772 | baseId: string;
773 | field: string;
774 | policy: "mask" | "hash" | "drop";
775 | }, {
776 | table: string;
777 | baseId: string;
778 | field: string;
779 | policy: "mask" | "hash" | "drop";
780 | }>, "many">>>;
781 | redactionPolicy: z.ZodDefault<z.ZodEnum<["mask_all_pii", "mask_on_inline", "none"]>>;
782 | loggingPolicy: z.ZodDefault<z.ZodEnum<["errors_only", "minimal", "verbose"]>>;
783 | retentionDays: z.ZodDefault<z.ZodNumber>;
784 | }, "strict", z.ZodTypeAny, {
785 | allowedBases: string[];
786 | allowedTables: {
787 | table: string;
788 | baseId: string;
789 | }[];
790 | allowedOperations: ("describe" | "query" | "create" | "update" | "upsert")[];
791 | piiFields: {
792 | table: string;
793 | baseId: string;
794 | field: string;
795 | policy: "mask" | "hash" | "drop";
796 | }[];
797 | redactionPolicy: "mask_all_pii" | "mask_on_inline" | "none";
798 | loggingPolicy: "errors_only" | "minimal" | "verbose";
799 | retentionDays: number;
800 | }, {
801 | allowedBases: string[];
802 | allowedTables?: {
803 | table: string;
804 | baseId: string;
805 | }[] | undefined;
806 | allowedOperations?: ("describe" | "query" | "create" | "update" | "upsert")[] | undefined;
807 | piiFields?: {
808 | table: string;
809 | baseId: string;
810 | field: string;
811 | policy: "mask" | "hash" | "drop";
812 | }[] | undefined;
813 | redactionPolicy?: "mask_all_pii" | "mask_on_inline" | "none" | undefined;
814 | loggingPolicy?: "errors_only" | "minimal" | "verbose" | undefined;
815 | retentionDays?: number | undefined;
816 | }>;
817 | export type DescribeInput = z.infer<typeof describeInputSchema>;
818 | export type DescribeOutput = z.infer<typeof describeOutputSchema>;
819 | export type QueryInput = z.infer<typeof queryInputSchema>;
820 | export type QueryOutput = z.infer<typeof queryOutputSchema>;
821 | export type CreateInput = z.infer<typeof createInputSchema>;
822 | export type CreateOutput = z.infer<typeof createOutputSchema>;
823 | export type UpdateInput = z.infer<typeof updateInputSchema>;
824 | export type UpdateOutput = z.infer<typeof updateOutputSchema>;
825 | export type UpsertInput = z.infer<typeof upsertInputSchema>;
826 | export type UpsertOutput = z.infer<typeof upsertOutputSchema>;
827 | export type ListExceptionsInput = z.infer<typeof listExceptionsInputSchema>;
828 | export type ExceptionItem = z.infer<typeof exceptionItemSchema>;
829 | export type ListExceptionsOutput = z.infer<typeof listExceptionsOutputSchema>;
830 | export type GovernanceSnapshot = z.infer<typeof governanceOutputSchema>;
831 |
```