# Directory Structure
```
├── .gitignore
├── assets
│ └── MCPArchitecture.png
├── LICENSE
├── mcp-ssms-client.py
├── mcp-ssms-server.py
├── README.md
└── requirement.txt
```
# Files
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
```
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | *.py,cover
50 | .hypothesis/
51 | .pytest_cache/
52 | cover/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | .pybuilder/
76 | target/
77 |
78 | # Jupyter Notebook
79 | .ipynb_checkpoints
80 |
81 | # IPython
82 | profile_default/
83 | ipython_config.py
84 |
85 | # pyenv
86 | # For a library or package, you might want to ignore these files since the code is
87 | # intended to run in multiple environments; otherwise, check them in:
88 | # .python-version
89 |
90 | # pipenv
91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
94 | # install all needed dependencies.
95 | #Pipfile.lock
96 |
97 | # UV
98 | # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
99 | # This is especially recommended for binary packages to ensure reproducibility, and is more
100 | # commonly ignored for libraries.
101 | #uv.lock
102 |
103 | # poetry
104 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
105 | # This is especially recommended for binary packages to ensure reproducibility, and is more
106 | # commonly ignored for libraries.
107 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
108 | #poetry.lock
109 |
110 | # pdm
111 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
112 | #pdm.lock
113 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
114 | # in version control.
115 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
116 | .pdm.toml
117 | .pdm-python
118 | .pdm-build/
119 |
120 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
121 | __pypackages__/
122 |
123 | # Celery stuff
124 | celerybeat-schedule
125 | celerybeat.pid
126 |
127 | # SageMath parsed files
128 | *.sage.py
129 |
130 | # Environments
131 | .env
132 | .venv
133 | env/
134 | venv/
135 | ENV/
136 | env.bak/
137 | venv.bak/
138 |
139 | # Spyder project settings
140 | .spyderproject
141 | .spyproject
142 |
143 | # Rope project settings
144 | .ropeproject
145 |
146 | # mkdocs documentation
147 | /site
148 |
149 | # mypy
150 | .mypy_cache/
151 | .dmypy.json
152 | dmypy.json
153 |
154 | # Pyre type checker
155 | .pyre/
156 |
157 | # pytype static type analyzer
158 | .pytype/
159 |
160 | # Cython debug symbols
161 | cython_debug/
162 |
163 | # PyCharm
164 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
165 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
166 | # and can be added to the global gitignore or merged into this file. For a more nuclear
167 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
168 | #.idea/
169 |
170 | # Ruff stuff:
171 | .ruff_cache/
172 |
173 | # PyPI configuration file
174 | .pypirc
175 |
```
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
```markdown
1 | # SQL Server Agent - Modal Context Protocol
2 | Here is the SQL Server Agent that let's you Interact with the SQL Server Database in the Natural Language leveraging the Modal Context Protocol as a layer between our LLMs and Data Source.
3 |
4 | ## Key Features:
5 |
6 | * **Talk to Your Database**: Chat with SQL Server using plain English.
7 | * **No-Code Database Operations**: Manage your database tasks entirely through natural conversations.
8 | * **One-Click Procedure Execution**: Run stored procedures effortlessly with natural commands.
9 | * **MCP-Enhanced Accuracy**: Achieve precise database interactions through Modal Context Protocol (MCP), intelligently connecting your commands to data.
10 | * **Context-Aware Conversations**: Enjoy seamless interactions powered by Modal Context Protocol.
11 |
12 | ## What is MCP?
13 | MCP (Modal Context Protocol) is a metodology that stats how we should bind the context to the LLMs.
14 | MCP provides a standardized way to connect AI models to different data sources and tools.
15 |
16 | ## Why MCP?
17 | MCP helps us to build the complex workflows in a simplified way to build the Agents on top of LLMs where the laguage models needs a frequent integration with the data sources and tools.
18 |
19 | ## MCP Architecture:
20 | The MCP architecture follows a client-server model, allowing a single client to interact seamlessly with multiple servers.
21 |
22 | 
23 |
24 |
25 | **MCP-Client**: Your AI client (LLM) accessing data.
26 |
27 | **MCP-Protocol**: Connects your client directly to the server.
28 |
29 | **MCP-Server**: Helps your client access data sources via MCP.
30 |
31 | **Local Database, Cloud Database, External APIs**: Sources providing data through local storage, cloud, or online APIs.
32 |
33 | ## Now, Let's Dive Into the Implementation
34 | With an understanding of MCP and its architecture, it's time to bring it all together with the **SQL Server Agent**.
35 |
36 | ### What is SQL Server Agent?
37 | The **SQL Server Agent** is a conversational AI Query CLI that enables you to **interact with your SQL Server Database using natural language**. Powered by the **Modal Context Protocol**, it acts as a smart layer between your language model and the database, making it possible to:
38 |
39 | - Query your database without writing SQL
40 | - Execute stored procedures with conversational commands
41 | - Perform complex operations while maintaining context across multiple steps
42 |
43 | Whether you're a developer, analyst, or non-technical user, this agent makes your data accessible through intuitive, human-like interactions.
44 |
45 | Now, let’s walk through how to get it up and running 👇
46 |
47 | ## Prerequisites
48 | Before you get started, make sure you have the following:
49 |
50 | - **Python 3.12+** installed on your machine
51 | - A valid **OpenAI API Key**
52 |
53 | ## Getting Started
54 | Follow these steps to get the project up and running:
55 | ### 1. Clone the Repository
56 |
57 | ```bash
58 | git clone https://github.com/Amanp17/mcp-sql-server-natural-lang.git
59 | cd mcp-sql-server-natural-lang
60 | ```
61 |
62 | ### 2. Install Dependencies
63 | ```bash
64 | pip install -r requirements.txt
65 | ```
66 | ### 3. Setup Environment Variables
67 |
68 | Create a `.env` file in the root of the project and add the following:
69 |
70 | ```dotenv
71 | OPENAI_API_KEY=your_openai_api_key
72 | MSSQL_SERVER=localhost
73 | MSSQL_DATABASE=your_database_name
74 | MSSQL_USERNAME=your_username
75 | MSSQL_PASSWORD=your_password
76 | MSSQL_DRIVER={ODBC Driver 17 for SQL Server}
77 | ```
78 |
79 | ## Running the SQL Server Agent
80 | Once you've set up your environment and dependencies, you're ready to interact with the SQL Server Agent.
81 |
82 | ### Run the Client Script
83 | Execute the following command to start the agent:
84 |
85 | ```bash
86 | python mcp-ssms-client.py
87 | ```
88 |
89 | Once the script starts, it will prompt you like this:
90 |
91 | ```bash
92 | Enter your Query:
93 | ```
94 |
95 | Now, you can type your request in plain English. For example:
96 |
97 | ```swift
98 | Create a Employee table with 10 dummy data in it with their departments and salaries.
99 | ```
100 |
101 | The agent will process your input using the Modal Context Protocol and return the relevant data from your SQL Server database.
102 |
103 | 🧠 Tip: You can ask follow-up questions or make requests like "show me the employees and their departments?" or "how many employees are having salary under $40K?" — the context is preserved!
104 |
105 | ## Conclusion
106 |
107 | The **SQL Server Agent** powered by the **Modal Context Protocol (MCP)** brings the power of conversational AI to your database operations. By bridging the gap between natural language and SQL, it allows users to interact with their data effortlessly, making database access more intuitive, efficient, and accessible to everyone even those without technical expertise.
108 |
109 | Whether you're querying data, executing procedures, or building complex workflows, this agent serves as your intelligent interface to SQL Server.
110 |
111 | Feel free to contribute, open issues, or suggest enhancements — we're building the future of AI-driven data interaction together! 🚀
112 |
```
--------------------------------------------------------------------------------
/requirement.txt:
--------------------------------------------------------------------------------
```
1 | python-dotenv
2 | mcp
3 | pyodbc
4 | loguru
5 | openai
```
--------------------------------------------------------------------------------
/mcp-ssms-server.py:
--------------------------------------------------------------------------------
```python
1 | import os
2 | import pyodbc
3 | from loguru import logger
4 | from mcp.server.fastmcp import FastMCP
5 | from dotenv import load_dotenv
6 |
7 | load_dotenv()
8 |
9 | # Database configurations
10 | MSSQL_SERVER = os.getenv("MSSQL_SERVER", "localhost")
11 | MSSQL_DATABASE = os.getenv("MSSQL_DATABASE", "my_database")
12 | MSSQL_USERNAME = os.getenv("MSSQL_USERNAME", "sa")
13 | MSSQL_PASSWORD = os.getenv("MSSQL_PASSWORD", "your_password")
14 | MSSQL_DRIVER = os.getenv("MSSQL_DRIVER", "{ODBC Driver 17 for SQL Server}")
15 |
16 | # Building the connection string
17 | connection_string = (
18 | f"DRIVER={MSSQL_DRIVER};"
19 | f"SERVER={MSSQL_SERVER};"
20 | f"DATABASE={MSSQL_DATABASE};"
21 | f"UID={MSSQL_USERNAME};"
22 | f"PWD={MSSQL_PASSWORD}"
23 | )
24 |
25 | # Creating an MCP server instance
26 | mcp = FastMCP("Demo")
27 |
28 | @mcp.tool()
29 | def query_data(sql: str) -> str:
30 | """Execute SQL queries safely on MSSQL."""
31 | logger.info(f"Processing your Query...")
32 | try:
33 | conn = pyodbc.connect(connection_string)
34 | cursor = conn.cursor()
35 | cursor.execute(sql)
36 |
37 | if cursor.description is not None:
38 | result = cursor.fetchall()
39 | output = "\n".join(str(row) for row in result)
40 | else:
41 | output = "SQL executed successfully, no results returned."
42 |
43 | conn.commit()
44 | return output
45 | except Exception as e:
46 | logger.error("Error executing query: " + str(e))
47 | return f"Error: {str(e)}"
48 | finally:
49 | conn.close()
50 |
51 | @mcp.prompt()
52 | def example_prompt(code: str) -> str:
53 | return f"Please review this code:\n\n{code}"
54 |
55 | if __name__ == "__main__":
56 | print("Starting server...")
57 | mcp.run(transport="stdio")
58 |
```
--------------------------------------------------------------------------------
/mcp-ssms-client.py:
--------------------------------------------------------------------------------
```python
1 | import asyncio
2 | import os
3 | import re
4 | import json
5 | from dataclasses import dataclass, field
6 | from typing import cast
7 | import sys
8 | from dotenv import load_dotenv
9 | from mcp import ClientSession, StdioServerParameters
10 | from mcp.client.stdio import stdio_client
11 |
12 | load_dotenv()
13 |
14 | from openai import OpenAI
15 | client = OpenAI()
16 |
17 | # Create server parameters for stdio connection
18 | server_params = StdioServerParameters(
19 | command="python", # Executable
20 | args=["./mcp-ssms-server.py"], # Command line arguments to run the server script
21 | env=None, # Optional environment variables
22 | )
23 |
24 | if os.name == 'nt':
25 | import msvcrt
26 |
27 | def get_input(prompt: str) -> str:
28 | sys.stdout.write(prompt)
29 | sys.stdout.flush()
30 | buf = []
31 | while True:
32 | ch = msvcrt.getwch()
33 | if ch == '\r':
34 | sys.stdout.write('\n')
35 | return ''.join(buf)
36 | elif ch == '\x1b':
37 | raise KeyboardInterrupt
38 | elif ch == '\b':
39 | if buf:
40 | buf.pop()
41 | sys.stdout.write('\b \b')
42 | else:
43 | buf.append(ch)
44 | sys.stdout.write(ch)
45 | sys.stdout.flush()
46 |
47 | @dataclass
48 | class Chat:
49 | messages: list[dict] = field(default_factory=list)
50 | system_prompt: str = (
51 | "You are a master MS SQL Server assistant. "
52 | "Your job is to use the tools at your disposal to execute SQL queries "
53 | "and provide the results to the user. "
54 | "When you need to execute a SQL query, respond with the following format exactly:\n"
55 | "TOOL: query_data, ARGS: {\"sql\": \"<YOUR_SQL_QUERY>\"}"
56 | )
57 |
58 | async def process_query(self, session: ClientSession, query: str) -> None:
59 | # 1) Gather available tools (for reference only)
60 | response = await session.list_tools()
61 | available_tools = [
62 | {
63 | "name": tool.name,
64 | "description": tool.description or "",
65 | "input_schema": tool.inputSchema,
66 | }
67 | for tool in response.tools
68 | ]
69 |
70 | # 2) Build the conversation for OpenAI
71 | openai_messages = [
72 | {"role": "system", "content": self.system_prompt},
73 | ]
74 | openai_messages.extend(self.messages)
75 | openai_messages.append({"role": "user", "content": query})
76 |
77 | # 3) Send to OpenAI
78 | completion = client.chat.completions.create(
79 | model="gpt-4",
80 | messages=openai_messages,
81 | max_tokens=2000,
82 | temperature=0.0,
83 | )
84 |
85 | assistant_reply = completion.choices[0].message.content
86 |
87 | self.messages.append({"role": "user", "content": query})
88 | self.messages.append({"role": "assistant", "content": assistant_reply})
89 |
90 | # 4) Look for a tool call in the assistant reply
91 | if "TOOL:" in assistant_reply:
92 | try:
93 | pattern = r"TOOL:\s*(\w+),\s*ARGS:\s*(\{.*\})"
94 | match = re.search(pattern, assistant_reply)
95 | if match:
96 | tool_name = match.group(1)
97 | tool_args_str = match.group(2)
98 | tool_args = json.loads(tool_args_str)
99 |
100 | # Now call the tool on the server
101 | result = await session.call_tool(tool_name, cast(dict, tool_args))
102 | tool_text = getattr(result.content[0], "text", "")
103 |
104 | tool_result_msg = f"Tool '{tool_name}' result:\n{tool_text}"
105 | self.messages.append({"role": "system", "content": tool_result_msg})
106 |
107 | completion_2 = client.chat.completions.create(
108 | model="gpt-4",
109 | messages=[{"role": "system", "content": self.system_prompt}] + self.messages,
110 | max_tokens=1000,
111 | temperature=0.0,
112 | )
113 | final_reply = completion_2.choices[0].message.content
114 | print("\nAssistant:", final_reply)
115 | self.messages.append({"role": "assistant", "content": final_reply})
116 | else:
117 | print("No valid tool command found in assistant response.")
118 | except Exception as e:
119 | print(f"Failed to parse tool usage: {e}")
120 |
121 | async def chat_loop(self, session: ClientSession):
122 | while True:
123 | try:
124 | query = get_input("Enter your Query (Press ESC to Quit): ").strip()
125 | except (KeyboardInterrupt, EOFError):
126 | print("\nExiting...")
127 | break
128 | if not query:
129 | break
130 | await self.process_query(session, query)
131 |
132 | async def run(self):
133 | async with stdio_client(server_params) as (read, write):
134 | async with ClientSession(read, write) as session:
135 | await session.initialize()
136 | await self.chat_loop(session)
137 |
138 | if __name__ == "__main__":
139 | chat = Chat()
140 | asyncio.run(chat.run())
141 |
```