This is page 11 of 16. Use http://codebase.md/fujitsu-ai/mcp-server-for-mas-developments?lines=false&page={x} to view the full context. # Directory Structure ``` ├── .gitattributes ├── .gitignore ├── agents │ ├── __init__.py │ ├── AgentInterface │ │ ├── __init__.py │ │ ├── Python │ │ │ ├── __init__.py │ │ │ ├── agent.py │ │ │ ├── color.py │ │ │ ├── config.py │ │ │ ├── language.py │ │ │ ├── local_file_handler.py │ │ │ └── network.py │ │ └── requirements.txt │ ├── AgentMonitoring │ │ ├── ChatBot-Agent Dashboard Example - Grafana.json │ │ ├── images │ │ │ ├── Grafana.png │ │ │ └── Prometheus.png │ │ ├── IoT-Agent Dashboard Example - Grafana.json │ │ ├── OpenAI compatible API - Agent Dashboard Example - Grafana.json │ │ ├── prometheus Example.yml │ │ └── README.md │ ├── ChatBotAgent │ │ ├── __init__.py │ │ ├── config.json.example │ │ ├── html │ │ │ ├── favicon.ico │ │ │ ├── index_de.html │ │ │ ├── index.html │ │ │ ├── Logo_light.svg │ │ │ ├── start_http_server.ps1 │ │ │ └── start_http_server.sh │ │ ├── Python │ │ │ ├── __init__.py │ │ │ └── chatbot_agent.py │ │ ├── README.md │ │ └── requirements.txt │ ├── IoTAgent │ │ ├── config_example.json │ │ ├── Python │ │ │ ├── iot_mqtt_agent.py │ │ │ └── language.py │ │ ├── README.md │ │ └── requirements.txt │ ├── MCP-Client │ │ ├── __init__.py │ │ ├── .env.example │ │ ├── Python │ │ │ ├── __init__.py │ │ │ ├── chat_handler.py │ │ │ ├── config.py │ │ │ ├── environment.py │ │ │ ├── llm_client.py │ │ │ ├── mcp_client_sse.py │ │ │ ├── mcp_client.py │ │ │ ├── messages │ │ │ │ ├── __init__.py │ │ │ │ ├── message_types │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── incrementing_id_message.py │ │ │ │ │ ├── initialize_message.py │ │ │ │ │ ├── json_rpc_message.py │ │ │ │ │ ├── ping_message.py │ │ │ │ │ ├── prompts_messages.py │ │ │ │ │ ├── prompts_models.py │ │ │ │ │ ├── resources_messages.py │ │ │ │ │ └── tools_messages.py │ │ │ │ ├── send_call_tool.py │ │ │ │ ├── send_initialize_message.py │ │ │ │ ├── send_message.py │ │ │ │ ├── send_ping.py │ │ │ │ ├── send_prompts.py │ │ │ │ ├── send_resources.py │ │ │ │ └── send_tools_list.py │ │ │ ├── system_prompt_generator.py │ │ │ ├── tools_handler.py │ │ │ └── transport │ │ │ ├── __init__.py │ │ │ └── stdio │ │ │ ├── __init__.py │ │ │ ├── stdio_client.py │ │ │ ├── stdio_server_parameters.py │ │ │ └── stdio_server_shutdown.py │ │ ├── README.md │ │ ├── requirements.txt │ │ └── server_config.json │ ├── OpenAI_Compatible_API_Agent │ │ ├── __init__.py │ │ ├── docker-compose.yml │ │ ├── Dockerfile │ │ ├── pgpt_openai_api_mcp.json.example │ │ ├── pgpt_openai_api_proxy.json.example │ │ ├── Python │ │ │ ├── __init__.py │ │ │ ├── client_tests │ │ │ │ ├── __init__.py │ │ │ │ ├── openai_test_client_structured.py │ │ │ │ ├── openai_test_client_tools.py │ │ │ │ ├── openai_test_client.py │ │ │ │ ├── vllm_client_multimodal.py │ │ │ │ ├── vllm_client.py │ │ │ │ ├── vllm_structured.py │ │ │ │ └── vllm_structured2.py │ │ │ ├── generate_api_key.py │ │ │ ├── open_ai_helper.py │ │ │ ├── openai_compatible_api.py │ │ │ ├── openai_mcp_api.py │ │ │ ├── pgpt_api.py │ │ │ ├── privategpt_api.py │ │ │ └── vllmproxy.py │ │ ├── README.md │ │ └── requirements.txt │ └── SourceManagerAgent │ ├── __init__.py │ ├── config.json.example │ └── Python │ ├── __init__.py │ ├── file_tools │ │ └── loader_factory.py │ ├── file_upload_agent.py │ └── local_db.py ├── clients │ ├── __init__.py │ ├── C# .Net │ │ ├── 1.0 mcp_login │ │ │ ├── bin │ │ │ │ └── Debug │ │ │ │ └── net9.0 │ │ │ │ ├── mcp_login.deps.json │ │ │ │ ├── mcp_login.dll │ │ │ │ ├── mcp_login.exe │ │ │ │ ├── mcp_login.pdb │ │ │ │ ├── mcp_login.runtimeconfig.json │ │ │ │ └── Newtonsoft.Json.dll │ │ │ ├── mcp_login.csproj │ │ │ ├── obj │ │ │ │ ├── Debug │ │ │ │ │ └── net9.0 │ │ │ │ │ ├── .NETCoreApp,Version=v9.0.AssemblyAttributes.cs │ │ │ │ │ ├── apphost.exe │ │ │ │ │ ├── mcp_login.AssemblyInfo.cs │ │ │ │ │ ├── mcp_login.AssemblyInfoInputs.cache │ │ │ │ │ ├── mcp_login.assets.cache │ │ │ │ │ ├── mcp_login.csproj.AssemblyReference.cache │ │ │ │ │ ├── mcp_login.csproj.CoreCompileInputs.cache │ │ │ │ │ ├── mcp_login.csproj.FileListAbsolute.txt │ │ │ │ │ ├── mcp_login.csproj.Up2Date │ │ │ │ │ ├── mcp_login.dll │ │ │ │ │ ├── mcp_login.GeneratedMSBuildEditorConfig.editorconfig │ │ │ │ │ ├── mcp_login.genruntimeconfig.cache │ │ │ │ │ ├── mcp_login.GlobalUsings.g.cs │ │ │ │ │ ├── mcp_login.pdb │ │ │ │ │ ├── ref │ │ │ │ │ │ └── mcp_login.dll │ │ │ │ │ └── refint │ │ │ │ │ └── mcp_login.dll │ │ │ │ ├── mcp_login.csproj.nuget.dgspec.json │ │ │ │ ├── mcp_login.csproj.nuget.g.props │ │ │ │ ├── mcp_login.csproj.nuget.g.targets │ │ │ │ ├── project.assets.json │ │ │ │ └── project.nuget.cache │ │ │ └── Program.cs │ │ ├── 1.1 mcp_logout │ │ │ ├── bin │ │ │ │ └── Debug │ │ │ │ └── net9.0 │ │ │ │ ├── mcp_logout.deps.json │ │ │ │ ├── mcp_logout.dll │ │ │ │ ├── mcp_logout.exe │ │ │ │ ├── mcp_logout.pdb │ │ │ │ ├── mcp_logout.runtimeconfig.json │ │ │ │ └── Newtonsoft.Json.dll │ │ │ ├── mcp_logout.csproj │ │ │ ├── obj │ │ │ │ ├── Debug │ │ │ │ │ └── net9.0 │ │ │ │ │ ├── .NETCoreApp,Version=v9.0.AssemblyAttributes.cs │ │ │ │ │ ├── apphost.exe │ │ │ │ │ ├── mcp_logout.AssemblyInfo.cs │ │ │ │ │ ├── mcp_logout.AssemblyInfoInputs.cache │ │ │ │ │ ├── mcp_logout.assets.cache │ │ │ │ │ ├── mcp_logout.csproj.AssemblyReference.cache │ │ │ │ │ ├── mcp_logout.csproj.CoreCompileInputs.cache │ │ │ │ │ ├── mcp_logout.csproj.FileListAbsolute.txt │ │ │ │ │ ├── mcp_logout.csproj.Up2Date │ │ │ │ │ ├── mcp_logout.dll │ │ │ │ │ ├── mcp_logout.GeneratedMSBuildEditorConfig.editorconfig │ │ │ │ │ ├── mcp_logout.genruntimeconfig.cache │ │ │ │ │ ├── mcp_logout.GlobalUsings.g.cs │ │ │ │ │ ├── mcp_logout.pdb │ │ │ │ │ ├── ref │ │ │ │ │ │ └── mcp_logout.dll │ │ │ │ │ └── refint │ │ │ │ │ └── mcp_logout.dll │ │ │ │ ├── mcp_logout.csproj.nuget.dgspec.json │ │ │ │ ├── mcp_logout.csproj.nuget.g.props │ │ │ │ ├── mcp_logout.csproj.nuget.g.targets │ │ │ │ ├── project.assets.json │ │ │ │ └── project.nuget.cache │ │ │ └── Program.cs │ │ ├── 2.0 mcp_chat │ │ │ ├── bin │ │ │ │ └── Debug │ │ │ │ └── net9.0 │ │ │ │ ├── mcp_chat.deps.json │ │ │ │ ├── mcp_chat.dll │ │ │ │ ├── mcp_chat.exe │ │ │ │ ├── mcp_chat.pdb │ │ │ │ ├── mcp_chat.runtimeconfig.json │ │ │ │ └── Newtonsoft.Json.dll │ │ │ ├── mcp_chat.csproj │ │ │ ├── obj │ │ │ │ ├── Debug │ │ │ │ │ └── net9.0 │ │ │ │ │ ├── .NETCoreApp,Version=v9.0.AssemblyAttributes.cs │ │ │ │ │ ├── apphost.exe │ │ │ │ │ ├── mcp_chat.AssemblyInfo.cs │ │ │ │ │ ├── mcp_chat.AssemblyInfoInputs.cache │ │ │ │ │ ├── mcp_chat.assets.cache │ │ │ │ │ ├── mcp_chat.csproj.AssemblyReference.cache │ │ │ │ │ ├── mcp_chat.csproj.CoreCompileInputs.cache │ │ │ │ │ ├── mcp_chat.csproj.FileListAbsolute.txt │ │ │ │ │ ├── mcp_chat.csproj.Up2Date │ │ │ │ │ ├── mcp_chat.dll │ │ │ │ │ ├── mcp_chat.GeneratedMSBuildEditorConfig.editorconfig │ │ │ │ │ ├── mcp_chat.genruntimeconfig.cache │ │ │ │ │ ├── mcp_chat.GlobalUsings.g.cs │ │ │ │ │ ├── mcp_chat.pdb │ │ │ │ │ ├── ref │ │ │ │ │ │ └── mcp_chat.dll │ │ │ │ │ └── refint │ │ │ │ │ └── mcp_chat.dll │ │ │ │ ├── mcp_chat.csproj.nuget.dgspec.json │ │ │ │ ├── mcp_chat.csproj.nuget.g.props │ │ │ │ ├── mcp_chat.csproj.nuget.g.targets │ │ │ │ ├── project.assets.json │ │ │ │ └── project.nuget.cache │ │ │ └── Program.cs │ │ ├── 2.1 mcp_continue_chat │ │ │ ├── bin │ │ │ │ └── Debug │ │ │ │ └── net9.0 │ │ │ │ ├── mcp_continue_chat.deps.json │ │ │ │ ├── mcp_continue_chat.dll │ │ │ │ ├── mcp_continue_chat.exe │ │ │ │ ├── mcp_continue_chat.pdb │ │ │ │ ├── mcp_continue_chat.runtimeconfig.json │ │ │ │ └── Newtonsoft.Json.dll │ │ │ ├── mcp_continue_chat.csproj │ │ │ ├── obj │ │ │ │ ├── Debug │ │ │ │ │ └── net9.0 │ │ │ │ │ ├── .NETCoreApp,Version=v9.0.AssemblyAttributes.cs │ │ │ │ │ ├── apphost.exe │ │ │ │ │ ├── mcp_cont.EF178231.Up2Date │ │ │ │ │ ├── mcp_continue_chat.AssemblyInfo.cs │ │ │ │ │ ├── mcp_continue_chat.AssemblyInfoInputs.cache │ │ │ │ │ ├── mcp_continue_chat.assets.cache │ │ │ │ │ ├── mcp_continue_chat.csproj.AssemblyReference.cache │ │ │ │ │ ├── mcp_continue_chat.csproj.CoreCompileInputs.cache │ │ │ │ │ ├── mcp_continue_chat.csproj.FileListAbsolute.txt │ │ │ │ │ ├── mcp_continue_chat.dll │ │ │ │ │ ├── mcp_continue_chat.GeneratedMSBuildEditorConfig.editorconfig │ │ │ │ │ ├── mcp_continue_chat.genruntimeconfig.cache │ │ │ │ │ ├── mcp_continue_chat.GlobalUsings.g.cs │ │ │ │ │ ├── mcp_continue_chat.pdb │ │ │ │ │ ├── ref │ │ │ │ │ │ └── mcp_continue_chat.dll │ │ │ │ │ └── refint │ │ │ │ │ └── mcp_continue_chat.dll │ │ │ │ ├── mcp_continue_chat.csproj.nuget.dgspec.json │ │ │ │ ├── mcp_continue_chat.csproj.nuget.g.props │ │ │ │ ├── mcp_continue_chat.csproj.nuget.g.targets │ │ │ │ ├── project.assets.json │ │ │ │ └── project.nuget.cache │ │ │ └── Program.cs │ │ ├── 2.2 mcp_get_chat_info │ │ │ ├── bin │ │ │ │ └── Debug │ │ │ │ └── net9.0 │ │ │ │ ├── mcp_get_chat_info.deps.json │ │ │ │ ├── mcp_get_chat_info.dll │ │ │ │ ├── mcp_get_chat_info.exe │ │ │ │ ├── mcp_get_chat_info.pdb │ │ │ │ ├── mcp_get_chat_info.runtimeconfig.json │ │ │ │ └── Newtonsoft.Json.dll │ │ │ ├── Dokumente - Verknüpfung.lnk │ │ │ ├── mcp_get_chat_info.csproj │ │ │ ├── obj │ │ │ │ ├── Debug │ │ │ │ │ └── net9.0 │ │ │ │ │ ├── .NETCoreApp,Version=v9.0.AssemblyAttributes.cs │ │ │ │ │ ├── apphost.exe │ │ │ │ │ ├── mcp_get_.DFF47B4E.Up2Date │ │ │ │ │ ├── mcp_get_chat_info.AssemblyInfo.cs │ │ │ │ │ ├── mcp_get_chat_info.AssemblyInfoInputs.cache │ │ │ │ │ ├── mcp_get_chat_info.assets.cache │ │ │ │ │ ├── mcp_get_chat_info.csproj.AssemblyReference.cache │ │ │ │ │ ├── mcp_get_chat_info.csproj.CoreCompileInputs.cache │ │ │ │ │ ├── mcp_get_chat_info.csproj.FileListAbsolute.txt │ │ │ │ │ ├── mcp_get_chat_info.dll │ │ │ │ │ ├── mcp_get_chat_info.GeneratedMSBuildEditorConfig.editorconfig │ │ │ │ │ ├── mcp_get_chat_info.genruntimeconfig.cache │ │ │ │ │ ├── mcp_get_chat_info.GlobalUsings.g.cs │ │ │ │ │ ├── mcp_get_chat_info.pdb │ │ │ │ │ ├── ref │ │ │ │ │ │ └── mcp_get_chat_info.dll │ │ │ │ │ └── refint │ │ │ │ │ └── mcp_get_chat_info.dll │ │ │ │ ├── mcp_get_chat_info.csproj.nuget.dgspec.json │ │ │ │ ├── mcp_get_chat_info.csproj.nuget.g.props │ │ │ │ ├── mcp_get_chat_info.csproj.nuget.g.targets │ │ │ │ ├── project.assets.json │ │ │ │ └── project.nuget.cache │ │ │ └── Program.cs │ │ ├── 3.0 mcp_create_source │ │ │ ├── bin │ │ │ │ └── Debug │ │ │ │ └── net9.0 │ │ │ │ ├── mcp_create_source.deps.json │ │ │ │ ├── mcp_create_source.dll │ │ │ │ ├── mcp_create_source.exe │ │ │ │ ├── mcp_create_source.pdb │ │ │ │ ├── mcp_create_source.runtimeconfig.json │ │ │ │ └── Newtonsoft.Json.dll │ │ │ ├── mcp_create_source.csproj │ │ │ ├── obj │ │ │ │ ├── Debug │ │ │ │ │ └── net9.0 │ │ │ │ │ ├── .NETCoreApp,Version=v9.0.AssemblyAttributes.cs │ │ │ │ │ ├── apphost.exe │ │ │ │ │ ├── mcp_crea.CB4ED912.Up2Date │ │ │ │ │ ├── mcp_create_source.AssemblyInfo.cs │ │ │ │ │ ├── mcp_create_source.AssemblyInfoInputs.cache │ │ │ │ │ ├── mcp_create_source.assets.cache │ │ │ │ │ ├── mcp_create_source.csproj.AssemblyReference.cache │ │ │ │ │ ├── mcp_create_source.csproj.CoreCompileInputs.cache │ │ │ │ │ ├── mcp_create_source.csproj.FileListAbsolute.txt │ │ │ │ │ ├── mcp_create_source.dll │ │ │ │ │ ├── mcp_create_source.GeneratedMSBuildEditorConfig.editorconfig │ │ │ │ │ ├── mcp_create_source.genruntimeconfig.cache │ │ │ │ │ ├── mcp_create_source.GlobalUsings.g.cs │ │ │ │ │ ├── mcp_create_source.pdb │ │ │ │ │ ├── ref │ │ │ │ │ │ └── mcp_create_source.dll │ │ │ │ │ └── refint │ │ │ │ │ └── mcp_create_source.dll │ │ │ │ ├── mcp_create_source.csproj.nuget.dgspec.json │ │ │ │ ├── mcp_create_source.csproj.nuget.g.props │ │ │ │ ├── mcp_create_source.csproj.nuget.g.targets │ │ │ │ ├── project.assets.json │ │ │ │ └── project.nuget.cache │ │ │ └── Program.cs │ │ ├── 3.1 mcp_get_source │ │ │ ├── bin │ │ │ │ └── Debug │ │ │ │ └── net9.0 │ │ │ │ ├── mcp_get_source.deps.json │ │ │ │ ├── mcp_get_source.dll │ │ │ │ ├── mcp_get_source.exe │ │ │ │ ├── mcp_get_source.pdb │ │ │ │ ├── mcp_get_source.runtimeconfig.json │ │ │ │ └── Newtonsoft.Json.dll │ │ │ ├── mcp_get_source.csproj │ │ │ ├── obj │ │ │ │ ├── Debug │ │ │ │ │ └── net9.0 │ │ │ │ │ ├── .NETCoreApp,Version=v9.0.AssemblyAttributes.cs │ │ │ │ │ ├── apphost.exe │ │ │ │ │ ├── mcp_get_.4E61956F.Up2Date │ │ │ │ │ ├── mcp_get_source.AssemblyInfo.cs │ │ │ │ │ ├── mcp_get_source.AssemblyInfoInputs.cache │ │ │ │ │ ├── mcp_get_source.assets.cache │ │ │ │ │ ├── mcp_get_source.csproj.AssemblyReference.cache │ │ │ │ │ ├── mcp_get_source.csproj.CoreCompileInputs.cache │ │ │ │ │ ├── mcp_get_source.csproj.FileListAbsolute.txt │ │ │ │ │ ├── mcp_get_source.dll │ │ │ │ │ ├── mcp_get_source.GeneratedMSBuildEditorConfig.editorconfig │ │ │ │ │ ├── mcp_get_source.genruntimeconfig.cache │ │ │ │ │ ├── mcp_get_source.GlobalUsings.g.cs │ │ │ │ │ ├── mcp_get_source.pdb │ │ │ │ │ ├── ref │ │ │ │ │ │ └── mcp_get_source.dll │ │ │ │ │ └── refint │ │ │ │ │ └── mcp_get_source.dll │ │ │ │ ├── mcp_get_source.csproj.nuget.dgspec.json │ │ │ │ ├── mcp_get_source.csproj.nuget.g.props │ │ │ │ ├── mcp_get_source.csproj.nuget.g.targets │ │ │ │ ├── project.assets.json │ │ │ │ └── project.nuget.cache │ │ │ └── Program.cs │ │ ├── 3.2 mcp_list_sources │ │ │ ├── bin │ │ │ │ └── Debug │ │ │ │ └── net9.0 │ │ │ │ ├── mcp_list_sources.deps.json │ │ │ │ ├── mcp_list_sources.dll │ │ │ │ ├── mcp_list_sources.exe │ │ │ │ ├── mcp_list_sources.pdb │ │ │ │ ├── mcp_list_sources.runtimeconfig.json │ │ │ │ └── Newtonsoft.Json.dll │ │ │ ├── mcp_list_sources.csproj │ │ │ ├── obj │ │ │ │ ├── Debug │ │ │ │ │ └── net9.0 │ │ │ │ │ ├── .NETCoreApp,Version=v9.0.AssemblyAttributes.cs │ │ │ │ │ ├── apphost.exe │ │ │ │ │ ├── mcp_list_sources.AssemblyInfo.cs │ │ │ │ │ ├── mcp_list_sources.AssemblyInfoInputs.cache │ │ │ │ │ ├── mcp_list_sources.assets.cache │ │ │ │ │ ├── mcp_list_sources.csproj.AssemblyReference.cache │ │ │ │ │ ├── mcp_list_sources.csproj.CoreCompileInputs.cache │ │ │ │ │ ├── mcp_list_sources.csproj.FileListAbsolute.txt │ │ │ │ │ ├── mcp_list_sources.dll │ │ │ │ │ ├── mcp_list_sources.GeneratedMSBuildEditorConfig.editorconfig │ │ │ │ │ ├── mcp_list_sources.genruntimeconfig.cache │ │ │ │ │ ├── mcp_list_sources.GlobalUsings.g.cs │ │ │ │ │ ├── mcp_list_sources.pdb │ │ │ │ │ ├── mcp_list.A720E197.Up2Date │ │ │ │ │ ├── ref │ │ │ │ │ │ └── mcp_list_sources.dll │ │ │ │ │ └── refint │ │ │ │ │ └── mcp_list_sources.dll │ │ │ │ ├── mcp_list_sources.csproj.nuget.dgspec.json │ │ │ │ ├── mcp_list_sources.csproj.nuget.g.props │ │ │ │ ├── mcp_list_sources.csproj.nuget.g.targets │ │ │ │ ├── project.assets.json │ │ │ │ └── project.nuget.cache │ │ │ └── Program.cs │ │ ├── 3.3 mcp_edit_source │ │ │ ├── bin │ │ │ │ └── Debug │ │ │ │ └── net9.0 │ │ │ │ ├── mcp_edit_source.deps.json │ │ │ │ ├── mcp_edit_source.dll │ │ │ │ ├── mcp_edit_source.exe │ │ │ │ ├── mcp_edit_source.pdb │ │ │ │ ├── mcp_edit_source.runtimeconfig.json │ │ │ │ └── Newtonsoft.Json.dll │ │ │ ├── mcp_edit_source.csproj │ │ │ ├── obj │ │ │ │ ├── Debug │ │ │ │ │ └── net9.0 │ │ │ │ │ ├── .NETCoreApp,Version=v9.0.AssemblyAttributes.cs │ │ │ │ │ ├── apphost.exe │ │ │ │ │ ├── mcp_edit_source.AssemblyInfo.cs │ │ │ │ │ ├── mcp_edit_source.AssemblyInfoInputs.cache │ │ │ │ │ ├── mcp_edit_source.assets.cache │ │ │ │ │ ├── mcp_edit_source.csproj.AssemblyReference.cache │ │ │ │ │ ├── mcp_edit_source.csproj.CoreCompileInputs.cache │ │ │ │ │ ├── mcp_edit_source.csproj.FileListAbsolute.txt │ │ │ │ │ ├── mcp_edit_source.dll │ │ │ │ │ ├── mcp_edit_source.GeneratedMSBuildEditorConfig.editorconfig │ │ │ │ │ ├── mcp_edit_source.genruntimeconfig.cache │ │ │ │ │ ├── mcp_edit_source.GlobalUsings.g.cs │ │ │ │ │ ├── mcp_edit_source.pdb │ │ │ │ │ ├── mcp_edit.7303BE3B.Up2Date │ │ │ │ │ ├── ref │ │ │ │ │ │ └── mcp_edit_source.dll │ │ │ │ │ └── refint │ │ │ │ │ └── mcp_edit_source.dll │ │ │ │ ├── mcp_edit_source.csproj.nuget.dgspec.json │ │ │ │ ├── mcp_edit_source.csproj.nuget.g.props │ │ │ │ ├── mcp_edit_source.csproj.nuget.g.targets │ │ │ │ ├── project.assets.json │ │ │ │ └── project.nuget.cache │ │ │ └── Program.cs │ │ ├── 3.4 mcp_delete_source │ │ │ ├── bin │ │ │ │ └── Debug │ │ │ │ └── net9.0 │ │ │ │ ├── mcp_delete_source.deps.json │ │ │ │ ├── mcp_delete_source.dll │ │ │ │ ├── mcp_delete_source.exe │ │ │ │ ├── mcp_delete_source.pdb │ │ │ │ ├── mcp_delete_source.runtimeconfig.json │ │ │ │ └── Newtonsoft.Json.dll │ │ │ ├── mcp_delete_source.csproj │ │ │ ├── obj │ │ │ │ ├── Debug │ │ │ │ │ └── net9.0 │ │ │ │ │ ├── .NETCoreApp,Version=v9.0.AssemblyAttributes.cs │ │ │ │ │ ├── apphost.exe │ │ │ │ │ ├── mcp_dele.67DD13F9.Up2Date │ │ │ │ │ ├── mcp_delete_source.AssemblyInfo.cs │ │ │ │ │ ├── mcp_delete_source.AssemblyInfoInputs.cache │ │ │ │ │ ├── mcp_delete_source.assets.cache │ │ │ │ │ ├── mcp_delete_source.csproj.AssemblyReference.cache │ │ │ │ │ ├── mcp_delete_source.csproj.CoreCompileInputs.cache │ │ │ │ │ ├── mcp_delete_source.csproj.FileListAbsolute.txt │ │ │ │ │ ├── mcp_delete_source.dll │ │ │ │ │ ├── mcp_delete_source.GeneratedMSBuildEditorConfig.editorconfig │ │ │ │ │ ├── mcp_delete_source.genruntimeconfig.cache │ │ │ │ │ ├── mcp_delete_source.GlobalUsings.g.cs │ │ │ │ │ ├── mcp_delete_source.pdb │ │ │ │ │ ├── ref │ │ │ │ │ │ └── mcp_delete_source.dll │ │ │ │ │ └── refint │ │ │ │ │ └── mcp_delete_source.dll │ │ │ │ ├── mcp_delete_source.csproj.nuget.dgspec.json │ │ │ │ ├── mcp_delete_source.csproj.nuget.g.props │ │ │ │ ├── mcp_delete_source.csproj.nuget.g.targets │ │ │ │ ├── project.assets.json │ │ │ │ └── project.nuget.cache │ │ │ └── Program.cs │ │ ├── 4.0 mcp_list_groups │ │ │ ├── bin │ │ │ │ └── Debug │ │ │ │ └── net9.0 │ │ │ │ ├── mcp_list_groups.deps.json │ │ │ │ ├── mcp_list_groups.dll │ │ │ │ ├── mcp_list_groups.exe │ │ │ │ ├── mcp_list_groups.pdb │ │ │ │ ├── mcp_list_groups.runtimeconfig.json │ │ │ │ └── Newtonsoft.Json.dll │ │ │ ├── mcp_list_groups.csproj │ │ │ ├── obj │ │ │ │ ├── Debug │ │ │ │ │ └── net9.0 │ │ │ │ │ ├── .NETCoreApp,Version=v9.0.AssemblyAttributes.cs │ │ │ │ │ ├── apphost.exe │ │ │ │ │ ├── mcp_list_groups.AssemblyInfo.cs │ │ │ │ │ ├── mcp_list_groups.AssemblyInfoInputs.cache │ │ │ │ │ ├── mcp_list_groups.assets.cache │ │ │ │ │ ├── mcp_list_groups.csproj.AssemblyReference.cache │ │ │ │ │ ├── mcp_list_groups.csproj.CoreCompileInputs.cache │ │ │ │ │ ├── mcp_list_groups.csproj.FileListAbsolute.txt │ │ │ │ │ ├── mcp_list_groups.dll │ │ │ │ │ ├── mcp_list_groups.GeneratedMSBuildEditorConfig.editorconfig │ │ │ │ │ ├── mcp_list_groups.genruntimeconfig.cache │ │ │ │ │ ├── mcp_list_groups.GlobalUsings.g.cs │ │ │ │ │ ├── mcp_list_groups.pdb │ │ │ │ │ ├── mcp_list.EBD5E0D2.Up2Date │ │ │ │ │ ├── ref │ │ │ │ │ │ └── mcp_list_groups.dll │ │ │ │ │ └── refint │ │ │ │ │ └── mcp_list_groups.dll │ │ │ │ ├── mcp_list_groups.csproj.nuget.dgspec.json │ │ │ │ ├── mcp_list_groups.csproj.nuget.g.props │ │ │ │ ├── mcp_list_groups.csproj.nuget.g.targets │ │ │ │ ├── project.assets.json │ │ │ │ └── project.nuget.cache │ │ │ └── Program.cs │ │ ├── 4.1 mcp_store_group │ │ │ ├── bin │ │ │ │ └── Debug │ │ │ │ └── net9.0 │ │ │ │ ├── mcp_store_group.deps.json │ │ │ │ ├── mcp_store_group.dll │ │ │ │ ├── mcp_store_group.exe │ │ │ │ ├── mcp_store_group.pdb │ │ │ │ ├── mcp_store_group.runtimeconfig.json │ │ │ │ └── Newtonsoft.Json.dll │ │ │ ├── mcp_store_group.csproj │ │ │ ├── obj │ │ │ │ ├── Debug │ │ │ │ │ └── net9.0 │ │ │ │ │ ├── .NETCoreApp,Version=v9.0.AssemblyAttributes.cs │ │ │ │ │ ├── apphost.exe │ │ │ │ │ ├── mcp_stor.AFB4AA35.Up2Date │ │ │ │ │ ├── mcp_store_group.AssemblyInfo.cs │ │ │ │ │ ├── mcp_store_group.AssemblyInfoInputs.cache │ │ │ │ │ ├── mcp_store_group.assets.cache │ │ │ │ │ ├── mcp_store_group.csproj.AssemblyReference.cache │ │ │ │ │ ├── mcp_store_group.csproj.CoreCompileInputs.cache │ │ │ │ │ ├── mcp_store_group.csproj.FileListAbsolute.txt │ │ │ │ │ ├── mcp_store_group.dll │ │ │ │ │ ├── mcp_store_group.GeneratedMSBuildEditorConfig.editorconfig │ │ │ │ │ ├── mcp_store_group.genruntimeconfig.cache │ │ │ │ │ ├── mcp_store_group.GlobalUsings.g.cs │ │ │ │ │ ├── mcp_store_group.pdb │ │ │ │ │ ├── ref │ │ │ │ │ │ └── mcp_store_group.dll │ │ │ │ │ └── refint │ │ │ │ │ └── mcp_store_group.dll │ │ │ │ ├── mcp_store_group.csproj.nuget.dgspec.json │ │ │ │ ├── mcp_store_group.csproj.nuget.g.props │ │ │ │ ├── mcp_store_group.csproj.nuget.g.targets │ │ │ │ ├── project.assets.json │ │ │ │ └── project.nuget.cache │ │ │ └── Program.cs │ │ ├── 4.2 mcp_delete_group │ │ │ ├── bin │ │ │ │ └── Debug │ │ │ │ └── net9.0 │ │ │ │ ├── mcp_delete_group.deps.json │ │ │ │ ├── mcp_delete_group.dll │ │ │ │ ├── mcp_delete_group.exe │ │ │ │ ├── mcp_delete_group.pdb │ │ │ │ ├── mcp_delete_group.runtimeconfig.json │ │ │ │ └── Newtonsoft.Json.dll │ │ │ ├── mcp_delete_group.csproj │ │ │ ├── obj │ │ │ │ ├── Debug │ │ │ │ │ └── net9.0 │ │ │ │ │ ├── .NETCoreApp,Version=v9.0.AssemblyAttributes.cs │ │ │ │ │ ├── apphost.exe │ │ │ │ │ ├── mcp_dele.FE1C6298.Up2Date │ │ │ │ │ ├── mcp_delete_group.AssemblyInfo.cs │ │ │ │ │ ├── mcp_delete_group.AssemblyInfoInputs.cache │ │ │ │ │ ├── mcp_delete_group.assets.cache │ │ │ │ │ ├── mcp_delete_group.csproj.AssemblyReference.cache │ │ │ │ │ ├── mcp_delete_group.csproj.CoreCompileInputs.cache │ │ │ │ │ ├── mcp_delete_group.csproj.FileListAbsolute.txt │ │ │ │ │ ├── mcp_delete_group.dll │ │ │ │ │ ├── mcp_delete_group.GeneratedMSBuildEditorConfig.editorconfig │ │ │ │ │ ├── mcp_delete_group.genruntimeconfig.cache │ │ │ │ │ ├── mcp_delete_group.GlobalUsings.g.cs │ │ │ │ │ ├── mcp_delete_group.pdb │ │ │ │ │ ├── ref │ │ │ │ │ │ └── mcp_delete_group.dll │ │ │ │ │ └── refint │ │ │ │ │ └── mcp_delete_group.dll │ │ │ │ ├── mcp_delete_group.csproj.nuget.dgspec.json │ │ │ │ ├── mcp_delete_group.csproj.nuget.g.props │ │ │ │ ├── mcp_delete_group.csproj.nuget.g.targets │ │ │ │ ├── project.assets.json │ │ │ │ └── project.nuget.cache │ │ │ └── Program.cs │ │ ├── 5.0 mcp_store_user │ │ │ ├── bin │ │ │ │ └── Debug │ │ │ │ └── net9.0 │ │ │ │ ├── mcp_store_user.deps.json │ │ │ │ ├── mcp_store_user.dll │ │ │ │ ├── mcp_store_user.exe │ │ │ │ ├── mcp_store_user.pdb │ │ │ │ ├── mcp_store_user.runtimeconfig.json │ │ │ │ └── Newtonsoft.Json.dll │ │ │ ├── mcp_store_user.csproj │ │ │ ├── obj │ │ │ │ ├── Debug │ │ │ │ │ └── net9.0 │ │ │ │ │ ├── .NETCoreApp,Version=v9.0.AssemblyAttributes.cs │ │ │ │ │ ├── apphost.exe │ │ │ │ │ ├── mcp_stor.6C0F0C8A.Up2Date │ │ │ │ │ ├── mcp_store_user.AssemblyInfo.cs │ │ │ │ │ ├── mcp_store_user.AssemblyInfoInputs.cache │ │ │ │ │ ├── mcp_store_user.assets.cache │ │ │ │ │ ├── mcp_store_user.csproj.AssemblyReference.cache │ │ │ │ │ ├── mcp_store_user.csproj.CoreCompileInputs.cache │ │ │ │ │ ├── mcp_store_user.csproj.FileListAbsolute.txt │ │ │ │ │ ├── mcp_store_user.dll │ │ │ │ │ ├── mcp_store_user.GeneratedMSBuildEditorConfig.editorconfig │ │ │ │ │ ├── mcp_store_user.genruntimeconfig.cache │ │ │ │ │ ├── mcp_store_user.GlobalUsings.g.cs │ │ │ │ │ ├── mcp_store_user.pdb │ │ │ │ │ ├── ref │ │ │ │ │ │ └── mcp_store_user.dll │ │ │ │ │ └── refint │ │ │ │ │ └── mcp_store_user.dll │ │ │ │ ├── mcp_store_user.csproj.nuget.dgspec.json │ │ │ │ ├── mcp_store_user.csproj.nuget.g.props │ │ │ │ ├── mcp_store_user.csproj.nuget.g.targets │ │ │ │ ├── project.assets.json │ │ │ │ └── project.nuget.cache │ │ │ └── Program.cs │ │ ├── 5.1 mcp_edit_user │ │ │ ├── bin │ │ │ │ └── Debug │ │ │ │ └── net9.0 │ │ │ │ ├── mcp_edit_user.deps.json │ │ │ │ ├── mcp_edit_user.dll │ │ │ │ ├── mcp_edit_user.exe │ │ │ │ ├── mcp_edit_user.pdb │ │ │ │ ├── mcp_edit_user.runtimeconfig.json │ │ │ │ └── Newtonsoft.Json.dll │ │ │ ├── mcp_edit_user.csproj │ │ │ ├── obj │ │ │ │ ├── Debug │ │ │ │ │ └── net9.0 │ │ │ │ │ ├── .NETCoreApp,Version=v9.0.AssemblyAttributes.cs │ │ │ │ │ ├── apphost.exe │ │ │ │ │ ├── mcp_edit_user.AssemblyInfo.cs │ │ │ │ │ ├── mcp_edit_user.AssemblyInfoInputs.cache │ │ │ │ │ ├── mcp_edit_user.assets.cache │ │ │ │ │ ├── mcp_edit_user.csproj.AssemblyReference.cache │ │ │ │ │ ├── mcp_edit_user.csproj.CoreCompileInputs.cache │ │ │ │ │ ├── mcp_edit_user.csproj.FileListAbsolute.txt │ │ │ │ │ ├── mcp_edit_user.dll │ │ │ │ │ ├── mcp_edit_user.GeneratedMSBuildEditorConfig.editorconfig │ │ │ │ │ ├── mcp_edit_user.genruntimeconfig.cache │ │ │ │ │ ├── mcp_edit_user.GlobalUsings.g.cs │ │ │ │ │ ├── mcp_edit_user.pdb │ │ │ │ │ ├── mcp_edit.94A30270.Up2Date │ │ │ │ │ ├── ref │ │ │ │ │ │ └── mcp_edit_user.dll │ │ │ │ │ └── refint │ │ │ │ │ └── mcp_edit_user.dll │ │ │ │ ├── mcp_edit_user.csproj.nuget.dgspec.json │ │ │ │ ├── mcp_edit_user.csproj.nuget.g.props │ │ │ │ ├── mcp_edit_user.csproj.nuget.g.targets │ │ │ │ ├── project.assets.json │ │ │ │ └── project.nuget.cache │ │ │ └── Program.cs │ │ ├── 5.2 mcp_delete_user │ │ │ ├── bin │ │ │ │ └── Debug │ │ │ │ └── net9.0 │ │ │ │ ├── mcp_delete_user.deps.json │ │ │ │ ├── mcp_delete_user.dll │ │ │ │ ├── mcp_delete_user.exe │ │ │ │ ├── mcp_delete_user.pdb │ │ │ │ ├── mcp_delete_user.runtimeconfig.json │ │ │ │ └── Newtonsoft.Json.dll │ │ │ ├── mcp_delete_user.csproj │ │ │ ├── obj │ │ │ │ ├── Debug │ │ │ │ │ └── net9.0 │ │ │ │ │ ├── .NETCoreApp,Version=v9.0.AssemblyAttributes.cs │ │ │ │ │ ├── apphost.exe │ │ │ │ │ ├── mcp_dele.CEB7E33D.Up2Date │ │ │ │ │ ├── mcp_delete_user.AssemblyInfo.cs │ │ │ │ │ ├── mcp_delete_user.AssemblyInfoInputs.cache │ │ │ │ │ ├── mcp_delete_user.assets.cache │ │ │ │ │ ├── mcp_delete_user.csproj.AssemblyReference.cache │ │ │ │ │ ├── mcp_delete_user.csproj.CoreCompileInputs.cache │ │ │ │ │ ├── mcp_delete_user.csproj.FileListAbsolute.txt │ │ │ │ │ ├── mcp_delete_user.dll │ │ │ │ │ ├── mcp_delete_user.GeneratedMSBuildEditorConfig.editorconfig │ │ │ │ │ ├── mcp_delete_user.genruntimeconfig.cache │ │ │ │ │ ├── mcp_delete_user.GlobalUsings.g.cs │ │ │ │ │ ├── mcp_delete_user.pdb │ │ │ │ │ ├── ref │ │ │ │ │ │ └── mcp_delete_user.dll │ │ │ │ │ └── refint │ │ │ │ │ └── mcp_delete_user.dll │ │ │ │ ├── mcp_delete_user.csproj.nuget.dgspec.json │ │ │ │ ├── mcp_delete_user.csproj.nuget.g.props │ │ │ │ ├── mcp_delete_user.csproj.nuget.g.targets │ │ │ │ ├── project.assets.json │ │ │ │ └── project.nuget.cache │ │ │ └── Program.cs │ │ ├── Code Archiv │ │ │ ├── mcp_chat.cs │ │ │ ├── mcp_continue_chat.cs │ │ │ ├── mcp_create_source.cs │ │ │ ├── mcp_delete_group.cs │ │ │ ├── mcp_delete_source.cs │ │ │ ├── mcp_delete_user.cs │ │ │ ├── mcp_edit_source.cs │ │ │ ├── mcp_edit_user.cs │ │ │ ├── mcp_get_chat_info.cs │ │ │ ├── mcp_get_source.cs │ │ │ ├── mcp_list_groups.cs │ │ │ ├── mcp_list_sources.cs │ │ │ ├── mcp_login.cs │ │ │ ├── mcp_logout.cs │ │ │ ├── mcp_store_group.cs │ │ │ └── mcp_store_user.cs │ │ └── README.md │ ├── C++ │ │ ├── .vscode │ │ │ └── launch.json │ │ ├── 1.0 mcp_login │ │ │ ├── MCPLoginClient.cpp │ │ │ └── Non-TLS version │ │ │ ├── MCPLoginClient.cpp │ │ │ └── MCPLoginClient.exe │ │ ├── 1.1 mcp_logout │ │ │ ├── MCPLogoutClient.cpp │ │ │ └── MCPLogoutClient.exe │ │ ├── 2.0 mcp_chat │ │ │ ├── MCPChatClient.cpp │ │ │ └── MCPChatClient.exe │ │ ├── 2.1 mcp_continue_chat │ │ │ ├── MCPChatContinuationClient.cpp │ │ │ └── MCPChatContinuationClient.exe │ │ ├── 2.2 mcp_get_chat_info │ │ │ ├── MCPGetChatInfoClient.cpp │ │ │ └── MCPGetChatInfoClient.exe │ │ ├── 3.0 mcp_create_source │ │ │ ├── MCPCreateSourceClient.cpp │ │ │ └── MCPCreateSourceClient.exe │ │ ├── 3.1 mcp_get_source │ │ │ ├── MCPGetSourceClient.cpp │ │ │ └── MCPGetSourceClient.exe │ │ ├── 3.2 mcp_list_sources │ │ │ ├── MCPListSourcesClient.cpp │ │ │ └── MCPListSourcesClient.exe │ │ ├── 3.3 mcp_edit_source │ │ │ ├── MCPEditSourceClient.cpp │ │ │ └── MCPEditSourceClient.exe │ │ ├── 3.4 mcp_delete_source │ │ │ ├── MCPDeleteSourceClient.cpp │ │ │ └── MCPDeleteSourceClient.exe │ │ ├── 4.0 mcp_list_groups │ │ │ ├── MCPListGroupsClient.cpp │ │ │ └── MCPListGroupsClient.exe │ │ ├── 4.1 mcp_store_group │ │ │ ├── MCPStoreGroupClient.cpp │ │ │ └── MCPStoreGroupClient.exe │ │ ├── 4.2 mcp_delete_group │ │ │ ├── MPCDeleteGroupClient.cpp │ │ │ └── MPCDeleteGroupClient.exe │ │ ├── 5.0 mcp_store_user │ │ │ ├── MCPStoreUserClient.cpp │ │ │ └── MCPStoreUserClient.exe │ │ ├── 5.1 mcp_edit_user │ │ │ ├── MCPEditUserClient.cpp │ │ │ └── MCPEditUserClient.exe │ │ ├── 5.2 mcp_delete_user │ │ │ ├── MCPDeleteUserClient.cpp │ │ │ └── MCPDeleteUserClient.exe │ │ ├── 9.0 mcp_keygen │ │ │ ├── MCPKeygenClient.cpp │ │ │ └── MCPKeygenClient.exe │ │ └── README.md │ ├── Go │ │ ├── 1.0 mcp_login │ │ │ ├── go.mod │ │ │ ├── MCPLoginClient.exe │ │ │ └── MCPLoginClient.go │ │ ├── 1.1 mcp_logout │ │ │ ├── MCPLogoutClient.exe │ │ │ └── MCPLogoutClient.go │ │ ├── 2.0 mcp_chat │ │ │ ├── MCPChatClient.exe │ │ │ └── MCPChatClient.go │ │ ├── 2.1 mcp_continue_chat │ │ │ ├── MCPChatContinuationClient.exe │ │ │ └── MCPChatContinuationClient.go │ │ ├── 2.2 mcp_get_chat_info │ │ │ ├── MCPGetChatInfoClient.exe │ │ │ └── MCPGetChatInfoClient.go │ │ ├── 3.0 mcp_create_source │ │ │ ├── MCPCreateSourceClient.exe │ │ │ └── MCPCreateSourceClient.go │ │ ├── 3.1 mcp_get_source │ │ │ ├── MCPGetSourceClient.exe │ │ │ └── MCPGetSourceClient.go │ │ ├── 3.2 mcp_list_sources │ │ │ ├── MCPListSourcesClient.exe │ │ │ └── MCPListSourcesClient.go │ │ ├── 3.3 mcp_edit_source │ │ │ ├── MCPEditSourceClient.exe │ │ │ └── MCPEditSourceClient.go │ │ ├── 3.4 mcp_delete_source │ │ │ ├── MCPDeleteSourceClient.exe │ │ │ └── MCPDeleteSourceClient.go │ │ ├── 4.0 mcp_list_groups │ │ │ ├── MCPListGroupsClient.exe │ │ │ └── MCPListGroupsClient.go │ │ ├── 4.1 mcp_store_group │ │ │ ├── MCPStoreGroupClient.exe │ │ │ └── MCPStoreGroupClient.go │ │ ├── 4.2 mcp_delete_group │ │ │ ├── MCPDeleteGroupClient.exe │ │ │ └── MCPDeleteGroupClient.go │ │ ├── 5.0 mcp_store_user │ │ │ ├── MCPStoreUserClient.exe │ │ │ └── MCPStoreUserClient.go │ │ ├── 5.1 mcp_edit_user │ │ │ ├── MCPEditUserClient.exe │ │ │ └── MCPEditUserClient.go │ │ ├── 5.2 mcp_delete_user │ │ │ ├── MCPDeleteUserClient.exe │ │ │ └── MCPDeleteUserClient.go │ │ ├── 9.0 mcp_keygen │ │ │ ├── MCPKeygenClient.exe │ │ │ └── MCPKeygenClient.go │ │ └── README.md │ ├── Gradio │ │ ├── Api.py │ │ ├── config.json.example │ │ ├── config.py │ │ ├── favicon.ico │ │ ├── file_tools │ │ │ └── loader_factory.py │ │ ├── language.py │ │ ├── logos │ │ │ ├── fsas.png │ │ │ └── Logo_dark.svg │ │ ├── main.py │ │ ├── mcp_client.py │ │ ├── mcp_servers │ │ │ ├── arxiv │ │ │ │ ├── arxiv-stdio.js │ │ │ │ ├── package.json │ │ │ │ ├── README.md │ │ │ │ ├── requirements.txt │ │ │ │ └── server_config.example.json │ │ │ ├── demo-mcp-server │ │ │ │ ├── demo-tools-sse.js │ │ │ │ ├── demo-tools-stdio.js │ │ │ │ └── tools │ │ │ │ ├── assets.js │ │ │ │ ├── calculator.js │ │ │ │ └── weather.js │ │ │ ├── filesystem │ │ │ │ ├── Dockerfile │ │ │ │ ├── index.ts │ │ │ │ ├── package.json │ │ │ │ ├── README.md │ │ │ │ ├── test │ │ │ │ │ └── new.txt │ │ │ │ └── tsconfig.json │ │ │ ├── moondream │ │ │ │ └── server.py │ │ │ ├── pgpt │ │ │ │ ├── __init__.py │ │ │ │ ├── Api.py │ │ │ │ ├── config.json.example │ │ │ │ ├── config.py │ │ │ │ ├── language.py │ │ │ │ ├── pyproject.toml │ │ │ │ ├── README.md │ │ │ │ └── server.py │ │ │ ├── replicate_flux │ │ │ │ └── server.py │ │ │ └── sqlite │ │ │ ├── .python-version │ │ │ ├── Dockerfile │ │ │ ├── pyproject.toml │ │ │ ├── README.md │ │ │ └── src │ │ │ └── mcp_server_sqlite │ │ │ ├── __init__.py │ │ │ └── server.py │ │ ├── messages │ │ │ ├── __init__.py │ │ │ ├── message_types │ │ │ │ ├── __init__.py │ │ │ │ ├── incrementing_id_message.py │ │ │ │ ├── initialize_message.py │ │ │ │ ├── json_rpc_message.py │ │ │ │ ├── ping_message.py │ │ │ │ ├── prompts_messages.py │ │ │ │ ├── prompts_models.py │ │ │ │ ├── resources_messages.py │ │ │ │ └── tools_messages.py │ │ │ ├── send_call_tool.py │ │ │ ├── send_initialize_message.py │ │ │ ├── send_message.py │ │ │ ├── send_ping.py │ │ │ ├── send_prompts.py │ │ │ ├── send_resources.py │ │ │ └── send_tools_list.py │ │ ├── README.md │ │ ├── requirements.txt │ │ ├── server_config.json │ │ ├── SourceManagement.py │ │ ├── transport │ │ │ ├── __init__.py │ │ │ └── stdio │ │ │ ├── __init__.py │ │ │ ├── stdio_client.py │ │ │ ├── stdio_server_parameters.py │ │ │ └── stdio_server_shutdown.py │ │ ├── tsconfig.json │ │ └── UserManagement.py │ ├── Java │ │ ├── 1.0 mcp_login │ │ │ ├── json-20241224.jar │ │ │ ├── MCPLoginClient.class │ │ │ └── MCPLoginClient.java │ │ ├── 1.1 mcp_logout │ │ │ ├── json-20241224.jar │ │ │ ├── MCPLogoutClient.class │ │ │ └── MCPLogoutClient.java │ │ ├── 2.0 mcp_chat │ │ │ ├── json-20241224.jar │ │ │ ├── MCPChatClient.class │ │ │ └── MCPChatClient.java │ │ ├── 2.1 mcp_continue_chat │ │ │ ├── json-20241224.jar │ │ │ ├── MCPContinueChatClient.class │ │ │ └── MCPContinueChatClient.java │ │ ├── 2.2 mcp_get_chat_info │ │ │ ├── json-20241224.jar │ │ │ ├── MCPGetChatInfoClient.class │ │ │ └── MCPGetChatInfoClient.java │ │ ├── 3.0 mcp_create_source │ │ │ ├── json-20241224.jar │ │ │ ├── MCPCreateSourceClient.class │ │ │ └── MCPCreateSourceClient.java │ │ ├── 3.1 mcp_get_source │ │ │ ├── json-20241224.jar │ │ │ ├── MCPGetSourceClient.class │ │ │ └── MCPGetSourceClient.java │ │ ├── 3.2 mcp_list_sources │ │ │ ├── json-20241224.jar │ │ │ ├── MCPListSourcesClient.class │ │ │ └── MCPListSourcesClient.java │ │ ├── 3.3 mcp_edit_source │ │ │ ├── json-20241224.jar │ │ │ ├── MCPEditSourceClient.class │ │ │ └── MCPEditSourceClient.java │ │ ├── 3.4 mcp_delete_source │ │ │ ├── json-20241224.jar │ │ │ ├── MCPDeleteSourceClient.class │ │ │ └── MCPDeleteSourceClient.java │ │ ├── 4.0 mcp_list_groups │ │ │ ├── json-20241224.jar │ │ │ ├── MCPListGroupsClient.class │ │ │ └── MCPListGroupsClient.java │ │ ├── 4.1 mcp_store_group │ │ │ ├── json-20241224.jar │ │ │ ├── MCPStoreGroupClient.class │ │ │ └── MCPStoreGroupClient.java │ │ ├── 4.2 mcp_delete_group │ │ │ ├── json-20241224.jar │ │ │ ├── MCPDeleteGroupClient.class │ │ │ └── MCPDeleteGroupClient.java │ │ ├── 5.0 mcp_store_user │ │ │ ├── json-20241224.jar │ │ │ ├── MCPStoreUserClient.class │ │ │ └── MCPStoreUserClient.java │ │ ├── 5.1 mcp_edit_user │ │ │ ├── json-20241224.jar │ │ │ ├── MCPEditUserClient.class │ │ │ └── MCPEditUserClient.java │ │ ├── 5.2 mcp_delete_user │ │ │ ├── json-20241224.jar │ │ │ ├── MCPDeleteUserClient.class │ │ │ └── MCPDeleteUserClient.java │ │ └── README.md │ ├── JavaScript │ │ ├── 1.0 mcp_login │ │ │ └── MCPLoginClient.js │ │ ├── 1.1 mcp_logout │ │ │ └── MCPLogoutClient.js │ │ ├── 2.0 mcp_chat │ │ │ └── MCPChatClient.js │ │ ├── 2.1 mcp_continue_chat │ │ │ └── MCPContinueChatClient.js │ │ ├── 2.2 mcp_get_chat_info │ │ │ └── MCPGetChatInfoClient.js │ │ ├── 3.0 mcp_create_source │ │ │ └── MCPCreateSourceClient.js │ │ ├── 3.1 mcp_get_source │ │ │ └── MCPGetSourceClient.js │ │ ├── 3.2 mcp_list_sources │ │ │ └── MCPListSourcesClient.js │ │ ├── 3.3 mcp_edit_source │ │ │ └── MCPEditSourceClient.js │ │ ├── 3.4 mcp_delete_source │ │ │ └── MCPDeleteSourceClient.js │ │ ├── 4.0 mcp_list_groups │ │ │ └── MCPListGroupsClient.js │ │ ├── 4.1 mcp_store_group │ │ │ └── MCPStoreGroupClient.js │ │ ├── 4.2 mcp_delete_group │ │ │ └── MCPDeleteGroupClient.js │ │ ├── 5.0 mcp_store_user │ │ │ └── MCPStoreUserClient.js │ │ ├── 5.1 mcp_edit_user │ │ │ └── MCPEditUserClient.js │ │ ├── 5.2 mcp_delete_user │ │ │ └── MCPDeleteUserClient.js │ │ ├── 9.0 mcp_keygen │ │ │ └── MCPKeygenClient.js │ │ └── README.md │ ├── PHP │ │ ├── 1.0 mcp_login │ │ │ └── MCPLoginClient.php │ │ ├── 1.1 mcp_logout │ │ │ └── MCPLogoutClient.php │ │ ├── 2.0 mcp_chat │ │ │ └── MCPChatClient.php │ │ ├── 2.1 mcp_continue_chat │ │ │ └── MCPContinueChatClient.php │ │ ├── 2.2 mcp_get_chat_info │ │ │ └── MCPGetChatInfoClient.php │ │ ├── 3.0 mcp_create_source │ │ │ └── MCPCreateSourceClient.php │ │ ├── 3.1 mcp_get_source │ │ │ └── MCPGetSourceClient.php │ │ ├── 3.2 mcp_list_sources │ │ │ └── MCPListSourcesClient.php │ │ ├── 3.3 mcp_edit_source │ │ │ └── MCPEditSourceClient.php │ │ ├── 3.4 mcp_delete_source │ │ │ └── MCPDeleteSourceClient.php │ │ ├── 4.0 mcp_list_groups │ │ │ └── MCPListGroupsClient.php │ │ ├── 4.1 mcp_store_group │ │ │ └── MCPStoreGroupClient.php │ │ ├── 4.2 mcp_delete_group │ │ │ └── MCPDeleteGroupClient.php │ │ ├── 5.0 mcp_store_user │ │ │ └── MCPStoreUserClient.php │ │ ├── 5.1 mcp_edit_user │ │ │ └── MCPEditUserClient.php │ │ ├── 5.2 mcp_delete_user │ │ │ └── MCPDeleteUserClient.php │ │ ├── 9.0 mcp_keygen │ │ │ └── MCPKeygenClient.php │ │ └── README.md │ └── Python │ ├── __init__.py │ ├── 1.0 mcp_login │ │ └── MCPLoginClient.py │ ├── 1.1 mcp_logout │ │ └── MCPLogoutClient.py │ ├── 2.0 mcp_chat │ │ └── MCPChatClient.py │ ├── 2.1 mcp_continue_chat │ │ └── MCPContinueChatClient.py │ ├── 2.2 mcp_get_chat_info │ │ └── MCPGetChatInfoClient.py │ ├── 2.3 mcp_delete_all_chats │ │ └── MCPDeleteAllChatsClient.py │ ├── 2.4 mcp_delete_chat │ │ └── MCPDeleteChatClient.py │ ├── 3.0 mcp_create_source │ │ └── MCPCreateSourceClient.py │ ├── 3.1 mcp_get_source │ │ └── MCPGetSourceClient.py │ ├── 3.2 mcp_list_sources │ │ └── MCPListSourcesClient.py │ ├── 3.3 mcp_edit_source │ │ └── MCPEditSourceClient.py │ ├── 3.4 mcp_delete_source │ │ └── MCPDeleteSourceClient.py │ ├── 4.0 mcp_list_groups │ │ └── MCPListGroupsClient.py │ ├── 4.1 mcp_store_group │ │ └── MCPStoreGroupClient.py │ ├── 4.2 mcp_delete_group │ │ └── MCPDeleteGroupClient.py │ ├── 5.0 mcp_store_user │ │ └── MCPStoreUserClient.py │ ├── 5.1 mcp_edit_user │ │ └── MCPEditUserClient.py │ ├── 5.2 mcp_delete_user │ │ └── MCPDeleteUserClient.py │ ├── 9.0 mcp_keygen │ │ └── MCPKeygenClient.py │ ├── Gradio │ │ ├── __init__.py │ │ └── server_config.json │ └── README.md ├── examples │ ├── create_users_from_csv │ │ ├── config.json.example │ │ ├── config.py │ │ ├── create_users_from_csv.py │ │ └── language.py │ ├── dynamic_sources │ │ └── rss_reader │ │ ├── Api.py │ │ ├── config.json.example │ │ ├── config.py │ │ ├── demo_dynamic_sources.py │ │ └── rss_parser.py │ ├── example_users_to_add_no_tz.csv │ └── sftp_upload_with_id │ ├── Api.py │ ├── config_ftp.json.example │ ├── config.py │ ├── demo_upload.py │ ├── language.py │ └── requirements.txt ├── images │ ├── alternative mcp client.png │ ├── favicon │ │ ├── android-chrome-192x192.png │ │ ├── android-chrome-512x512.png │ │ ├── apple-touch-icon.png │ │ ├── favicon-16x16.png │ │ ├── favicon-32x32.png │ │ ├── favicon.ico │ │ └── site.webmanifest │ ├── mcp-general-architecture.png │ ├── privateGPT-MCP.png │ └── privateGPT.png ├── InstallMPCServer.sh ├── jest.config.js ├── LICENSE ├── package.json ├── pgpt.env.json.example ├── README.md ├── security │ ├── generate_decrypted_password.js │ └── generate_encrypted_password.js ├── src │ ├── helper.js │ ├── index.js │ ├── logger.js │ ├── pgpt-messages.js │ ├── public │ │ ├── index.html │ │ └── pgpt-mcp-logo.png │ ├── services │ │ └── pgpt-service.ts │ └── types │ └── api.ts ├── start_chatbot_agent.ps1 ├── start_chatbot_agent.sh ├── start_iot_agent.ps1 ├── start_iot_agent.sh ├── start_openai_compatible_api_agent.ps1 ├── start_openai_compatible_api_agent.sh ├── tsconfig.json ├── ver │ ├── index_np.js │ └── index_proxy_np.js └── WORKLOG.md ``` # Files -------------------------------------------------------------------------------- /agents/OpenAI_Compatible_API_Agent/Python/openai_mcp_api.py: -------------------------------------------------------------------------------- ```python import asyncio import logging import time from pathlib import Path from typing import Optional, List from fastapi import FastAPI, Request, HTTPException, Header, Depends from fastapi.responses import StreamingResponse, Response from pydantic import BaseModel # Prometheus-Client importieren from prometheus_client import ( Counter, Histogram, Gauge, generate_latest, CONTENT_TYPE_LATEST ) # ------------------------------------------------------------------ # 1) Logging: Log-Level konfigurierbar, Minimalkonfiguration # ------------------------------------------------------------------ logging.basicConfig( level=logging.INFO, # Für Produktion ggf. WARNING oder ERROR format="%(asctime)s [%(levelname)s] %(name)s - %(message)s", ) logger = logging.getLogger(__name__) # ------------------------------------------------------------------ # 2) Konfiguration laden # ------------------------------------------------------------------ try: from ...AgentInterface.Python.config import Config, ConfigError config_file = Path(__file__).parent.parent / "pgpt_openai_api_mcp.json" config_file = Path.absolute(config_file) config = Config(config_file=config_file, required_fields=["email", "password", "mcp_server"]) logger.info(f"Configuration loaded: {config}") except ConfigError as e: logger.error(f"Configuration Error: {e}") exit(1) # ------------------------------------------------------------------ # 3) Globaler Agent (nur eine Instanz) # ------------------------------------------------------------------ try: from ...AgentInterface.Python.agent import PrivateGPTAgent GLOBAL_AGENT = PrivateGPTAgent(config) logger.info("Global PrivateGPTAgent instance initialized.") except Exception as e: logger.error(f"Error initializing global agent: {e}") exit(1) # ------------------------------------------------------------------ # 4) Benötigte Klassen/Modelle # ------------------------------------------------------------------ class Message(BaseModel): role: str content: str class ChatCompletionRequest(BaseModel): model: Optional[str] = "PGPT - Mistral NeMo 12B" messages: List[Message] max_tokens: Optional[int] = 2048 temperature: Optional[float] = 0.1 stream: Optional[bool] = False # (Optional) CompletionRequest, falls benötigt from agents.OpenAI_Compatible_API_Agent.Python.open_ai_helper import ( CompletionRequest, _resp_sync, _resp_async_generator, _resp_async_generator_completions, _resp_sync_completions, models ) # ------------------------------------------------------------------ # 5) Asynchroner Aufruf des Agenten via Thread-Pool # ------------------------------------------------------------------ from concurrent.futures import ThreadPoolExecutor executor = ThreadPoolExecutor(max_workers=4) async def async_respond(agent: PrivateGPTAgent, messages: List[Message]) -> dict: """ Führt den blockierenden respond_with_context-Aufruf in einem Threadpool aus, um den Haupt-Eventloop nicht zu blockieren. """ loop = asyncio.get_event_loop() return await loop.run_in_executor(executor, agent.respond_with_context, messages) # ------------------------------------------------------------------ # 6) FastAPI-App erstellen # ------------------------------------------------------------------ app = FastAPI(title="OpenAI-Compatible API for PrivateGPT using MCP") # ------------------------------------------------------------------ # 7) Prometheus-Metriken definieren # ------------------------------------------------------------------ # Anzahl eingehender Requests pro Method + Endpoint REQUEST_COUNT = Counter( "request_count", "Number of requests received", ["method", "endpoint"] ) # Latenz der Requests (Histogram) REQUEST_LATENCY = Histogram( "request_latency_seconds", "Request latency in seconds", ["method", "endpoint"] ) # Zähler, wie oft Chat-/Completion-Aufrufe erfolgreich waren CHAT_COMPLETION_COUNT = Counter( "chat_completion_count", "Number of successful ChatCompletion requests" ) COMPLETION_COUNT = Counter( "completion_count", "Number of successful Completions requests" ) # Ggf. ein Gauge für "laufende Threads" oder "Queue-Länge", falls relevant # (Beispiel: wir nehmen hier einen Dummy-Gauge für aktive Worker) ACTIVE_WORKER = Gauge( "active_worker", "Number of active threads in the ThreadPoolExecutor" ) # (Optional) Counter für Token, wenn du das aus dem Agent extrahieren kannst: TOKEN_USAGE = Counter( "token_usage", "Count of tokens used", ["model"] ) # ------------------------------------------------------------------ # 8) Middleware zum Messen und Zählen der Requests # ------------------------------------------------------------------ @app.middleware("http") async def prometheus_middleware(request: Request, call_next): start_time = time.time() # Zähle Request REQUEST_COUNT.labels(request.method, request.url.path).inc() # Schätze aktive Worker # (Im ThreadPool ist das nicht exakt; man könnte hier "max_workers - free" ermitteln.) ACTIVE_WORKER.set(executor._work_queue.qsize()) try: response = await call_next(request) except Exception as exc: raise exc finally: resp_time = time.time() - start_time # Latenz messen REQUEST_LATENCY.labels(request.method, request.url.path).observe(resp_time) return response # ------------------------------------------------------------------ # 9) Whitelist-Prüfung via Dependency # -> Gibt bei invalidem Key sofort HTTPException (401) zurück # ------------------------------------------------------------------ def verify_api_key(authorization: str = Header(None)) -> str: if not authorization: # Kein Authorization-Header raise HTTPException(status_code=401, detail="Missing Authorization header") try: scheme, token = authorization.split(" ") if scheme.lower() != "bearer": raise HTTPException(status_code=401, detail="Authorization scheme must be 'Bearer'") except ValueError: raise HTTPException(status_code=401, detail="Invalid Authorization header format") # Ggf. Whitelisting whitelist_keys = config.get("whitelist_keys", []) if len(whitelist_keys) > 0 and token not in whitelist_keys: # Key ist nicht in der Whitelist logger.warning(f"Invalid API key: {token}") raise HTTPException(status_code=401, detail="API Key not valid") return token # ------------------------------------------------------------------ # 10) Chat-Completions Endpoint # ------------------------------------------------------------------ @app.post("/chat/completions") async def chat_completions( request: ChatCompletionRequest, client_api_key: str = Depends(verify_api_key) ): """ Beispielhafter Endpoint für Chat Completion. Nutzt GLOBAL_AGENT und führt die Logik asynchron aus. """ logger.info(f"[/chat/completions] Request received with API key: {client_api_key}") # Kein messages-Array => Fehler/Leere Antwort if not request.messages: response = {"chatId": "0", "answer": "No input provided"} logger.warning("No messages provided.") return _resp_sync(response, request) # Asynchrone Agent-Antwort response = await async_respond(GLOBAL_AGENT, request.messages) if "answer" not in response: response["answer"] = "No Response received" # Metrik hochzählen CHAT_COMPLETION_COUNT.inc() # (Optional) Token-Usage-Tracking, falls du im response-Dict Token-Infos hast # Hier beispielhaft: response["usage"]["tokens"] (falls existiert) # if "usage" in response and "tokens" in response["usage"]: # TOKEN_USAGE.labels(request.model or "unknown_model").inc(response["usage"]["tokens"]) preview_len = 80 logger.info(f"💡 Response (preview): {response['answer'][:preview_len]}...") # Streaming? if request.stream: return StreamingResponse( _resp_async_generator(response, request), media_type="application/x-ndjson" ) else: return _resp_sync(response, request) # ------------------------------------------------------------------ # 11) Text-Completions Endpoint # ------------------------------------------------------------------ @app.post("/completions") async def completions( request: CompletionRequest, client_api_key: str = Depends(verify_api_key) ): logger.info(f"[/completions] Request received with API key: {client_api_key}") if not request.prompt: response = {"chatId": "0", "answer": "No input provided"} logger.warning("No prompt provided.") return _resp_sync(response, request) # Asynchrone Agent-Antwort response = await async_respond(GLOBAL_AGENT, [Message(role="user", content=request.prompt)]) if "answer" not in response: response["answer"] = "No Response received" # Completion-Metrik hochzählen COMPLETION_COUNT.inc() # (Optional) Token-Usage-Tracking # if "usage" in response and "tokens" in response["usage"]: # TOKEN_USAGE.labels("some_model").inc(response["usage"]["tokens"]) logger.info(f"💡 Response (preview): {response['answer'][:80]}...") if request.stream: return StreamingResponse( _resp_async_generator_completions(response, request), media_type="application/x-ndjson" ) else: return _resp_sync_completions(response, request) # ------------------------------------------------------------------ # 12) Modelle abfragen # ------------------------------------------------------------------ @app.get("/models") def return_models(): return {"object": "list", "data": models} @app.get("/models/{model_id}") async def get_model(model_id: str): filtered_entries = [m for m in models if m["id"] == model_id] if not filtered_entries: raise HTTPException(status_code=404, detail="Model not found") return filtered_entries[0] # ------------------------------------------------------------------ # 13) /metrics Endpoint für Prometheus # ------------------------------------------------------------------ @app.get("/metrics") def metrics(): """ Endpoint, der die Prometheus-Metriken zurückgibt. Von Prometheus unter http://<host>:<port>/metrics abgefragt. """ return Response(generate_latest(), media_type=CONTENT_TYPE_LATEST) # ------------------------------------------------------------------ # 14) App-Start via uvicorn.run() # ------------------------------------------------------------------ if __name__ == "__main__": import uvicorn api_ip = config.get("api_ip", "0.0.0.0") api_port = config.get("api_port", 8002) logger.info(f"Starting API on http://{api_ip}:{api_port}") # workers=4, wenn man mehrere Prozesse möchte (Skalierung) uvicorn.run(app, host=api_ip, port=int(api_port)) ``` -------------------------------------------------------------------------------- /agents/AgentInterface/Python/agent.py: -------------------------------------------------------------------------------- ```python # agent.py import logging import json import atexit from .network import NetworkClient, NetworkError from .color import Color from .language import languages class GroupValidationError(Exception): """Exception raised for errors in the group validation process.""" pass class PrivateGPTAgent: def __init__(self, config): # mcp_server-Daten aus dem config-Objekt lesen self.mcp_config = config.get("mcp_server") # Lese host und port self.mcp_host = self.mcp_config.get("host") self.mcp_port = self.mcp_config.get("port") self.server_ip = self.mcp_host self.server_port = self.mcp_port self.email = config.get("email") self.password = config.get("password") self.chosen_groups = config.get("groups", []) self.language = config.get("language", "en") # Standard ist Englisch if self.language not in languages: self.language = "en" logging.warning(f"Unsupported language '{config.get('language')}'. Falling back to English.") self.lang = languages[self.language] self.network_client = NetworkClient(self.server_ip, self.server_port, language=self.language) self.token = None atexit.register(self.logout) # Initialer Login self.login() # Personalisierte Gruppen abholen if self.token: self.allowed_groups = self.list_personal_groups() if not self.allowed_groups: logging.warning(self.lang["no_personal_groups"]) print(self.lang["no_personal_groups"], flush=True) self.allowed_groups = [] # Validierung der Gruppen invalid = self.validate_groups(self.chosen_groups) if invalid: print(self.lang["invalid_group"].format(groups=invalid), flush=True) logging.error(self.lang["invalid_group_error"]) raise GroupValidationError(self.lang["invalid_group"].format(groups=invalid)) else: self.allowed_groups = [] # Lokale Wissensbasis (Beispiel) self.knowledge_base = { "What is AI?": self.lang["knowledge_ai"], "Who created Python?": self.lang["knowledge_python"], "What is Machine Learning?": self.lang["knowledge_ml"] } def get_lang_message(self, key, **kwargs): message = self.lang.get(key, "Message not defined.") try: return message.format(**kwargs) except KeyError as e: logging.error(f"Missing placeholder in language file for key '{key}': {e}") return message def validate_groups(self, groups): if groups is None: return [] invalid = [g for g in groups if g not in self.allowed_groups] if invalid: logging.error(self.get_lang_message("group_validation_error", error=invalid)) return invalid return [] def login(self): payload = { "command": "login", "arguments": { "email": self.email, "password": self.password } } logging.info(self.get_lang_message("login_attempt")) try: resp = self.network_client.send_request(payload) #logging.info(self.get_lang_message("received_response", response=resp)) if resp.get("status") == 200 and resp.get("message") == "success": self.token = resp.get("token") logging.info(self.get_lang_message("login_success")) return True else: msg = resp.get("message", self.get_lang_message("no_server_message")) logging.error(self.get_lang_message("login_failed", message=msg)) return False except NetworkError as e: logging.error(self.get_lang_message("login_failed", message=str(e))) return False def list_personal_groups(self): if not self.token: logging.error(self.get_lang_message("authentication_failed")) return [] payload = { "command": "list_groups", "token": self.token } try: resp = self.network_client.send_request(payload) data_block = resp.get("data") if not data_block: logging.warning(self.lang["no_data_in_response"].format(response=resp)) return [] if data_block.get("status") == 200 and data_block.get("message") == "success": personal = data_block.get("personalGroups", []) logging.info(self.lang["personal_groups"].format(groups=personal)) return personal else: logging.warning(self.lang["list_groups_failed"].format( message=data_block.get("message", self.lang["no_server_message"]))) return [] except NetworkError as e: logging.error(self.lang["list_groups_failed"].format(message=str(e))) return [] def query_private_gpt(self, prompt, use_public=False, language="en", groups=None, _retry_on_token_expired=True): if not self.token: error_msg = self.get_lang_message("authentication_failed") logging.error(error_msg) return json.dumps({"error": error_msg}) if language not in languages: language = 'en' logging.warning(f"Unsupported language '{language}'. Falling back to English.") lang = languages[language] if groups is None: groups = self.chosen_groups else: groups = [g.strip() for g in groups if g.strip()] relevant_groups = [g for g in groups if g in self.allowed_groups] payload = { "command": "chat", "token": self.token, "arguments": { "question": prompt, "usePublic": use_public, "groups": relevant_groups, "language": language } } #logging.info(lang["sending_payload"].format(payload=json.dumps(payload))) try: resp = self.network_client.send_request(payload) #logging.info(lang["received_response"].format(response=resp)) # ───────────────────────────────────────────────── # Token abgelaufen/ungültig => Re-Login # ───────────────────────────────────────────────── if ( (resp.get("status") in [401, 403]) or (resp.get("message") in ["token expired", "token invalid"]) ): if not _retry_on_token_expired: return json.dumps({"error": "Token ungültig, Re-Login fehlgeschlagen."}) # Zusätzlicher Log-Eintrag, um sicher zu sehen, dass der Refresh hier wirklich passiert: logging.warning("TOKEN REFRESH TRIGGERED! (401/403 or token expired/invalid recognized)") old_token = self.token self.token = None if self.login(): return self.query_private_gpt( prompt, use_public, language, groups, _retry_on_token_expired=False ) else: return json.dumps({"error": "Automatischer Re-Login ist fehlgeschlagen."}) # Normaler Erfolgsfall if resp.get("status") == 200 and resp.get("message") == "success": content = resp.get("content", {}) answer = content.get("answer", lang["agent_error"].format(error=lang["no_answer_received"])) return json.dumps({"answer": answer}) else: return json.dumps({"error": resp.get("message", lang["agent_error"].format(error=lang["unknown_error"]))}) except NetworkError as e: error_msg = lang["agent_error"].format(error=str(e)) logging.error(f"❌ {error_msg}") return json.dumps({"error": error_msg}) def respond(self, user_input, groups=None): response = self.knowledge_base.get(user_input, None) if response: #logging.info(self.get_lang_message("knowledge_response", input=user_input)) return json.dumps({"answer": response}) else: return self.query_private_gpt(user_input, groups=groups) def respond_with_context(self, messages): user_input = f'{messages[-1].content}' add_context = False if add_context: messages.pop() user_input += "\nHere is some context about the previous conversation:\n" for message in messages: user_input += f"{message.role}: {message.content}\n" result = self.query_private_gpt(user_input) return json.loads(result) def logout(self): if not self.token: logging.info(self.get_lang_message("no_token_logout")) return payload = { "command": "logout", "token": self.token } logging.info(self.get_lang_message("logout_attempt")) try: resp = self.network_client.send_request(payload) logging.info(self.get_lang_message("received_response", response=resp)) if resp.get("status") == 200 and resp.get("message") == "success": logging.info(self.get_lang_message("logout_success")) self.token = None else: msg = resp.get("message", self.get_lang_message("no_server_message")) logging.warning(self.get_lang_message("logout_failed", message=msg)) except NetworkError as e: logging.error(self.get_lang_message("logout_failed", message=str(e))) def run(self): if not self.token: logging.error(self.get_lang_message("authentication_failed")) print(self.get_lang_message("authentication_failed"), flush=True) return welcome_msg = f"{Color.OKGREEN}{self.get_lang_message('welcome')}{Color.ENDC}" print(welcome_msg, flush=True) logging.info(self.get_lang_message("user_interface_started")) while True: try: user_input = input(f"{Color.OKBLUE}{self.get_lang_message('user_question')}{Color.ENDC}") if user_input.strip().lower() == "exit": goodbye_msg = f"{Color.OKGREEN}{self.get_lang_message('goodbye')}{Color.ENDC}" print(goodbye_msg, flush=True) logging.info(self.get_lang_message("session_ended")) break elif not user_input.strip(): continue result = self.respond(user_input) parsed_result = json.loads(result) if "answer" in parsed_result: answer = parsed_result["answer"] print(f"{Color.OKGREEN}{self.get_lang_message('agent_answer', answer=answer)}{Color.ENDC}", flush=True) else: error = parsed_result["error"] print(f"{Color.FAIL}{self.get_lang_message('agent_error', error=error)}{Color.ENDC}", flush=True) except (KeyboardInterrupt, EOFError): goodbye_msg = f"{Color.OKGREEN}{self.get_lang_message('goodbye')}{Color.ENDC}" print(goodbye_msg, flush=True) logging.info(self.get_lang_message("session_interrupted")) break ``` -------------------------------------------------------------------------------- /clients/PHP/5.0 mcp_store_user/MCPStoreUserClient.php: -------------------------------------------------------------------------------- ```php <?php /** * MCPStoreUserClient.php * * A PHP script that acts as a Store User Client. It connects to a server via TCP, * sends a request to store a new user, and receives the server's response. * * Usage: * php MCPStoreUserClient.php --server-ip <IP> --server-port <Port> --token <Token> --name <Name> --email <Email> --password <Password> --language <Language> --timezone <Timezone> [--roles <Role1> <Role2> ...] [--groups <Group1> <Group2> ...] [--usePublic] [--activateFtp] [--ftpPassword <FtpPassword>] */ /** * Function to parse command line arguments * * @param array $args The command line arguments * @return array An associative array with the parsed arguments */ function parseArguments($args) { $parsedArgs = []; $argc = count($args); for ($i = 1; $i < $argc; $i++) { switch ($args[$i]) { case '--server-ip': if (isset($args[$i + 1]) && !startsWith($args[$i + 1], '--')) { $parsedArgs['serverIp'] = $args[++$i]; } else { fwrite(STDERR, "⚠️ Warning: No value provided for --server-ip.\n"); } break; case '--server-port': if (isset($args[$i + 1]) && !startsWith($args[$i + 1], '--')) { $parsedArgs['serverPort'] = intval($args[++$i]); } else { fwrite(STDERR, "⚠️ Warning: No value provided for --server-port.\n"); } break; case '--token': if (isset($args[$i + 1]) && !startsWith($args[$i + 1], '--')) { $parsedArgs['token'] = $args[++$i]; } else { fwrite(STDERR, "⚠️ Warning: No value provided for --token.\n"); } break; case '--name': if (isset($args[$i + 1]) && !startsWith($args[$i + 1], '--')) { $parsedArgs['name'] = $args[++$i]; } else { fwrite(STDERR, "⚠️ Warning: No value provided for --name.\n"); } break; case '--email': if (isset($args[$i + 1]) && !startsWith($args[$i + 1], '--')) { $parsedArgs['email'] = $args[++$i]; } else { fwrite(STDERR, "⚠️ Warning: No value provided for --email.\n"); } break; case '--password': if (isset($args[$i + 1]) && !startsWith($args[$i + 1], '--')) { $parsedArgs['password'] = $args[++$i]; } else { fwrite(STDERR, "⚠️ Warning: No value provided for --password.\n"); } break; case '--language': if (isset($args[$i + 1]) && !startsWith($args[$i + 1], '--')) { $parsedArgs['language'] = $args[++$i]; } else { fwrite(STDERR, "⚠️ Warning: No value provided for --language.\n"); } break; case '--timezone': if (isset($args[$i + 1]) && !startsWith($args[$i + 1], '--')) { $parsedArgs['timezone'] = $args[++$i]; } else { fwrite(STDERR, "⚠️ Warning: No value provided for --timezone.\n"); } break; case '--roles': $parsedArgs['roles'] = []; while ($i + 1 < $argc && !startsWith($args[$i + 1], '--')) { $parsedArgs['roles'][] = $args[++$i]; } break; case '--groups': $parsedArgs['groups'] = []; while ($i + 1 < $argc && !startsWith($args[$i + 1], '--')) { $parsedArgs['groups'][] = $args[++$i]; } break; case '--usePublic': $parsedArgs['usePublic'] = true; break; case '--activateFtp': $parsedArgs['activateFtp'] = true; break; case '--ftpPassword': if (isset($args[$i + 1]) && !startsWith($args[$i + 1], '--')) { $parsedArgs['ftpPassword'] = $args[++$i]; } else { fwrite(STDERR, "⚠️ Warning: No value provided for --ftpPassword.\n"); } break; default: fwrite(STDERR, "⚠️ Warning: Unknown argument: {$args[$i]}\n"); } } return $parsedArgs; } /** * Helper function to check if a string starts with a specific prefix * * @param string $string The string to check * @param string $prefix The prefix * @return bool True if the string starts with the prefix, otherwise False */ function startsWith($string, $prefix) { return substr($string, 0, strlen($prefix)) === $prefix; } /** * Function to interactively prompt for a parameter (optional) * * @param string $prompt The input prompt * @return string The user input */ function askQuestionPrompt($prompt) { if (preg_match('/^win/i', PHP_OS)) { // Windows specific input prompt $vbscript = sys_get_temp_dir() . 'prompt_input.vbs'; file_put_contents($vbscript, 'wscript.echo(InputBox("' . addslashes($prompt) . '", "", ""))'); $response = shell_exec("cscript //nologo " . escapeshellarg($vbscript)); unlink($vbscript); return trim($response); } else { // Unix/Linux input prompt echo $prompt; $handle = fopen("php://stdin", "r"); $response = trim(fgets($handle)); fclose($handle); return $response; } } /** * Function to send a Store User request over a TCP connection * * @param string $serverIp The server's IP address * @param int $serverPort The server's port * @param string $token The authentication token * @param array $args The arguments for the user to be processed * @return array The response received from the server as an associative array * @throws Exception On connection errors or JSON parsing errors */ function sendStoreUserRequest($serverIp, $serverPort, $token, $args) { $payload = [ "command" => "store_user", "token" => $token, "arguments" => [ "name" => $args['name'] ?? null, "email" => $args['email'] ?? null, "password" => $args['password'] ?? null, "language" => $args['language'] ?? null, "timezone" => $args['timezone'] ?? null, "roles" => $args['roles'] ?? [], "groups" => $args['groups'] ?? [], "usePublic" => $args['usePublic'] ?? false, "activateFtp" => $args['activateFtp'] ?? false, "ftpPassword" => $args['ftpPassword'] ?? null ] ]; // Remove null or empty values from the arguments $payload['arguments'] = array_filter($payload['arguments'], function($value) { if (is_array($value)) { return !empty($value); } return $value !== null; }); $jsonPayload = json_encode($payload); if ($jsonPayload === false) { throw new Exception("Error while encoding the JSON payload: " . json_last_error_msg()); } $errno = 0; $errstr = ''; $timeoutDuration = 10; // Seconds (10 seconds timeout) $client = @fsockopen($serverIp, $serverPort, $errno, $errstr, $timeoutDuration); if (!$client) { throw new Exception("Connection error: $errstr ($errno)"); } echo "🔗 Connected to server ({$serverIp}:{$serverPort}).\n"; echo "📤 Sending Payload: {$jsonPayload}\n"; fwrite($client, $jsonPayload); $responseData = ''; stream_set_timeout($client, $timeoutDuration); while (!feof($client)) { $data = fread($client, 1024); if ($data === false) { throw new Exception("Error reading data from server."); } if ($data === '') { break; // No more data } echo "📥 Received data: {$data}\n"; $responseData .= $data; // Attempt to parse the received data as JSON $parsedData = json_decode($responseData, true); if ($parsedData !== null) { echo "✅ JSON response successfully parsed.\n"; fclose($client); return $parsedData; } // Check if the stream has timed out $info = stream_get_meta_data($client); if ($info['timed_out']) { throw new Exception("Timeout while waiting for data from server."); } } fclose($client); throw new Exception("Connection to server was closed before a complete response was received."); } /** * Main function of the script */ function main($argv) { $parsedArgs = parseArguments($argv); $serverIp = $parsedArgs['serverIp'] ?? null; $serverPort = $parsedArgs['serverPort'] ?? null; $token = $parsedArgs['token'] ?? null; $name = $parsedArgs['name'] ?? null; $email = $parsedArgs['email'] ?? null; $password = $parsedArgs['password'] ?? null; $language = $parsedArgs['language'] ?? null; $timezone = $parsedArgs['timezone'] ?? null; $roles = $parsedArgs['roles'] ?? []; $groups = $parsedArgs['groups'] ?? []; $usePublic = $parsedArgs['usePublic'] ?? false; $activateFtp = $parsedArgs['activateFtp'] ?? false; $ftpPassword = $parsedArgs['ftpPassword'] ?? null; // Check if all required parameters are present, otherwise prompt interactively if (!$serverIp) { $serverIp = askQuestionPrompt('🔗 Please enter the server IP: '); } if (!$serverPort) { $portInput = askQuestionPrompt('🔗 Please enter the server port: '); $serverPort = intval($portInput); if ($serverPort <= 0) { fwrite(STDERR, "❌ ERROR: Invalid server port.\n"); exit(1); } } if (!$token) { $token = askQuestionPrompt('🔒 Please enter your authentication token: '); } if (!$name) { $name = askQuestionPrompt('👤 Please enter the user\'s name: '); } if (!$email) { $email = askQuestionPrompt('📧 Please enter the user\'s email: '); } if (!$password) { $password = askQuestionPrompt('🔑 Please enter the user\'s password: '); } if (!$language) { $language = askQuestionPrompt('🌐 Please enter the user\'s preferred language (e.g., en, de): '); } if (!$timezone) { $timezone = askQuestionPrompt('🕰️ Please enter the user\'s timezone (e.g., Europe/Berlin): '); } // Roles and groups are optional and have already been handled by parseArguments // usePublic, activateFtp, and ftpPassword are also optional // Set default values for optional parameters if they are not present $roles = $roles ?: []; $groups = $groups ?: []; $usePublic = $usePublic ? true : false; $activateFtp = $activateFtp ? true : false; $ftpPassword = $ftpPassword ?: ''; // Check if all required parameters are now present if (!$serverIp || !$serverPort || !$token || !$name || !$email || !$password || !$language || !$timezone) { fwrite(STDERR, "❌ ERROR: Missing required parameters.\n"); fwrite(STDOUT, "Usage: php MCPStoreUserClient.php --server-ip <IP> --server-port <Port> --token <Token> --name <Name> --email <Email> --password <Password> --language <Language> --timezone <Timezone> [--roles <Role1> <Role2> ...] [--groups <Group1> <Group2> ...] [--usePublic] [--activateFtp] [--ftpPassword <FtpPassword>]\n"); exit(1); } try { echo "🧑💻 Sending Store-User request...\n"; $response = sendStoreUserRequest( $serverIp, $serverPort, $token, [ 'name' => $name, 'email' => $email, 'password' => $password, 'language' => $language, 'timezone' => $timezone, 'roles' => $roles, 'groups' => $groups, 'usePublic' => $usePublic, 'activateFtp' => $activateFtp, 'ftpPassword' => $ftpPassword ] ); echo "✔️ Server response:\n"; echo json_encode($response, JSON_PRETTY_PRINT) . "\n"; } catch (Exception $e) { fwrite(STDERR, "❌ Error: " . $e->getMessage() . "\n"); } } // Check if PHP version is at least 7.1 (for better features) if (version_compare(PHP_VERSION, '7.1.0') < 0) { fwrite(STDERR, "❌ ERROR: This script requires PHP version 7.1 or higher.\n"); exit(1); } // Call the main function main($argv); ?> ``` -------------------------------------------------------------------------------- /agents/OpenAI_Compatible_API_Agent/Python/pgpt_api.py: -------------------------------------------------------------------------------- ```python import json import re from pathlib import Path import requests import urllib3 import base64 from ...AgentInterface.Python.config import Config urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) def initialize_session(proxy_user, proxy_password, access_header): """Set up the session with proxy authentication.""" session = requests.Session() session.verify = False headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', } if access_header is not None: headers['X-Custom-Header'] = access_header elif proxy_user is not None and proxy_password is not None: auth = base64.b64encode(f"{proxy_user}:{proxy_password}".encode()).decode() headers['Authorization'] = f'Basic {auth}' session.headers.update(headers) return session class PrivateGPTAPI: def __init__(self, config, client_api_key=None): """Initialize the chat client with proxy authentication.""" self.token = None self.chat_id = None self.base_url = config.get("base_url") self.proxy_user = config.get("proxy_user", None) if self.proxy_user == "": self.proxy_user = None self.proxy_password = config.get("proxy_password", None) if self.proxy_password == "": self.proxy_password = None self.access_header = config.get("access_header", None) if self.access_header == "": self.access_header = None self.chosen_groups = config.data["groups"] or [] self.language = config.get("language", "en") self.use_public = config.get("use_public", True) self.whitelist_keys = config.get("whitelist_keys", []) self.logged_in = False if client_api_key is not None: self.email, self.password = decrypt_api_key(client_api_key) if len(self.whitelist_keys) > 0: if client_api_key not in self.whitelist_keys: print("not authorized") self.session = initialize_session(self.proxy_user, self.proxy_password, self.access_header) if self.login(): self.logged_in = True def login(self): """Authenticate the user and retrieve the token.""" url = f"{self.base_url}/login" payload = {"email": self.email, "password": self.password} try: response = self.session.post(url, json=payload) print(response.content) response.raise_for_status() data = response.json() self.token = data['data']['token'] # Prüfen, ob der Header bereits existiert if 'Authorization' in self.session.headers: self.session.headers['Authorization'] += f', Bearer {self.token}' else: self.session.headers['Authorization'] = f'Bearer {self.token}' self.chat_id = None print("✅ Login successful.") return True except requests.exceptions.RequestException as e: print(f"❌ Login failed: {e}") return False def create_chat(self, user_input): """Start a new chat session. This method sends a POST request to the '/chats' endpoint with the provided parameters. It initializes a new chat session and stores the chat ID for future use. """ url = f"{self.base_url}/chats" payload = { "language": self.language, "question": user_input, # Initial question to start the chat "usePublic": self.use_public, "groups": self.chosen_groups } try: response = self.session.post(url, json=payload) response.raise_for_status() # Raise an exception if the response was not successful data = response.json() self.chat_id = data['data']['chatId'] # Store the chat ID for future use print("✅ Chat initialized.") resp = response.json() try: answer = resp.get('data', None).get('answer', "error") except: print(response.json()) resp = {"data": {"answer": "error"} } answer = "error" if answer.startswith("{\"role\":"): answerj = json.loads(answer) resp["data"]["answer"] = answerj["content"] resp["data"]["chatId"] = "0" print(f"💡 Response: {answer}") return resp except requests.exceptions.RequestException as e: # It seems we get disconnections from time to time.. # print(f"⚠️ Failed to get response on first try, trying again..: {e}") try: response = self.session.patch(url, json=payload) response.raise_for_status() data = response.json() answer = data.get('data', {}).get('answer', "No answer provided.") print(f"💡 Response: {answer}") return data except: print(f"❌ Failed to get response: {e}") return {"error": f"❌ Failed to get response: {e}"} def query_private_gpt(self, user_input) -> json: """Send a question to the chat and retrieve the response.""" if not self.chat_id: print("❌ Chat session not initialized.") return False url = f"{self.base_url}/chats/{self.chat_id}" payload = {"question": user_input} try: response = self.session.patch(url, json=payload) #response.raise_for_status() resp = response.json() try: answer = resp.get('data', None).get('answer', "error") except: print(response.json()) resp = {"data" : {"answer": "error"} } answer = "error" if answer.startswith("{\"role\":"): answerj = json.loads(answer) resp["data"]["answer"] = answerj["content"] resp["data"]["chatId"] = "0" print(f"💡 Response: {answer}") return resp except requests.exceptions.RequestException as e: # It seems we get disconnections from time to time.. #print(f"⚠️ Failed to get response on first try, trying again..: {e}") try: response = self.session.patch(url, json=payload) response.raise_for_status() data = response.json() answer = data.get('data', {}).get('answer', "No answer provided.") print(f"💡 Response: {answer}") return data except: print(f"❌ Failed to get response: {e}") return {"error": f"❌ Failed to get response: {e}"} def get_document_info(self, source_id): """Send a source id to retrieve details. Working with version 1.3.3 and newer""" url = f"{self.base_url}/sources/{source_id}" try: response = self.session.get(url) data = response.json() info = data.get('data', {}) print(f"💡 Response: {str(info)}") return data except requests.exceptions.RequestException as e: print(f"❌ Failed to get response: {e}") return {"error": f"❌ Failed to get response: {e}"} def respond_with_context(self, messages, response_format=None, request_tools=None): last_user_message = next((p for p in reversed(messages) if p.role == "user"), None) user_input = "" for message in messages: if message.role == "system": user_input = str(message) + "\n" if last_user_message is not None: user_input += last_user_message.content last_assistant_message = next((p for p in reversed(messages) if p.role == "assistant"), None) last_tool_message = next((p for p in reversed(messages) if p.role == "tool"), None) hastoolresult = False if last_tool_message is not None and last_assistant_message is not None and last_assistant_message.tool_calls is not None and len(last_assistant_message.tool_calls) > 0: user_input += "\nYou called the tool: " + str(last_assistant_message.tool_calls[0]) + ". The result was: " + last_tool_message.content hastoolresult = True print(f"💁 Request: " + user_input) # PGPT manages history and context itself so we don't need to forward the history. add_context = False if add_context: messages.pop() user_input += "\nHere is some context about the previous conversation:\n" for message in messages: user_input += f"{message.role}: {message.content}\n" if response_format is not None: print("Response format: " + str(response_format)) user_input += add_response_format(response_format) if request_tools is not None and not hastoolresult: user_input += add_tools(request_tools, last_tool_message) if not self.logged_in: self.login() else: if self.chat_id is None: result = self.create_chat(user_input) else: result = self.query_private_gpt(user_input) if 'data' in result: response_data = result.get("data") if request_tools is not None and not hastoolresult and is_json(clean_response(response_data.get("answer"))): response_data["tool_call"] = clean_response(response_data.get("answer", "")) return response_data elif 'error' in result: # Try to login again and send the query once more on error. if self.login(): if self.chat_id is None: result = self.create_chat(user_input) else: result = self.query_private_gpt(user_input) if 'data' in result: return result['data'] else: return result else: return result def is_json(myjson): try: json.loads(myjson) except ValueError as e: return False return True def add_response_format(response_format): #prompt = "\nPlease fill in the following template with realistic and appropriate information. Be creative. The field 'type' defines the output format. In your reply, only return the generated json\n" prompt = "\nPlease fill in the following json template with realistic and appropriate information. In your reply, only return the generated json. If you can't answer return an empty json.\n" prompt += json.dumps(response_format) return prompt def add_tools(response_tools, last_tool_message): prompt = "\nPlease select the fitting provided tool to create your answer. Only return the generated result of the tool. Do not describe what you are doing, just return the json.\n" index = 1 for tool in response_tools: prompt += "\n" + json.dumps(tool) + "\n" index += 1 return prompt def clean_response(response): # Remove artefacts from reply here response = response.replace("[TOOL_CALLS]", "") return response def decrypt_api_key(api_key): """ This is PoC code and methods should be replaced with a more secure way to deal with credentials (e.g. in a db) """ try: base64_bytes = api_key.encode("ascii") decoded_string_bytes = base64.b64decode(base64_bytes) decoded_key = decoded_string_bytes.decode("ascii") except Exception as e: print(e) decoded_key = "invalid:invalid" return decoded_key.split(":")[0], decoded_key.split(":")[1] def main(): """Main function to run the chat application.""" config_file = Path.absolute(Path(__file__).parent.parent / "pgpt_openai_api_proxy.json") config = Config(config_file=config_file, required_fields=["base_url"]) chat = PrivateGPTAPI(config) print("Type your questions below. Type 'quit' to exit.") while True: try: question = input("❓ Question: ").strip() if question.lower() == 'quit': break if question: chat.query_private_gpt(question) except KeyboardInterrupt: print("\nExiting chat...") break except Exception as e: print(f"❌ Error: {str(e)}") break if __name__ == "__main__": main() ``` -------------------------------------------------------------------------------- /agents/MCP-Client/Python/mcp_client.py: -------------------------------------------------------------------------------- ```python # src/__main__.py import argparse import asyncio import json import logging import os import signal import sys from pathlib import Path from typing import List import anyio # Rich imports from rich import print from rich.markdown import Markdown from rich.panel import Panel from .chat_handler import handle_chat_mode, get_input from .config import load_config from .messages.send_ping import send_ping from .messages.send_prompts import send_prompts_list from .messages.send_resources import send_resources_list from .messages.send_initialize_message import send_initialize from .messages.send_call_tool import send_call_tool from .messages.send_tools_list import send_tools_list from .transport.stdio.stdio_client import stdio_client # Default path for the configuration file DEFAULT_CONFIG_FILE = config_file = Path.absolute(Path(__file__).parent.parent / "server_config.json") # Configure logging logging.basicConfig( level=logging.CRITICAL, format="%(asctime)s - %(levelname)s - %(message)s", stream=sys.stderr, ) def signal_handler(sig, frame): # Ignore subsequent SIGINT signals signal.signal(signal.SIGINT, signal.SIG_IGN) # pretty exit print("\n[bold red]Goodbye![/bold red]") # Immediately and forcibly kill the process os.kill(os.getpid(), signal.SIGKILL) # signal handler signal.signal(signal.SIGINT, signal_handler) async def handle_command(command: str, server_streams: List[tuple]) -> bool: """Handle specific commands dynamically with multiple servers.""" try: if command == "ping": print("[cyan]\nPinging Servers...[/cyan]") for i, (read_stream, write_stream) in enumerate(server_streams): result = await send_ping(read_stream, write_stream) server_num = i + 1 if result: ping_md = f"## Server {server_num} Ping Result\n\n✅ **Server is up and running**" print(Panel(Markdown(ping_md), style="bold green")) else: ping_md = f"## Server {server_num} Ping Result\n\n❌ **Server ping failed**" print(Panel(Markdown(ping_md), style="bold red")) elif command == "list-tools": print("[cyan]\nFetching Tools List from all servers...[/cyan]") for i, (read_stream, write_stream) in enumerate(server_streams): response = await send_tools_list(read_stream, write_stream) tools_list = response.get("tools", []) server_num = i + 1 if not tools_list: tools_md = ( f"## Server {server_num} Tools List\n\nNo tools available." ) else: tools_md = f"## Server {server_num} Tools List\n\n" + "\n".join( [ f"- **{t.get('name')}**: {t.get('description', 'No description')}" for t in tools_list ] ) print( Panel( Markdown(tools_md), title=f"Server {server_num} Tools", style="bold cyan", ) ) elif command == "call-tool": tool_name = await get_input("[bold magenta]Enter tool name[/bold magenta]") if not tool_name: print("[red]Tool name cannot be empty.[/red]") return True arguments_str = await get_input("[bold magenta]Enter tool arguments as JSON (e.g., {'key': 'value'})[/bold magenta]") try: arguments = json.loads(arguments_str) except json.JSONDecodeError as e: print(f"[red]Invalid JSON arguments format:[/red] {e}") return True print(f"[cyan]\nCalling tool '{tool_name}' with arguments:\n[/cyan]") print( Panel( Markdown(f"```json\n{json.dumps(arguments, indent=2)}\n```"), style="dim", ) ) for read_stream, write_stream in server_streams: result = await send_call_tool(tool_name, arguments, read_stream, write_stream) if result.get("isError"): # print(f"[red]Error calling tool:[/red] {result.get('error')}") continue response_content = result.get("content", "No content") try: if response_content[0]['text'].startswith('Error:'): continue except: pass print( Panel( Markdown(f"### Tool Response\n\n{response_content}"), style="green", ) ) elif command == "list-resources": print("[cyan]\nFetching Resources List from all servers...[/cyan]") for i, (read_stream, write_stream) in enumerate(server_streams): response = await send_resources_list(read_stream, write_stream) resources_list = response.get("resources", []) if response else None server_num = i + 1 if not resources_list: resources_md = f"## Server {server_num} Resources List\n\nNo resources available." else: resources_md = f"## Server {server_num} Resources List\n" for r in resources_list: if isinstance(r, dict): json_str = json.dumps(r, indent=2) resources_md += f"\n```json\n{json_str}\n```" else: resources_md += f"\n- {r}" print( Panel( Markdown(resources_md), title=f"Server {server_num} Resources", style="bold cyan", ) ) elif command == "list-prompts": print("[cyan]\nFetching Prompts List from all servers...[/cyan]") for i, (read_stream, write_stream) in enumerate(server_streams): response = await send_prompts_list(read_stream, write_stream) prompts_list = response.get("prompts", []) if response else None server_num = i + 1 if not prompts_list: prompts_md = ( f"## Server {server_num} Prompts List\n\nNo prompts available." ) else: prompts_md = f"## Server {server_num} Prompts List\n\n" + "\n".join( [f"- {p}" for p in prompts_list] ) print( Panel( Markdown(prompts_md), title=f"Server {server_num} Prompts", style="bold cyan", ) ) elif command == "chat": provider = os.getenv("LLM_PROVIDER", "openai") model = os.getenv("LLM_MODEL", "gpt-4o-mini") # Clear the screen first if sys.platform == "win32": os.system("cls") else: os.system("clear") chat_info_text = ( "Welcome to the Chat!\n\n" f"**Provider:** {provider} | **Model:** {model}\n\n" "Type 'exit' to quit." ) print( Panel( Markdown(chat_info_text), style="bold cyan", title="Chat Mode", title_align="center", ) ) await handle_chat_mode(server_streams, provider, model) elif command in ["quit", "exit"]: print("\n[bold red]Goodbye![/bold red]") return False elif command == "clear": if sys.platform == "win32": os.system("cls") else: os.system("clear") elif command == "help": help_md = print_help() print(Panel(help_md)) else: print(f"[red]\nUnknown command: {command}[/red]") print("[yellow]Type 'help' for available commands[/yellow]") except Exception as e: print(f"\n[red]Error executing command:[/red] {e}") return True def print_help(): return Markdown( """ # Available Commands - **ping**: Check if server is responsive - **list-tools**: Display available tools - **list-resources**: Display available resources - **list-prompts**: Display available prompts - **chat**: Enter chat mode - **clear**: Clear the screen - **help**: Show this help message - **quit/exit**: Exit the program **Note:** Commands use dashes (e.g., `list-tools` not `list tools`). """) async def interactive_mode(server_streams: List[tuple]): """Run the CLI in interactive mode with multiple servers.""" welcome_text = """ # Welcome to the Interactive MCP Command-Line Tool (Multi-Server Mode) Type 'help' for available commands or 'quit' to exit. """ print(Panel(Markdown(welcome_text), style="bold cyan")) help_md = print_help() print(Panel(help_md, style="yellow")) while True: try: command = await get_input("[bold green]\n>[/bold green]") command = command.lower() if not command: continue should_continue = await handle_command(command, server_streams) if not should_continue: return except EOFError: break except Exception as e: print(f"\n[red]Error:[/red] {e}") class GracefulExit(Exception): """Custom exception for handling graceful exits.""" pass async def run(config_path: str, server_names: List[str], command: str = None) -> None: """Main function to manage server initialization, communication, and shutdown.""" # Clear screen before rendering anything if sys.platform == "win32": os.system("cls") else: os.system("clear") # Load server configurations and establish connections for all servers server_streams = [] context_managers = [] for server_name in server_names: server_params = await load_config(config_path, server_name) # Establish stdio communication for each server cm = stdio_client(server_params) (read_stream, write_stream) = await cm.__aenter__() context_managers.append(cm) server_streams.append((read_stream, write_stream)) init_result = await send_initialize(read_stream, write_stream) if not init_result: print(f"[red]Server initialization failed for {server_name}[/red]") return try: if command: # Single command mode await handle_command(command, server_streams) else: # Interactive mode await interactive_mode(server_streams) finally: # Clean up all streams for cm in context_managers: with anyio.move_on_after(1): # wait up to 1 second await cm.__aexit__() def cli_main(): # setup the parser parser = argparse.ArgumentParser(description="MCP Client") parser.add_argument( "--config-file", default=DEFAULT_CONFIG_FILE, help="Path to the JSON configuration file containing server details.", ) parser.add_argument( "--server", action="append", dest="servers", help="Server configuration(s) to use. Can be specified multiple times.", default=[], ) parser.add_argument( "--all", action="store_true", dest="all", default=False ) parser.add_argument( "command", nargs="?", choices=["ping", "list-tools", "list-resources", "list-prompts"], help="Command to execute (optional - if not provided, enters interactive mode).", ) parser.add_argument( "--model", help=( "Model to use. Defaults to 'pgpt-mistral-nemo-12b'"), ) args = parser.parse_args() # Set default model based on provider model = args.model or ( "pgpt-mistral-nemo-12b" ) os.environ["LLM_PROVIDER"] = "pgpt" os.environ["LLM_MODEL"] = model try: if args.all: with open(args.config_file,'r') as f: args.servers = list(json.load(f)['mcpServers'].keys()) result = anyio.run(run, args.config_file, args.servers, args.command) sys.exit(result) except Exception as e: print(f"[red]Error occurred:[/red] {e}") sys.exit(1) if __name__ == "__main__": cli_main() ``` -------------------------------------------------------------------------------- /agents/OpenAI_Compatible_API_Agent/Python/privategpt_api.py: -------------------------------------------------------------------------------- ```python import json import re from pathlib import Path import requests import urllib3 import base64 from httpcore import NetworkError from ...AgentInterface.Python.config import Config urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) def initialize_session(proxy_user, proxy_password, access_header): """Set up the session with proxy authentication.""" session = requests.Session() session.verify = False headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', } if access_header is not None: headers['X-Custom-Header'] = access_header elif proxy_user is not None and proxy_password is not None: auth = base64.b64encode(f"{proxy_user}:{proxy_password}".encode()).decode() headers['Authorization'] = f'Basic {auth}' session.headers.update(headers) return session class PrivateGPTAPI: def __init__(self, config, client_api_key=None): """Initialize the chat client with proxy authentication.""" self.token = None self.chat_id = None self.base_url = config.get("base_url") self.proxy_user = config.get("proxy_user", None) if self.proxy_user == "": self.proxy_user = None self.proxy_password = config.get("proxy_password", None) if self.proxy_password == "": self.proxy_password = None self.access_header = config.get("access_header", None) if self.access_header == "": self.access_header = None self.chosen_groups = config.data["groups"] or [] self.language = config.get("language", "en") self.use_public = config.get("use_public", True) self.whitelist_keys = config.get("whitelist_keys", []) self.logged_in = False if client_api_key is not None: self.email, self.password = decrypt_api_key(client_api_key) if len(self.whitelist_keys) > 0: if client_api_key not in self.whitelist_keys: print("not authorized") self.session = initialize_session(self.proxy_user, self.proxy_password, self.access_header) if self.login(): self.logged_in = True def login(self): """Authenticate the user and retrieve the token.""" url = f"{self.base_url}/login" payload = {"email": self.email, "password": self.password} try: response = self.session.post(url, json=payload) print(response.content) response.raise_for_status() data = response.json() self.token = data['data']['token'] # Prüfen, ob der Header bereits existiert if 'Authorization' in self.session.headers: self.session.headers['Authorization'] += f', Bearer {self.token}' else: self.session.headers['Authorization'] = f'Bearer {self.token}' self.chat_id = None print("✅ Login successful.") return True except requests.exceptions.RequestException as e: print(f"❌ Login failed: {e}") return False def create_chat(self, user_input): """Start a new chat session. This method sends a POST request to the '/chats' endpoint with the provided parameters. It initializes a new chat session and stores the chat ID for future use. """ url = f"{self.base_url}/chats" payload = { "language": self.language, "question": user_input, # Initial question to start the chat "usePublic": self.use_public, "groups": self.chosen_groups } try: response = self.session.post(url, json=payload) response.raise_for_status() # Raise an exception if the response was not successful data = response.json() self.chat_id = data['data']['chatId'] # Store the chat ID for future use print("✅ Chat initialized.") resp = response.json() try: answer = resp.get('data', None).get('answer', "error") except: print(response.json()) resp = {"data": {"answer": "error"} } answer = "error" if answer.startswith("{\"role\":"): answerj = json.loads(answer) resp["data"]["answer"] = answerj["content"] resp["data"]["chatId"] = "0" print(f"💡 Response: {answer}") return resp except requests.exceptions.RequestException as e: # It seems we get disconnections from time to time.. # print(f"⚠️ Failed to get response on first try, trying again..: {e}") try: response = self.session.patch(url, json=payload) response.raise_for_status() data = response.json() answer = data.get('data', {}).get('answer', "No answer provided.") print(f"💡 Response: {answer}") return data except: print(f"❌ Failed to get response: {e}") return {"error": f"❌ Failed to get response: {e}"} def list_personal_groups(self): url = f"{self.base_url}/groups" try: resp = self.session.get(url) data_block = resp.content.get("data") if not data_block: return [] if data_block.get("status") == 200 and data_block.get("message") == "success": personal = data_block.get("personalGroups", []) return personal else: return [] except NetworkError as e: return [] def query_private_gpt(self, user_input) -> json: """Send a question to the chat and retrieve the response.""" if not self.chat_id: print("❌ Chat session not initialized.") return False url = f"{self.base_url}/chats/{self.chat_id}" payload = {"question": user_input} try: response = self.session.patch(url, json=payload) #response.raise_for_status() resp = response.json() try: answer = resp.get('data', None).get('answer', "error") except: print(response.json()) resp = {"data" : {"answer": "error"} } answer = "error" if answer.startswith("{\"role\":"): answerj = json.loads(answer) resp["data"]["answer"] = answerj["content"] resp["data"]["chatId"] = "0" print(f"💡 Response: {answer}") return resp except requests.exceptions.RequestException as e: # It seems we get disconnections from time to time.. #print(f"⚠️ Failed to get response on first try, trying again..: {e}") try: response = self.session.patch(url, json=payload) response.raise_for_status() data = response.json() answer = data.get('data', {}).get('answer', "No answer provided.") print(f"💡 Response: {answer}") return data except: print(f"❌ Failed to get response: {e}") return {"error": f"❌ Failed to get response: {e}"} def get_document_info(self, source_id): """Send a source id to retrieve details. Working with version 1.3.3 and newer""" url = f"{self.base_url}/sources/{source_id}" try: response = self.session.get(url) data = response.json() info = data.get('data', {}) print(f"💡 Response: {str(info)}") return data except requests.exceptions.RequestException as e: print(f"❌ Failed to get response: {e}") return {"error": f"❌ Failed to get response: {e}"} def respond_with_context(self, messages, response_format=None, request_tools=None): last_user_message = next((p for p in reversed(messages) if p.role == "user"), None) user_input = "" for message in messages: if message.role == "system": user_input = str(message) + "\n" if last_user_message is not None: user_input += last_user_message.content last_assistant_message = next((p for p in reversed(messages) if p.role == "assistant"), None) last_tool_message = next((p for p in reversed(messages) if p.role == "tool"), None) hastoolresult = False if last_tool_message is not None and last_assistant_message is not None and last_assistant_message.tool_calls is not None and len(last_assistant_message.tool_calls) > 0: user_input += "\nYou called the tool: " + str(last_assistant_message.tool_calls[0]) + ". The result was: " + last_tool_message.content hastoolresult = True print(f"💁 Request: " + user_input) # PGPT manages history and context itself so we don't need to forward the history. add_context = False if add_context: messages.pop() user_input += "\nHere is some context about the previous conversation:\n" for message in messages: user_input += f"{message.role}: {message.content}\n" if response_format is not None: print("Response format: " + str(response_format)) user_input += add_response_format(response_format) if request_tools is not None and not hastoolresult: user_input += add_tools(request_tools, last_tool_message) if not self.logged_in: self.login() else: if self.chat_id is None: result = self.create_chat(user_input) else: result = self.query_private_gpt(user_input) if 'data' in result: response_data = result.get("data") if request_tools is not None and not hastoolresult and is_json(clean_response(response_data.get("answer"))): response_data["tool_call"] = clean_response(response_data.get("answer", "")) return response_data elif 'error' in result: # Try to login again and send the query once more on error. if self.login(): if self.chat_id is None: result = self.create_chat(user_input) else: result = self.query_private_gpt(user_input) if 'data' in result: return result['data'] else: return result else: return result def is_json(myjson): try: json.loads(myjson) except ValueError as e: return False return True def add_response_format(response_format): #prompt = "\nPlease fill in the following template with realistic and appropriate information. Be creative. The field 'type' defines the output format. In your reply, only return the generated json\n" prompt = "\nPlease fill in the following json template with realistic and appropriate information. In your reply, only return the generated json. If you can't answer return an empty json.\n" prompt += json.dumps(response_format) return prompt def add_tools(response_tools, last_tool_message): prompt = "\nPlease select the fitting provided tool to create your answer. Only return the generated result of the tool. Do not describe what you are doing, just return the json.\n" index = 1 for tool in response_tools: prompt += "\n" + json.dumps(tool) + "\n" index += 1 return prompt def clean_response(response): # Remove artefacts from reply here response = response.replace("[TOOL_CALLS]", "") return response def decrypt_api_key(api_key): """ This is PoC code and methods should be replaced with a more secure way to deal with credentials (e.g. in a db) """ try: base64_bytes = api_key.encode("ascii") decoded_string_bytes = base64.b64decode(base64_bytes) decoded_key = decoded_string_bytes.decode("ascii") except Exception as e: print(e) decoded_key = "invalid:invalid" return decoded_key.split(":")[0], decoded_key.split(":")[1] def main(): """Main function to run the chat application.""" config_file = Path.absolute(Path(__file__).parent.parent / "pgpt_openai_api_proxy.json") config = Config(config_file=config_file, required_fields=["base_url"]) chat = PrivateGPTAPI(config) print("Type your questions below. Type 'quit' to exit.") while True: try: question = input("❓ Question: ").strip() if question.lower() == 'quit': break if question: chat.query_private_gpt(question) except KeyboardInterrupt: print("\nExiting chat...") break except Exception as e: print(f"❌ Error: {str(e)}") break if __name__ == "__main__": main() ``` -------------------------------------------------------------------------------- /clients/Gradio/logos/Logo_dark.svg: -------------------------------------------------------------------------------- ``` <svg width="2227" height="1024" viewBox="0 0 600 1024" fill="none" xmlns="http://www.w3.org/2000/svg"> <path d="M2811.31 775.828V157.115H2568.68V19.2558H3220.48V157.115H2976.74V775.828H2811.31Z" fill="url(#paint0_linear_3311_10229)" fill-opacity="0.9"/> <path d="M1881.39 775.828V19.2558H2275.12C2329.52 19.2558 2374.74 29.5493 2410.77 50.1363C2447.53 69.988 2475.1 98.2952 2493.48 135.058C2512.6 171.085 2522.16 214.097 2522.16 264.094C2522.16 314.091 2512.23 357.839 2492.38 395.336C2473.27 432.099 2444.96 460.774 2407.46 481.361C2369.96 501.948 2324.01 512.241 2269.6 512.241H2045.72V775.828H1881.39ZM2045.72 377.69H2247.54C2282.1 377.69 2308.57 367.764 2326.95 347.913C2346.07 328.061 2355.63 300.489 2355.63 265.197C2355.63 240.934 2351.58 220.714 2343.49 204.539C2335.41 188.363 2323.27 175.864 2307.1 167.041C2291.66 158.218 2271.81 153.807 2247.54 153.807H2045.72V377.69Z" fill="url(#paint1_linear_3311_10229)" fill-opacity="0.9"/> <path d="M1353.48 789.065C1234.37 789.065 1142.47 757.449 1077.76 694.217C1013.8 630.25 981.814 531.359 981.814 397.544C981.814 308.579 997.254 235.421 1028.13 178.072C1059.01 119.987 1103.86 76.9747 1162.69 49.0352C1221.51 20.3604 1292.09 6.02307 1374.44 6.02307C1423.7 6.02307 1470.02 11.5374 1513.4 22.5662C1556.78 33.5949 1595.01 50.5057 1628.1 73.2984C1661.92 96.0912 1688.02 124.766 1706.4 159.323C1725.52 193.144 1735.08 233.583 1735.08 280.639H1568.54C1568.54 257.846 1563.4 237.627 1553.1 219.981C1543.54 202.335 1529.94 187.63 1512.3 175.866C1494.65 164.102 1474.43 155.279 1451.64 149.397C1428.85 143.515 1404.95 140.574 1379.95 140.574C1341.72 140.574 1308.26 145.721 1279.59 156.014C1250.92 165.572 1227.02 180.645 1207.9 201.232C1189.52 221.084 1175.55 246.082 1165.99 276.228C1156.44 306.373 1151.66 341.297 1151.66 381.001V414.087C1151.66 469.231 1159.74 514.449 1175.92 549.741C1192.1 585.033 1216.36 611.502 1248.71 629.148C1281.06 646.058 1321.13 654.514 1368.92 654.514C1408.63 654.514 1443.55 648.632 1473.7 636.868C1504.58 624.369 1528.47 606.723 1545.38 583.93C1563.03 561.137 1571.85 533.198 1571.85 500.111V492.391H1345.76V366.663H1735.08V775.83H1627L1612.66 694.217C1590.6 715.539 1566.71 733.186 1540.97 747.155C1515.97 761.125 1488.03 771.419 1457.15 778.036C1426.27 785.388 1391.72 789.065 1353.48 789.065Z" fill="url(#paint2_linear_3311_10229)" fill-opacity="0.9"/> <path fill-rule="evenodd" clip-rule="evenodd" d="M6.02344 89.6842C6.02344 50.135 37.9352 18.074 77.3003 18.074H481.202C691.15 18.074 861.346 189.066 861.346 399.995C861.346 610.924 691.15 781.916 481.202 781.916H250.986L47.4726 1009.97C32.9221 1026.28 6.02344 1015.94 6.02344 994.04V89.6842ZM218.262 247.835C218.262 238.435 225.864 230.815 235.241 230.815H480.672C573.696 230.815 649.107 306.405 649.107 399.649C649.107 492.893 573.696 568.483 480.672 568.483H355.38L246.959 686.671C228.889 698.718 218.262 685.675 218.262 674.629V247.835Z" fill="url(#paint3_linear_3311_10229)"/> <path fill-rule="evenodd" clip-rule="evenodd" d="M1374.44 12.0468C1292.77 12.0468 1223.14 26.2657 1165.32 54.4498L1165.27 54.4763C1107.63 81.8548 1063.73 123.944 1033.45 180.899L1033.44 180.928C1003.17 237.137 987.837 309.228 987.837 397.544C987.837 530.507 1019.62 627.541 1082 689.935C1145.25 751.736 1235.45 783.042 1353.48 783.042C1391.34 783.042 1425.42 779.401 1455.76 772.177L1455.89 772.145C1486.24 765.641 1513.61 755.546 1538.03 741.898L1538.1 741.862C1563.33 728.165 1586.79 710.848 1608.47 689.887L1616.63 682.003L1632.05 769.807H1729.05V372.687H1351.79V486.368H1577.88V500.112C1577.88 534.302 1568.74 563.575 1550.19 587.566C1532.51 611.369 1507.66 629.618 1475.96 642.452L1475.89 642.479C1444.9 654.57 1409.21 660.538 1368.92 660.538C1320.45 660.538 1279.36 651.965 1245.92 634.486L1245.82 634.437C1212.27 616.134 1187.13 588.657 1170.44 552.251C1153.79 515.913 1145.63 469.759 1145.63 414.087V381.001C1145.63 340.83 1150.47 305.269 1160.25 274.407C1170.03 243.559 1184.41 217.744 1203.48 197.14C1223.32 175.773 1248.09 160.176 1277.62 150.322C1307.07 139.759 1341.22 134.551 1379.95 134.551C1405.43 134.551 1429.83 137.548 1453.14 143.565C1476.52 149.597 1497.37 158.677 1515.64 170.854C1534.03 183.113 1548.31 198.51 1558.36 217.032C1568.29 234.088 1573.61 253.327 1574.45 274.616H1729C1728.22 230.974 1718.86 193.6 1701.16 162.287L1701.08 162.153C1683.18 128.504 1657.77 100.559 1624.73 78.2938L1624.68 78.2591C1592.25 55.9144 1554.68 39.2771 1511.92 28.4042C1469.07 17.5104 1423.25 12.0468 1374.44 12.0468ZM1160.07 43.6073C1219.9 14.4512 1291.42 0 1374.44 0C1424.15 0 1470.97 5.56508 1514.88 16.7288C1558.86 27.9096 1597.75 45.0862 1631.48 68.3154C1666.06 91.6183 1692.82 120.997 1711.68 156.424C1731.38 191.309 1741.1 232.793 1741.1 280.639V286.663H1562.52V280.639C1562.52 258.787 1557.6 239.638 1547.9 223.016L1547.85 222.934L1547.81 222.85C1538.74 206.116 1525.83 192.129 1508.96 180.878C1491.93 169.527 1472.34 160.961 1450.13 155.23C1427.86 149.482 1404.47 146.598 1379.95 146.598C1342.24 146.598 1309.5 151.676 1281.62 161.684L1281.5 161.73C1253.71 170.993 1230.7 185.537 1212.32 205.325C1194.64 224.425 1181.07 248.607 1171.74 278.048C1162.4 307.477 1157.68 341.765 1157.68 381.001V414.087C1157.68 468.703 1165.7 512.985 1181.4 547.231C1197.05 581.393 1220.42 606.846 1251.55 623.835C1282.81 640.162 1321.84 648.491 1368.92 648.491C1408.03 648.491 1442.17 642.699 1471.47 631.271C1501.49 619.115 1524.41 602.092 1540.55 580.341L1540.62 580.242C1557.33 558.657 1565.83 532.07 1565.83 500.112V498.415H1339.74V360.64H1741.1V781.854H1621.94L1608.65 706.226C1588.53 724.502 1566.94 739.91 1543.88 752.433C1518.33 766.704 1489.85 777.184 1458.48 783.911C1427.08 791.382 1392.07 795.088 1353.48 795.088C1233.31 795.088 1139.7 763.171 1073.55 698.525L1073.5 698.477C1007.96 632.937 975.79 532.196 975.79 397.544C975.79 307.931 991.336 233.707 1022.83 175.218M0 89.6845C0 46.8352 34.582 12.0509 77.3003 12.0509H481.202C694.503 12.0509 867.369 185.766 867.369 399.995C867.369 614.225 694.503 787.94 481.202 787.94H253.684L51.9667 1013.98C33.6916 1034.46 0 1021.41 0 994.04V89.6845ZM77.3003 24.0977C41.2882 24.0977 12.0468 53.4354 12.0468 89.6845V994.04C12.0468 1010.47 32.1526 1018.1 42.9784 1005.96L248.288 775.893H481.202C687.797 775.893 855.322 607.625 855.322 399.995C855.322 192.366 687.797 24.0977 481.202 24.0977H77.3003ZM1875.37 13.2327H2275.12C2330.22 13.2327 2376.53 23.6501 2413.69 44.8695C2451.47 65.286 2479.91 94.4615 2498.84 132.299C2518.49 169.358 2528.18 213.362 2528.18 264.094C2528.18 314.894 2518.09 359.637 2497.71 398.144C2497.71 398.147 2497.71 398.151 2497.71 398.155L2492.38 395.337L2497.73 398.116C2497.72 398.125 2497.72 398.134 2497.71 398.144C2478.03 435.986 2448.86 465.505 2410.36 486.641C2371.76 507.831 2324.75 518.265 2269.6 518.265H2051.74V512.241H2045.72V518.265H2051.74V781.852H1875.37V13.2327ZM1887.41 25.2795V769.805H2039.69V506.218H2269.6C2323.27 506.218 2368.16 496.065 2404.56 476.081C2441.05 456.048 2468.49 428.227 2487.04 392.558L2487.06 392.518C2506.38 356.032 2516.14 313.284 2516.14 264.094C2516.14 214.862 2506.73 172.864 2488.16 137.881L2488.1 137.753C2470.28 102.117 2443.6 74.7103 2407.91 55.4366L2407.78 55.3679C2372.9 35.4384 2328.79 25.2795 2275.12 25.2795H1887.41ZM2562.65 13.2327H3226.5V163.139H2982.76V781.852H2805.29V163.139H2562.65V13.2327ZM2574.7 25.2795V151.092H2817.33V769.805H2970.72V151.092H3214.45V25.2795H2574.7ZM2039.69 147.784H2247.54C2272.46 147.784 2293.42 152.303 2310.03 161.781C2327.24 171.18 2340.24 184.565 2348.88 201.845C2357.5 219.093 2361.65 240.304 2361.65 265.197C2361.65 301.566 2351.78 330.785 2331.33 352.045C2311.52 373.422 2283.26 383.714 2247.54 383.714H2039.69V147.784ZM2051.74 159.83V371.667H2247.54C2280.92 371.667 2305.59 362.12 2322.53 343.821L2322.61 343.735C2340.37 325.296 2349.6 299.387 2349.6 265.197C2349.6 241.564 2345.66 222.337 2338.11 207.233C2330.58 192.179 2319.33 180.575 2304.21 172.329L2304.11 172.271C2289.85 164.124 2271.12 159.83 2247.54 159.83H2051.74ZM212.239 247.835C212.239 235.122 222.524 224.792 235.241 224.792H480.672C577.036 224.792 655.13 303.092 655.13 399.649C655.13 496.207 577.036 574.507 480.672 574.507H358.029L250.906 691.279L250.3 691.683C239.904 698.614 230.088 699.168 222.595 694.389C215.629 689.945 212.239 681.805 212.239 674.629V247.835ZM235.241 236.839C229.204 236.839 224.286 241.749 224.286 247.835V674.629C224.286 678.501 226.208 682.404 229.074 684.232C231.351 685.685 235.713 686.721 243.031 682.043L352.732 562.46H480.672C570.356 562.46 643.083 489.58 643.083 399.649C643.083 309.718 570.356 236.839 480.672 236.839H235.241Z" fill="url(#paint4_linear_3311_10229)" fill-opacity="0.6"/> <path fill-rule="evenodd" clip-rule="evenodd" d="M1374.44 12.0468C1292.77 12.0468 1223.14 26.2657 1165.32 54.4498L1165.27 54.4763C1107.63 81.8548 1063.73 123.944 1033.45 180.899L1033.44 180.928C1003.17 237.137 987.837 309.228 987.837 397.544C987.837 530.507 1019.62 627.541 1082 689.935C1145.25 751.736 1235.45 783.042 1353.48 783.042C1391.34 783.042 1425.42 779.401 1455.76 772.177L1455.89 772.145C1486.24 765.641 1513.61 755.546 1538.03 741.898L1538.1 741.862C1563.33 728.165 1586.79 710.848 1608.47 689.887L1616.63 682.003L1632.05 769.807H1729.05V372.687H1351.79V486.368H1577.88V500.112C1577.88 534.302 1568.74 563.575 1550.19 587.566C1532.51 611.369 1507.66 629.618 1475.96 642.452L1475.89 642.479C1444.9 654.57 1409.21 660.538 1368.92 660.538C1320.45 660.538 1279.36 651.965 1245.92 634.486L1245.82 634.437C1212.27 616.134 1187.13 588.657 1170.44 552.251C1153.79 515.913 1145.63 469.759 1145.63 414.087V381.001C1145.63 340.83 1150.47 305.269 1160.25 274.407C1170.03 243.559 1184.41 217.744 1203.48 197.14C1223.32 175.773 1248.09 160.176 1277.62 150.322C1307.07 139.759 1341.22 134.551 1379.95 134.551C1405.43 134.551 1429.83 137.548 1453.14 143.565C1476.52 149.597 1497.37 158.677 1515.64 170.854C1534.03 183.113 1548.31 198.51 1558.36 217.032C1568.29 234.088 1573.61 253.327 1574.45 274.616H1729C1728.22 230.974 1718.86 193.6 1701.16 162.287L1701.08 162.153C1683.18 128.504 1657.77 100.559 1624.73 78.2938L1624.68 78.2591C1592.25 55.9144 1554.68 39.2771 1511.92 28.4042C1469.07 17.5104 1423.25 12.0468 1374.44 12.0468ZM1160.07 43.6073C1219.9 14.4512 1291.42 0 1374.44 0C1424.15 0 1470.97 5.56508 1514.88 16.7288C1558.86 27.9096 1597.75 45.0862 1631.48 68.3154C1666.06 91.6183 1692.82 120.997 1711.68 156.424C1731.38 191.309 1741.1 232.793 1741.1 280.639V286.663H1562.52V280.639C1562.52 258.787 1557.6 239.638 1547.9 223.016L1547.85 222.934L1547.81 222.85C1538.74 206.116 1525.83 192.129 1508.96 180.878C1491.93 169.527 1472.34 160.961 1450.13 155.23C1427.86 149.482 1404.47 146.598 1379.95 146.598C1342.24 146.598 1309.5 151.676 1281.62 161.684L1281.5 161.73C1253.71 170.993 1230.7 185.537 1212.32 205.325C1194.64 224.425 1181.07 248.607 1171.74 278.048C1162.4 307.477 1157.68 341.765 1157.68 381.001V414.087C1157.68 468.703 1165.7 512.985 1181.4 547.231C1197.05 581.393 1220.42 606.846 1251.55 623.835C1282.81 640.162 1321.84 648.491 1368.92 648.491C1408.03 648.491 1442.17 642.699 1471.47 631.271C1501.49 619.115 1524.41 602.092 1540.55 580.341L1540.62 580.242C1557.33 558.657 1565.83 532.07 1565.83 500.112V498.415H1339.74V360.64H1741.1V781.854H1621.94L1608.65 706.226C1588.53 724.502 1566.94 739.91 1543.88 752.433C1518.33 766.704 1489.85 777.184 1458.48 783.911C1427.08 791.382 1392.07 795.088 1353.48 795.088C1233.31 795.088 1139.7 763.171 1073.55 698.525L1073.5 698.477C1007.96 632.937 975.79 532.196 975.79 397.544C975.79 307.931 991.336 233.707 1022.83 175.218M0 89.6845C0 46.8352 34.582 12.0509 77.3003 12.0509H481.202C694.503 12.0509 867.369 185.766 867.369 399.995C867.369 614.225 694.503 787.94 481.202 787.94H253.684L51.9667 1013.98C33.6916 1034.46 0 1021.41 0 994.04V89.6845ZM77.3003 24.0977C41.2882 24.0977 12.0468 53.4354 12.0468 89.6845V994.04C12.0468 1010.47 32.1526 1018.1 42.9784 1005.96L248.288 775.893H481.202C687.797 775.893 855.322 607.625 855.322 399.995C855.322 192.366 687.797 24.0977 481.202 24.0977H77.3003ZM1875.37 13.2327H2275.12C2330.22 13.2327 2376.53 23.6501 2413.69 44.8695C2451.47 65.286 2479.91 94.4615 2498.84 132.299C2518.49 169.358 2528.18 213.362 2528.18 264.094C2528.18 314.894 2518.09 359.637 2497.71 398.144C2497.71 398.147 2497.71 398.151 2497.71 398.155L2492.38 395.337L2497.73 398.116C2497.72 398.125 2497.72 398.134 2497.71 398.144C2478.03 435.986 2448.86 465.505 2410.36 486.641C2371.76 507.831 2324.75 518.265 2269.6 518.265H2051.74V512.241H2045.72V518.265H2051.74V781.852H1875.37V13.2327ZM1887.41 25.2795V769.805H2039.69V506.218H2269.6C2323.27 506.218 2368.16 496.065 2404.56 476.081C2441.05 456.048 2468.49 428.227 2487.04 392.558L2487.06 392.518C2506.38 356.032 2516.14 313.284 2516.14 264.094C2516.14 214.862 2506.73 172.864 2488.16 137.881L2488.1 137.753C2470.28 102.117 2443.6 74.7103 2407.91 55.4366L2407.78 55.3679C2372.9 35.4384 2328.79 25.2795 2275.12 25.2795H1887.41ZM2562.65 13.2327H3226.5V163.139H2982.76V781.852H2805.29V163.139H2562.65V13.2327ZM2574.7 25.2795V151.092H2817.33V769.805H2970.72V151.092H3214.45V25.2795H2574.7ZM2039.69 147.784H2247.54C2272.46 147.784 2293.42 152.303 2310.03 161.781C2327.24 171.18 2340.24 184.565 2348.88 201.845C2357.5 219.093 2361.65 240.304 2361.65 265.197C2361.65 301.566 2351.78 330.785 2331.33 352.045C2311.52 373.422 2283.26 383.714 2247.54 383.714H2039.69V147.784ZM2051.74 159.83V371.667H2247.54C2280.92 371.667 2305.59 362.12 2322.53 343.821L2322.61 343.735C2340.37 325.296 2349.6 299.387 2349.6 265.197C2349.6 241.564 2345.66 222.337 2338.11 207.233C2330.58 192.179 2319.33 180.575 2304.21 172.329L2304.11 172.271C2289.85 164.124 2271.12 159.83 2247.54 159.83H2051.74ZM212.239 247.835C212.239 235.122 222.524 224.792 235.241 224.792H480.672C577.036 224.792 655.13 303.092 655.13 399.649C655.13 496.207 577.036 574.507 480.672 574.507H358.029L250.906 691.279L250.3 691.683C239.904 698.614 230.088 699.168 222.595 694.389C215.629 689.945 212.239 681.805 212.239 674.629V247.835ZM235.241 236.839C229.204 236.839 224.286 241.749 224.286 247.835V674.629C224.286 678.501 226.208 682.404 229.074 684.232C231.351 685.685 235.713 686.721 243.031 682.043L352.732 562.46H480.672C570.356 562.46 643.083 489.58 643.083 399.649C643.083 309.718 570.356 236.839 480.672 236.839H235.241Z" fill="url(#paint5_linear_3311_10229)" fill-opacity="0.15"/> <defs> <linearGradient id="paint3_linear_3311_10229" x1="6.02342" y1="1017.95" x2="886.292" y2="40.5368" gradientUnits="userSpaceOnUse"> <stop stop-color="#3333FF"/> <stop offset="0.5" stop-color="#980194"/> <stop offset="1" stop-color="#FE0100"/> </linearGradient> </defs> </svg> ``` -------------------------------------------------------------------------------- /examples/dynamic_sources/rss_reader/Api.py: -------------------------------------------------------------------------------- ```python import json from pathlib import Path import requests import urllib3 import base64 from httpcore import NetworkError from .config import Config urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) def initialize_session(proxy_user, proxy_password, access_header): """Set up the session with proxy authentication.""" session = requests.Session() session.verify = False headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', } if access_header is not None: headers['X-Custom-Header'] = access_header elif proxy_user is not None and proxy_password is not None: auth = base64.b64encode(f"{proxy_user}:{proxy_password}".encode()).decode() headers['Authorization'] = f'Basic {auth}' session.headers.update(headers) return session class PrivateGPTAPI: def __init__(self, config, client_api_key=None): """Initialize the chat client with proxy authentication.""" self.token = None self.chat_id = None self.base_url = config.get("base_url") self.proxy_user = config.get("proxy_user", None) if self.proxy_user == "": self.proxy_user = None self.proxy_password = config.get("proxy_password", None) if self.proxy_password == "": self.proxy_password = None self.access_header = config.get("access_header", None) if self.access_header == "": self.access_header = None self.chosen_groups = config.get("groups", []) self.language = config.get("language", "en") self.use_public = config.get("use_public", True) self.whitelist_keys = config.get("whitelist_keys", []) self.logged_in = False if client_api_key is not None: self.email, self.password = decrypt_api_key(client_api_key) if len(self.whitelist_keys) > 0: if client_api_key not in self.whitelist_keys: print("not authorized") else: self.email = config.get("email", None) self.password = config.get("password", None) self.session = initialize_session(self.proxy_user, self.proxy_password, self.access_header) if self.login(): self.logged_in = True def login(self): """Authenticate the user and retrieve the token.""" url = f"{self.base_url}/login" payload = {"email": self.email, "password": self.password} try: response = self.session.post(url, json=payload) print(response.content) response.raise_for_status() data = response.json() self.token = data['data']['token'] # Prüfen, ob der Header bereits existiert if 'Authorization' in self.session.headers: self.session.headers['Authorization'] += f', Bearer {self.token}' else: self.session.headers['Authorization'] = f'Bearer {self.token}' self.chat_id = None print("✅ Login successful.") return True except requests.exceptions.RequestException as e: print(f"❌ Login failed: {e}") return False def create_chat(self, user_input): """Start a new chat session. This method sends a POST request to the '/chats' endpoint with the provided parameters. It initializes a new chat session and stores the chat ID for future use. """ url = f"{self.base_url}/chats" payload = { "language": self.language, "question": user_input, # Initial question to start the chat "usePublic": self.use_public, "groups": self.chosen_groups } try: response = self.session.post(url, json=payload) response.raise_for_status() # Raise an exception if the response was not successful data = response.json() self.chat_id = data['data']['chatId'] # Store the chat ID for future use print("✅ Chat initialized.") resp = response.json() try: answer = resp.get('data', None).get('answer', "error") except: print(response.json()) resp = {"data": {"answer": "error"} } answer = "error" if answer.startswith("{\"role\":"): answerj = json.loads(answer) resp["data"]["answer"] = answerj["content"] resp["data"]["chatId"] = "0" print(f"💡 Response: {answer}") return resp except requests.exceptions.RequestException as e: # It seems we get disconnections from time to time.. # print(f"⚠️ Failed to get response on first try, trying again..: {e}") try: response = self.session.patch(url, json=payload) response.raise_for_status() data = response.json() answer = data.get('data', {}).get('answer', "No answer provided.") print(f"💡 Response: {answer}") return data except: print(f"❌ Failed to get response: {e}") return {"error": f"❌ Failed to get response: {e}"} def list_personal_groups(self): url = f"{self.base_url}/groups" try: resp = self.session.get(url) j = json.loads(resp.content) data_block = j["data"] if not data_block: return [] personal = data_block.get("personalGroups", []) return personal except NetworkError as e: return [] def get_document_info(self, id): url = f"{self.base_url}/sources/{id }" try: resp = self.session.get(url) j = json.loads(resp.content) data_block = j["data"] if not data_block: return [] return data_block except NetworkError as e: return [] def query_private_gpt(self, user_input) -> json: """Send a question to the chat and retrieve the response.""" if not self.chat_id: print("❌ Chat session not initialized.") return False url = f"{self.base_url}/chats/{self.chat_id}" payload = {"question": user_input} try: response = self.session.patch(url, json=payload) # response.raise_for_status() resp = response.json() try: answer = resp.get('data', None).get('answer', "error") except: print(response.json()) resp = {"data": {"answer": "error"} } answer = "error" if answer.startswith("{\"role\":"): answerj = json.loads(answer) resp["data"]["answer"] = answerj["content"] resp["data"]["chatId"] = "0" print(f"💡 Response: {answer}") return resp except requests.exceptions.RequestException as e: # It seems we get disconnections from time to time.. # print(f"⚠️ Failed to get response on first try, trying again..: {e}") try: response = self.session.patch(url, json=payload) response.raise_for_status() data = response.json() answer = data.get('data', {}).get('answer', "No answer provided.") print(f"💡 Response: {answer}") return data except: print(f"❌ Failed to get response: {e}") return {"error": f"❌ Failed to get response: {e}"} def add_source(self, markdown, groups, name): """Send a source id to retrieve details. Working with version 1.3.3 and newer""" url = f"{self.base_url}/sources" try: payload = { "name": name, "groups": groups, "content": markdown } resp = self.session.post(url, json=payload) j = json.loads(resp.content) data_block = j["data"] if not data_block: return [] return data_block except requests.exceptions.RequestException as e: print(f"❌ Failed to get response: {e}") return {"error": f"❌ Failed to get response: {e}"} def update_source(self, source_id, markdown=None, groups=None, name=None): """Edit an existing Source""" url = f"{self.base_url}/sources/{source_id}" try: payload = {} if groups is None: existing_groups = self.get_document_info(source_id)["groups"] payload["groups"] = existing_groups else: payload["groups"] = groups if markdown is not None: payload["content"] = markdown if name is not None: payload["name"] = name resp = self.session.patch(url, json=payload) j = json.loads(resp.content) data_block = j["data"] if not data_block: return [] return data_block except requests.exceptions.RequestException as e: print(f"❌ Failed to get response: {e}") return {"error": f"❌ Failed to get response: {e}"} def delete_source(self, source_id): """Send a source id to retrieve details. Working with version 1.3.3 and newer""" url = f"{self.base_url}/sources/{source_id}" try: resp = self.session.delete(url) j = json.loads(resp.content) message = j["message"] if not message: return "failed" return message except requests.exceptions.RequestException as e: print(f"❌ Failed to get response: {e}") return {"error": f"❌ Failed to get response: {e}"} def get_sources_from_group(self, group): """Send a source id to retrieve details. Working with version 1.3.3 and newer""" url = f"{self.base_url}/sources/groups" try: payload = { "groupName": group } resp = self.session.post(url, json=payload) j = json.loads(resp.content) data_block = j["data"] if not data_block: return [] sources = [] for source in data_block["sources"]: doc = self.get_document_info(source) sources.append(doc) return sources except requests.exceptions.RequestException as e: print(f"❌ Failed to get response: {e}") return {"error": f"❌ Failed to get response: {e}"} def respond_with_context(self, messages, response_format=None, request_tools=None): last_user_message = next((p for p in reversed(messages) if p["role"] == "user"), None) user_input = "" for message in messages: if message["role"] == "system": user_input = str(message) + "\n" if last_user_message is not None: user_input += last_user_message["content"] last_assistant_message = next((p for p in reversed(messages) if p["role"] == "assistant"), None) last_tool_message = next((p for p in reversed(messages) if p["role"] == "tool"), None) hastoolresult = False if last_tool_message is not None and last_assistant_message is not None and last_assistant_message.tool_calls is not None and len( last_assistant_message.tool_calls) > 0: user_input += "\nYou called the tool: " + str( last_assistant_message.tool_calls[0]) + ". The result was: " + last_tool_message.content hastoolresult = True print(f"💁 Request: " + user_input) # PGPT manages history and context itself so we don't need to forward the history. add_context = False if add_context: messages.pop() user_input += "\nHere is some context about the previous conversation:\n" for message in messages: user_input += f"{message.role}: {message.content}\n" if response_format is not None: print("Response format: " + str(response_format)) user_input += add_response_format(response_format) if request_tools is not None and not hastoolresult: user_input += add_tools(request_tools, last_tool_message) if not self.logged_in: self.login() else: if self.chat_id is None: result = self.create_chat(user_input) else: result = self.query_private_gpt(user_input) if 'data' in result: response_data = result.get("data") if request_tools is not None and not hastoolresult and is_json( clean_response(response_data.get("answer"))): response_data["tool_call"] = clean_response(response_data.get("answer", "")) return response_data elif 'error' in result: # Try to login again and send the query once more on error. if self.login(): if self.chat_id is None: result = self.create_chat(user_input) else: result = self.query_private_gpt(user_input) if 'data' in result: return result['data'] else: return result else: return result def is_json(myjson): try: json.loads(myjson) except ValueError as e: return False return True def add_response_format(response_format): # prompt = "\nPlease fill in the following template with realistic and appropriate information. Be creative. The field 'type' defines the output format. In your reply, only return the generated json\n" prompt = "\nPlease fill in the following json template with realistic and appropriate information. In your reply, only return the generated json. If you can't answer return an empty json.\n" prompt += json.dumps(response_format) return prompt def add_tools(response_tools, last_tool_message): prompt = "\nPlease select the fitting provided tool to create your answer. Only return the generated result of the tool. Do not describe what you are doing, just return the json.\n" index = 1 for tool in response_tools: prompt += "\n" + json.dumps(tool) + "\n" index += 1 return prompt def clean_response(response): # Remove artefacts from reply here response = response.replace("[TOOL_CALLS]", "") return response def decrypt_api_key(api_key): """ This is PoC code and methods should be replaced with a more secure way to deal with credentials (e.g. in a db) """ try: base64_bytes = api_key.encode("ascii") decoded_string_bytes = base64.b64decode(base64_bytes) decoded_key = decoded_string_bytes.decode("ascii") except Exception as e: print(e) decoded_key = "invalid:invalid" return decoded_key.split(":")[0], decoded_key.split(":")[1] def main(): """Main function to run the chat application.""" config_file = Path.absolute(Path(__file__).parent.parent / "pgpt_openai_api_proxy.json") config = Config(config_file=config_file, required_fields=["base_url"]) chat = PrivateGPTAPI(config) print("Type your questions below. Type 'quit' to exit.") while True: try: question = input("❓ Question: ").strip() if question.lower() == 'quit': break if question: chat.query_private_gpt(question) except KeyboardInterrupt: print("\nExiting chat...") break except Exception as e: print(f"❌ Error: {str(e)}") break if __name__ == "__main__": main() ``` -------------------------------------------------------------------------------- /clients/Gradio/mcp_servers/pgpt/Api.py: -------------------------------------------------------------------------------- ```python import json from pathlib import Path import requests import urllib3 import base64 from httpcore import NetworkError from config import Config urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning) def initialize_session(proxy_user, proxy_password, access_header): """Set up the session with proxy authentication.""" session = requests.Session() session.verify = False headers = { 'Accept': 'application/json', 'Content-Type': 'application/json', } if access_header is not None: headers['X-Custom-Header'] = access_header elif proxy_user is not None and proxy_password is not None: auth = base64.b64encode(f"{proxy_user}:{proxy_password}".encode()).decode() headers['Authorization'] = f'Basic {auth}' session.headers.update(headers) return session class PrivateGPTAPI: def __init__(self, config, client_api_key=None): """Initialize the chat client with proxy authentication.""" self.token = None self.chat_id = None self.base_url = config.get("base_url") self.proxy_user = config.get("proxy_user", None) if self.proxy_user == "": self.proxy_user = None self.proxy_password = config.get("proxy_password", None) if self.proxy_password == "": self.proxy_password = None self.access_header = config.get("access_header", None) if self.access_header == "": self.access_header = None self.chosen_groups = config.get("groups", []) self.language = config.get("language", "en") self.use_public = config.get("use_public", True) self.whitelist_keys = config.get("whitelist_keys", []) self.logged_in = False if client_api_key is not None: self.email, self.password = decrypt_api_key(client_api_key) if len(self.whitelist_keys) > 0: if client_api_key not in self.whitelist_keys: print("not authorized") else: self.email = config.get("email", None) self.password = config.get("password", None) self.session = initialize_session(self.proxy_user, self.proxy_password, self.access_header) if self.login(): self.logged_in = True def login(self): """Authenticate the user and retrieve the token.""" url = f"{self.base_url}/login" payload = {"email": self.email, "password": self.password} try: response = self.session.post(url, json=payload) print(response.content) response.raise_for_status() data = response.json() self.token = data['data']['token'] # Prüfen, ob der Header bereits existiert if 'Authorization' in self.session.headers: self.session.headers['Authorization'] += f', Bearer {self.token}' else: self.session.headers['Authorization'] = f'Bearer {self.token}' self.chat_id = None print("✅ Login successful.") return True except requests.exceptions.RequestException as e: print(f"❌ Login failed: {e}") return False def create_chat(self, user_input): """Start a new chat session. This method sends a POST request to the '/chats' endpoint with the provided parameters. It initializes a new chat session and stores the chat ID for future use. """ url = f"{self.base_url}/chats" payload = { "language": self.language, "question": user_input, # Initial question to start the chat "usePublic": self.use_public, "groups": self.chosen_groups } try: response = self.session.post(url, json=payload) response.raise_for_status() # Raise an exception if the response was not successful data = response.json() self.chat_id = data['data']['chatId'] # Store the chat ID for future use print("✅ Chat initialized.") resp = response.json() try: answer = resp.get('data', None).get('answer', "error") except: print(response.json()) resp = {"data": {"answer": "error"} } answer = "error" if answer.startswith("{\"role\":"): answerj = json.loads(answer) resp["data"]["answer"] = answerj["content"] resp["data"]["chatId"] = "0" print(f"💡 Response: {answer}") return resp except requests.exceptions.RequestException as e: # It seems we get disconnections from time to time.. # print(f"⚠️ Failed to get response on first try, trying again..: {e}") try: response = self.session.patch(url, json=payload) response.raise_for_status() data = response.json() answer = data.get('data', {}).get('answer', "No answer provided.") print(f"💡 Response: {answer}") return data except: print(f"❌ Failed to get response: {e}") return {"error": f"❌ Failed to get response: {e}"} def list_personal_groups(self): url = f"{self.base_url}/groups" try: resp = self.session.get(url) try: j = json.loads(resp.content) data_block = j["data"] if not data_block: return [] personal = data_block.get("personalGroups", []) return personal except: return [] except NetworkError as e: return [] def get_document_info(self, id): url = f"{self.base_url}/sources/{id }" try: resp = self.session.get(url) j = json.loads(resp.content) data_block = j["data"] if not data_block: return [] return data_block except NetworkError as e: return [] def query_private_gpt(self, user_input) -> json: """Send a question to the chat and retrieve the response.""" if not self.chat_id: print("❌ Chat session not initialized.") return False url = f"{self.base_url}/chats/{self.chat_id}" payload = {"question": user_input} try: response = self.session.patch(url, json=payload) # response.raise_for_status() resp = response.json() try: answer = resp.get('data', None).get('answer', "error") except: print(response.json()) resp = {"data": {"answer": "error"} } answer = "error" if answer.startswith("{\"role\":"): answerj = json.loads(answer) resp["data"]["answer"] = answerj["content"] resp["data"]["chatId"] = "0" print(f"💡 Response: {answer}") return resp except requests.exceptions.RequestException as e: # It seems we get disconnections from time to time.. # print(f"⚠️ Failed to get response on first try, trying again..: {e}") try: response = self.session.patch(url, json=payload) response.raise_for_status() data = response.json() answer = data.get('data', {}).get('answer', "No answer provided.") print(f"💡 Response: {answer}") return data except: print(f"❌ Failed to get response: {e}") return {"error": f"❌ Failed to get response: {e}"} def add_user(self, userName, userEmail, userPassword, userGroups): """Add a user""" url = f"{self.base_url}/users" try: payload = { # necessary name, email, password, usePublic "name": userName, "email": userEmail, "password": userPassword, "usePublic": False, "language": "en", # optional - defaults to "en" # "timezone": "UTC", # optional - defaults to "Europe/Berlin" "groups": userGroups, "roles": ["documents"] # "activateFtp": true, # "ftpPassword": "myFTP-Password1337" } resp = self.session.post(url, json=payload) j = json.loads(resp.content) data_block = j["message"] if not data_block: return "failed" return data_block except requests.exceptions.RequestException as e: print(f"❌ Failed to get response: {e}") return {"error": f"❌ Failed to get response: {e}"} def add_source(self, markdown, groups, name): """Send a source id to retrieve details. Working with version 1.3.3 and newer""" url = f"{self.base_url}/sources" try: payload = { "name": name, "groups": groups, "content": markdown } resp = self.session.post(url, json=payload) j = json.loads(resp.content) data_block = j["data"] if not data_block: return [] return data_block except requests.exceptions.RequestException as e: print(f"❌ Failed to get response: {e}") return {"error": f"❌ Failed to get response: {e}"} def delete_source(self, source_id): """Send a source id to retrieve details. Working with version 1.3.3 and newer""" url = f"{self.base_url}/sources/{source_id}" try: resp = self.session.delete(url) j = json.loads(resp.content) message = j["message"] if not message: return "failed" return message except requests.exceptions.RequestException as e: print(f"❌ Failed to get response: {e}") return {"error": f"❌ Failed to get response: {e}"} def get_sources_from_group(self, group): """Send a source id to retrieve details. Working with version 1.3.3 and newer""" url = f"{self.base_url}/sources/groups" try: payload = { "groupName": group } resp = self.session.post(url, json=payload) j = json.loads(resp.content) data_block = j["data"] if not data_block: return [] sources = [] for source in data_block["sources"]: doc = self.get_document_info(source) sources.append(doc) return sources except requests.exceptions.RequestException as e: print(f"❌ Failed to get response: {e}") return [] def respond_with_context(self, messages, response_format=None, request_tools=None): last_user_message = next((p for p in reversed(messages) if p["role"] == "user"), None) user_input = "" for message in messages: if message["role"] == "system": user_input = str(message) + "\n" if last_user_message is not None: user_input += last_user_message["content"] last_assistant_message = next((p for p in reversed(messages) if p["role"] == "assistant"), None) last_tool_message = next((p for p in reversed(messages) if p["role"] == "tool"), None) hastoolresult = False if last_tool_message is not None and last_assistant_message is not None and last_assistant_message.tool_calls is not None and len( last_assistant_message.tool_calls) > 0: user_input += "\nYou called the tool: " + str( last_assistant_message.tool_calls[0]) + ". The result was: " + last_tool_message.content hastoolresult = True print(f"💁 Request: " + user_input) # PGPT manages history and context itself so we don't need to forward the history. add_context = False if add_context: messages.pop() user_input += "\nHere is some context about the previous conversation:\n" for message in messages: user_input += f"{message.role}: {message.content}\n" if response_format is not None: print("Response format: " + str(response_format)) user_input += add_response_format(response_format) if request_tools is not None and not hastoolresult: user_input += add_tools(request_tools, last_tool_message) if not self.logged_in: self.login() else: if self.chat_id is None: result = self.create_chat(user_input) else: result = self.query_private_gpt(user_input) if 'data' in result: response_data = result.get("data") if request_tools is not None and not hastoolresult and is_json( clean_response(response_data.get("answer"))): response_data["tool_call"] = clean_response(response_data.get("answer", "")) return response_data elif 'error' in result: # Try to login again and send the query once more on error. if self.login(): if self.chat_id is None: result = self.create_chat(user_input) else: result = self.query_private_gpt(user_input) if 'data' in result: return result['data'] else: return result else: return result def is_json(myjson): try: json.loads(myjson) except ValueError as e: return False return True def add_response_format(response_format): # prompt = "\nPlease fill in the following template with realistic and appropriate information. Be creative. The field 'type' defines the output format. In your reply, only return the generated json\n" prompt = "\nPlease fill in the following json template with realistic and appropriate information. In your reply, only return the generated json. If you can't answer return an empty json.\n" prompt += json.dumps(response_format) return prompt def add_tools(response_tools, last_tool_message): prompt = "\nPlease select the fitting provided tool to create your answer. Only return the generated result of the tool. Do not describe what you are doing, just return the json.\n" index = 1 for tool in response_tools: prompt += "\n" + json.dumps(tool) + "\n" index += 1 return prompt def clean_response(response): # Remove artefacts from reply here response = response.replace("[TOOL_CALLS]", "") return response def decrypt_api_key(api_key): """ This is PoC code and methods should be replaced with a more secure way to deal with credentials (e.g. in a db) """ try: base64_bytes = api_key.encode("ascii") decoded_string_bytes = base64.b64decode(base64_bytes) decoded_key = decoded_string_bytes.decode("ascii") except Exception as e: print(e) decoded_key = "invalid:invalid" return decoded_key.split(":")[0], decoded_key.split(":")[1] def main(): """Main function to run the chat application.""" config_file = Path.absolute(Path(__file__).parent.parent / "pgpt_openai_api_proxy.json") config = Config(config_file=config_file, required_fields=["base_url"]) chat = PrivateGPTAPI(config) print("Type your questions below. Type 'quit' to exit.") while True: try: question = input("❓ Question: ").strip() if question.lower() == 'quit': break if question: chat.query_private_gpt(question) except KeyboardInterrupt: print("\nExiting chat...") break except Exception as e: print(f"❌ Error: {str(e)}") break if __name__ == "__main__": main() ```