This is page 5 of 11. Use http://codebase.md/tuananh/hyper-mcp?lines=true&page={x} to view the full context.
# Directory Structure
```
├── .cursor
│ └── rules
│ └── print-ctx-size.mdc
├── .dockerignore
├── .github
│ ├── renovate.json5
│ └── workflows
│ ├── ci.yml
│ ├── nightly.yml
│ └── release.yml
├── .gitignore
├── .gitmodules
├── .hadolint.yaml
├── .pre-commit-config.yaml
├── .windsurf
│ └── rules
│ ├── print-ctx-size.md
│ └── think.md
├── assets
│ ├── cursor-mcp-1.png
│ ├── cursor-mcp.png
│ ├── eval-py.jpg
│ └── logo.png
├── Cargo.lock
├── Cargo.toml
├── config.example.json
├── config.example.yaml
├── CREATING_PLUGINS.md
├── DEPLOYMENT.md
├── Dockerfile
├── examples
│ └── plugins
│ ├── v1
│ │ ├── arxiv
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── context7
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── crates-io
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── crypto-price
│ │ │ ├── Dockerfile
│ │ │ ├── go.mod
│ │ │ ├── go.sum
│ │ │ ├── main.go
│ │ │ ├── pdk.gen.go
│ │ │ └── README.md
│ │ ├── eval-py
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── fetch
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── fs
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── github
│ │ │ ├── .gitignore
│ │ │ ├── branches.go
│ │ │ ├── Dockerfile
│ │ │ ├── files.go
│ │ │ ├── gists.go
│ │ │ ├── go.mod
│ │ │ ├── go.sum
│ │ │ ├── issues.go
│ │ │ ├── main.go
│ │ │ ├── pdk.gen.go
│ │ │ ├── README.md
│ │ │ └── repo.go
│ │ ├── gitlab
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── gomodule
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── hash
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.lock
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── maven
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── meme-generator
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── generate_embedded.py
│ │ │ ├── README.md
│ │ │ ├── src
│ │ │ │ ├── embedded.rs
│ │ │ │ ├── lib.rs
│ │ │ │ └── pdk.rs
│ │ │ └── templates.json
│ │ ├── memory
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── myip
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.lock
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── qdrant
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ ├── pdk.rs
│ │ │ └── qdrant_client.rs
│ │ ├── qr-code
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.lock
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── serper
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── sqlite
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── think
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── time
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ ├── src
│ │ │ │ ├── lib.rs
│ │ │ │ └── pdk.rs
│ │ │ └── time.wasm
│ │ └── tool-list-changed
│ │ ├── .gitignore
│ │ ├── Cargo.toml
│ │ ├── Dockerfile
│ │ ├── README.md
│ │ ├── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ └── tool_list_changed.wasm
│ └── v2
│ └── rstime
│ ├── .cargo
│ │ └── config.toml
│ ├── .gitignore
│ ├── Cargo.toml
│ ├── Dockerfile
│ ├── README.md
│ ├── rstime.wasm
│ └── src
│ ├── lib.rs
│ └── pdk
│ ├── exports.rs
│ ├── imports.rs
│ ├── mod.rs
│ └── types.rs
├── iac
│ ├── .terraform.lock.hcl
│ ├── main.tf
│ ├── outputs.tf
│ └── variables.tf
├── justfile
├── LICENSE
├── README.md
├── RUNTIME_CONFIG.md
├── rust-toolchain.toml
├── server.json
├── SKIP_TOOLS_GUIDE.md
├── src
│ ├── cli.rs
│ ├── config.rs
│ ├── https_auth.rs
│ ├── logging.rs
│ ├── main.rs
│ ├── naming.rs
│ ├── plugin.rs
│ ├── service.rs
│ └── wasm
│ ├── http.rs
│ ├── mod.rs
│ ├── oci.rs
│ └── s3.rs
├── templates
│ └── plugins
│ ├── go
│ │ ├── .gitignore
│ │ ├── Dockerfile
│ │ ├── exports.go
│ │ ├── go.mod
│ │ ├── go.sum
│ │ ├── imports.go
│ │ ├── main.go
│ │ ├── README.md
│ │ └── types.go
│ ├── README.md
│ └── rust
│ ├── .cargo
│ │ └── config.toml
│ ├── .gitignore
│ ├── Cargo.toml
│ ├── Dockerfile
│ ├── README.md
│ └── src
│ ├── lib.rs
│ └── pdk
│ ├── exports.rs
│ ├── imports.rs
│ ├── mod.rs
│ └── types.rs
├── tests
│ └── fixtures
│ ├── config_with_auths.json
│ ├── config_with_auths.yaml
│ ├── documentation_example.json
│ ├── documentation_example.yaml
│ ├── invalid_auth_config.yaml
│ ├── invalid_plugin_name.yaml
│ ├── invalid_structure.yaml
│ ├── invalid_url.yaml
│ ├── keyring_auth_config.yaml
│ ├── skip_tools_examples.yaml
│ ├── unsupported_config.txt
│ ├── valid_config.json
│ └── valid_config.yaml
└── xtp-plugin-schema.json
```
# Files
--------------------------------------------------------------------------------
/examples/plugins/v1/github/repo.go:
--------------------------------------------------------------------------------
```go
1 | package main
2 |
3 | import (
4 | "encoding/json"
5 | "fmt"
6 | "strings"
7 |
8 | "github.com/extism/go-pdk"
9 | )
10 |
11 | var (
12 | GetRepositoryContributorsTool = ToolDescription{
13 | Name: "gh-get-repo-contributors",
14 | Description: "Get the list of contributors for a GitHub repository, including their contributions count and profile details",
15 | InputSchema: schema{
16 | "type": "object",
17 | "properties": props{
18 | "owner": prop("string", "The owner of the repository"),
19 | "repo": prop("string", "The repository name"),
20 | "per_page": prop("integer", "Number of results per page (max 100)"),
21 | "page": prop("integer", "Page number for pagination"),
22 | },
23 | "required": []string{"owner", "repo"},
24 | },
25 | }
26 | GetRepositoryCollaboratorsTool = ToolDescription{
27 | Name: "gh-get-repo-collaborators",
28 | Description: "Get the list of collaborators for a GitHub repository, including their permissions and profile details",
29 | InputSchema: schema{
30 | "type": "object",
31 | "properties": props{
32 | "owner": prop("string", "The owner of the repository"),
33 | "repo": prop("string", "The repository name"),
34 | "per_page": prop("integer", "Number of results per page (max 100)"),
35 | "page": prop("integer", "Page number for pagination"),
36 | },
37 | "required": []string{"owner", "repo"},
38 | },
39 | }
40 | GetRepositoryDetailsTool = ToolDescription{
41 | Name: "gh-get-repo-details",
42 | Description: "Get detailed information about a GitHub repository, including stars, forks, issues, and more",
43 | InputSchema: schema{
44 | "type": "object",
45 | "properties": props{
46 | "owner": prop("string", "The owner of the repository"),
47 | "repo": prop("string", "The repository name"),
48 | },
49 | "required": []string{"owner", "repo"},
50 | },
51 | }
52 | ListReposTool = ToolDescription{
53 | Name: "gh-list-repos",
54 | Description: "List repositories for a GitHub user or organization",
55 | InputSchema: schema{
56 | "type": "object",
57 | "properties": props{
58 | "username": prop("string", "The GitHub username or organization name"),
59 | "type": prop("string", "The type of repositories to list (all, owner, member)"),
60 | "sort": prop("string", "The sort field (created, updated, pushed, full_name)"),
61 | "direction": prop("string", "The sort direction (asc or desc)"),
62 | "per_page": prop("integer", "Number of results per page (max 100)"),
63 | "page": prop("integer", "Page number for pagination"),
64 | },
65 | "required": []string{"username"},
66 | },
67 | }
68 | RepoTools = []ToolDescription{
69 | GetRepositoryContributorsTool,
70 | GetRepositoryCollaboratorsTool,
71 | GetRepositoryDetailsTool,
72 | ListReposTool,
73 | }
74 | )
75 |
76 | type Contributor struct {
77 | Login string `json:"login"`
78 | ID int `json:"id"`
79 | NodeID string `json:"node_id"`
80 | AvatarURL string `json:"avatar_url"`
81 | GravatarID string `json:"gravatar_id"`
82 | URL string `json:"url"`
83 | HTMLURL string `json:"html_url"`
84 | FollowersURL string `json:"followers_url"`
85 | FollowingURL string `json:"following_url"`
86 | GistsURL string `json:"gists_url"`
87 | StarredURL string `json:"starred_url"`
88 | SubscriptionsURL string `json:"subscriptions_url"`
89 | OrganizationsURL string `json:"organizations_url"`
90 | ReposURL string `json:"repos_url"`
91 | EventsURL string `json:"events_url"`
92 | ReceivedEventsURL string `json:"received_events_url"`
93 | Type string `json:"type"`
94 | SiteAdmin bool `json:"site_admin"`
95 | Contributions int `json:"contributions"`
96 | }
97 |
98 | func reposGetContributors(apiKey string, owner, repo string, args map[string]interface{}) (CallToolResult, error) {
99 | baseURL := fmt.Sprintf("https://api.github.com/repos/%s/%s/contributors", owner, repo)
100 | params := make([]string, 0)
101 |
102 | // Pagination parameters
103 | perPage := 30 // Default value
104 | if value, ok := args["per_page"].(float64); ok {
105 | if value > 100 {
106 | perPage = 100 // Max value
107 | } else if value > 0 {
108 | perPage = int(value)
109 | }
110 | }
111 | params = append(params, fmt.Sprintf("per_page=%d", perPage))
112 |
113 | page := 1 // Default value
114 | if value, ok := args["page"].(float64); ok && value > 0 {
115 | page = int(value)
116 | }
117 | params = append(params, fmt.Sprintf("page=%d", page))
118 |
119 | // Build final URL
120 | url := baseURL
121 | if len(params) > 0 {
122 | url = fmt.Sprintf("%s?%s", baseURL, strings.Join(params, "&"))
123 | }
124 |
125 | pdk.Log(pdk.LogDebug, fmt.Sprint("Fetching contributors: ", url))
126 |
127 | // Make request
128 | req := pdk.NewHTTPRequest(pdk.MethodGet, url)
129 | req.SetHeader("Authorization", fmt.Sprint("token ", apiKey))
130 | req.SetHeader("Accept", "application/vnd.github+json")
131 | req.SetHeader("User-Agent", "github-mcpx-servlet")
132 |
133 | resp := req.Send()
134 | if resp.Status() != 200 {
135 | return CallToolResult{
136 | IsError: some(true),
137 | Content: []Content{{
138 | Type: ContentTypeText,
139 | Text: some(fmt.Sprintf("Failed to fetch contributors: %d %s", resp.Status(), string(resp.Body()))),
140 | }},
141 | }, nil
142 | }
143 |
144 | // Parse the response
145 | var contributors []Contributor
146 | if err := json.Unmarshal(resp.Body(), &contributors); err != nil {
147 | return CallToolResult{
148 | IsError: some(true),
149 | Content: []Content{{
150 | Type: ContentTypeText,
151 | Text: some(fmt.Sprintf("Failed to parse contributors: %s", err)),
152 | }},
153 | }, nil
154 | }
155 |
156 | // Marshal the response
157 | responseJSON, err := json.Marshal(contributors)
158 | if err != nil {
159 | return CallToolResult{
160 | IsError: some(true),
161 | Content: []Content{{
162 | Type: ContentTypeText,
163 | Text: some(fmt.Sprintf("Failed to marshal response: %s", err)),
164 | }},
165 | }, nil
166 | }
167 |
168 | return CallToolResult{
169 | Content: []Content{{
170 | Type: ContentTypeText,
171 | Text: some(string(responseJSON)),
172 | }},
173 | }, nil
174 | }
175 |
176 | type Collaborator struct {
177 | Login string `json:"login"`
178 | ID int `json:"id"`
179 | NodeID string `json:"node_id"`
180 | AvatarURL string `json:"avatar_url"`
181 | GravatarID string `json:"gravatar_id"`
182 | URL string `json:"url"`
183 | HTMLURL string `json:"html_url"`
184 | FollowersURL string `json:"followers_url"`
185 | FollowingURL string `json:"following_url"`
186 | GistsURL string `json:"gists_url"`
187 | StarredURL string `json:"starred_url"`
188 | SubscriptionsURL string `json:"subscriptions_url"`
189 | OrganizationsURL string `json:"organizations_url"`
190 | ReposURL string `json:"repos_url"`
191 | EventsURL string `json:"events_url"`
192 | ReceivedEventsURL string `json:"received_events_url"`
193 | Type string `json:"type"`
194 | SiteAdmin bool `json:"site_admin"`
195 | Permissions struct {
196 | Admin bool `json:"admin"`
197 | Push bool `json:"push"`
198 | Pull bool `json:"pull"`
199 | } `json:"permissions"`
200 | }
201 |
202 | func reposGetCollaborators(apiKey string, owner, repo string, args map[string]interface{}) (CallToolResult, error) {
203 | baseURL := fmt.Sprintf("https://api.github.com/repos/%s/%s/collaborators", owner, repo)
204 | params := make([]string, 0)
205 |
206 | // Pagination parameters
207 | perPage := 30 // Default value
208 | if value, ok := args["per_page"].(float64); ok {
209 | if value > 100 {
210 | perPage = 100 // Max value
211 | } else if value > 0 {
212 | perPage = int(value)
213 | }
214 | }
215 | params = append(params, fmt.Sprintf("per_page=%d", perPage))
216 |
217 | page := 1 // Default value
218 | if value, ok := args["page"].(float64); ok && value > 0 {
219 | page = int(value)
220 | }
221 | params = append(params, fmt.Sprintf("page=%d", page))
222 |
223 | // Build final URL
224 | url := baseURL
225 | if len(params) > 0 {
226 | url = fmt.Sprintf("%s?%s", baseURL, strings.Join(params, "&"))
227 | }
228 |
229 | pdk.Log(pdk.LogDebug, fmt.Sprint("Fetching collaborators: ", url))
230 |
231 | // Make request
232 | req := pdk.NewHTTPRequest(pdk.MethodGet, url)
233 | req.SetHeader("Authorization", fmt.Sprint("token ", apiKey))
234 | req.SetHeader("Accept", "application/vnd.github+json")
235 | req.SetHeader("User-Agent", "github-mcpx-servlet")
236 |
237 | resp := req.Send()
238 | if resp.Status() != 200 {
239 | return CallToolResult{
240 | IsError: some(true),
241 | Content: []Content{{
242 | Type: ContentTypeText,
243 | Text: some(fmt.Sprintf("Failed to fetch collaborators: %d %s", resp.Status(), string(resp.Body()))),
244 | }},
245 | }, nil
246 | }
247 |
248 | // Parse the response
249 | var collaborators []Collaborator
250 | if err := json.Unmarshal(resp.Body(), &collaborators); err != nil {
251 | return CallToolResult{
252 | IsError: some(true),
253 | Content: []Content{{
254 | Type: ContentTypeText,
255 | Text: some(fmt.Sprintf("Failed to parse collaborators: %s", err)),
256 | }},
257 | }, nil
258 | }
259 |
260 | // Marshal the response
261 | responseJSON, err := json.Marshal(collaborators)
262 | if err != nil {
263 | return CallToolResult{
264 | IsError: some(true),
265 | Content: []Content{{
266 | Type: ContentTypeText,
267 | Text: some(fmt.Sprintf("Failed to marshal response: %s", err)),
268 | }},
269 | }, nil
270 | }
271 |
272 | return CallToolResult{
273 | Content: []Content{{
274 | Type: ContentTypeText,
275 | Text: some(string(responseJSON)),
276 | }},
277 | }, nil
278 | }
279 |
280 | type RepositoryDetails struct {
281 | Name string `json:"name"`
282 | FullName string `json:"full_name"`
283 | Description string `json:"description"`
284 | Private bool `json:"private"`
285 | Owner struct {
286 | Login string `json:"login"`
287 | } `json:"owner"`
288 | HTMLURL string `json:"html_url"`
289 | Stargazers int `json:"stargazers_count"`
290 | Watchers int `json:"watchers_count"`
291 | Forks int `json:"forks_count"`
292 | OpenIssues int `json:"open_issues_count"`
293 | DefaultBranch string `json:"default_branch"`
294 | CreatedAt string `json:"created_at"`
295 | UpdatedAt string `json:"updated_at"`
296 | PushedAt string `json:"pushed_at"`
297 | }
298 |
299 | func reposGetDetails(apiKey string, owner, repo string) (CallToolResult, error) {
300 | url := fmt.Sprintf("https://api.github.com/repos/%s/%s", owner, repo)
301 | pdk.Log(pdk.LogDebug, fmt.Sprint("Fetching repository details: ", url))
302 |
303 | req := pdk.NewHTTPRequest(pdk.MethodGet, url)
304 | req.SetHeader("Authorization", fmt.Sprint("token ", apiKey))
305 | req.SetHeader("Accept", "application/vnd.github+json")
306 | req.SetHeader("User-Agent", "github-mcpx-servlet")
307 |
308 | resp := req.Send()
309 | if resp.Status() != 200 {
310 | return CallToolResult{
311 | IsError: some(true),
312 | Content: []Content{{
313 | Type: ContentTypeText,
314 | Text: some(fmt.Sprintf("Failed to fetch repository details: %d %s", resp.Status(), string(resp.Body()))),
315 | }},
316 | }, nil
317 | }
318 |
319 | var repoDetails RepositoryDetails
320 | if err := json.Unmarshal(resp.Body(), &repoDetails); err != nil {
321 | return CallToolResult{
322 | IsError: some(true),
323 | Content: []Content{{
324 | Type: ContentTypeText,
325 | Text: some(fmt.Sprintf("Failed to parse repository details: %s", err)),
326 | }},
327 | }, nil
328 | }
329 |
330 | responseJSON, err := json.Marshal(repoDetails)
331 | if err != nil {
332 | return CallToolResult{
333 | IsError: some(true),
334 | Content: []Content{{
335 | Type: ContentTypeText,
336 | Text: some(fmt.Sprintf("Failed to marshal response: %s", err)),
337 | }},
338 | }, nil
339 | }
340 |
341 | return CallToolResult{
342 | Content: []Content{{
343 | Type: ContentTypeText,
344 | Text: some(string(responseJSON)),
345 | }},
346 | }, nil
347 | }
348 |
349 | func reposList(apiKey string, username string, args map[string]interface{}) (CallToolResult, error) {
350 | baseURL := fmt.Sprintf("https://api.github.com/users/%s/repos", username)
351 | params := make([]string, 0)
352 |
353 | // Optional parameters
354 | if value, ok := args["type"].(string); ok && value != "" {
355 | params = append(params, fmt.Sprintf("type=%s", value))
356 | }
357 | if value, ok := args["sort"].(string); ok && value != "" {
358 | params = append(params, fmt.Sprintf("sort=%s", value))
359 | }
360 | if value, ok := args["direction"].(string); ok && value != "" {
361 | params = append(params, fmt.Sprintf("direction=%s", value))
362 | }
363 |
364 | // Pagination parameters
365 | perPage := 30 // Default value
366 | if value, ok := args["per_page"].(float64); ok {
367 | if value > 100 {
368 | perPage = 100 // Max value
369 | } else if value > 0 {
370 | perPage = int(value)
371 | }
372 | }
373 | params = append(params, fmt.Sprintf("per_page=%d", perPage))
374 |
375 | page := 1 // Default value
376 | if value, ok := args["page"].(float64); ok && value > 0 {
377 | page = int(value)
378 | }
379 | params = append(params, fmt.Sprintf("page=%d", page))
380 |
381 | // Build final URL
382 | url := baseURL
383 | if len(params) > 0 {
384 | url = fmt.Sprintf("%s?%s", baseURL, strings.Join(params, "&"))
385 | }
386 |
387 | pdk.Log(pdk.LogDebug, fmt.Sprint("Fetching repositories: ", url))
388 |
389 | // Make request
390 | req := pdk.NewHTTPRequest(pdk.MethodGet, url)
391 | req.SetHeader("Authorization", fmt.Sprint("token ", apiKey))
392 | req.SetHeader("Accept", "application/vnd.github+json")
393 | req.SetHeader("User-Agent", "github-mcpx-servlet")
394 |
395 | resp := req.Send()
396 | if resp.Status() != 200 {
397 | return CallToolResult{
398 | IsError: some(true),
399 | Content: []Content{{
400 | Type: ContentTypeText,
401 | Text: some(fmt.Sprintf("Failed to fetch repositories: %d %s", resp.Status(), string(resp.Body()))),
402 | }},
403 | }, nil
404 | }
405 |
406 | return CallToolResult{
407 | Content: []Content{{
408 | Type: ContentTypeText,
409 | Text: some(string(resp.Body())),
410 | }},
411 | }, nil
412 | }
413 |
```
--------------------------------------------------------------------------------
/examples/plugins/v1/github/files.go:
--------------------------------------------------------------------------------
```go
1 | package main
2 |
3 | import (
4 | "encoding/base64"
5 | "encoding/json"
6 | "fmt"
7 | "net/url"
8 |
9 | "github.com/extism/go-pdk"
10 | )
11 |
12 | var (
13 | GetFileContentsTool = ToolDescription{
14 | Name: "gh-get-file-contents",
15 | Description: "Get the contents of a file or a directory in a GitHub repository",
16 | InputSchema: schema{
17 | "type": "object",
18 | "properties": props{
19 | "owner": prop("string", "The owner of the repository"),
20 | "repo": prop("string", "The repository name"),
21 | "path": prop("string", "The path of the file"),
22 | "branch": prop("string", "(optional string): Branch to get contents from"),
23 | },
24 | "required": []string{"owner", "repo", "path"},
25 | },
26 | }
27 | CreateOrUpdateFileTool = ToolDescription{
28 | Name: "gh-create-or-update-file",
29 | Description: "Create or update a file in a GitHub repository",
30 | InputSchema: schema{
31 | "type": "object",
32 | "properties": props{
33 | "owner": prop("string", "The owner of the repository"),
34 | "repo": prop("string", "The repository name"),
35 | "path": prop("string", "The path of the file"),
36 | "content": prop("string", "The content of the file"),
37 | "message": prop("string", "The commit message"),
38 | "branch": prop("string", "The branch name"),
39 | "sha": prop("string", "(optional) The sha of the file, required for updates"),
40 | },
41 | "required": []string{"owner", "repo", "path", "content", "message", "branch"},
42 | },
43 | }
44 | PushFilesTool = ToolDescription{
45 | Name: "gh-push-files",
46 | Description: "Push files to a GitHub repository",
47 | InputSchema: schema{
48 | "type": "object",
49 | "properties": props{
50 | "owner": prop("string", "The owner of the repository"),
51 | "repo": prop("string", "The repository name"),
52 | "branch": prop("string", "The branch name to push to"),
53 | "message": prop("string", "The commit message"),
54 | "files": SchemaProperty{
55 | Type: "array",
56 | Description: "Array of files to push",
57 | Items: &schema{
58 | "type": "object",
59 | "properties": props{
60 | "path": prop("string", "The path of the file"),
61 | "content": prop("string", "The content of the file"),
62 | },
63 | },
64 | },
65 | },
66 | },
67 | }
68 | FileTools = []ToolDescription{
69 | GetFileContentsTool,
70 | CreateOrUpdateFileTool,
71 | PushFilesTool,
72 | }
73 | )
74 |
75 | type FileCreate struct {
76 | Content string `json:"content"`
77 | Message string `json:"message"`
78 | Branch string `json:"branch"`
79 | Sha *string `json:"sha,omitempty"`
80 | }
81 |
82 | func fileCreateFromArgs(args map[string]interface{}) FileCreate {
83 | file := FileCreate{}
84 | if content, ok := args["content"].(string); ok {
85 | b64c := base64.StdEncoding.EncodeToString([]byte(content))
86 | file.Content = b64c
87 | }
88 | if message, ok := args["message"].(string); ok {
89 | file.Message = message
90 | }
91 | if branch, ok := args["branch"].(string); ok {
92 | file.Branch = branch
93 | }
94 | if sha, ok := args["sha"].(string); ok {
95 | file.Sha = some(sha)
96 | }
97 | return file
98 | }
99 |
100 | func filesCreateOrUpdate(apiKey string, owner string, repo string, path string, file FileCreate) (CallToolResult, error) {
101 | if file.Sha == nil {
102 | uc, err := filesGetContentsInternal(apiKey, owner, repo, path, &file.Branch)
103 | if err != nil {
104 | pdk.Log(pdk.LogDebug, "File does not exist, creating it")
105 | } else if !uc.isArray {
106 | sha := uc.FileContent.Sha
107 | file.Sha = &sha
108 | }
109 | }
110 |
111 | url := fmt.Sprint("https://api.github.com/repos/", owner, "/", repo, "/contents/", path)
112 | req := pdk.NewHTTPRequest(pdk.MethodPut, url)
113 | req.SetHeader("Authorization", fmt.Sprint("token ", apiKey))
114 | req.SetHeader("Accept", "application/vnd.github.v3+json")
115 | req.SetHeader("User-Agent", "github-mcpx-servlet")
116 |
117 | res, err := json.Marshal(file)
118 | if err != nil {
119 | return CallToolResult{
120 | IsError: some(true),
121 | Content: []Content{{
122 | Type: ContentTypeText,
123 | Text: some(fmt.Sprint("Failed to marshal file: ", err)),
124 | }},
125 | }, nil
126 | }
127 |
128 | req.SetBody([]byte(res))
129 | resp := req.Send()
130 | if resp.Status() != 201 {
131 | return CallToolResult{
132 | IsError: some(true),
133 | Content: []Content{{
134 | Type: ContentTypeText,
135 | Text: some(fmt.Sprint("Failed to create or update file: ", resp.Status(), " ", string(resp.Body()))),
136 | }},
137 | }, nil
138 | }
139 |
140 | return CallToolResult{
141 | Content: []Content{{
142 | Type: ContentTypeText,
143 | Text: some(string(resp.Body())),
144 | }},
145 | }, nil
146 |
147 | }
148 |
149 | type UnionContent struct {
150 | isArray bool
151 | FileContent FileContent
152 | DirectoryContents []DirectoryContent
153 | }
154 |
155 | type FileContent struct {
156 | Type string `json:"type"`
157 | Encoding string `json:"encoding"`
158 | Size int `json:"size"`
159 | Name string `json:"name"`
160 | Path string `json:"path"`
161 | Content string `json:"content"`
162 | Sha string `json:"sha"`
163 | Url string `json:"url"`
164 | GitUrl string `json:"git_url"`
165 | HtmlUrl string `json:"html_url"`
166 | DownloadUrl string `json:"download_url"`
167 | }
168 |
169 | type DirectoryContent struct {
170 | Type string `json:"type"`
171 | Size int `json:"size"`
172 | Name string `json:"name"`
173 | Path string `json:"path"`
174 | Sha string `json:"sha"`
175 | Url string `json:"url"`
176 | GitUrl string `json:"git_url"`
177 | HtmlUrl string `json:"html_url"`
178 | DownloadUrl *string `json:"download_url"`
179 | }
180 |
181 | func filesGetContents(apiKey string, owner string, repo string, path string, branch *string) CallToolResult {
182 | res, err := filesGetContentsInternal(apiKey, owner, repo, path, branch)
183 | if err == nil {
184 | var v []byte
185 | if res.isArray {
186 | v, err = json.Marshal(res.DirectoryContents)
187 | } else {
188 | v, err = json.Marshal(res.FileContent)
189 | }
190 | if err == nil {
191 | return CallToolResult{
192 | Content: []Content{{
193 | Type: ContentTypeText,
194 | Text: some(string(v)),
195 | }},
196 | }
197 | }
198 | }
199 | return CallToolResult{
200 | IsError: some(true),
201 | Content: []Content{{
202 | Type: ContentTypeText,
203 | Text: some(err.Error()),
204 | }},
205 | }
206 | }
207 |
208 | func filesGetContentsInternal(apiKey string, owner string, repo string, path string, branch *string) (UnionContent, error) {
209 | u := fmt.Sprint("https://api.github.com/repos/", owner, "/", repo, "/contents/", path)
210 |
211 | params := url.Values{}
212 | if branch != nil {
213 | params.Add("ref", *branch)
214 | }
215 | u = fmt.Sprint(u, "?", params.Encode())
216 |
217 | req := pdk.NewHTTPRequest(pdk.MethodGet, u)
218 | req.SetHeader("Authorization", fmt.Sprint("token ", apiKey))
219 | req.SetHeader("Accept", "application/vnd.github.v3+json")
220 | req.SetHeader("User-Agent", "github-mcpx-servlet")
221 |
222 | resp := req.Send()
223 | if resp.Status() != 200 {
224 | return UnionContent{}, fmt.Errorf("Failed to get file contents: %d %s (%s)", resp.Status(), string(resp.Body()), u)
225 | }
226 |
227 | // attempt to parse this as a file
228 | uc := UnionContent{}
229 | fc := &uc.FileContent
230 | if err := json.Unmarshal(resp.Body(), fc); err == nil {
231 | base64.StdEncoding.DecodeString(fc.Content)
232 | // replace it with the decoded content
233 | fc.Content = string(fc.Content)
234 | return uc, nil
235 | } else {
236 | // if it's not a file, try to parse it as a directory
237 | d := []DirectoryContent{}
238 | if err := json.Unmarshal(resp.Body(), &d); err != nil {
239 | return UnionContent{}, fmt.Errorf("Failed to unmarshal directory contents: %w", err)
240 | }
241 | uc.DirectoryContents = d
242 | uc.isArray = true
243 | return uc, nil
244 | }
245 | }
246 |
247 | type FileOperation struct {
248 | Path string `json:"path"`
249 | Content string `json:"content"`
250 | }
251 |
252 | func filePushFromArgs(args map[string]interface{}) []FileOperation {
253 | files := []FileOperation{}
254 | if f, ok := args["files"].([]interface{}); ok {
255 | for _, file := range f {
256 | if file, ok := file.(map[string]interface{}); ok {
257 | files = append(files, FileOperation{
258 | Path: file["path"].(string),
259 | Content: file["content"].(string),
260 | })
261 | }
262 | }
263 | }
264 | return files
265 | }
266 |
267 | func filesPush(apiKey, owner, repo, branch, message string, files []FileOperation) CallToolResult {
268 | url := fmt.Sprint("https://api.github.com/repos/", owner, "/", repo, "/git/refs/heads/", branch)
269 | req := pdk.NewHTTPRequest(pdk.MethodGet, url)
270 | req.SetHeader("Authorization", fmt.Sprint("token ", apiKey))
271 | req.SetHeader("Accept", "application/vnd.github.v3+json")
272 | req.SetHeader("User-Agent", "github-mcpx-servlet")
273 |
274 | resp := req.Send()
275 | if resp.Status() != 200 {
276 | return CallToolResult{
277 | IsError: some(true),
278 | Content: []Content{{
279 | Type: ContentTypeText,
280 | Text: some(fmt.Sprint("Failed to get branch: ", resp.Status())),
281 | }},
282 | }
283 | }
284 |
285 | ref := RefSchema{}
286 | json.Unmarshal(resp.Body(), &ref)
287 |
288 | commitSha := ref.Object.Sha
289 | if tree, err := createTree(apiKey, owner, repo, files, commitSha); err != nil {
290 | return CallToolResult{
291 | IsError: some(true),
292 | Content: []Content{{
293 | Type: ContentTypeText,
294 | Text: some(fmt.Sprint("Failed to create tree: ", err)),
295 | }},
296 | }
297 | } else if commit, err := createCommit(apiKey, owner, repo, message, tree.Sha, []string{commitSha}); err != nil {
298 | return CallToolResult{
299 | IsError: some(true),
300 | Content: []Content{{
301 | Type: ContentTypeText,
302 | Text: some(fmt.Sprint("Failed to create commit: ", err)),
303 | }},
304 | }
305 | } else {
306 | return updateRef(apiKey, owner, repo, "heads/"+branch, commit.Sha)
307 | }
308 | }
309 |
310 | type TreeSchema struct {
311 | BaseTree string `json:"base_tree,omitempty"`
312 | Tree []TreeEntry `json:"tree"`
313 | Truncated bool `json:"truncated,omitempty"`
314 | Url string `json:"url,omitempty"`
315 | Sha string `json:"sha,omitempty"`
316 | }
317 | type TreeEntry struct {
318 | Path string `json:"path"`
319 | Mode string `json:"mode"`
320 | Type string `json:"type"`
321 | Content string `json:"content,omitempty"`
322 | Size int `json:"size,omitempty"`
323 | Sha string `json:"sha,omitempty"`
324 | Url string `json:"url,omitempty"`
325 | }
326 |
327 | func createTree(apiKey, owner, repo string, files []FileOperation, baseTree string) (TreeSchema, error) {
328 | tree := TreeSchema{
329 | BaseTree: baseTree,
330 | Tree: []TreeEntry{},
331 | }
332 |
333 | for _, file := range files {
334 | tree.Tree = append(tree.Tree, TreeEntry{
335 | Path: file.Path, Mode: "100644", Type: "blob", Content: file.Content})
336 | }
337 |
338 | url := fmt.Sprint("https://api.github.com/repos/", owner, "/", repo, "/git/trees")
339 | req := pdk.NewHTTPRequest(pdk.MethodPost, url)
340 | req.SetHeader("Authorization", fmt.Sprint("token ", apiKey))
341 | req.SetHeader("Accept", "application/vnd.github.v3+json")
342 | req.SetHeader("User-Agent", "github-mcpx-servlet")
343 | req.SetHeader("Content-Type", "application/json")
344 |
345 | res, err := json.Marshal(tree)
346 |
347 | if err != nil {
348 | return TreeSchema{}, fmt.Errorf("Failed to marshal tree: %w", err)
349 | }
350 | req.SetBody(res)
351 |
352 | resp := req.Send()
353 | if resp.Status() != 201 {
354 | return TreeSchema{}, fmt.Errorf("Failed to create tree: %d %s", resp.Status(), string(resp.Body()))
355 | }
356 |
357 | ts := TreeSchema{}
358 | err = json.Unmarshal(resp.Body(), &res)
359 | return ts, err
360 | }
361 |
362 | type Author struct {
363 | Name string `json:"name"`
364 | Email string `json:"email"`
365 | Date string `json:"date"`
366 | }
367 |
368 | type Commit struct {
369 | Sha string `json:"sha"`
370 | NodeID string `json:"node_id"`
371 | Url string `json:"url"`
372 | Author Author `json:"author"`
373 | Committer Author `json:"committer"`
374 | Message string `json:"message"`
375 | Tree []struct {
376 | Sha string `json:"sha"`
377 | Url string `json:"url"`
378 | } `json:"tree"`
379 | Parents []struct {
380 | Sha string `json:"sha"`
381 | Url string `json:"url"`
382 | } `json:"parents"`
383 | }
384 |
385 | func createCommit(apiKey, owner, repo, message, tree string, parents []string) (Commit, error) {
386 | commit := map[string]interface{}{
387 | "message": message,
388 | "tree": tree,
389 | "parents": parents,
390 | }
391 |
392 | url := fmt.Sprint("https://api.github.com/repos/", owner, "/", repo, "/git/commits")
393 | req := pdk.NewHTTPRequest(pdk.MethodPost, url)
394 | req.SetHeader("Authorization", fmt.Sprint("token ", apiKey))
395 | req.SetHeader("Accept", "application/vnd.github.v3+json")
396 | req.SetHeader("User-Agent", "github-mcpx-servlet")
397 | req.SetHeader("Content-Type", "application/json")
398 |
399 | res, _ := json.Marshal(commit)
400 | req.SetBody(res)
401 |
402 | resp := req.Send()
403 | if resp.Status() != 201 {
404 | return Commit{}, fmt.Errorf("Failed to create commit: %d %s", resp.Status(), string(resp.Body()))
405 | }
406 |
407 | cs := Commit{}
408 | json.Unmarshal(resp.Body(), &cs)
409 | return cs, nil
410 | }
411 |
412 | func updateRef(apiKey, owner, repo, ref, sha string) CallToolResult {
413 | url := fmt.Sprint("https://api.github.com/repos/", owner, "/", repo, "/git/refs/", ref)
414 | req := pdk.NewHTTPRequest(pdk.MethodPatch, url)
415 | req.SetHeader("Authorization", fmt.Sprint("token ", apiKey))
416 | req.SetHeader("Accept", "application/vnd.github.v3+json")
417 | req.SetHeader("User-Agent", "github-mcpx-servlet")
418 | req.SetHeader("Content-Type", "application/json")
419 |
420 | res, _ := json.Marshal(map[string]any{"sha": sha, "force": true})
421 | req.SetBody(res)
422 |
423 | resp := req.Send()
424 | if resp.Status() != 200 {
425 | return CallToolResult{
426 | IsError: some(true),
427 | Content: []Content{{
428 | Type: ContentTypeText,
429 | Text: some(fmt.Sprint("Failed to update ref: ", resp.Status(), " ", string(resp.Body()))),
430 | }},
431 | }
432 | }
433 |
434 | return CallToolResult{
435 | Content: []Content{{
436 | Type: ContentTypeText,
437 | Text: some(string(resp.Body())),
438 | }},
439 | }
440 | }
441 |
```
--------------------------------------------------------------------------------
/src/wasm/oci.rs:
--------------------------------------------------------------------------------
```rust
1 | use crate::config::{OciConfig, PluginName};
2 | use anyhow::{Result, anyhow};
3 | use docker_credential::{CredentialRetrievalError, DockerCredential};
4 | use flate2::read::GzDecoder;
5 | use oci_client::{
6 | Client, Reference, client::ClientConfig, manifest, manifest::OciDescriptor,
7 | secrets::RegistryAuth,
8 | };
9 | use sha2::{Digest, Sha256};
10 | use sigstore::{
11 | cosign::{
12 | ClientBuilder, CosignCapabilities,
13 | verification_constraint::{
14 | CertSubjectEmailVerifier, CertSubjectUrlVerifier, VerificationConstraintVec,
15 | cert_subject_email_verifier::StringVerifier,
16 | },
17 | verify_constraints,
18 | },
19 | errors::SigstoreVerifyConstraintsError,
20 | registry::{Auth, OciReference},
21 | trust::{ManualTrustRoot, TrustRoot, sigstore::SigstoreTrustRoot},
22 | };
23 | use std::{fs, io::Read, path::Path, str::FromStr};
24 | use tar::Archive;
25 | use tokio::sync::OnceCell;
26 | use url::Url;
27 |
28 | static OCI_CLIENT: OnceCell<Client> = OnceCell::const_new();
29 |
30 | fn build_auth(reference: &Reference) -> RegistryAuth {
31 | let server = reference
32 | .resolve_registry()
33 | .strip_suffix('/')
34 | .unwrap_or_else(|| reference.resolve_registry());
35 |
36 | // if cli.anonymous {
37 | // return RegistryAuth::Anonymous;
38 | // }
39 |
40 | match docker_credential::get_credential(server) {
41 | Err(CredentialRetrievalError::ConfigNotFound) => RegistryAuth::Anonymous,
42 | Err(CredentialRetrievalError::NoCredentialConfigured) => RegistryAuth::Anonymous,
43 | Err(e) => {
44 | tracing::info!("Error retrieving docker credentials: {e}. Using anonymous auth");
45 | RegistryAuth::Anonymous
46 | }
47 | Ok(DockerCredential::UsernamePassword(username, password)) => {
48 | tracing::info!("Found docker credentials");
49 | RegistryAuth::Basic(username, password)
50 | }
51 | Ok(DockerCredential::IdentityToken(_)) => {
52 | tracing::info!(
53 | "Cannot use contents of docker config, identity token not supported. Using anonymous auth"
54 | );
55 | RegistryAuth::Anonymous
56 | }
57 | }
58 | }
59 |
60 | pub async fn load_wasm(url: &Url, config: &OciConfig, plugin_name: &PluginName) -> Result<Vec<u8>> {
61 | let image_reference = url.as_str().strip_prefix("oci://").unwrap();
62 | let target_file_path = "/plugin.wasm";
63 | let mut hasher = Sha256::new();
64 | hasher.update(image_reference);
65 | let hash = hasher.finalize();
66 | let short_hash = &hex::encode(hash)[..7];
67 | let cache_dir = dirs::cache_dir()
68 | .map(|mut path| {
69 | path.push("hyper-mcp");
70 | path
71 | })
72 | .unwrap();
73 | std::fs::create_dir_all(&cache_dir)?;
74 |
75 | let local_output_path = cache_dir.join(format!("{plugin_name}-{short_hash}.wasm"));
76 | let local_output_path = local_output_path.to_str().unwrap();
77 |
78 | if let Err(e) =
79 | pull_and_extract_oci_image(config, image_reference, target_file_path, local_output_path)
80 | .await
81 | {
82 | tracing::error!("Error pulling oci plugin: {e}");
83 | return Err(anyhow::anyhow!("Failed to pull OCI plugin: {e}"));
84 | }
85 | tracing::info!("cache plugin `{plugin_name}` to : {local_output_path}");
86 | tokio::fs::read(local_output_path)
87 | .await
88 | .map_err(|e| e.into())
89 | }
90 |
91 | async fn setup_trust_repository(config: &OciConfig) -> Result<Box<dyn TrustRoot>> {
92 | if config.use_sigstore_tuf_data {
93 | // Use Sigstore TUF data from the official repository
94 | tracing::info!("Using Sigstore TUF data for verification");
95 | match SigstoreTrustRoot::new(None).await {
96 | Ok(repo) => return Ok(Box::new(repo)),
97 | Err(e) => {
98 | tracing::error!("Failed to initialize TUF trust repository: {e}");
99 | if !config.insecure_skip_signature {
100 | return Err(anyhow!(
101 | "Failed to initialize TUF trust repository and signature verification is required"
102 | ));
103 | }
104 | tracing::info!("Falling back to manual trust repository");
105 | }
106 | }
107 | }
108 |
109 | // Create a manual trust repository
110 | let mut data = ManualTrustRoot::default();
111 |
112 | // Add Rekor public keys if provided
113 | if let Some(rekor_keys_path) = &config.rekor_pub_keys {
114 | if rekor_keys_path.exists() {
115 | match fs::read(rekor_keys_path) {
116 | Ok(content) => {
117 | tracing::info!("Added Rekor public key");
118 | if let Some(path_str) = rekor_keys_path.to_str() {
119 | data.rekor_keys.insert(path_str.to_string(), content);
120 | tracing::info!("Added Rekor public key from: {}", path_str);
121 | }
122 | }
123 | Err(e) => tracing::warn!("Failed to read Rekor public keys file: {e}"),
124 | }
125 | } else {
126 | tracing::warn!("Rekor public keys file not found: {rekor_keys_path:?}");
127 | }
128 | }
129 |
130 | // Add Fulcio certificates if provided
131 | if let Some(fulcio_certs_path) = &config.fulcio_certs {
132 | if fulcio_certs_path.exists() {
133 | match fs::read(fulcio_certs_path) {
134 | Ok(content) => {
135 | let certificate = sigstore::registry::Certificate {
136 | encoding: sigstore::registry::CertificateEncoding::Pem,
137 | data: content,
138 | };
139 |
140 | match certificate.try_into() {
141 | Ok(cert) => {
142 | tracing::info!("Added Fulcio certificate");
143 | data.fulcio_certs.push(cert);
144 | }
145 | Err(e) => tracing::warn!("Failed to parse Fulcio certificate: {e}"),
146 | }
147 | }
148 | Err(e) => tracing::warn!("Failed to read Fulcio certificates file: {e}"),
149 | }
150 | } else {
151 | tracing::warn!("Fulcio certificates file not found: {fulcio_certs_path:?}");
152 | }
153 | }
154 |
155 | Ok(Box::new(data))
156 | }
157 |
158 | async fn verify_image_signature(config: &OciConfig, image_reference: &str) -> Result<bool> {
159 | tracing::info!("Verifying signature for {image_reference}");
160 |
161 | // Set up the trust repository based on CLI arguments
162 | let repo = setup_trust_repository(config).await?;
163 | let auth = &Auth::Anonymous;
164 |
165 | // Create a client builder
166 | let client_builder = ClientBuilder::default();
167 |
168 | // Create client with trust repository
169 | let client_builder = match client_builder.with_trust_repository(repo.as_ref()) {
170 | Ok(builder) => builder,
171 | Err(e) => return Err(anyhow!("Failed to set up trust repository: {e}")),
172 | };
173 |
174 | // Build the client
175 | let mut client = match client_builder.build() {
176 | Ok(client) => client,
177 | Err(e) => return Err(anyhow!("Failed to build Sigstore client: {e}")),
178 | };
179 |
180 | // Parse the reference
181 | let image_ref = match OciReference::from_str(image_reference) {
182 | Ok(reference) => reference,
183 | Err(e) => return Err(anyhow!("Invalid image reference: {e}")),
184 | };
185 |
186 | // Triangulate to find the signature image and source digest
187 | let (cosign_signature_image, source_image_digest) =
188 | match client.triangulate(&image_ref, auth).await {
189 | Ok((sig_image, digest)) => (sig_image, digest),
190 | Err(e) => {
191 | tracing::warn!("Failed to triangulate image: {e}");
192 | return Ok(false); // No signatures found
193 | }
194 | };
195 |
196 | // Get trusted signature layers
197 | let signature_layers = match client
198 | .trusted_signature_layers(auth, &source_image_digest, &cosign_signature_image)
199 | .await
200 | {
201 | Ok(layers) => layers,
202 | Err(e) => {
203 | tracing::warn!("Failed to get trusted signature layers: {e}");
204 | return Ok(false);
205 | }
206 | };
207 |
208 | if signature_layers.is_empty() {
209 | tracing::warn!("No valid signatures found for {image_reference}");
210 | return Ok(false);
211 | }
212 |
213 | // Build verification constraints based on CLI options
214 | let mut verification_constraints: VerificationConstraintVec = Vec::new();
215 |
216 | if let Some(cert_email) = &config.cert_email {
217 | let issuer = config
218 | .cert_issuer
219 | .as_ref()
220 | .map(|i| StringVerifier::ExactMatch(i.to_string()));
221 |
222 | verification_constraints.push(Box::new(CertSubjectEmailVerifier {
223 | email: StringVerifier::ExactMatch(cert_email.to_string()),
224 | issuer,
225 | }));
226 | }
227 |
228 | if let Some(cert_url) = &config.cert_url {
229 | match config.cert_issuer.as_ref() {
230 | Some(issuer) => {
231 | verification_constraints.push(Box::new(CertSubjectUrlVerifier {
232 | url: cert_url.to_string(),
233 | issuer: issuer.to_string(),
234 | }));
235 | }
236 | None => {
237 | tracing::warn!("'cert-issuer' is required when 'cert-url' is specified");
238 | }
239 | }
240 | }
241 |
242 | // Verify the constraints
243 | match verify_constraints(&signature_layers, verification_constraints.iter()) {
244 | Ok(()) => {
245 | tracing::info!("Signature verification successful for {image_reference}");
246 | Ok(true)
247 | }
248 | Err(SigstoreVerifyConstraintsError {
249 | unsatisfied_constraints,
250 | }) => {
251 | tracing::warn!(
252 | "Signature verification failed for {image_reference}: {unsatisfied_constraints:?}"
253 | );
254 | Ok(false)
255 | }
256 | }
257 | }
258 |
259 | async fn pull_and_extract_oci_image(
260 | config: &OciConfig,
261 | image_reference: &str,
262 | target_file_path: &str,
263 | local_output_path: &str,
264 | ) -> Result<(), Box<dyn std::error::Error>> {
265 | if Path::new(local_output_path).exists() {
266 | tracing::info!(
267 | "Plugin {image_reference} already cached at: {local_output_path}. Skipping downloading."
268 | );
269 | return Ok(());
270 | }
271 |
272 | tracing::info!("Pulling {image_reference} ...");
273 |
274 | let reference = Reference::try_from(image_reference)?;
275 | let auth = build_auth(&reference);
276 |
277 | // Verify the image signature if it's an OCI image and verification is enabled
278 | if !config.insecure_skip_signature {
279 | tracing::info!("Signature verification enabled for {image_reference}");
280 | match verify_image_signature(config, image_reference).await {
281 | Ok(verified) => {
282 | if !verified {
283 | return Err(format!(
284 | "No valid signatures found for the image {image_reference}"
285 | )
286 | .into());
287 | }
288 | }
289 | Err(e) => {
290 | return Err(format!("Image signature verification failed: {e}").into());
291 | }
292 | }
293 | } else {
294 | tracing::warn!("Signature verification disabled for {image_reference}");
295 | }
296 |
297 | let client = OCI_CLIENT
298 | .get_or_init(|| async { Client::new(ClientConfig::default()) })
299 | .await;
300 |
301 | // Accept both OCI and Docker manifest types
302 | let manifest = client
303 | .pull(
304 | &reference,
305 | &auth,
306 | vec![
307 | manifest::IMAGE_MANIFEST_MEDIA_TYPE,
308 | manifest::IMAGE_DOCKER_LAYER_GZIP_MEDIA_TYPE,
309 | manifest::IMAGE_LAYER_GZIP_MEDIA_TYPE,
310 | ],
311 | )
312 | .await?;
313 |
314 | for layer in manifest.layers.iter() {
315 | let mut buf = Vec::new();
316 | let desc = OciDescriptor {
317 | digest: layer.sha256_digest().clone(),
318 | media_type: "application/vnd.docker.image.rootfs.diff.tar.gzip".to_string(),
319 | ..Default::default()
320 | };
321 | client.pull_blob(&reference, &desc, &mut buf).await.unwrap();
322 |
323 | let gz_extract = GzDecoder::new(&buf[..]);
324 | let mut archive_extract = Archive::new(gz_extract);
325 |
326 | for entry_result in archive_extract.entries()? {
327 | match entry_result {
328 | Ok(mut entry) => {
329 | if let Ok(path) = entry.path() {
330 | let path_str = path.to_string_lossy();
331 | if path_str.ends_with(target_file_path) || path_str.ends_with("plugin.wasm")
332 | {
333 | if let Some(parent) = Path::new(local_output_path).parent() {
334 | fs::create_dir_all(parent)?;
335 | }
336 | let mut content = Vec::new();
337 | entry.read_to_end(&mut content)?;
338 | fs::write(local_output_path, content)?;
339 | tracing::info!("Successfully extracted to: {local_output_path}");
340 | return Ok(());
341 | }
342 | }
343 | }
344 | Err(e) => tracing::info!("Error during extraction: {e}"),
345 | }
346 | }
347 | }
348 |
349 | Err("Target file not found in any layer".into())
350 | }
351 |
```
--------------------------------------------------------------------------------
/src/plugin.rs:
--------------------------------------------------------------------------------
```rust
1 | use crate::config::PluginName;
2 | use async_trait::async_trait;
3 | use rmcp::{
4 | ErrorData as McpError,
5 | model::*,
6 | service::{NotificationContext, RequestContext, RoleServer},
7 | };
8 | use serde::{Deserialize, Serialize, de::DeserializeOwned};
9 | use serde_json::{Value, json};
10 | use std::{
11 | fmt::Debug,
12 | ops::Deref,
13 | sync::{Arc, Mutex},
14 | };
15 | use tokio_util::sync::CancellationToken;
16 |
17 | type PluginHandle = Arc<Mutex<extism::Plugin>>;
18 |
19 | #[derive(Clone, Debug, Serialize, Deserialize)]
20 | struct PluginRequestContext {
21 | pub id: NumberOrString,
22 | #[serde(rename = "_meta")]
23 | pub meta: Meta,
24 | }
25 |
26 | impl<'a> From<&'a RequestContext<RoleServer>> for PluginRequestContext {
27 | fn from(context: &'a RequestContext<RoleServer>) -> Self {
28 | PluginRequestContext {
29 | id: context.id.clone(),
30 | meta: context.meta.clone(),
31 | }
32 | }
33 | }
34 |
35 | #[derive(Clone, Debug, Serialize, Deserialize)]
36 | struct PluginNotificationContext {
37 | #[serde(rename = "_meta")]
38 | pub meta: Meta,
39 | }
40 |
41 | impl<'a> From<&'a NotificationContext<RoleServer>> for PluginNotificationContext {
42 | fn from(context: &'a NotificationContext<RoleServer>) -> Self {
43 | PluginNotificationContext {
44 | meta: context.meta.clone(),
45 | }
46 | }
47 | }
48 |
49 | #[async_trait]
50 | #[allow(unused_variables)]
51 | pub trait Plugin: Send + Sync + Debug {
52 | async fn call_tool(
53 | &self,
54 | request: CallToolRequestParam,
55 | context: RequestContext<RoleServer>,
56 | ) -> Result<CallToolResult, McpError>;
57 |
58 | async fn complete(
59 | &self,
60 | request: CompleteRequestParam,
61 | context: RequestContext<RoleServer>,
62 | ) -> Result<CompleteResult, McpError> {
63 | Ok(CompleteResult::default())
64 | }
65 |
66 | async fn get_prompt(
67 | &self,
68 | request: GetPromptRequestParam,
69 | context: RequestContext<RoleServer>,
70 | ) -> Result<GetPromptResult, McpError> {
71 | Err(McpError::method_not_found::<GetPromptRequestMethod>())
72 | }
73 |
74 | async fn list_prompts(
75 | &self,
76 | request: Option<PaginatedRequestParam>,
77 | context: RequestContext<RoleServer>,
78 | ) -> Result<ListPromptsResult, McpError> {
79 | Ok(ListPromptsResult::default())
80 | }
81 |
82 | async fn list_resources(
83 | &self,
84 | request: Option<PaginatedRequestParam>,
85 | context: RequestContext<RoleServer>,
86 | ) -> Result<ListResourcesResult, McpError> {
87 | Ok(ListResourcesResult::default())
88 | }
89 |
90 | async fn list_resource_templates(
91 | &self,
92 | request: Option<PaginatedRequestParam>,
93 | context: RequestContext<RoleServer>,
94 | ) -> Result<ListResourceTemplatesResult, McpError> {
95 | Ok(ListResourceTemplatesResult::default())
96 | }
97 |
98 | async fn list_tools(
99 | &self,
100 | request: Option<PaginatedRequestParam>,
101 | context: RequestContext<RoleServer>,
102 | ) -> Result<ListToolsResult, McpError>;
103 |
104 | fn name(&self) -> &PluginName;
105 |
106 | async fn on_roots_list_changed(
107 | &self,
108 | context: NotificationContext<RoleServer>,
109 | ) -> Result<(), McpError> {
110 | Ok(())
111 | }
112 |
113 | fn plugin(&self) -> &PluginHandle;
114 |
115 | async fn read_resource(
116 | &self,
117 | request: ReadResourceRequestParam,
118 | context: RequestContext<RoleServer>,
119 | ) -> Result<ReadResourceResult, McpError> {
120 | Err(McpError::method_not_found::<ReadResourceRequestMethod>())
121 | }
122 | }
123 |
124 | async fn call_plugin<R>(
125 | plugin: &dyn Plugin,
126 | name: &str,
127 | payload: String,
128 | ct: CancellationToken,
129 | ) -> Result<R, McpError>
130 | where
131 | R: DeserializeOwned + Send + 'static,
132 | {
133 | let plugin_name = plugin.name().to_string();
134 | if !function_exists_plugin(plugin, name) {
135 | return Err(McpError::invalid_request(
136 | format!("Method {name} not found for plugin {plugin_name}"),
137 | None,
138 | ));
139 | }
140 | let plugin = Arc::clone(plugin.plugin());
141 | let cancel_handle = {
142 | let guard = plugin.lock().unwrap();
143 | guard.cancel_handle()
144 | };
145 |
146 | let name = name.to_string();
147 | let mut join = tokio::task::spawn_blocking(move || {
148 | let mut plugin = plugin.lock().unwrap();
149 | let result: Result<String, extism::Error> = plugin.call(&name, payload);
150 | match result {
151 | Ok(res) => match serde_json::from_str::<R>(&res) {
152 | Ok(parsed) => Ok(parsed),
153 | Err(e) => Err(McpError::internal_error(
154 | format!("Failed to deserialize data: {e}"),
155 | None,
156 | )),
157 | },
158 | Err(e) => Err(McpError::internal_error(
159 | format!("Failed to call plugin: {e}"),
160 | None,
161 | )),
162 | }
163 | });
164 |
165 | tokio::select! {
166 | // Finished normally
167 | res = &mut join => {
168 | match res {
169 | Ok(Ok(result)) => Ok(result),
170 | Ok(Err(e)) => Err(e),
171 | Err(e) => Err(McpError::internal_error(
172 | format!("Failed to spawn blocking task for plugin {plugin_name}: {e}"),
173 | None,
174 | )),
175 | }
176 | }
177 |
178 | //Cancellation requested
179 | _ = ct.cancelled() => {
180 | if let Err(e) = cancel_handle.cancel() {
181 | tracing::error!("Failed to cancel plugin {plugin_name}: {e}");
182 | return Err(McpError::internal_error(
183 | format!("Failed to cancel plugin {plugin_name}: {e}"),
184 | None,
185 | ));
186 | }
187 | match tokio::time::timeout(std::time::Duration::from_millis(250), join).await {
188 | Ok(Ok(Ok(_))) => Err(McpError::internal_error(
189 | format!("Plugin {plugin_name} was cancelled"),
190 | None,
191 | )),
192 | Ok(Ok(Err(e))) => Err(McpError::internal_error(
193 | format!("Failed to execute plugin {plugin_name}: {e}"),
194 | None,
195 | )),
196 | Ok(Err(e)) => Err(McpError::internal_error(
197 | format!("Join error for plugin {plugin_name}: {e}"),
198 | None,
199 | )),
200 | Err(_) => Err(McpError::internal_error(
201 | format!("Timeout waiting for plugin {plugin_name} to cancel"),
202 | None,
203 | )),
204 | }
205 | }
206 | }
207 | }
208 |
209 | fn function_exists_plugin(plugin: &dyn Plugin, name: &str) -> bool {
210 | let plugin = Arc::clone(plugin.plugin());
211 | plugin.lock().unwrap().function_exists(name)
212 | }
213 |
214 | async fn notify_plugin(plugin: &dyn Plugin, name: &str, payload: String) -> Result<(), McpError> {
215 | let plugin_name = plugin.name().to_string();
216 | if !function_exists_plugin(plugin, name) {
217 | return Err(McpError::invalid_request(
218 | format!("Method {name} not found for plugin {plugin_name}"),
219 | None,
220 | ));
221 | }
222 | let plugin = Arc::clone(plugin.plugin());
223 | let name = name.to_string();
224 | tokio::task::spawn_blocking(move || {
225 | let mut plugin = plugin.lock().unwrap();
226 | let result: Result<String, extism::Error> = plugin.call(&name, payload);
227 | if let Err(e) = result {
228 | tracing::error!("Failed to notify plugin {plugin_name}: {e}");
229 | }
230 | });
231 | Ok(())
232 | }
233 |
234 | #[derive(Debug)]
235 | pub struct PluginBase {
236 | pub name: PluginName,
237 | pub plugin: PluginHandle,
238 | }
239 |
240 | #[derive(Debug)]
241 | pub struct PluginV1(pub PluginBase);
242 |
243 | impl Deref for PluginV1 {
244 | type Target = PluginBase;
245 |
246 | fn deref(&self) -> &Self::Target {
247 | &self.0
248 | }
249 | }
250 |
251 | #[async_trait]
252 | impl Plugin for PluginV1 {
253 | async fn call_tool(
254 | &self,
255 | request: CallToolRequestParam,
256 | context: RequestContext<RoleServer>,
257 | ) -> Result<CallToolResult, McpError> {
258 | call_plugin::<CallToolResult>(
259 | self,
260 | "call",
261 | serde_json::to_string(&json!({
262 | "params": request,
263 | }))
264 | .expect("Failed to serialize request"),
265 | context.ct,
266 | )
267 | .await
268 | }
269 |
270 | async fn list_tools(
271 | &self,
272 | _request: Option<PaginatedRequestParam>,
273 | context: RequestContext<RoleServer>,
274 | ) -> Result<ListToolsResult, McpError> {
275 | call_plugin::<ListToolsResult>(self, "describe", "".to_string(), context.ct).await
276 | }
277 |
278 | fn name(&self) -> &PluginName {
279 | &self.name
280 | }
281 |
282 | fn plugin(&self) -> &PluginHandle {
283 | &self.plugin
284 | }
285 | }
286 |
287 | impl PluginV1 {
288 | pub fn new(name: PluginName, plugin: PluginHandle) -> Self {
289 | Self(PluginBase { name, plugin })
290 | }
291 | }
292 |
293 | #[derive(Debug)]
294 | pub struct PluginV2(pub PluginBase);
295 |
296 | impl Deref for PluginV2 {
297 | type Target = PluginBase;
298 |
299 | fn deref(&self) -> &Self::Target {
300 | &self.0
301 | }
302 | }
303 |
304 | #[async_trait]
305 | impl Plugin for PluginV2 {
306 | async fn call_tool(
307 | &self,
308 | request: CallToolRequestParam,
309 | context: RequestContext<RoleServer>,
310 | ) -> Result<CallToolResult, McpError> {
311 | call_plugin::<CallToolResult>(
312 | self,
313 | "call_tool",
314 | serde_json::to_string(&json!({
315 | "request": request,
316 | "context": PluginRequestContext::from(&context),
317 | }))
318 | .expect("Failed to serialize request"),
319 | context.ct,
320 | )
321 | .await
322 | }
323 |
324 | async fn complete(
325 | &self,
326 | request: CompleteRequestParam,
327 | context: RequestContext<RoleServer>,
328 | ) -> Result<CompleteResult, McpError> {
329 | #[derive(Debug, Clone)]
330 | struct Helper(CompleteRequestParam);
331 |
332 | impl Serialize for Helper {
333 | fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
334 | where
335 | S: serde::Serializer,
336 | {
337 | let mut value = serde_json::to_value(&self.0).map_err(serde::ser::Error::custom)?;
338 |
339 | if let Value::Object(root) = &mut value
340 | && let Some(Value::Object(ref_obj)) = root.get_mut("ref")
341 | && let Some(Value::String(t)) = ref_obj.get_mut("type")
342 | && let Some(stripped) = t.strip_prefix("ref/")
343 | {
344 | *t = stripped.to_string();
345 | }
346 |
347 | value.serialize(serializer)
348 | }
349 | }
350 |
351 | call_plugin::<CompleteResult>(
352 | self,
353 | "complete",
354 | serde_json::to_string(&json!({
355 | "request": Helper(request),
356 | "context": PluginRequestContext::from(&context),
357 | }))
358 | .expect("Failed to serialize request"),
359 | context.ct,
360 | )
361 | .await
362 | }
363 |
364 | async fn get_prompt(
365 | &self,
366 | request: GetPromptRequestParam,
367 | context: RequestContext<RoleServer>,
368 | ) -> Result<GetPromptResult, McpError> {
369 | call_plugin::<GetPromptResult>(
370 | self,
371 | "get_prompt",
372 | serde_json::to_string(&json!({
373 | "request": request,
374 | "context": PluginRequestContext::from(&context),
375 | }))
376 | .expect("Failed to serialize request"),
377 | context.ct,
378 | )
379 | .await
380 | }
381 |
382 | async fn list_prompts(
383 | &self,
384 | _request: Option<PaginatedRequestParam>,
385 | context: RequestContext<RoleServer>,
386 | ) -> Result<ListPromptsResult, McpError> {
387 | if !function_exists_plugin(self, "list_prompts") {
388 | return Ok(ListPromptsResult::default());
389 | }
390 | call_plugin::<ListPromptsResult>(
391 | self,
392 | "list_prompts",
393 | serde_json::to_string(&json!({
394 | "context": PluginRequestContext::from(&context),
395 | }))
396 | .expect("Failed to serialize context"),
397 | context.ct,
398 | )
399 | .await
400 | }
401 |
402 | async fn list_resources(
403 | &self,
404 | _request: Option<PaginatedRequestParam>,
405 | context: RequestContext<RoleServer>,
406 | ) -> Result<ListResourcesResult, McpError> {
407 | if !function_exists_plugin(self, "list_resources") {
408 | return Ok(ListResourcesResult::default());
409 | }
410 | call_plugin::<ListResourcesResult>(
411 | self,
412 | "list_resources",
413 | serde_json::to_string(&json!({
414 | "context": PluginRequestContext::from(&context),
415 | }))
416 | .expect("Failed to serialize context"),
417 | context.ct,
418 | )
419 | .await
420 | }
421 |
422 | async fn list_resource_templates(
423 | &self,
424 | _request: Option<PaginatedRequestParam>,
425 | context: RequestContext<RoleServer>,
426 | ) -> Result<ListResourceTemplatesResult, McpError> {
427 | if !function_exists_plugin(self, "list_resource_templates") {
428 | return Ok(ListResourceTemplatesResult::default());
429 | }
430 | call_plugin::<ListResourceTemplatesResult>(
431 | self,
432 | "list_resource_templates",
433 | serde_json::to_string(&json!({
434 | "context": PluginRequestContext::from(&context),
435 | }))
436 | .expect("Failed to serialize context"),
437 | context.ct,
438 | )
439 | .await
440 | }
441 |
442 | async fn list_tools(
443 | &self,
444 | _request: Option<PaginatedRequestParam>,
445 | context: RequestContext<RoleServer>,
446 | ) -> Result<ListToolsResult, McpError> {
447 | if !function_exists_plugin(self, "list_tools") {
448 | return Ok(ListToolsResult::default());
449 | }
450 | call_plugin::<ListToolsResult>(
451 | self,
452 | "list_tools",
453 | serde_json::to_string(&json!({
454 | "context": PluginRequestContext::from(&context),
455 | }))
456 | .expect("Failed to serialize context"),
457 | context.ct,
458 | )
459 | .await
460 | }
461 |
462 | fn name(&self) -> &PluginName {
463 | &self.name
464 | }
465 |
466 | async fn on_roots_list_changed(
467 | &self,
468 | context: NotificationContext<RoleServer>,
469 | ) -> Result<(), McpError> {
470 | if function_exists_plugin(self, "on_roots_list_changed") {
471 | return Ok(());
472 | }
473 | notify_plugin(
474 | self,
475 | "on_roots_list_changed",
476 | serde_json::to_string(&json!({
477 | "context": PluginNotificationContext::from(&context),
478 | }))
479 | .expect("Failed to serialize context"),
480 | )
481 | .await
482 | }
483 |
484 | fn plugin(&self) -> &PluginHandle {
485 | &self.plugin
486 | }
487 |
488 | async fn read_resource(
489 | &self,
490 | request: ReadResourceRequestParam,
491 | context: RequestContext<RoleServer>,
492 | ) -> Result<ReadResourceResult, McpError> {
493 | call_plugin::<ReadResourceResult>(
494 | self,
495 | "read_resource",
496 | serde_json::to_string(&json!({
497 | "request": request,
498 | "context": PluginRequestContext::from(&context),
499 | }))
500 | .expect("Failed to serialize request"),
501 | context.ct,
502 | )
503 | .await
504 | }
505 | }
506 |
507 | impl PluginV2 {
508 | pub fn new(name: PluginName, plugin: PluginHandle) -> Self {
509 | Self(PluginBase { name, plugin })
510 | }
511 | }
512 |
```
--------------------------------------------------------------------------------
/examples/plugins/v1/context7/src/lib.rs:
--------------------------------------------------------------------------------
```rust
1 | mod pdk;
2 |
3 | use extism_pdk::*;
4 | use pdk::types::{
5 | CallToolRequest, CallToolResult, Content, ContentType, ListToolsResult, ToolDescription,
6 | };
7 | use serde_json::{Value as JsonValue, json};
8 | use urlencoding::encode;
9 |
10 | const CONTEXT7_API_BASE_URL: &str = "https://context7.com/api"; // Guessed API base URL
11 |
12 | pub(crate) fn call(input: CallToolRequest) -> Result<CallToolResult, Error> {
13 | match input.params.name.as_str() {
14 | "c7_resolve_library_id" => c7_resolve_library_id(input),
15 | "c7_get_library_docs" => c7_get_library_docs(input),
16 | _ => Ok(CallToolResult {
17 | is_error: Some(true),
18 | content: vec![Content {
19 | annotations: None,
20 | text: Some(format!("Unknown tool: {}", input.params.name)),
21 | mime_type: None,
22 | r#type: ContentType::Text,
23 | data: None,
24 | }],
25 | }),
26 | }
27 | }
28 |
29 | fn c7_resolve_library_id(input: CallToolRequest) -> Result<CallToolResult, Error> {
30 | let args = input.params.arguments.unwrap_or_default();
31 | let library_name_val = args.get("library_name").unwrap_or(&JsonValue::Null);
32 |
33 | if let JsonValue::String(library_name_as_query) = library_name_val {
34 | let encoded_query = encode(library_name_as_query);
35 | let url = format!(
36 | "{}/v1/search?query={}",
37 | CONTEXT7_API_BASE_URL, encoded_query
38 | );
39 |
40 | let mut req = HttpRequest::new(&url).with_method("GET");
41 | req.headers
42 | .insert("X-Context7-Source".to_string(), "mcp-server".to_string());
43 |
44 | match http::request::<()>(&req, None) {
45 | Ok(res) => {
46 | let body_str = String::from_utf8_lossy(&res.body()).to_string();
47 | if res.status_code() >= 200 && res.status_code() < 300 {
48 | match serde_json::from_str::<JsonValue>(&body_str) {
49 | Ok(parsed_json) => {
50 | let mut results_text_parts = Vec::new();
51 |
52 | // Check if the root is an object and has a "results" field which is an array
53 | if let Some(results_node) = parsed_json.get("results") {
54 | if let JsonValue::Array(results_array) = results_node {
55 | if results_array.is_empty() {
56 | results_text_parts.push(
57 | "No libraries found matching your query.".to_string(),
58 | );
59 | } else {
60 | for result_item in results_array {
61 | let mut item_details = Vec::new();
62 |
63 | let title = result_item
64 | .get("title")
65 | .and_then(JsonValue::as_str)
66 | .unwrap_or("N/A");
67 | item_details.push(format!("- Title: {}", title));
68 |
69 | let id = result_item
70 | .get("id")
71 | .and_then(JsonValue::as_str)
72 | .unwrap_or("N/A");
73 | item_details.push(format!(
74 | "- Context7-compatible library ID: {}",
75 | id
76 | ));
77 |
78 | let description = result_item
79 | .get("description")
80 | .and_then(JsonValue::as_str)
81 | .unwrap_or("N/A");
82 | item_details
83 | .push(format!("- Description: {}", description));
84 |
85 | if let Some(v) = result_item
86 | .get("totalSnippets")
87 | .and_then(JsonValue::as_i64)
88 | .filter(|&v| v >= 0)
89 | {
90 | item_details.push(format!("- Code Snippets: {}", v))
91 | }
92 |
93 | if let Some(v) = result_item
94 | .get("stars")
95 | .and_then(JsonValue::as_i64)
96 | .filter(|&v| v >= 0)
97 | {
98 | item_details.push(format!("- GitHub Stars: {}", v))
99 | }
100 |
101 | results_text_parts.push(item_details.join("\n"));
102 | }
103 | }
104 | } else {
105 | results_text_parts.push("API response 'results' field was not an array as expected.".to_string());
106 | }
107 | } else {
108 | results_text_parts.push(
109 | "API response did not contain a 'results' field as expected."
110 | .to_string(),
111 | );
112 | }
113 |
114 | let header = "Available Libraries (top matches):\n\nEach result includes information like:\n- Title: Library or package name\n- Context7-compatible library ID: Identifier (format: /org/repo)\n- Description: Short summary\n- Code Snippets: Number of available code examples (if available)\n- GitHub Stars: Popularity indicator (if available)\n\nFor best results, select libraries based on name match, popularity (stars), snippet coverage, and relevance to your use case.\n\n---\n";
115 | let final_text =
116 | format!("{}{}", header, results_text_parts.join("\n\n"));
117 |
118 | Ok(CallToolResult {
119 | is_error: None,
120 | content: vec![Content {
121 | annotations: None,
122 | text: Some(final_text),
123 | mime_type: Some("text/markdown".to_string()),
124 | r#type: ContentType::Text,
125 | data: None,
126 | }],
127 | })
128 | }
129 | Err(e) => {
130 | // Failed to parse the JSON body
131 | Ok(CallToolResult {
132 | is_error: Some(true),
133 | content: vec![Content {
134 | annotations: None,
135 | text: Some(format!(
136 | "Failed to parse API response JSON: {}. Body: {}",
137 | e, body_str
138 | )),
139 | mime_type: None,
140 | r#type: ContentType::Text,
141 | data: None,
142 | }],
143 | })
144 | }
145 | }
146 | } else {
147 | Ok(CallToolResult {
148 | is_error: Some(true),
149 | content: vec![Content {
150 | annotations: None,
151 | text: Some(format!(
152 | "API request failed with status {}: {}",
153 | res.status_code(),
154 | body_str
155 | )),
156 | mime_type: None,
157 | r#type: ContentType::Text,
158 | data: None,
159 | }],
160 | })
161 | }
162 | }
163 | Err(e) => Ok(CallToolResult {
164 | is_error: Some(true),
165 | content: vec![Content {
166 | annotations: None,
167 | text: Some(format!("HTTP request failed: {}", e)),
168 | mime_type: None,
169 | r#type: ContentType::Text,
170 | data: None,
171 | }],
172 | }),
173 | }
174 | } else {
175 | Ok(CallToolResult {
176 | is_error: Some(true),
177 | content: vec![Content {
178 | annotations: None,
179 | text: Some(
180 | "Missing required parameter: library_name (or not a string)".to_string(),
181 | ),
182 | mime_type: None,
183 | r#type: ContentType::Text,
184 | data: None,
185 | }],
186 | })
187 | }
188 | }
189 |
190 | fn c7_get_library_docs(input: CallToolRequest) -> Result<CallToolResult, Error> {
191 | let args = input.params.arguments.unwrap_or_default();
192 | let library_id_json_val = args
193 | .get("context7_compatible_library_id")
194 | .unwrap_or(&JsonValue::Null);
195 |
196 | if let JsonValue::String(original_id_str) = library_id_json_val {
197 | let mut id_for_path = original_id_str.clone();
198 | let mut folders_value_opt: Option<String> = None;
199 |
200 | if let Some(idx) = original_id_str.rfind("?folders=") {
201 | let (id_part, folders_part_with_query) = original_id_str.split_at(idx);
202 | id_for_path = id_part.to_string();
203 | folders_value_opt = Some(
204 | folders_part_with_query
205 | .trim_start_matches("?folders=")
206 | .to_string(),
207 | );
208 | }
209 |
210 | let mut query_params_vec = vec![format!(
211 | "context7CompatibleLibraryID={}",
212 | encode(original_id_str) // Use the original, full ID string for this query parameter
213 | )];
214 |
215 | if let Some(folders_val) = &folders_value_opt {
216 | if !folders_val.is_empty() {
217 | query_params_vec.push(format!("folders={}", encode(folders_val)));
218 | }
219 | }
220 |
221 | if let Some(JsonValue::String(topic_str)) = args.get("topic") {
222 | if !topic_str.is_empty() {
223 | // Ensure topic is not empty before adding
224 | query_params_vec.push(format!("topic={}", encode(topic_str)));
225 | }
226 | }
227 |
228 | if let Some(JsonValue::Number(tokens_num_json)) = args.get("tokens") {
229 | if let Some(tokens_f64) = tokens_num_json.as_f64() {
230 | query_params_vec.push(format!("tokens={}", tokens_f64 as i64));
231 | } else if let Some(tokens_i64) = tokens_num_json.as_i64() {
232 | query_params_vec.push(format!("tokens={}", tokens_i64));
233 | }
234 | }
235 |
236 | let final_id_for_path_segment = id_for_path.strip_prefix("/").unwrap_or(&id_for_path);
237 |
238 | let query_params = query_params_vec.join("&");
239 | let url = format!(
240 | "{}/v1/{}/?{}", // Corrected URL: ensure '?' before query parameters
241 | CONTEXT7_API_BASE_URL, final_id_for_path_segment, query_params
242 | );
243 |
244 | let mut req = HttpRequest::new(&url).with_method("GET");
245 | req.headers
246 | .insert("X-Context7-Source".to_string(), "mcp-server".to_string());
247 |
248 | match http::request::<()>(&req, None) {
249 | Ok(res) => {
250 | let body_str = String::from_utf8_lossy(&res.body()).to_string();
251 | if res.status_code() >= 200 && res.status_code() < 300 {
252 | // Directly use the body_str as markdown content
253 | Ok(CallToolResult {
254 | is_error: None,
255 | content: vec![Content {
256 | annotations: None,
257 | text: Some(body_str),
258 | mime_type: Some("text/markdown".to_string()), // Assuming it's still markdown
259 | r#type: ContentType::Text,
260 | data: None,
261 | }],
262 | })
263 | } else {
264 | Ok(CallToolResult {
265 | is_error: Some(true),
266 | content: vec![Content {
267 | annotations: None,
268 | text: Some(format!(
269 | "API request for docs (URL: {}) failed with status {}: {}",
270 | url,
271 | res.status_code(),
272 | body_str
273 | )),
274 | mime_type: None,
275 | r#type: ContentType::Text,
276 | data: None,
277 | }],
278 | })
279 | }
280 | }
281 | Err(e) => Ok(CallToolResult {
282 | is_error: Some(true),
283 | content: vec![Content {
284 | annotations: None,
285 | text: Some(format!("HTTP request for docs failed: {}, URL: {}", e, url)),
286 | mime_type: None,
287 | r#type: ContentType::Text,
288 | data: None,
289 | }],
290 | }),
291 | }
292 | } else {
293 | Ok(CallToolResult {
294 | is_error: Some(true),
295 | content: vec![Content {
296 | annotations: None,
297 | text: Some(
298 | "Missing required parameter: context7_compatible_library_id (or not a string)"
299 | .to_string(),
300 | ),
301 | mime_type: None,
302 | r#type: ContentType::Text,
303 | data: None,
304 | }],
305 | })
306 | }
307 | }
308 |
309 | pub(crate) fn describe() -> Result<ListToolsResult, Error> {
310 | Ok(ListToolsResult {
311 | tools: vec![
312 | ToolDescription {
313 | name: "c7_resolve_library_id".into(),
314 | description: "Resolves a package name to a Context7-compatible library ID and returns a list of matching libraries. You MUST call this function before 'c7_get_library_docs' to obtain a valid Context7-compatible library ID. When selecting the best match, consider: - Name similarity to the query - Description relevance - Code Snippet count (documentation coverage) - GitHub Stars (popularity) Return the selected library ID and explain your choice. If there are multiple good matches, mention this but proceed with the most relevant one.".into(),
315 | input_schema: json!({
316 | "type": "object",
317 | "properties": {
318 | "library_name": {
319 | "type": "string",
320 | "description": "Library name to search for and retrieve a Context7-compatible library ID.",
321 | },
322 | },
323 | "required": ["library_name"],
324 | })
325 | .as_object()
326 | .unwrap()
327 | .clone(),
328 | },
329 | ToolDescription {
330 | name: "c7_get_library_docs".into(),
331 | description: "Fetches up-to-date documentation for a library. You must call 'c7_resolve_library_id' first to obtain the exact Context7-compatible library ID required to use this tool.".into(),
332 | input_schema: json!({
333 | "type": "object",
334 | "properties": {
335 | "context7_compatible_library_id": {
336 | "type": "string",
337 | "description": "Exact Context7-compatible library ID (e.g., 'mongodb/docs', 'vercel/nextjs') retrieved from 'c7_resolve_library_id'.",
338 | },
339 | "topic": {
340 | "type": "string",
341 | "description": "Topic to focus documentation on (e.g., 'hooks', 'routing').",
342 | },
343 | "tokens": {
344 | "type": "integer",
345 | "description": "Maximum number of tokens of documentation to retrieve (default: 10000). Higher values provide more context but consume more tokens.",
346 | },
347 | },
348 | "required": ["context7_compatible_library_id"],
349 | })
350 | .as_object()
351 | .unwrap()
352 | .clone(),
353 | },
354 | ],
355 | })
356 | }
357 |
```
--------------------------------------------------------------------------------
/examples/plugins/v1/qdrant/src/qdrant_client.rs:
--------------------------------------------------------------------------------
```rust
1 | use anyhow::{Error, anyhow, bail};
2 | use extism_pdk::*;
3 | use serde::{Deserialize, Serialize};
4 | use serde_json::json;
5 | use serde_json::{Map, Value};
6 | use std::collections::BTreeMap;
7 | use std::fmt::Display;
8 |
9 | #[derive(Debug, Clone, Serialize, Deserialize)]
10 | #[serde(untagged)]
11 | pub enum PointId {
12 | Uuid(String),
13 | Num(u64),
14 | }
15 | impl From<u64> for PointId {
16 | fn from(num: u64) -> Self {
17 | PointId::Num(num)
18 | }
19 | }
20 | impl From<String> for PointId {
21 | fn from(uuid: String) -> Self {
22 | PointId::Uuid(uuid)
23 | }
24 | }
25 | impl Display for PointId {
26 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
27 | match self {
28 | PointId::Uuid(uuid) => write!(f, "{}", uuid),
29 | PointId::Num(num) => write!(f, "{}", num),
30 | }
31 | }
32 | }
33 |
34 | /// The point struct.
35 | /// A point is a record consisting of a vector and an optional payload.
36 | #[derive(Debug, Serialize, Deserialize)]
37 | #[serde(rename_all = "camelCase")]
38 | pub struct Point {
39 | /// Id of the point
40 | pub id: PointId,
41 |
42 | /// Vectors
43 | pub vector: Vec<f32>,
44 |
45 | /// Additional information along with vectors
46 | pub payload: Option<Map<String, Value>>,
47 | }
48 |
49 | /// The point struct with the score returned by searching
50 | #[derive(Debug, Serialize, Deserialize)]
51 | #[serde(rename_all = "camelCase")]
52 | pub struct ScoredPoint {
53 | /// Id of the point
54 | pub id: PointId,
55 |
56 | /// Vectors
57 | pub vector: Option<Vec<f32>>,
58 |
59 | /// Additional information along with vectors
60 | pub payload: Option<Map<String, Value>>,
61 |
62 | /// Points vector distance to the query vector
63 | pub score: f32,
64 | }
65 |
66 | pub struct QdrantClient {
67 | url_base: String,
68 | api_key: Option<String>,
69 | }
70 |
71 | impl QdrantClient {
72 | pub fn new_with_url(url_base_: String) -> QdrantClient {
73 | QdrantClient {
74 | url_base: url_base_,
75 | api_key: None,
76 | }
77 | }
78 |
79 | pub fn new() -> QdrantClient {
80 | QdrantClient::new_with_url("http://localhost:6333".to_string())
81 | }
82 |
83 | pub fn set_api_key(&mut self, api_key: impl Into<String>) {
84 | self.api_key = Some(api_key.into());
85 | }
86 | }
87 |
88 | impl Default for QdrantClient {
89 | fn default() -> Self {
90 | Self::new()
91 | }
92 | }
93 |
94 | /// Shortcut functions
95 | impl QdrantClient {
96 | /// Shortcut functions
97 | pub fn collection_info(&self, collection_name: &str) -> Result<u64, Error> {
98 | let v = self.collection_info_api(collection_name)?;
99 | v.get("result")
100 | .and_then(|v| v.get("points_count"))
101 | .and_then(|v| v.as_u64())
102 | .ok_or_else(|| anyhow!("[qdrant] Invalid response format"))
103 | }
104 |
105 | pub fn create_collection(&self, collection_name: &str, size: u32) -> Result<(), Error> {
106 | match self.collection_exists(collection_name)? {
107 | false => (),
108 | true => {
109 | let err_msg = format!("Collection '{}' already exists", collection_name);
110 | bail!(err_msg);
111 | }
112 | }
113 |
114 | let params = json!({
115 | "vectors": {
116 | "size": size,
117 | "distance": "Cosine",
118 | "on_disk": true,
119 | }
120 | });
121 | if !self.create_collection_api(collection_name, ¶ms)? {
122 | bail!("Failed to create collection '{}'", collection_name);
123 | }
124 | Ok(())
125 | }
126 |
127 | pub fn list_collections(&self) -> Result<Vec<String>, Error> {
128 | self.list_collections_api()
129 | }
130 |
131 | pub fn collection_exists(&self, collection_name: &str) -> Result<bool, Error> {
132 | let collection_names = self.list_collections()?;
133 | Ok(collection_names.contains(&collection_name.to_string()))
134 | }
135 |
136 | pub fn delete_collection(&self, collection_name: &str) -> Result<(), Error> {
137 | match self.collection_exists(collection_name)? {
138 | true => (),
139 | false => {
140 | let err_msg = format!("Not found collection '{}'", collection_name);
141 | bail!(err_msg);
142 | }
143 | }
144 |
145 | if !self.delete_collection_api(collection_name)? {
146 | bail!("Failed to delete collection '{}'", collection_name);
147 | }
148 | Ok(())
149 | }
150 |
151 | pub fn upsert_points(&self, collection_name: &str, points: Vec<Point>) -> Result<(), Error> {
152 | let params = json!({
153 | "points": points,
154 | });
155 | self.upsert_points_api(collection_name, ¶ms)
156 | }
157 |
158 | pub fn search_points(
159 | &self,
160 | collection_name: &str,
161 | vector: Vec<f32>,
162 | limit: u64,
163 | score_threshold: Option<f32>,
164 | ) -> Result<Vec<ScoredPoint>, Error> {
165 | let score_threshold = score_threshold.unwrap_or(0.0);
166 |
167 | let params = json!({
168 | "vector": vector,
169 | "limit": limit,
170 | "with_payload": true,
171 | "with_vector": true,
172 | "score_threshold": score_threshold,
173 | });
174 |
175 | match self.search_points_api(collection_name, ¶ms) {
176 | Ok(v) => match v.get("result") {
177 | Some(v) => match v.as_array() {
178 | Some(rs) => {
179 | let mut sps: Vec<ScoredPoint> = Vec::<ScoredPoint>::new();
180 | for r in rs {
181 | let sp: ScoredPoint = serde_json::from_value(r.clone())?;
182 | sps.push(sp);
183 | }
184 | Ok(sps)
185 | }
186 | None => {
187 | bail!(
188 | "[qdrant] The value corresponding to the 'result' key is not an array."
189 | )
190 | }
191 | },
192 | None => Ok(vec![]),
193 | },
194 | Err(_) => Ok(vec![]),
195 | }
196 | }
197 |
198 | pub fn get_points(&self, collection_name: &str, ids: &[PointId]) -> Result<Vec<Point>, Error> {
199 | let params = json!({
200 | "ids": ids,
201 | "with_payload": true,
202 | "with_vector": true,
203 | });
204 |
205 | let v = self.get_points_api(collection_name, ¶ms)?;
206 | let rs = v
207 | .get("result")
208 | .and_then(|v| v.as_array())
209 | .ok_or_else(|| anyhow!("[qdrant] Invalid response format"))?;
210 |
211 | let mut ps: Vec<Point> = Vec::new();
212 | for r in rs {
213 | let p: Point = serde_json::from_value(r.clone())?;
214 | ps.push(p);
215 | }
216 | Ok(ps)
217 | }
218 |
219 | pub fn get_point(&self, collection_name: &str, id: &PointId) -> Result<Point, Error> {
220 | let v = self.get_point_api(collection_name, id)?;
221 | let r = v
222 | .get("result")
223 | .ok_or_else(|| anyhow!("[qdrant] Invalid response format"))?;
224 | Ok(serde_json::from_value(r.clone())?)
225 | }
226 |
227 | pub fn delete_points(&self, collection_name: &str, ids: &[PointId]) -> Result<(), Error> {
228 | let params = json!({
229 | "points": ids,
230 | });
231 | self.delete_points_api(collection_name, ¶ms)
232 | }
233 |
234 | /// REST API functions
235 | pub fn collection_info_api(&self, collection_name: &str) -> Result<Value, Error> {
236 | let url = format!("{}/collections/{}", self.url_base, collection_name);
237 |
238 | let mut headers = BTreeMap::new();
239 | headers.insert("Content-Type".to_string(), "application/json".to_string());
240 | if let Some(api_key) = &self.api_key {
241 | headers.insert("api-key".to_string(), api_key.clone());
242 | }
243 |
244 | let response: HttpResponse = http::request::<()>(
245 | &HttpRequest {
246 | url: url.clone(),
247 | headers,
248 | method: Some("GET".to_string()),
249 | },
250 | None,
251 | )?;
252 |
253 | let json: Value = serde_json::from_slice(&response.body())?;
254 | Ok(json)
255 | }
256 |
257 | pub fn create_collection_api(
258 | &self,
259 | collection_name: &str,
260 | params: &Value,
261 | ) -> Result<bool, Error> {
262 | let url = format!("{}/collections/{}", self.url_base, collection_name);
263 | let mut headers = BTreeMap::new();
264 | headers.insert("Content-Type".to_string(), "application/json".to_string());
265 | if let Some(api_key) = &self.api_key {
266 | headers.insert("api-key".to_string(), api_key.clone());
267 | }
268 |
269 | let body = serde_json::to_vec(params)?;
270 | let response = http::request::<Vec<u8>>(
271 | &HttpRequest {
272 | url: url.clone(),
273 | headers,
274 | method: Some("PUT".to_string()),
275 | },
276 | Some(body),
277 | )?;
278 |
279 | let json: Value = serde_json::from_slice(&response.body())?;
280 | let success = json
281 | .get("result")
282 | .and_then(|v| v.as_bool())
283 | .ok_or_else(|| anyhow!("[qdrant] Invalid response format"))?;
284 | Ok(success)
285 | }
286 |
287 | pub fn list_collections_api(&self) -> Result<Vec<String>, Error> {
288 | let url = format!("{}/collections", self.url_base);
289 | let mut headers = BTreeMap::new();
290 | headers.insert("Content-Type".to_string(), "application/json".to_string());
291 | if let Some(api_key) = &self.api_key {
292 | headers.insert("api-key".to_string(), api_key.clone());
293 | }
294 |
295 | let response = http::request::<()>(
296 | &HttpRequest {
297 | url: url.clone(),
298 | headers,
299 | method: Some("GET".to_string()),
300 | },
301 | None,
302 | )?;
303 |
304 | let json: Value = serde_json::from_slice(&response.body())?;
305 |
306 | match json.get("result") {
307 | Some(result) => match result.get("collections") {
308 | Some(collections) => match collections.as_array() {
309 | Some(collections) => {
310 | let mut collection_names = Vec::new();
311 | for collection in collections {
312 | if let Some(name) = collection.get("name").and_then(|n| n.as_str()) {
313 | collection_names.push(name.to_string());
314 | }
315 | }
316 | Ok(collection_names)
317 | }
318 | None => bail!(
319 | "[qdrant] The value corresponding to the 'collections' key is not an array."
320 | ),
321 | },
322 | None => bail!("[qdrant] The given key 'collections' does not exist."),
323 | },
324 | None => bail!("[qdrant] The given key 'result' does not exist."),
325 | }
326 | }
327 |
328 | pub fn collection_exists_api(&self, collection_name: &str) -> Result<bool, Error> {
329 | let url = format!("{}/collections/{}/exists", self.url_base, collection_name);
330 | let mut headers = BTreeMap::new();
331 | headers.insert("Content-Type".to_string(), "application/json".to_string());
332 | if let Some(api_key) = &self.api_key {
333 | headers.insert("api-key".to_string(), api_key.clone());
334 | }
335 |
336 | let response = http::request::<()>(
337 | &HttpRequest {
338 | url: url.clone(),
339 | headers,
340 | method: Some("GET".to_string()),
341 | },
342 | None,
343 | )?;
344 |
345 | let json: Value = serde_json::from_slice(&response.body())?;
346 | match json.get("result") {
347 | Some(result) => {
348 | let exists = result
349 | .get("exists")
350 | .and_then(|v| v.as_bool())
351 | .ok_or_else(|| anyhow!("[qdrant] Invalid response format"))?;
352 | Ok(exists)
353 | }
354 | None => Err(anyhow!("[qdrant] Failed to check collection existence")),
355 | }
356 | }
357 |
358 | pub fn delete_collection_api(&self, collection_name: &str) -> Result<bool, Error> {
359 | let url = format!("{}/collections/{}", self.url_base, collection_name);
360 | let mut headers = BTreeMap::new();
361 | headers.insert("Content-Type".to_string(), "application/json".to_string());
362 | if let Some(api_key) = &self.api_key {
363 | headers.insert("api-key".to_string(), api_key.clone());
364 | }
365 |
366 | let response = http::request::<()>(
367 | &HttpRequest {
368 | url: url.clone(),
369 | headers,
370 | method: Some("DELETE".to_string()),
371 | },
372 | None,
373 | )?;
374 |
375 | let json: Value = serde_json::from_slice(&response.body())?;
376 | let success = json
377 | .get("result")
378 | .and_then(|v| v.as_bool())
379 | .ok_or_else(|| anyhow!("[qdrant] Invalid response format"))?;
380 | Ok(success)
381 | }
382 |
383 | pub fn upsert_points_api(&self, collection_name: &str, params: &Value) -> Result<(), Error> {
384 | let url = format!(
385 | "{}/collections/{}/points?wait=true",
386 | self.url_base, collection_name,
387 | );
388 | let mut headers = BTreeMap::new();
389 | headers.insert("Content-Type".to_string(), "application/json".to_string());
390 | if let Some(api_key) = &self.api_key {
391 | headers.insert("api-key".to_string(), api_key.clone());
392 | }
393 |
394 | let body = serde_json::to_vec(params)?;
395 | let response = http::request(
396 | &HttpRequest {
397 | url: url.clone(),
398 | headers,
399 | method: Some("PUT".to_string()),
400 | },
401 | Some(&body),
402 | )?;
403 |
404 | let json: Value = serde_json::from_slice(&response.body())?;
405 | let status = json
406 | .get("status")
407 | .and_then(|v| v.as_str())
408 | .ok_or_else(|| anyhow!("[qdrant] Invalid response format"))?;
409 |
410 | if status == "ok" {
411 | Ok(())
412 | } else {
413 | Err(anyhow!(
414 | "[qdrant] Failed to upsert points. Status = {}",
415 | status
416 | ))
417 | }
418 | }
419 |
420 | pub fn search_points_api(&self, collection_name: &str, params: &Value) -> Result<Value, Error> {
421 | let url = format!(
422 | "{}/collections/{}/points/search",
423 | self.url_base, collection_name,
424 | );
425 | let mut headers = BTreeMap::new();
426 | headers.insert("Content-Type".to_string(), "application/json".to_string());
427 | if let Some(api_key) = &self.api_key {
428 | headers.insert("api-key".to_string(), api_key.clone());
429 | }
430 |
431 | let body = serde_json::to_vec(params)?;
432 | let response = http::request(
433 | &HttpRequest {
434 | url: url.clone(),
435 | headers,
436 | method: Some("POST".to_string()),
437 | },
438 | Some(&body),
439 | )?;
440 |
441 | let json: Value = serde_json::from_slice(&response.body())?;
442 | Ok(json)
443 | }
444 |
445 | pub fn get_points_api(&self, collection_name: &str, params: &Value) -> Result<Value, Error> {
446 | let url = format!("{}/collections/{}/points", self.url_base, collection_name);
447 | let mut headers = BTreeMap::new();
448 | headers.insert("Content-Type".to_string(), "application/json".to_string());
449 | if let Some(api_key) = &self.api_key {
450 | headers.insert("api-key".to_string(), api_key.clone());
451 | }
452 |
453 | let body = serde_json::to_vec(params)?;
454 | let response = http::request(
455 | &HttpRequest {
456 | url: url.clone(),
457 | headers,
458 | method: Some("POST".to_string()),
459 | },
460 | Some(&body),
461 | )?;
462 |
463 | let json: Value = serde_json::from_slice(&response.body())?;
464 | Ok(json)
465 | }
466 |
467 | pub fn get_point_api(&self, collection_name: &str, id: &PointId) -> Result<Value, Error> {
468 | let url = format!(
469 | "{}/collections/{}/points/{}",
470 | self.url_base, collection_name, id,
471 | );
472 | let mut headers = BTreeMap::new();
473 | headers.insert("Content-Type".to_string(), "application/json".to_string());
474 | if let Some(api_key) = &self.api_key {
475 | headers.insert("api-key".to_string(), api_key.clone());
476 | }
477 |
478 | let response = http::request::<()>(
479 | &HttpRequest {
480 | url: url.clone(),
481 | headers,
482 | method: Some("GET".to_string()),
483 | },
484 | None,
485 | )?;
486 |
487 | let json: Value = serde_json::from_slice(&response.body())?;
488 | Ok(json)
489 | }
490 |
491 | pub fn delete_points_api(&self, collection_name: &str, params: &Value) -> Result<(), Error> {
492 | let url = format!(
493 | "{}/collections/{}/points/delete?wait=true",
494 | self.url_base, collection_name,
495 | );
496 | let mut headers = BTreeMap::new();
497 | headers.insert("Content-Type".to_string(), "application/json".to_string());
498 | if let Some(api_key) = &self.api_key {
499 | headers.insert("api-key".to_string(), api_key.clone());
500 | }
501 |
502 | let body = serde_json::to_vec(params)?;
503 | let response = http::request(
504 | &HttpRequest {
505 | url: url.clone(),
506 | headers,
507 | method: Some("POST".to_string()),
508 | },
509 | Some(&body),
510 | )?;
511 |
512 | Ok(())
513 | }
514 | }
515 |
```
--------------------------------------------------------------------------------
/RUNTIME_CONFIG.md:
--------------------------------------------------------------------------------
```markdown
1 | # Runtime Configuration
2 |
3 | ## Structure
4 |
5 | The configuration is structured as follows:
6 |
7 | - **auths** (`object`, optional): Authentication configurations for HTTPS requests, keyed by URL.
8 | - **plugins**: A map of plugin names to plugin configuration objects.
9 | - **path** (`string`): OCI path or HTTP URL or local path for the plugin.
10 | - **runtime_config** (`object`, optional): Plugin-specific runtime configuration. The available fields are:
11 | - **skip_tools** (`array[string]`, optional): List of regex patterns for tool names to skip loading at runtime. Each pattern is automatically anchored to match the entire tool name (equivalent to wrapping with `^` and `$`). Supports full regex syntax for powerful pattern matching.
12 | - **allowed_hosts** (`array[string]`, optional): List of allowed hosts for the plugin (e.g., `["1.1.1.1"]` or `["*"]`).
13 | - **allowed_paths** (`array[string]`, optional): List of allowed file system paths.
14 | - **env_vars** (`object`, optional): Key-value pairs of environment variables for the plugin.
15 | - **memory_limit** (`string`, optional): Memory limit for the plugin (e.g., `"512Mi"`).
16 |
17 | ## Plugin Names
18 |
19 | Plugin names must follow strict naming conventions to ensure consistency and avoid conflicts:
20 |
21 | ### Allowed Characters
22 | - **Letters**: A-Z, a-z (case-sensitive)
23 | - **Numbers**: 0-9
24 | - **Underscores**: _ (as separators only)
25 |
26 | ### Naming Rules
27 | - Must start with a letter or number (not underscore)
28 | - Must end with a letter or number (not underscore)
29 | - Cannot contain consecutive underscores
30 | - Cannot contain hyphens or other special characters
31 | - Cannot contain spaces or whitespace
32 |
33 | ### Valid Examples
34 | ```
35 | ✅ plugin
36 | ✅ myPlugin
37 | ✅ plugin_name
38 | ✅ plugin123
39 | ✅ my_awesome_plugin_v2
40 | ✅ Plugin_Name_123
41 | ```
42 |
43 | ### Invalid Examples
44 | ```
45 | ❌ plugin-name (hyphens not allowed)
46 | ❌ plugin_ (cannot end with underscore)
47 | ❌ _plugin (cannot start with underscore)
48 | ❌ plugin__name (consecutive underscores)
49 | ❌ plugin name (spaces not allowed)
50 | ❌ plugin@name (special characters not allowed)
51 | ```
52 |
53 | ### Best Practices
54 | - Use descriptive, meaningful names
55 | - Follow consistent naming conventions within your organization
56 | - Consider using prefixes for related plugins (e.g., `company_auth`, `company_logging`)
57 | - Use underscores to separate logical components (e.g., `api_client`, `data_processor`)
58 |
59 | ## Authentication Configuration
60 |
61 | The `auths` field allows you to configure authentication for HTTPS requests made by plugins. Authentication is matched by URL prefix, with longer prefixes taking precedence.
62 |
63 | ### Supported Authentication Types
64 |
65 | #### Basic Authentication
66 | ```yaml
67 | auths:
68 | "https://api.example.com":
69 | type: basic
70 | username: "your-username"
71 | password: "your-password"
72 | ```
73 |
74 | #### Bearer Token Authentication
75 | ```yaml
76 | auths:
77 | "https://api.example.com":
78 | type: token
79 | token: "your-bearer-token"
80 | ```
81 |
82 | #### Keyring Authentication
83 | ```yaml
84 | auths:
85 | "https://private.registry.io":
86 | type: keyring
87 | service: "my-app"
88 | user: "registry-user"
89 | ```
90 |
91 | ### Keyring Setup Examples
92 |
93 | For keyring authentication, you need to store the actual auth configuration JSON in your system keyring. This provides secure credential storage without exposing sensitive data in config files.
94 |
95 | #### macOS (using Keychain Access or security command)
96 |
97 | **Using the `security` command:**
98 | ```bash
99 | # Store basic auth credentials
100 | security add-generic-password -a "registry-user" -s "my-app" -w '{"type":"basic","username":"actual-user","password":"actual-pass"}'
101 |
102 | # Store token auth credentials
103 | security add-generic-password -a "api-user" -s "my-service" -w '{"type":"token","token":"actual-bearer-token"}'
104 |
105 | # Verify the entry was created
106 | security find-generic-password -a "registry-user" -s "my-app"
107 | ```
108 |
109 | **Using Keychain Access GUI:**
110 | 1. Open Keychain Access (Applications → Utilities → Keychain Access)
111 | 2. Click "File" → "New Password Item"
112 | 3. Set "Keychain Item Name" to your service name (e.g., "my-app")
113 | 4. Set "Account Name" to your user name (e.g., "registry-user")
114 | 5. Set "Password" to the JSON auth config: `{"type":"basic","username":"actual-user","password":"actual-pass"}`
115 | 6. Click "Add"
116 |
117 | #### Linux (using libsecret/gnome-keyring)
118 |
119 | **Install required tools:**
120 | ```bash
121 | # Ubuntu/Debian
122 | sudo apt-get install libsecret-tools
123 |
124 | # RHEL/CentOS/Fedora
125 | sudo yum install libsecret-devel
126 | ```
127 |
128 | **Using `secret-tool`:**
129 | ```bash
130 | # Store basic auth credentials
131 | echo '{"type":"basic","username":"actual-user","password":"actual-pass"}' | secret-tool store --label="my-app credentials" service "my-app" username "registry-user"
132 |
133 | # Store token auth credentials
134 | echo '{"type":"token","token":"actual-bearer-token"}' | secret-tool store --label="my-service token" service "my-service" username "api-user"
135 |
136 | # Verify the entry was created
137 | secret-tool lookup service "my-app" username "registry-user"
138 | ```
139 |
140 | #### Windows (using Windows Credential Manager)
141 |
142 | **Using `cmdkey` (Command Prompt as Administrator):**
143 | ```cmd
144 | REM Store basic auth credentials (escape quotes for JSON)
145 | cmdkey /generic:"my-app" /user:"registry-user" /pass:"{\"type\":\"basic\",\"username\":\"actual-user\",\"password\":\"actual-pass\"}"
146 |
147 | REM Store token auth credentials
148 | cmdkey /generic:"my-service" /user:"api-user" /pass:"{\"type\":\"token\",\"token\":\"actual-bearer-token\"}"
149 |
150 | REM Verify the entry was created
151 | cmdkey /list:"my-app"
152 | ```
153 |
154 | **Using Credential Manager GUI:**
155 | 1. Open "Credential Manager" from Control Panel → User Accounts → Credential Manager
156 | 2. Click "Add a generic credential"
157 | 3. Set "Internet or network address" to your service name (e.g., "my-app")
158 | 4. Set "User name" to your user name (e.g., "registry-user")
159 | 5. Set "Password" to the JSON auth config: `{"type":"basic","username":"actual-user","password":"actual-pass"}`
160 | 6. Click "OK"
161 |
162 | **Using PowerShell:**
163 | ```powershell
164 | # Store basic auth credentials
165 | $cred = New-Object System.Management.Automation.PSCredential("registry-user", (ConvertTo-SecureString '{"type":"basic","username":"actual-user","password":"actual-pass"}' -AsPlainText -Force))
166 | New-StoredCredential -Target "my-app" -Credential $cred -Type Generic
167 | ```
168 |
169 | ### URL Matching Behavior
170 |
171 | Authentication is applied based on URL prefix matching:
172 | - Longer prefixes take precedence over shorter ones
173 | - Exact matches take highest precedence
174 | - URLs are matched case-sensitively
175 |
176 | **Example:**
177 | ```yaml
178 | auths:
179 | "https://example.com":
180 | type: basic
181 | username: "broad-user"
182 | password: "broad-pass"
183 | "https://example.com/api":
184 | type: token
185 | token: "api-token"
186 | "https://example.com/api/v1":
187 | type: basic
188 | username: "v1-user"
189 | password: "v1-pass"
190 | ```
191 |
192 | - Request to `https://example.com/api/v1/users` → uses v1 basic auth (longest match)
193 | - Request to `https://example.com/api/data` → uses api token auth
194 | - Request to `https://example.com/public` → uses broad basic auth
195 |
196 | ### Keyring Authentication Example
197 |
198 | **Configuration file:**
199 | ```yaml
200 | auths:
201 | "https://private.registry.io":
202 | type: keyring
203 | service: "private-registry"
204 | user: "registry-user"
205 | "https://internal.company.com":
206 | type: keyring
207 | service: "company-api"
208 | user: "api-user"
209 |
210 | plugins:
211 | secure-plugin:
212 | url: "https://private.registry.io/secure-plugin"
213 | runtime_config:
214 | allowed_hosts:
215 | - "private.registry.io"
216 | ```
217 |
218 | **Corresponding keyring entries (stored separately):**
219 | - Service: `private-registry`, User: `registry-user`, Password: `{"type":"basic","username":"real-user","password":"real-pass"}`
220 | - Service: `company-api`, User: `api-user`, Password: `{"type":"token","token":"company-jwt-token"}`
221 |
222 | ### Real-World Keyring Scenarios
223 |
224 | #### Scenario 1: Corporate Environment
225 | ```yaml
226 | auths:
227 | "https://artifactory.company.com":
228 | type: keyring
229 | service: "company-artifactory"
230 | user: "build-service"
231 | "https://nexus.company.com":
232 | type: keyring
233 | service: "company-nexus"
234 | user: "deployment-bot"
235 | ```
236 |
237 | Setup corporate credentials once:
238 | ```bash
239 | # macOS
240 | security add-generic-password -a "build-service" -s "company-artifactory" -w '{"type":"basic","username":"corp_user","password":"corp_secret"}'
241 |
242 | # Linux
243 | echo '{"type":"basic","username":"corp_user","password":"corp_secret"}' | secret-tool store --label="Company Artifactory" service "company-artifactory" username "build-service"
244 |
245 | # Windows
246 | cmdkey /generic:"company-artifactory" /user:"build-service" /pass:"{\"type\":\"basic\",\"username\":\"corp_user\",\"password\":\"corp_secret\"}"
247 | ```
248 |
249 | #### Scenario 2: Multi-Environment Setup
250 | ```yaml
251 | auths:
252 | "https://staging-api.example.com":
253 | type: keyring
254 | service: "example-staging"
255 | user: "staging-user"
256 | "https://prod-api.example.com":
257 | type: keyring
258 | service: "example-prod"
259 | user: "prod-user"
260 | ```
261 |
262 | Store different credentials for each environment:
263 | ```bash
264 | # Staging credentials
265 | security add-generic-password -a "staging-user" -s "example-staging" -w '{"type":"token","token":"staging-jwt-token"}'
266 |
267 | # Production credentials
268 | security add-generic-password -a "prod-user" -s "example-prod" -w '{"type":"token","token":"prod-jwt-token"}'
269 | ```
270 |
271 | #### Scenario 3: Team Shared Configuration
272 | ```yaml
273 | # Team members can share this config file safely
274 | auths:
275 | "https://shared-registry.team.com":
276 | type: keyring
277 | service: "team-registry"
278 | user: "developer"
279 | ```
280 |
281 | Each team member stores their own credentials:
282 | ```bash
283 | # Developer A
284 | security add-generic-password -a "developer" -s "team-registry" -w '{"type":"basic","username":"alice","password":"alice_key"}'
285 |
286 | # Developer B
287 | security add-generic-password -a "developer" -s "team-registry" -w '{"type":"basic","username":"bob","password":"bob_key"}'
288 | ```
289 |
290 | ### Keyring Best Practices
291 |
292 | 1. **Service Naming Convention**: Use descriptive, consistent service names (e.g., `company-artifactory`, `project-registry`)
293 | 2. **User Identification**: Use role-based usernames (e.g., `build-service`, `deployment-bot`) rather than personal names
294 | 3. **Credential Rotation**: Update keyring entries when rotating credentials - no config file changes needed
295 | 4. **Environment Separation**: Use different service names for different environments
296 | 5. **Team Coordination**: Document your service/user naming conventions for team members
297 | 6. **Backup Strategy**: Consider backing up keyring entries for critical services
298 | 7. **Testing**: Use non-production credentials in keyring for testing
299 |
300 | ## Example (YAML)
301 |
302 | ```yaml
303 | auths:
304 | "https://private.registry.io":
305 | type: basic
306 | username: "registry-user"
307 | password: "registry-pass"
308 | "https://api.github.com":
309 | type: token
310 | token: "ghp_1234567890abcdef"
311 | "https://enterprise.api.com":
312 | type: basic
313 | username: "enterprise-user"
314 | password: "enterprise-pass"
315 |
316 | plugins:
317 | time:
318 | url: oci://ghcr.io/tuananh/time-plugin:latest
319 | myip:
320 | url: oci://ghcr.io/tuananh/myip-plugin:latest
321 | runtime_config:
322 | allowed_hosts:
323 | - "1.1.1.1"
324 | skip_tools:
325 | - "debug_tool" # Skip exact tool name
326 | - "temp_.*" # Skip tools starting with "temp_"
327 | - ".*_backup" # Skip tools ending with "_backup"
328 | - "test_[0-9]+" # Skip tools like "test_1", "test_42"
329 | env_vars:
330 | FOO: "bar"
331 | memory_limit: "512Mi"
332 | private_plugin:
333 | url: "https://private.registry.io/my-plugin"
334 | runtime_config:
335 | allowed_hosts:
336 | - "private.registry.io"
337 | ```
338 |
339 | ## Example (JSON)
340 |
341 | ```json
342 | {
343 | "auths": {
344 | "https://private.registry.io": {
345 | "type": "basic",
346 | "username": "registry-user",
347 | "password": "registry-pass"
348 | },
349 | "https://api.github.com": {
350 | "type": "token",
351 | "token": "ghp_1234567890abcdef"
352 | },
353 | "https://enterprise.api.com": {
354 | "type": "basic",
355 | "username": "enterprise-user",
356 | "password": "enterprise-pass"
357 | }
358 | },
359 | "plugins": {
360 | "time": {
361 | "url": "oci://ghcr.io/tuananh/time-plugin:latest"
362 | },
363 | "myip": {
364 | "url": "oci://ghcr.io/tuananh/myip-plugin:latest",
365 | "runtime_config": {
366 | "allowed_hosts": ["1.1.1.1"],
367 | "skip_tools": [
368 | "debug_tool",
369 | "temp_.*",
370 | ".*_backup",
371 | "test_[0-9]+"
372 | ],
373 | "env_vars": {"FOO": "bar"},
374 | "memory_limit": "512Mi"
375 | }
376 | },
377 | "private_plugin": {
378 | "url": "https://private.registry.io/my-plugin",
379 | "runtime_config": {
380 | "allowed_hosts": ["private.registry.io"]
381 | }
382 | }
383 | }
384 | }
385 | ```
386 |
387 | ## Loading Configuration
388 |
389 | Configuration is loaded at runtime from a file with `.json`, `.yaml`, `.yml`, or `.toml` extension. The loader will parse the file according to its extension. If the file does not exist or the format is unsupported, an error will be raised.
390 |
391 | ## Security Considerations
392 |
393 | ### Credential Storage
394 | - **Basic/Token auth**: Credentials are stored directly in the config file. Ensure proper file permissions (e.g., `chmod 600`).
395 | - **Keyring auth**: Credentials are stored securely in the system keyring. The config file only contains service/user identifiers.
396 |
397 | ### Best Practices
398 | - Use keyring authentication for production environments
399 | - Rotate credentials regularly
400 | - Use environment-specific config files
401 | - Never commit credentials to version control
402 | - Consider using short-lived tokens when possible
403 |
404 | ## Troubleshooting Keyring Authentication
405 |
406 | ### Common Issues
407 |
408 | #### "No matching entry found in secure storage"
409 | This error occurs when the keyring entry doesn't exist or can't be accessed.
410 |
411 | **Solutions:**
412 | 1. Verify the service and user names match exactly between config and keyring
413 | 2. Check that the keyring entry exists:
414 | ```bash
415 | # macOS
416 | security find-generic-password -a "your-user" -s "your-service"
417 |
418 | # Linux
419 | secret-tool lookup service "your-service" username "your-user"
420 |
421 | # Windows
422 | cmdkey /list:"your-service"
423 | ```
424 | 3. Ensure the current user has permission to access the keyring entry
425 |
426 | #### "Failed to parse JSON from keyring"
427 | This error occurs when the stored password isn't valid JSON or doesn't match the expected AuthConfig format.
428 |
429 | **Solutions:**
430 | 1. Verify the stored password is valid JSON:
431 | ```bash
432 | # macOS - retrieve and validate
433 | security find-generic-password -a "your-user" -s "your-service" -w | jq .
434 | ```
435 | 2. Ensure the JSON matches one of these formats:
436 | - `{"type":"basic","username":"real-user","password":"real-pass"}`
437 | - `{"type":"token","token":"real-token"}`
438 |
439 | #### Platform-Specific Issues
440 |
441 | **macOS:**
442 | - Keychain may be locked - unlock it manually or use `security unlock-keychain`
443 | - Application may not have keychain access permissions
444 |
445 | **Linux:**
446 | - GNOME Keyring service may not be running: `systemctl --user status gnome-keyring`
447 | - D-Bus session may not be available in non-graphical environments
448 |
449 | **Windows:**
450 | - Credential Manager may require administrator privileges for certain operations
451 | - Windows Credential Manager has size limits for stored passwords
452 |
453 | ### Debugging Tips
454 |
455 | 1. **Test keyring access independently:**
456 | ```bash
457 | # Create a test entry
458 | security add-generic-password -a "test-user" -s "test-service" -w '{"type":"token","token":"test"}'
459 |
460 | # Retrieve it
461 | security find-generic-password -a "test-user" -s "test-service" -w
462 |
463 | # Clean up
464 | security delete-generic-password -a "test-user" -s "test-service"
465 | ```
466 |
467 | 2. **Validate JSON format:**
468 | ```bash
469 | echo '{"type":"basic","username":"user","password":"pass"}' | jq .
470 | ```
471 |
472 | 3. **Check permissions:**
473 | ```bash
474 | # Ensure config file is readable
475 | ls -la config.yaml
476 |
477 | # Set appropriate permissions
478 | chmod 600 config.yaml
479 | ```
480 |
481 | ## Skip Tools Pattern Matching
482 |
483 | The `skip_tools` field supports powerful regex pattern matching for filtering out unwanted tools at runtime.
484 |
485 | > 📖 **For comprehensive examples, advanced patterns, and detailed use cases, see [SKIP_TOOLS_GUIDE.md](./SKIP_TOOLS_GUIDE.md)**
486 |
487 | ### Pattern Behavior
488 | - **Automatic Anchoring**: Patterns are automatically anchored to match the entire tool name (wrapped with `^` and `$`)
489 | - **Regex Support**: Full regex syntax is supported, including wildcards, character classes, and quantifiers
490 | - **Case Sensitive**: Pattern matching is case-sensitive
491 | - **Compilation**: All patterns are compiled into a single optimized regex set for efficient matching
492 |
493 | ### Pattern Examples
494 |
495 | #### Exact Matches
496 | ```yaml
497 | skip_tools:
498 | - "debug_tool" # Matches only "debug_tool"
499 | - "test_runner" # Matches only "test_runner"
500 | ```
501 |
502 | #### Wildcard Patterns
503 | ```yaml
504 | skip_tools:
505 | - "temp_.*" # Matches "temp_file", "temp_data", etc.
506 | - ".*_backup" # Matches "data_backup", "file_backup", etc.
507 | - "debug.*" # Matches "debug", "debugger", "debug_info", etc.
508 | ```
509 |
510 | #### Advanced Regex Patterns
511 | ```yaml
512 | skip_tools:
513 | - "tool_[0-9]+" # Matches "tool_1", "tool_42", etc.
514 | - "test_(unit|integration)" # Matches "test_unit" and "test_integration"
515 | - "[a-z]+_helper" # Matches lowercase word + "_helper"
516 | - "system_(admin|user)_.*" # Matches tools starting with "system_admin_" or "system_user_"
517 | ```
518 |
519 | #### Explicit Anchoring
520 | ```yaml
521 | skip_tools:
522 | - "^prefix_.*" # Explicit start anchor (same as "prefix_.*" due to auto-anchoring)
523 | - ".*_suffix$" # Explicit end anchor (same as ".*_suffix" due to auto-anchoring)
524 | - "^exact_only$" # Fully explicit anchoring (same as "exact_only")
525 | ```
526 |
527 | #### Special Characters
528 | ```yaml
529 | skip_tools:
530 | - "file\\.exe" # Matches "file.exe" literally (escaped dot)
531 | - "script\\?" # Matches "script?" literally (escaped question mark)
532 | - "temp\\*data" # Matches "temp*data" literally (escaped asterisk)
533 | ```
534 |
535 | #### Common Use Cases
536 | ```yaml
537 | skip_tools:
538 | - ".*_test" # Skip all test tools
539 | - "dev_.*" # Skip all development tools
540 | - "mock_.*" # Skip all mock tools
541 | - ".*_deprecated" # Skip all deprecated tools
542 | - "admin_.*" # Skip all admin tools
543 | - "debug.*" # Skip all debug-related tools
544 | ```
545 |
546 | ### Error Handling
547 | - Invalid regex patterns will cause configuration loading to fail with a descriptive error
548 | - Empty pattern arrays are allowed and will skip no tools
549 | - The `skip_tools` field can be omitted entirely to skip no tools
550 |
551 | ### Performance Notes
552 | - All patterns are compiled into a single optimized `RegexSet` for O(1) tool name checking
553 | - Pattern compilation happens once at startup, not per tool evaluation
554 | - Large numbers of patterns have minimal runtime performance impact
555 |
556 | ## Notes
557 |
558 | - Fields marked as `optional` can be omitted.
559 | - Plugin authors may extend `runtime_config` with additional fields, but only the above are officially recognized.
560 | - Authentication applies to all HTTPS requests made by plugins, including plugin downloads and runtime API calls.
561 | - URL matching is case-sensitive and based on string prefix matching.
562 | - Keyring authentication requires platform-specific keyring services to be available and accessible.
563 | - Skip tools patterns use full regex syntax with automatic anchoring for precise tool filtering.
564 |
```
--------------------------------------------------------------------------------
/src/naming.rs:
--------------------------------------------------------------------------------
```rust
1 | use crate::config::{PluginName, PluginNameParseError};
2 | use anyhow::Result;
3 | use std::fmt;
4 | use std::str::FromStr;
5 | use url::Url;
6 |
7 | #[derive(Debug, Clone)]
8 | pub struct NamespacedNameParseError;
9 |
10 | impl fmt::Display for NamespacedNameParseError {
11 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
12 | write!(f, "Failed to parse name")
13 | }
14 | }
15 |
16 | impl std::error::Error for NamespacedNameParseError {}
17 |
18 | impl From<PluginNameParseError> for NamespacedNameParseError {
19 | fn from(_: PluginNameParseError) -> Self {
20 | NamespacedNameParseError
21 | }
22 | }
23 |
24 | pub fn create_namespaced_name(plugin_name: &PluginName, name: &str) -> String {
25 | format!("{plugin_name}-{name}")
26 | }
27 |
28 | pub fn create_namespaced_uri(plugin_name: &PluginName, uri: &str) -> Result<String> {
29 | let mut uri = Url::parse(uri)?;
30 | uri.set_path(&format!(
31 | "{}/{}",
32 | plugin_name.as_str(),
33 | uri.path().trim_start_matches('/')
34 | ));
35 | Ok(uri.to_string())
36 | }
37 |
38 | pub fn parse_namespaced_name(namespaced_name: String) -> Result<(PluginName, String)> {
39 | if let Some((plugin_name, tool_name)) = namespaced_name.split_once("-") {
40 | return Ok((PluginName::from_str(plugin_name)?, tool_name.to_string()));
41 | }
42 | Err(NamespacedNameParseError.into())
43 | }
44 |
45 | pub fn parse_namespaced_uri(namespaced_uri: String) -> Result<(PluginName, String)> {
46 | let mut uri = Url::parse(namespaced_uri.as_str())?;
47 | let mut segments = uri
48 | .path_segments()
49 | .ok_or(url::ParseError::RelativeUrlWithoutBase)?
50 | .collect::<Vec<&str>>();
51 | if segments.is_empty() {
52 | return Err(NamespacedNameParseError.into());
53 | }
54 | let plugin_name = PluginName::from_str(segments.remove(0))?;
55 | uri.set_path(&segments.join("/"));
56 | Ok((plugin_name, uri.to_string()))
57 | }
58 |
59 | #[cfg(test)]
60 | mod tests {
61 | use super::*;
62 |
63 | #[test]
64 | fn test_create_tool_name() {
65 | let plugin_name = PluginName::from_str("example_plugin").unwrap();
66 | let tool_name = "example_tool";
67 | let expected = "example_plugin-example_tool";
68 | assert_eq!(create_namespaced_name(&plugin_name, tool_name), expected);
69 | }
70 |
71 | #[test]
72 | fn test_parse_tool_name() {
73 | let tool_name = "example_plugin-example_tool".to_string();
74 | let result = parse_namespaced_name(tool_name);
75 | assert!(result.is_ok());
76 | let (plugin_name, tool) = result.unwrap();
77 | assert_eq!(plugin_name.as_str(), "example_plugin");
78 | assert_eq!(tool, "example_tool");
79 | }
80 |
81 | #[test]
82 | fn test_create_tool_name_invalid() {
83 | let plugin_name = PluginName::from_str("example_plugin").unwrap();
84 | let tool_name = "invalid-tool";
85 | let result = create_namespaced_name(&plugin_name, tool_name);
86 | assert_eq!(result, "example_plugin-invalid-tool");
87 | }
88 |
89 | #[test]
90 | fn test_create_namespaced_tool_name_with_special_chars() {
91 | let plugin_name = PluginName::from_str("test_plugin_123").unwrap();
92 | let tool_name = "tool_name_with_underscores";
93 | let result = create_namespaced_name(&plugin_name, tool_name);
94 | assert_eq!(result, "test_plugin_123-tool_name_with_underscores");
95 | }
96 |
97 | #[test]
98 | fn test_create_namespaced_tool_name_empty_tool_name() {
99 | let plugin_name = PluginName::from_str("test_plugin").unwrap();
100 | let tool_name = "";
101 | let result = create_namespaced_name(&plugin_name, tool_name);
102 | assert_eq!(result, "test_plugin-");
103 | }
104 |
105 | #[test]
106 | fn test_create_namespaced_tool_name_multiple_hyphens() {
107 | let plugin_name = PluginName::from_str("test_plugin").unwrap();
108 | let tool_name = "invalid-tool-name";
109 | let result = create_namespaced_name(&plugin_name, tool_name);
110 | assert_eq!(result, "test_plugin-invalid-tool-name");
111 | }
112 |
113 | #[test]
114 | fn test_parse_namespaced_tool_name_with_special_chars() {
115 | let tool_name = "plugin_name_123-tool_name_456".to_string();
116 | let result = parse_namespaced_name(tool_name).unwrap();
117 | assert_eq!(result.0.as_str(), "plugin_name_123");
118 | assert_eq!(result.1, "tool_name_456");
119 | }
120 |
121 | #[test]
122 | fn test_parse_namespaced_tool_name_no_separator() {
123 | let tool_name = "invalid_tool_name".to_string();
124 | let result = parse_namespaced_name(tool_name);
125 | assert!(result.is_err());
126 | }
127 |
128 | #[test]
129 | fn test_parse_namespaced_tool_name_multiple_separators() {
130 | let tool_name = "plugin-tool-extra".to_string();
131 | let result = parse_namespaced_name(tool_name).unwrap();
132 | assert_eq!(result.0.as_str(), "plugin");
133 | assert_eq!(result.1, "tool-extra");
134 | }
135 |
136 | #[test]
137 | fn test_parse_namespaced_tool_name_empty_parts() {
138 | let tool_name = "-tool".to_string();
139 | let result = parse_namespaced_name(tool_name);
140 | // This should still work but with empty plugin name
141 | if result.is_ok() {
142 | let (plugin, _) = result.unwrap();
143 | assert!(plugin.as_str().is_empty());
144 | }
145 | }
146 |
147 | #[test]
148 | fn test_parse_namespaced_tool_name_only_separator() {
149 | let tool_name = "-".to_string();
150 | let result = parse_namespaced_name(tool_name);
151 | // Should result in empty plugin and tool names
152 | if let Ok((plugin, tool)) = result {
153 | assert!(plugin.as_str().is_empty());
154 | assert!(tool.is_empty());
155 | }
156 | }
157 |
158 | #[test]
159 | fn test_parse_namespaced_tool_name_empty_string() {
160 | let tool_name = "".to_string();
161 | let result = parse_namespaced_name(tool_name);
162 | assert!(result.is_err());
163 | }
164 |
165 | #[test]
166 | fn test_tool_name_parse_error_display() {
167 | let error = NamespacedNameParseError;
168 | assert_eq!(format!("{error}"), "Failed to parse name");
169 | }
170 |
171 | #[test]
172 | fn test_tool_name_parse_error_from_plugin_name_error() {
173 | let plugin_error = PluginNameParseError;
174 | let tool_error: NamespacedNameParseError = plugin_error.into();
175 | assert_eq!(format!("{tool_error}"), "Failed to parse name");
176 | }
177 |
178 | #[test]
179 | fn test_round_trip_tool_name_operations() {
180 | let plugin_name = PluginName::from_str("test_plugin").unwrap();
181 | let original_tool = "my_tool";
182 |
183 | let namespaced = create_namespaced_name(&plugin_name, original_tool);
184 | let (parsed_plugin, parsed_tool) = parse_namespaced_name(namespaced).unwrap();
185 |
186 | assert_eq!(parsed_plugin.as_str(), "test_plugin");
187 | assert_eq!(parsed_tool, "my_tool");
188 | }
189 |
190 | #[test]
191 | fn test_tool_name_with_unicode() {
192 | let plugin_name = PluginName::from_str("test_plugin").unwrap();
193 | let tool_name = "тест_工具"; // Cyrillic and Chinese characters
194 |
195 | let result = create_namespaced_name(&plugin_name, tool_name);
196 | assert_eq!(result, "test_plugin-тест_工具");
197 | }
198 |
199 | #[test]
200 | fn test_very_long_tool_names() {
201 | let plugin_name = PluginName::from_str("plugin").unwrap();
202 | let very_long_tool = "a".repeat(1000);
203 |
204 | let namespaced = create_namespaced_name(&plugin_name, &very_long_tool);
205 |
206 | let (parsed_plugin, parsed_tool) = parse_namespaced_name(namespaced).unwrap();
207 |
208 | assert_eq!(parsed_plugin.as_str(), "plugin");
209 | assert_eq!(parsed_tool.len(), 1000);
210 | }
211 |
212 | #[test]
213 | fn test_plugin_name_error_conversion() {
214 | let plugin_error = PluginNameParseError;
215 | let tool_error: NamespacedNameParseError = plugin_error.into();
216 |
217 | // Test that the error implements standard error traits
218 | assert!(std::error::Error::source(&tool_error).is_none());
219 | assert!(!format!("{tool_error}").is_empty());
220 | }
221 |
222 | #[test]
223 | fn test_tool_name_with_numbers_and_special_chars() {
224 | let plugin_name = PluginName::from_str("plugin_123").unwrap();
225 | let tool_name = "tool_456_test";
226 |
227 | let result = create_namespaced_name(&plugin_name, tool_name);
228 | assert_eq!(result, "plugin_123-tool_456_test");
229 |
230 | let (parsed_plugin, parsed_tool) = parse_namespaced_name(result).unwrap();
231 | assert_eq!(parsed_plugin.as_str(), "plugin_123");
232 | assert_eq!(parsed_tool, "tool_456_test");
233 | }
234 |
235 | #[test]
236 | fn test_borrowed_vs_owned_cow_strings() {
237 | // Test with borrowed string
238 | let borrowed_result = parse_namespaced_name("plugin-tool".to_string());
239 | assert!(borrowed_result.is_ok());
240 |
241 | // Test with owned string
242 | let owned_result = parse_namespaced_name("plugin-tool".to_string());
243 | assert!(owned_result.is_ok());
244 |
245 | let (plugin1, tool1) = borrowed_result.unwrap();
246 | let (plugin2, tool2) = owned_result.unwrap();
247 |
248 | assert_eq!(plugin1.as_str(), plugin2.as_str());
249 | assert_eq!(tool1, tool2);
250 | }
251 |
252 | #[test]
253 | fn test_namespaced_tool_format_invariants() {
254 | let plugin_name = PluginName::from_str("test_plugin").unwrap();
255 | let tool_name = "test_tool";
256 |
257 | let namespaced = create_namespaced_name(&plugin_name, tool_name);
258 |
259 | // Should contain at least one "-" (the separator)
260 | let hyphen_count = namespaced.matches("-").count();
261 | assert!(hyphen_count >= 1, "Should contain at least one '-'");
262 |
263 | // Should start with plugin name
264 | assert!(
265 | namespaced.starts_with("test_plugin"),
266 | "Should start with plugin name"
267 | );
268 |
269 | // Should end with tool name
270 | assert!(
271 | namespaced.ends_with("test_tool"),
272 | "Should end with tool name"
273 | );
274 |
275 | // Should be in the format "plugin-tool"
276 | assert_eq!(namespaced, "test_plugin-test_tool");
277 |
278 | // Test parsing works correctly with the first hyphen as separator
279 | let (parsed_plugin, parsed_tool) = parse_namespaced_name(namespaced).unwrap();
280 | assert_eq!(parsed_plugin.as_str(), "test_plugin");
281 | assert_eq!(parsed_tool, "test_tool");
282 | }
283 |
284 | // Tests for create_namespaced_uri and parse_namespaced_uri
285 |
286 | #[test]
287 | fn test_create_namespaced_uri_basic() {
288 | let plugin_name = PluginName::from_str("test_plugin").unwrap();
289 | let uri = "http://example.com/api/endpoint";
290 |
291 | let result = create_namespaced_uri(&plugin_name, uri).unwrap();
292 | assert_eq!(result, "http://example.com/test_plugin/api/endpoint");
293 | }
294 |
295 | #[test]
296 | fn test_create_namespaced_uri_root_path() {
297 | let plugin_name = PluginName::from_str("my_plugin").unwrap();
298 | let uri = "http://example.com/";
299 |
300 | let result = create_namespaced_uri(&plugin_name, uri).unwrap();
301 | assert_eq!(result, "http://example.com/my_plugin/");
302 | }
303 |
304 | #[test]
305 | fn test_create_namespaced_uri_no_path() {
306 | let plugin_name = PluginName::from_str("my_plugin").unwrap();
307 | let uri = "http://example.com";
308 |
309 | let result = create_namespaced_uri(&plugin_name, uri).unwrap();
310 | assert_eq!(result, "http://example.com/my_plugin/");
311 | }
312 |
313 | #[test]
314 | fn test_create_namespaced_uri_with_query_string() {
315 | let plugin_name = PluginName::from_str("test_plugin").unwrap();
316 | let uri = "http://example.com/api/endpoint?key=value&foo=bar";
317 |
318 | let result = create_namespaced_uri(&plugin_name, uri).unwrap();
319 | // Query string should be preserved
320 | assert!(result.contains("test_plugin/api/endpoint"));
321 | assert!(result.contains("key=value"));
322 | assert!(result.contains("foo=bar"));
323 | }
324 |
325 | #[test]
326 | fn test_create_namespaced_uri_with_fragment() {
327 | let plugin_name = PluginName::from_str("test_plugin").unwrap();
328 | let uri = "http://example.com/api/endpoint#section";
329 |
330 | let result = create_namespaced_uri(&plugin_name, uri).unwrap();
331 | assert!(result.contains("test_plugin/api/endpoint"));
332 | assert!(result.contains("#section"));
333 | }
334 |
335 | #[test]
336 | fn test_create_namespaced_uri_with_port() {
337 | let plugin_name = PluginName::from_str("test_plugin").unwrap();
338 | let uri = "http://example.com:8080/api/endpoint";
339 |
340 | let result = create_namespaced_uri(&plugin_name, uri).unwrap();
341 | assert_eq!(result, "http://example.com:8080/test_plugin/api/endpoint");
342 | }
343 |
344 | #[test]
345 | fn test_create_namespaced_uri_https() {
346 | let plugin_name = PluginName::from_str("test_plugin").unwrap();
347 | let uri = "https://secure.example.com/api/endpoint";
348 |
349 | let result = create_namespaced_uri(&plugin_name, uri).unwrap();
350 | assert_eq!(
351 | result,
352 | "https://secure.example.com/test_plugin/api/endpoint"
353 | );
354 | }
355 |
356 | #[test]
357 | fn test_create_namespaced_uri_leading_slash_path() {
358 | let plugin_name = PluginName::from_str("test_plugin").unwrap();
359 | let uri = "http://example.com//api/endpoint";
360 |
361 | let result = create_namespaced_uri(&plugin_name, uri).unwrap();
362 | assert!(result.contains("test_plugin"));
363 | }
364 |
365 | #[test]
366 | fn test_create_namespaced_uri_deep_path() {
367 | let plugin_name = PluginName::from_str("test_plugin").unwrap();
368 | let uri = "http://example.com/v1/api/v2/endpoint/deep";
369 |
370 | let result = create_namespaced_uri(&plugin_name, uri).unwrap();
371 | assert_eq!(
372 | result,
373 | "http://example.com/test_plugin/v1/api/v2/endpoint/deep"
374 | );
375 | }
376 |
377 | #[test]
378 | fn test_create_namespaced_uri_invalid_url() {
379 | let plugin_name = PluginName::from_str("test_plugin").unwrap();
380 | let uri = "not a valid url";
381 |
382 | let result = create_namespaced_uri(&plugin_name, uri);
383 | assert!(result.is_err());
384 | }
385 |
386 | #[test]
387 | fn test_create_namespaced_uri_with_underscores_in_plugin_name() {
388 | let plugin_name = PluginName::from_str("my_test_plugin_123").unwrap();
389 | let uri = "http://example.com/api";
390 |
391 | let result = create_namespaced_uri(&plugin_name, uri).unwrap();
392 | assert_eq!(result, "http://example.com/my_test_plugin_123/api");
393 | }
394 |
395 | #[test]
396 | fn test_parse_namespaced_uri_basic() {
397 | let namespaced_uri = "http://example.com/test_plugin/api/endpoint".to_string();
398 |
399 | let (plugin_name, uri) = parse_namespaced_uri(namespaced_uri).unwrap();
400 | assert_eq!(plugin_name.as_str(), "test_plugin");
401 | assert_eq!(uri, "http://example.com/api/endpoint");
402 | }
403 |
404 | #[test]
405 | fn test_parse_namespaced_uri_root_path() {
406 | let namespaced_uri = "http://example.com/my_plugin/".to_string();
407 |
408 | let (plugin_name, uri) = parse_namespaced_uri(namespaced_uri).unwrap();
409 | assert_eq!(plugin_name.as_str(), "my_plugin");
410 | assert_eq!(uri, "http://example.com/");
411 | }
412 |
413 | #[test]
414 | fn test_parse_namespaced_uri_with_query_string() {
415 | let namespaced_uri = "http://example.com/test_plugin/api/endpoint?key=value".to_string();
416 |
417 | let (plugin_name, uri) = parse_namespaced_uri(namespaced_uri).unwrap();
418 | assert_eq!(plugin_name.as_str(), "test_plugin");
419 | assert!(uri.contains("api/endpoint"));
420 | assert!(uri.contains("key=value"));
421 | }
422 |
423 | #[test]
424 | fn test_parse_namespaced_uri_with_fragment() {
425 | let namespaced_uri = "http://example.com/test_plugin/api/endpoint#section".to_string();
426 |
427 | let (plugin_name, uri) = parse_namespaced_uri(namespaced_uri).unwrap();
428 | assert_eq!(plugin_name.as_str(), "test_plugin");
429 | assert!(uri.contains("api/endpoint"));
430 | assert!(uri.contains("#section"));
431 | }
432 |
433 | #[test]
434 | fn test_parse_namespaced_uri_with_port() {
435 | let namespaced_uri = "http://example.com:8080/test_plugin/api/endpoint".to_string();
436 |
437 | let (plugin_name, uri) = parse_namespaced_uri(namespaced_uri).unwrap();
438 | assert_eq!(plugin_name.as_str(), "test_plugin");
439 | assert_eq!(uri, "http://example.com:8080/api/endpoint");
440 | }
441 |
442 | #[test]
443 | fn test_parse_namespaced_uri_https() {
444 | let namespaced_uri = "https://secure.example.com/test_plugin/api/endpoint".to_string();
445 |
446 | let (plugin_name, uri) = parse_namespaced_uri(namespaced_uri).unwrap();
447 | assert_eq!(plugin_name.as_str(), "test_plugin");
448 | assert_eq!(uri, "https://secure.example.com/api/endpoint");
449 | }
450 |
451 | #[test]
452 | fn test_parse_namespaced_uri_deep_path() {
453 | let namespaced_uri = "http://example.com/test_plugin/v1/api/v2/endpoint/deep".to_string();
454 |
455 | let (plugin_name, uri) = parse_namespaced_uri(namespaced_uri).unwrap();
456 | assert_eq!(plugin_name.as_str(), "test_plugin");
457 | assert_eq!(uri, "http://example.com/v1/api/v2/endpoint/deep");
458 | }
459 |
460 | #[test]
461 | fn test_parse_namespaced_uri_invalid_url() {
462 | let namespaced_uri = "not a valid url".to_string();
463 |
464 | let result = parse_namespaced_uri(namespaced_uri);
465 | assert!(result.is_err());
466 | }
467 |
468 | #[test]
469 | fn test_parse_namespaced_uri_no_path() {
470 | let namespaced_uri = "http://example.com".to_string();
471 |
472 | let result = parse_namespaced_uri(namespaced_uri);
473 | // Should fail because there's no path segment for plugin name
474 | assert!(result.is_err());
475 | }
476 |
477 | #[test]
478 | fn test_parse_namespaced_uri_only_plugin() {
479 | let namespaced_uri = "http://example.com/test_plugin".to_string();
480 |
481 | let (plugin_name, uri) = parse_namespaced_uri(namespaced_uri).unwrap();
482 | assert_eq!(plugin_name.as_str(), "test_plugin");
483 | assert_eq!(uri, "http://example.com/");
484 | }
485 |
486 | #[test]
487 | fn test_round_trip_uri_operations() {
488 | let plugin_name = PluginName::from_str("test_plugin").unwrap();
489 | let original_uri = "http://example.com/api/endpoint";
490 |
491 | let namespaced = create_namespaced_uri(&plugin_name, original_uri).unwrap();
492 | let (parsed_plugin, parsed_uri) = parse_namespaced_uri(namespaced).unwrap();
493 |
494 | assert_eq!(parsed_plugin.as_str(), "test_plugin");
495 | assert_eq!(parsed_uri, original_uri);
496 | }
497 |
498 | #[test]
499 | fn test_round_trip_uri_with_query_and_fragment() {
500 | let plugin_name = PluginName::from_str("test_plugin").unwrap();
501 | let original_uri = "http://example.com/api/endpoint?key=value#section";
502 |
503 | let namespaced = create_namespaced_uri(&plugin_name, original_uri).unwrap();
504 | let (parsed_plugin, parsed_uri) = parse_namespaced_uri(namespaced).unwrap();
505 |
506 | assert_eq!(parsed_plugin.as_str(), "test_plugin");
507 | assert_eq!(parsed_uri, original_uri);
508 | }
509 |
510 | #[test]
511 | fn test_uri_with_special_characters_in_path() {
512 | let plugin_name = PluginName::from_str("test_plugin").unwrap();
513 | let uri = "http://example.com/api/resource-123_test";
514 |
515 | let namespaced = create_namespaced_uri(&plugin_name, uri).unwrap();
516 | assert_eq!(
517 | namespaced,
518 | "http://example.com/test_plugin/api/resource-123_test"
519 | );
520 |
521 | let (parsed_plugin, parsed_uri) = parse_namespaced_uri(namespaced).unwrap();
522 | assert_eq!(parsed_plugin.as_str(), "test_plugin");
523 | assert_eq!(parsed_uri, uri);
524 | }
525 |
526 | #[test]
527 | fn test_create_namespaced_uri_with_empty_path() {
528 | let plugin_name = PluginName::from_str("test_plugin").unwrap();
529 | let uri = "http://example.com/";
530 |
531 | let result = create_namespaced_uri(&plugin_name, uri).unwrap();
532 | assert_eq!(result, "http://example.com/test_plugin/");
533 | }
534 |
535 | #[test]
536 | fn test_parse_namespaced_uri_with_underscores_in_plugin() {
537 | let namespaced_uri = "http://example.com/my_test_plugin_123/api/resource".to_string();
538 |
539 | let (plugin_name, uri) = parse_namespaced_uri(namespaced_uri).unwrap();
540 | assert_eq!(plugin_name.as_str(), "my_test_plugin_123");
541 | assert_eq!(uri, "http://example.com/api/resource");
542 | }
543 | }
544 |
```
--------------------------------------------------------------------------------
/examples/plugins/v1/fs/src/lib.rs:
--------------------------------------------------------------------------------
```rust
1 | mod pdk;
2 |
3 | use std::fs::{self, OpenOptions};
4 | use std::io::{self, Write};
5 | use std::path::Path;
6 | use std::time::SystemTime;
7 |
8 | use extism_pdk::*;
9 | use json::Value;
10 | use pdk::types::{
11 | CallToolRequest, CallToolResult, Content, ContentType, ListToolsResult, ToolDescription,
12 | };
13 | use serde_json::json;
14 |
15 | pub(crate) fn call(input: CallToolRequest) -> Result<CallToolResult, Error> {
16 | info!("call: {:?}", input);
17 | match input.params.name.as_str() {
18 | "read_file" => read_file(input),
19 | "read_multiple_files" => read_multiple_files(input),
20 | "write_file" => write_file(input),
21 | "edit_file" => edit_file(input),
22 | "create_dir" => create_dir(input),
23 | "list_dir" => list_dir(input),
24 | "move_file" => move_file(input),
25 | "search_files" => search_files(input),
26 | "get_file_info" => get_file_info(input),
27 | _ => Ok(CallToolResult {
28 | is_error: Some(true),
29 | content: vec![Content {
30 | annotations: None,
31 | text: Some(format!("Unknown operation: {}", input.params.name)),
32 | mime_type: None,
33 | r#type: ContentType::Text,
34 | data: None,
35 | }],
36 | }),
37 | }
38 | }
39 |
40 | fn read_file(input: CallToolRequest) -> Result<CallToolResult, Error> {
41 | let args = input.params.arguments.clone().unwrap_or_default();
42 | if let Some(Value::String(path)) = args.get("path") {
43 | match fs::read_to_string(path) {
44 | Ok(content) => Ok(CallToolResult {
45 | is_error: None,
46 | content: vec![Content {
47 | annotations: None,
48 | text: Some(content),
49 | mime_type: Some("text/plain".to_string()),
50 | r#type: ContentType::Text,
51 | data: None,
52 | }],
53 | }),
54 | Err(e) => Ok(CallToolResult {
55 | is_error: Some(true),
56 | content: vec![Content {
57 | annotations: None,
58 | text: Some(format!("Failed to read file: {}", e)),
59 | mime_type: None,
60 | r#type: ContentType::Text,
61 | data: None,
62 | }],
63 | }),
64 | }
65 | } else {
66 | Ok(CallToolResult {
67 | is_error: Some(true),
68 | content: vec![Content {
69 | annotations: None,
70 | text: Some("Please provide a path".into()),
71 | mime_type: None,
72 | r#type: ContentType::Text,
73 | data: None,
74 | }],
75 | })
76 | }
77 | }
78 |
79 | fn read_multiple_files(input: CallToolRequest) -> Result<CallToolResult, Error> {
80 | let args = input.params.arguments.clone().unwrap_or_default();
81 | if let Some(Value::Array(paths)) = args.get("paths") {
82 | let mut results = Vec::new();
83 | for path in paths {
84 | if let Value::String(path_str) = path {
85 | match fs::read_to_string(path_str) {
86 | Ok(content) => results.push(json!({
87 | "path": path_str,
88 | "content": content,
89 | "error": null
90 | })),
91 | Err(e) => results.push(json!({
92 | "path": path_str,
93 | "content": null,
94 | "error": e.to_string()
95 | })),
96 | }
97 | }
98 | }
99 | Ok(CallToolResult {
100 | is_error: None,
101 | content: vec![Content {
102 | annotations: None,
103 | text: Some(serde_json::to_string(&results)?),
104 | mime_type: Some("application/json".to_string()),
105 | r#type: ContentType::Text,
106 | data: None,
107 | }],
108 | })
109 | } else {
110 | Ok(CallToolResult {
111 | is_error: Some(true),
112 | content: vec![Content {
113 | annotations: None,
114 | text: Some("Please provide an array of paths".into()),
115 | mime_type: None,
116 | r#type: ContentType::Text,
117 | data: None,
118 | }],
119 | })
120 | }
121 | }
122 |
123 | fn write_file(input: CallToolRequest) -> Result<CallToolResult, Error> {
124 | let args = input.params.arguments.clone().unwrap_or_default();
125 | if let (Some(Value::String(path)), Some(Value::String(content))) =
126 | (args.get("path"), args.get("content"))
127 | {
128 | match fs::write(path, content) {
129 | Ok(_) => Ok(CallToolResult {
130 | is_error: None,
131 | content: vec![Content {
132 | annotations: None,
133 | text: Some("File written successfully".into()),
134 | mime_type: None,
135 | r#type: ContentType::Text,
136 | data: None,
137 | }],
138 | }),
139 | Err(e) => Ok(CallToolResult {
140 | is_error: Some(true),
141 | content: vec![Content {
142 | annotations: None,
143 | text: Some(format!("Failed to write file: {}", e)),
144 | mime_type: None,
145 | r#type: ContentType::Text,
146 | data: None,
147 | }],
148 | }),
149 | }
150 | } else {
151 | Ok(CallToolResult {
152 | is_error: Some(true),
153 | content: vec![Content {
154 | annotations: None,
155 | text: Some("Please provide path and content".into()),
156 | mime_type: None,
157 | r#type: ContentType::Text,
158 | data: None,
159 | }],
160 | })
161 | }
162 | }
163 |
164 | fn edit_file(input: CallToolRequest) -> Result<CallToolResult, Error> {
165 | let args = input.params.arguments.clone().unwrap_or_default();
166 | if let (Some(Value::String(path)), Some(Value::String(content))) =
167 | (args.get("path"), args.get("content"))
168 | {
169 | let mut file = OpenOptions::new().write(true).truncate(true).open(path)?;
170 | file.write_all(content.as_bytes())?;
171 | Ok(CallToolResult {
172 | is_error: None,
173 | content: vec![Content {
174 | annotations: None,
175 | text: Some("File edited successfully".into()),
176 | mime_type: None,
177 | r#type: ContentType::Text,
178 | data: None,
179 | }],
180 | })
181 | } else {
182 | Ok(CallToolResult {
183 | is_error: Some(true),
184 | content: vec![Content {
185 | annotations: None,
186 | text: Some("Please provide path and content".into()),
187 | mime_type: None,
188 | r#type: ContentType::Text,
189 | data: None,
190 | }],
191 | })
192 | }
193 | }
194 |
195 | fn create_dir(input: CallToolRequest) -> Result<CallToolResult, Error> {
196 | let args = input.params.arguments.clone().unwrap_or_default();
197 | if let Some(Value::String(path)) = args.get("path") {
198 | match fs::create_dir_all(path) {
199 | Ok(_) => Ok(CallToolResult {
200 | is_error: None,
201 | content: vec![Content {
202 | annotations: None,
203 | text: Some("Directory created successfully".into()),
204 | mime_type: None,
205 | r#type: ContentType::Text,
206 | data: None,
207 | }],
208 | }),
209 | Err(e) => Ok(CallToolResult {
210 | is_error: Some(true),
211 | content: vec![Content {
212 | annotations: None,
213 | text: Some(format!("Failed to create directory: {}", e)),
214 | mime_type: None,
215 | r#type: ContentType::Text,
216 | data: None,
217 | }],
218 | }),
219 | }
220 | } else {
221 | Ok(CallToolResult {
222 | is_error: Some(true),
223 | content: vec![Content {
224 | annotations: None,
225 | text: Some("Please provide a path".into()),
226 | mime_type: None,
227 | r#type: ContentType::Text,
228 | data: None,
229 | }],
230 | })
231 | }
232 | }
233 |
234 | fn list_dir(input: CallToolRequest) -> Result<CallToolResult, Error> {
235 | let args = input.params.arguments.clone().unwrap_or_default();
236 | if let Some(Value::String(path)) = args.get("path") {
237 | match fs::read_dir(path) {
238 | Ok(entries) => {
239 | let mut items = Vec::new();
240 | for entry in entries {
241 | if let Ok(entry) = entry {
242 | let path = entry.path();
243 | let metadata = entry.metadata()?;
244 | items.push(json!({
245 | "name": entry.file_name().to_string_lossy(),
246 | "path": path.to_string_lossy(),
247 | "is_file": metadata.is_file(),
248 | "is_dir": metadata.is_dir(),
249 | "size": metadata.len(),
250 | "modified": metadata.modified()?.duration_since(SystemTime::UNIX_EPOCH)?.as_secs()
251 | }));
252 | }
253 | }
254 | Ok(CallToolResult {
255 | is_error: None,
256 | content: vec![Content {
257 | annotations: None,
258 | text: Some(serde_json::to_string(&items)?),
259 | mime_type: Some("application/json".to_string()),
260 | r#type: ContentType::Text,
261 | data: None,
262 | }],
263 | })
264 | }
265 | Err(e) => Ok(CallToolResult {
266 | is_error: Some(true),
267 | content: vec![Content {
268 | annotations: None,
269 | text: Some(format!("Failed to list directory: {}", e)),
270 | mime_type: None,
271 | r#type: ContentType::Text,
272 | data: None,
273 | }],
274 | }),
275 | }
276 | } else {
277 | Ok(CallToolResult {
278 | is_error: Some(true),
279 | content: vec![Content {
280 | annotations: None,
281 | text: Some("Please provide a path".into()),
282 | mime_type: None,
283 | r#type: ContentType::Text,
284 | data: None,
285 | }],
286 | })
287 | }
288 | }
289 |
290 | fn move_file(input: CallToolRequest) -> Result<CallToolResult, Error> {
291 | let args = input.params.arguments.clone().unwrap_or_default();
292 | if let (Some(Value::String(from)), Some(Value::String(to))) = (args.get("from"), args.get("to"))
293 | {
294 | match fs::rename(from, to) {
295 | Ok(_) => Ok(CallToolResult {
296 | is_error: None,
297 | content: vec![Content {
298 | annotations: None,
299 | text: Some("File moved successfully".into()),
300 | mime_type: None,
301 | r#type: ContentType::Text,
302 | data: None,
303 | }],
304 | }),
305 | Err(e) => Ok(CallToolResult {
306 | is_error: Some(true),
307 | content: vec![Content {
308 | annotations: None,
309 | text: Some(format!("Failed to move file: {}", e)),
310 | mime_type: None,
311 | r#type: ContentType::Text,
312 | data: None,
313 | }],
314 | }),
315 | }
316 | } else {
317 | Ok(CallToolResult {
318 | is_error: Some(true),
319 | content: vec![Content {
320 | annotations: None,
321 | text: Some("Please provide from and to paths".into()),
322 | mime_type: None,
323 | r#type: ContentType::Text,
324 | data: None,
325 | }],
326 | })
327 | }
328 | }
329 |
330 | fn search_files(input: CallToolRequest) -> Result<CallToolResult, Error> {
331 | let args = input.params.arguments.clone().unwrap_or_default();
332 | if let (Some(Value::String(dir)), Some(Value::String(pattern))) =
333 | (args.get("directory"), args.get("pattern"))
334 | {
335 | let mut results = Vec::new();
336 | fn search_dir(dir: &Path, pattern: &str, results: &mut Vec<String>) -> io::Result<()> {
337 | for entry in fs::read_dir(dir)? {
338 | let entry = entry?;
339 | let path = entry.path();
340 | if path.is_dir() {
341 | search_dir(&path, pattern, results)?;
342 | } else if path
343 | .file_name()
344 | .unwrap_or_default()
345 | .to_string_lossy()
346 | .contains(pattern)
347 | {
348 | results.push(path.to_string_lossy().into_owned());
349 | }
350 | }
351 | Ok(())
352 | }
353 | match search_dir(Path::new(dir), pattern, &mut results) {
354 | Ok(_) => Ok(CallToolResult {
355 | is_error: None,
356 | content: vec![Content {
357 | annotations: None,
358 | text: Some(serde_json::to_string(&results)?),
359 | mime_type: Some("application/json".to_string()),
360 | r#type: ContentType::Text,
361 | data: None,
362 | }],
363 | }),
364 | Err(e) => Ok(CallToolResult {
365 | is_error: Some(true),
366 | content: vec![Content {
367 | annotations: None,
368 | text: Some(format!("Failed to search files: {}", e)),
369 | mime_type: None,
370 | r#type: ContentType::Text,
371 | data: None,
372 | }],
373 | }),
374 | }
375 | } else {
376 | Ok(CallToolResult {
377 | is_error: Some(true),
378 | content: vec![Content {
379 | annotations: None,
380 | text: Some("Please provide directory and pattern".into()),
381 | mime_type: None,
382 | r#type: ContentType::Text,
383 | data: None,
384 | }],
385 | })
386 | }
387 | }
388 |
389 | fn get_file_info(input: CallToolRequest) -> Result<CallToolResult, Error> {
390 | let args = input.params.arguments.clone().unwrap_or_default();
391 | if let Some(Value::String(path)) = args.get("path") {
392 | match fs::metadata(path) {
393 | Ok(metadata) => {
394 | let info = json!({
395 | "size": metadata.len(),
396 | "is_file": metadata.is_file(),
397 | "is_dir": metadata.is_dir(),
398 | "modified": metadata.modified()?.duration_since(SystemTime::UNIX_EPOCH)?.as_secs(),
399 | "created": metadata.created()?.duration_since(SystemTime::UNIX_EPOCH)?.as_secs(),
400 | "accessed": metadata.accessed()?.duration_since(SystemTime::UNIX_EPOCH)?.as_secs(),
401 | });
402 | Ok(CallToolResult {
403 | is_error: None,
404 | content: vec![Content {
405 | annotations: None,
406 | text: Some(serde_json::to_string(&info)?),
407 | mime_type: Some("application/json".to_string()),
408 | r#type: ContentType::Text,
409 | data: None,
410 | }],
411 | })
412 | }
413 | Err(e) => Ok(CallToolResult {
414 | is_error: Some(true),
415 | content: vec![Content {
416 | annotations: None,
417 | text: Some(format!("Failed to get file info: {}", e)),
418 | mime_type: None,
419 | r#type: ContentType::Text,
420 | data: None,
421 | }],
422 | }),
423 | }
424 | } else {
425 | Ok(CallToolResult {
426 | is_error: Some(true),
427 | content: vec![Content {
428 | annotations: None,
429 | text: Some("Please provide a path".into()),
430 | mime_type: None,
431 | r#type: ContentType::Text,
432 | data: None,
433 | }],
434 | })
435 | }
436 | }
437 |
438 | pub(crate) fn describe() -> Result<ListToolsResult, Error> {
439 | Ok(ListToolsResult {
440 | tools: vec![
441 | ToolDescription {
442 | name: "read_file".into(),
443 | description: "Read the contents of a file".into(),
444 | input_schema: json!({
445 | "type": "object",
446 | "properties": {
447 | "path": {
448 | "type": "string",
449 | "description": "Path to the file to read",
450 | },
451 | },
452 | "required": ["path"],
453 | })
454 | .as_object()
455 | .unwrap()
456 | .clone(),
457 | },
458 | ToolDescription {
459 | name: "read_multiple_files".into(),
460 | description: "Read contents of multiple files".into(),
461 | input_schema: json!({
462 | "type": "object",
463 | "properties": {
464 | "paths": {
465 | "type": "array",
466 | "items": {
467 | "type": "string"
468 | },
469 | "description": "Array of file paths to read",
470 | },
471 | },
472 | "required": ["paths"],
473 | })
474 | .as_object()
475 | .unwrap()
476 | .clone(),
477 | },
478 | ToolDescription {
479 | name: "write_file".into(),
480 | description: "Write content to a file".into(),
481 | input_schema: json!({
482 | "type": "object",
483 | "properties": {
484 | "path": {
485 | "type": "string",
486 | "description": "Path where to write the file",
487 | },
488 | "content": {
489 | "type": "string",
490 | "description": "Content to write to the file",
491 | },
492 | },
493 | "required": ["path", "content"],
494 | })
495 | .as_object()
496 | .unwrap()
497 | .clone(),
498 | },
499 | ToolDescription {
500 | name: "edit_file".into(),
501 | description: "Edit an existing file's content".into(),
502 | input_schema: json!({
503 | "type": "object",
504 | "properties": {
505 | "path": {
506 | "type": "string",
507 | "description": "Path to the file to edit",
508 | },
509 | "content": {
510 | "type": "string",
511 | "description": "New content for the file",
512 | },
513 | },
514 | "required": ["path", "content"],
515 | })
516 | .as_object()
517 | .unwrap()
518 | .clone(),
519 | },
520 | ToolDescription {
521 | name: "create_dir".into(),
522 | description: "Create a new directory".into(),
523 | input_schema: json!({
524 | "type": "object",
525 | "properties": {
526 | "path": {
527 | "type": "string",
528 | "description": "Path where to create the directory",
529 | },
530 | },
531 | "required": ["path"],
532 | })
533 | .as_object()
534 | .unwrap()
535 | .clone(),
536 | },
537 | ToolDescription {
538 | name: "list_dir".into(),
539 | description: "List contents of a directory".into(),
540 | input_schema: json!({
541 | "type": "object",
542 | "properties": {
543 | "path": {
544 | "type": "string",
545 | "description": "Path to the directory to list",
546 | },
547 | },
548 | "required": ["path"],
549 | })
550 | .as_object()
551 | .unwrap()
552 | .clone(),
553 | },
554 | ToolDescription {
555 | name: "move_file".into(),
556 | description: "Move a file from one location to another".into(),
557 | input_schema: json!({
558 | "type": "object",
559 | "properties": {
560 | "from": {
561 | "type": "string",
562 | "description": "Source path of the file",
563 | },
564 | "to": {
565 | "type": "string",
566 | "description": "Destination path for the file",
567 | },
568 | },
569 | "required": ["from", "to"],
570 | })
571 | .as_object()
572 | .unwrap()
573 | .clone(),
574 | },
575 | ToolDescription {
576 | name: "search_files".into(),
577 | description: "Search for files matching a pattern in a directory".into(),
578 | input_schema: json!({
579 | "type": "object",
580 | "properties": {
581 | "directory": {
582 | "type": "string",
583 | "description": "Directory to search in",
584 | },
585 | "pattern": {
586 | "type": "string",
587 | "description": "Pattern to match against filenames",
588 | },
589 | },
590 | "required": ["directory", "pattern"],
591 | })
592 | .as_object()
593 | .unwrap()
594 | .clone(),
595 | },
596 | ToolDescription {
597 | name: "get_file_info".into(),
598 | description: "Get information about a file or directory".into(),
599 | input_schema: json!({
600 | "type": "object",
601 | "properties": {
602 | "path": {
603 | "type": "string",
604 | "description": "Path to get information about",
605 | },
606 | },
607 | "required": ["path"],
608 | })
609 | .as_object()
610 | .unwrap()
611 | .clone(),
612 | },
613 | ],
614 | })
615 | }
616 |
```