This is page 6 of 8. Use http://codebase.md/tuananh/hyper-mcp?lines=false&page={x} to view the full context.
# Directory Structure
```
├── .cursor
│ └── rules
│ └── print-ctx-size.mdc
├── .dockerignore
├── .github
│ ├── renovate.json5
│ └── workflows
│ ├── ci.yml
│ ├── nightly.yml
│ └── release.yml
├── .gitignore
├── .gitmodules
├── .hadolint.yaml
├── .pre-commit-config.yaml
├── .windsurf
│ └── rules
│ ├── print-ctx-size.md
│ └── think.md
├── assets
│ ├── cursor-mcp-1.png
│ ├── cursor-mcp.png
│ ├── eval-py.jpg
│ └── logo.png
├── Cargo.lock
├── Cargo.toml
├── config.example.json
├── config.example.yaml
├── CREATING_PLUGINS.md
├── DEPLOYMENT.md
├── Dockerfile
├── examples
│ └── plugins
│ ├── v1
│ │ ├── arxiv
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── context7
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── crates-io
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── crypto-price
│ │ │ ├── Dockerfile
│ │ │ ├── go.mod
│ │ │ ├── go.sum
│ │ │ ├── main.go
│ │ │ ├── pdk.gen.go
│ │ │ └── README.md
│ │ ├── eval-py
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── fetch
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── fs
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── github
│ │ │ ├── .gitignore
│ │ │ ├── branches.go
│ │ │ ├── Dockerfile
│ │ │ ├── files.go
│ │ │ ├── gists.go
│ │ │ ├── go.mod
│ │ │ ├── go.sum
│ │ │ ├── issues.go
│ │ │ ├── main.go
│ │ │ ├── pdk.gen.go
│ │ │ ├── README.md
│ │ │ └── repo.go
│ │ ├── gitlab
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── gomodule
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── hash
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.lock
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── maven
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── meme-generator
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── generate_embedded.py
│ │ │ ├── README.md
│ │ │ ├── src
│ │ │ │ ├── embedded.rs
│ │ │ │ ├── lib.rs
│ │ │ │ └── pdk.rs
│ │ │ └── templates.json
│ │ ├── memory
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── myip
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.lock
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── qdrant
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ ├── pdk.rs
│ │ │ └── qdrant_client.rs
│ │ ├── qr-code
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.lock
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── serper
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── sqlite
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── think
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ └── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ ├── time
│ │ │ ├── .cargo
│ │ │ │ └── config.toml
│ │ │ ├── .gitignore
│ │ │ ├── Cargo.toml
│ │ │ ├── Dockerfile
│ │ │ ├── README.md
│ │ │ ├── src
│ │ │ │ ├── lib.rs
│ │ │ │ └── pdk.rs
│ │ │ └── time.wasm
│ │ └── tool-list-changed
│ │ ├── .gitignore
│ │ ├── Cargo.toml
│ │ ├── Dockerfile
│ │ ├── README.md
│ │ ├── src
│ │ │ ├── lib.rs
│ │ │ └── pdk.rs
│ │ └── tool_list_changed.wasm
│ └── v2
│ └── rstime
│ ├── .cargo
│ │ └── config.toml
│ ├── .gitignore
│ ├── Cargo.toml
│ ├── Dockerfile
│ ├── README.md
│ ├── rstime.wasm
│ └── src
│ ├── lib.rs
│ └── pdk
│ ├── exports.rs
│ ├── imports.rs
│ ├── mod.rs
│ └── types.rs
├── iac
│ ├── .terraform.lock.hcl
│ ├── main.tf
│ ├── outputs.tf
│ └── variables.tf
├── justfile
├── LICENSE
├── README.md
├── RUNTIME_CONFIG.md
├── rust-toolchain.toml
├── server.json
├── SKIP_TOOLS_GUIDE.md
├── src
│ ├── cli.rs
│ ├── config.rs
│ ├── https_auth.rs
│ ├── logging.rs
│ ├── main.rs
│ ├── naming.rs
│ ├── plugin.rs
│ ├── service.rs
│ └── wasm
│ ├── http.rs
│ ├── mod.rs
│ ├── oci.rs
│ └── s3.rs
├── templates
│ └── plugins
│ ├── go
│ │ ├── .gitignore
│ │ ├── Dockerfile
│ │ ├── exports.go
│ │ ├── go.mod
│ │ ├── go.sum
│ │ ├── imports.go
│ │ ├── main.go
│ │ ├── README.md
│ │ └── types.go
│ ├── README.md
│ └── rust
│ ├── .cargo
│ │ └── config.toml
│ ├── .gitignore
│ ├── Cargo.toml
│ ├── Dockerfile
│ ├── README.md
│ └── src
│ ├── lib.rs
│ └── pdk
│ ├── exports.rs
│ ├── imports.rs
│ ├── mod.rs
│ └── types.rs
├── tests
│ └── fixtures
│ ├── config_with_auths.json
│ ├── config_with_auths.yaml
│ ├── documentation_example.json
│ ├── documentation_example.yaml
│ ├── invalid_auth_config.yaml
│ ├── invalid_plugin_name.yaml
│ ├── invalid_structure.yaml
│ ├── invalid_url.yaml
│ ├── keyring_auth_config.yaml
│ ├── skip_tools_examples.yaml
│ ├── unsupported_config.txt
│ ├── valid_config.json
│ └── valid_config.yaml
└── xtp-plugin-schema.json
```
# Files
--------------------------------------------------------------------------------
/examples/plugins/v1/gitlab/src/lib.rs:
--------------------------------------------------------------------------------
```rust
mod pdk;
use std::collections::BTreeMap;
use base64::prelude::*;
use extism_pdk::*;
use json::Value;
use pdk::types::{
CallToolRequest, CallToolResult, Content, ContentType, ListToolsResult, ToolDescription,
};
use serde_json::json;
use termtree::Tree;
// Helper struct for deserializing GitLab API response
// https://docs.gitlab.com/api/repositories/#list-repository-tree
#[derive(serde::Deserialize)]
struct GitLabRepoEntry {
id: String,
name: String,
r#type: String, // "tree" or "blob"
path: String,
mode: String,
}
// Helper struct for building the tree
#[derive(Debug)]
struct FileTreeNode {
name: String,
children: BTreeMap<String, FileTreeNode>,
}
impl FileTreeNode {
fn new(name: &str) -> Self {
FileTreeNode {
name: name.to_string(),
children: BTreeMap::new(),
}
}
fn insert_path(&mut self, path_segments: &[&str]) {
if path_segments.is_empty() {
return;
}
let current_segment = path_segments[0];
let node = self
.children
.entry(current_segment.to_string())
.or_insert_with(|| FileTreeNode::new(current_segment));
if path_segments.len() > 1 {
node.insert_path(&path_segments[1..]);
}
}
}
// Renamed and modified function to convert FileTreeNode to termtree::Tree<String>
fn convert_file_tree_to_termtree(file_node: &FileTreeNode) -> Tree<String> {
let mut tree_node = Tree::new(file_node.name.clone());
for child_file_node in file_node.children.values() {
// Iterate over sorted children
tree_node.push(convert_file_tree_to_termtree(child_file_node));
}
tree_node
}
// New function to build and format the tree
fn build_and_format_tree_from_entries(
entries: Vec<GitLabRepoEntry>,
requested_path_opt: Option<&str>,
project_id_str: &str,
) -> Result<String, String> {
if entries.is_empty() {
return Ok("Repository tree is empty or path not found.".to_string());
}
let root_display_name = match requested_path_opt {
Some(req_path) if !req_path.is_empty() => req_path
.split('/')
.next_back()
.unwrap_or("root")
.to_string(),
_ => project_id_str
.split('/')
.next_back()
.unwrap_or("root")
.to_string(),
};
let mut root_node = FileTreeNode::new(&root_display_name);
for entry in entries {
let effective_path = match requested_path_opt {
Some(base_path_val)
if !base_path_val.is_empty() && entry.path.starts_with(base_path_val) =>
{
entry
.path
.strip_prefix(base_path_val)
.unwrap_or(&entry.path)
.trim_start_matches('/')
.to_string()
}
_ => entry.path.clone(),
};
if effective_path.is_empty() {
continue;
}
let path_segments: Vec<&str> = effective_path
.split('/')
.filter(|s| !s.is_empty())
.collect();
if !path_segments.is_empty() {
root_node.insert_path(&path_segments);
}
}
let termtree_root = convert_file_tree_to_termtree(&root_node); // Use the new conversion function
Ok(termtree_root.to_string())
}
fn get_gitlab_config() -> Result<(String, String), Error> {
let token = config::get("GITLAB_TOKEN")?
.ok_or_else(|| Error::msg("GITLAB_TOKEN configuration is required but not set"))?;
let url = config::get("GITLAB_URL")?.unwrap_or_else(|| "https://gitlab.com/api/v4".to_string());
Ok((token, url))
}
/// Helper function to check if an HTTP status code represents success (200-299)
fn is_success_status(status_code: u16) -> bool {
(200..300).contains(&status_code)
}
fn urlencode_if_needed(input: &str) -> String {
if input.contains("/") {
urlencoding::encode(input).to_string()
} else {
input.to_string()
}
}
pub(crate) fn call(input: CallToolRequest) -> Result<CallToolResult, Error> {
info!("call: {:?}", input);
match input.params.name.as_str() {
// Issues
"gl_create_issue" => create_issue(input),
"gl_get_issue" => get_issue(input),
"gl_update_issue" => update_issue(input),
"gl_add_issue_comment" => add_issue_comment(input),
"gl_list_issues" => gl_list_issues(input),
// Files
"gl_get_file_contents" => get_file_contents(input),
"gl_create_or_update_file" => create_or_update_file(input),
"gl_delete_file" => delete_file(input),
// Branches
"gl_create_branch" => create_branch(input),
"gl_list_branches" => gl_list_branches(input),
"gl_create_merge_request" => create_merge_request(input),
"gl_update_merge_request" => update_merge_request(input),
"gl_get_merge_request" => gl_get_merge_request(input),
// Snippets (GitLab equivalent of Gists)
"gl_create_snippet" => create_snippet(input),
"gl_update_snippet" => update_snippet(input),
"gl_get_snippet" => get_snippet(input),
"gl_delete_snippet" => delete_snippet(input),
// Repository tree
"gl_get_repo_tree" => gl_get_repo_tree(input),
// Repository members
"gl_get_repo_members" => gl_get_repo_members(input),
_ => Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!("Unknown operation: {}", input.params.name)),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
}),
}
}
fn create_issue(input: CallToolRequest) -> Result<CallToolResult, Error> {
let args = input.params.arguments.clone().unwrap_or_default();
let (token, gitlab_url) = get_gitlab_config()?;
if let (
Some(Value::String(project_id)),
Some(Value::String(title)),
Some(Value::String(description)),
) = (
args.get("project_id"),
args.get("title"),
args.get("description"),
) {
let url = format!(
"{}/projects/{}/issues",
gitlab_url,
urlencode_if_needed(project_id)
);
let mut body = json!({
"title": title,
"description": description,
});
// Add labels if provided
if let Some(Value::String(labels)) = args.get("labels") {
body.as_object_mut()
.unwrap()
.insert("labels".to_string(), Value::String(labels.clone()));
}
let mut headers = BTreeMap::new();
headers.insert("PRIVATE-TOKEN".to_string(), token);
headers.insert("Content-Type".to_string(), "application/json".to_string());
headers.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let req = HttpRequest {
url,
headers,
method: Some("POST".to_string()),
};
let res = http::request(&req, Some(&body.to_string()))?;
if is_success_status(res.status_code()) {
Ok(CallToolResult {
is_error: None,
content: vec![Content {
annotations: None,
text: Some(String::from_utf8_lossy(&res.body()).to_string()),
mime_type: Some("application/json".to_string()),
r#type: ContentType::Text,
data: None,
}],
})
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!("Failed to create issue: {}", res.status_code())),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Please provide project_id, title, and description".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
}
fn get_issue(input: CallToolRequest) -> Result<CallToolResult, Error> {
let args = input.params.arguments.clone().unwrap_or_default();
let (token, gitlab_url) = get_gitlab_config()?;
if let (Some(Value::String(project_id)), Some(Value::String(issue_iid))) =
(args.get("project_id"), args.get("issue_iid"))
{
let url = format!(
"{}/projects/{}/issues/{}",
gitlab_url,
urlencode_if_needed(project_id),
issue_iid
);
let mut headers = BTreeMap::new();
headers.insert("PRIVATE-TOKEN".to_string(), token);
headers.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let req = HttpRequest {
url,
headers,
method: Some("GET".to_string()),
};
let res = http::request::<()>(&req, None)?;
if is_success_status(res.status_code()) {
Ok(CallToolResult {
is_error: None,
content: vec![Content {
annotations: None,
text: Some(String::from_utf8_lossy(&res.body()).to_string()),
mime_type: Some("application/json".to_string()),
r#type: ContentType::Text,
data: None,
}],
})
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!("Failed to get issue: {}", res.status_code())),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Please provide project_id and issue_iid".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
}
fn update_issue(input: CallToolRequest) -> Result<CallToolResult, Error> {
let args = input.params.arguments.clone().unwrap_or_default();
let (token, gitlab_url) = get_gitlab_config()?;
if let (Some(Value::String(project_id)), Some(Value::String(issue_iid))) =
(args.get("project_id"), args.get("issue_iid"))
{
let url = format!(
"{}/projects/{}/issues/{}",
gitlab_url,
urlencode_if_needed(project_id),
issue_iid
);
let mut body_map = serde_json::Map::new();
if let Some(Value::String(title)) = args.get("title") {
body_map.insert("title".to_string(), json!(title));
}
if let Some(Value::String(description)) = args.get("description") {
body_map.insert("description".to_string(), json!(description));
}
if let Some(Value::String(add_labels)) = args.get("add_labels") {
body_map.insert("add_labels".to_string(), json!(add_labels));
}
if let Some(Value::String(remove_labels)) = args.get("remove_labels") {
body_map.insert("remove_labels".to_string(), json!(remove_labels));
}
if let Some(Value::String(due_date)) = args.get("due_date") {
body_map.insert("due_date".to_string(), json!(due_date));
}
if body_map.is_empty() {
return Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Please provide at least one field to update (e.g., title, description, add_labels, remove_labels, due_date)".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
});
}
let body = Value::Object(body_map);
let mut headers = BTreeMap::new();
headers.insert("PRIVATE-TOKEN".to_string(), token);
headers.insert("Content-Type".to_string(), "application/json".to_string());
headers.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let req = HttpRequest {
url,
headers,
method: Some("PUT".to_string()),
};
let res = http::request(&req, Some(&body.to_string()))?;
if is_success_status(res.status_code()) {
Ok(CallToolResult {
is_error: None,
content: vec![Content {
annotations: None,
text: Some(String::from_utf8_lossy(&res.body()).to_string()),
mime_type: Some("application/json".to_string()),
r#type: ContentType::Text,
data: None,
}],
})
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!(
"Failed to update issue: {} - Response: {}",
res.status_code(),
String::from_utf8_lossy(&res.body())
)),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Please provide project_id, issue_iid, and at least one field to update (title, description, add_labels, remove_labels, or due_date)".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
}
fn add_issue_comment(input: CallToolRequest) -> Result<CallToolResult, Error> {
let args = input.params.arguments.clone().unwrap_or_default();
let (token, gitlab_url) = get_gitlab_config()?;
if let (
Some(Value::String(project_id)),
Some(Value::String(issue_iid)),
Some(Value::String(comment)),
) = (
args.get("project_id"),
args.get("issue_iid"),
args.get("comment"),
) {
let url = format!(
"{}/projects/{}/issues/{}/notes",
gitlab_url,
urlencode_if_needed(project_id),
issue_iid
);
let body = json!({
"body": comment,
});
let mut headers = BTreeMap::new();
headers.insert("PRIVATE-TOKEN".to_string(), token);
headers.insert("Content-Type".to_string(), "application/json".to_string());
headers.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let req = HttpRequest {
url,
headers,
method: Some("POST".to_string()),
};
let res = http::request(&req, Some(&body.to_string()))?;
if is_success_status(res.status_code()) {
Ok(CallToolResult {
is_error: None,
content: vec![Content {
annotations: None,
text: Some(String::from_utf8_lossy(&res.body()).to_string()),
mime_type: Some("application/json".to_string()),
r#type: ContentType::Text,
data: None,
}],
})
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!("Failed to add comment: {}", res.status_code())),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Please provide project_id, issue_iid, and comment".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
}
fn get_file_contents(input: CallToolRequest) -> Result<CallToolResult, Error> {
let args = input.params.arguments.clone().unwrap_or_default();
let (token, gitlab_url) = get_gitlab_config()?;
if let (Some(Value::String(project_id)), Some(Value::String(file_path))) =
(args.get("project_id"), args.get("file_path"))
{
let ref_name = args.get("ref").and_then(|v| v.as_str()).unwrap_or("HEAD");
let url = format!(
"{}/projects/{}/repository/files/{}?ref={}",
gitlab_url,
urlencode_if_needed(project_id),
urlencode_if_needed(file_path),
ref_name
);
let mut headers = BTreeMap::new();
headers.insert("PRIVATE-TOKEN".to_string(), token);
headers.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let req = HttpRequest {
url: url.clone(),
headers,
method: Some("GET".to_string()),
};
let res = http::request::<()>(&req, None)?;
if is_success_status(res.status_code()) {
// Parse the response to get the file content from the "content" field
if let Ok(json) = serde_json::from_slice::<Value>(&res.body()) {
if let Some(content) = json.get("content").and_then(|v| v.as_str()) {
// Decode base64 content
match BASE64_STANDARD.decode(content.as_bytes()) {
Ok(decoded_bytes) => {
if let Ok(decoded_content) = String::from_utf8(decoded_bytes) {
return Ok(CallToolResult {
is_error: None,
content: vec![Content {
annotations: None,
text: Some(decoded_content),
mime_type: Some("text/plain".to_string()),
r#type: ContentType::Text,
data: None,
}],
});
}
}
Err(e) => {
return Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!("Failed to decode base64 content: {}", e)),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
});
}
}
}
}
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Failed to parse file contents from response".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!(
"Failed to get file contents: {} {}",
url.clone(),
res.status_code()
)),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Please provide project_id and file_path".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
}
fn delete_file(input: CallToolRequest) -> Result<CallToolResult, Error> {
let args = input.params.arguments.clone().unwrap_or_default();
let (token, gitlab_url) = get_gitlab_config()?;
if let (
Some(Value::String(project_id)),
Some(Value::String(file_path)),
Some(Value::String(branch)),
) = (
args.get("project_id"),
args.get("file_path"),
args.get("branch"),
) {
let commit_message = args
.get("commit_message")
.and_then(|v| v.as_str())
.unwrap_or("Delete file via API")
.to_string();
let url = format!(
"{}/projects/{}/repository/files/{}",
gitlab_url,
urlencode_if_needed(project_id),
urlencode_if_needed(file_path)
);
let mut headers = BTreeMap::new();
headers.insert("PRIVATE-TOKEN".to_string(), token);
headers.insert("Content-Type".to_string(), "application/json".to_string());
headers.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let mut body_map = serde_json::Map::new();
body_map.insert("branch".to_string(), json!(branch));
body_map.insert("commit_message".to_string(), json!(commit_message));
if let Some(Value::String(author_email)) = args.get("author_email") {
body_map.insert("author_email".to_string(), json!(author_email));
}
if let Some(Value::String(author_name)) = args.get("author_name") {
body_map.insert("author_name".to_string(), json!(author_name));
}
let body = Value::Object(body_map);
let req = HttpRequest {
url,
headers,
method: Some("DELETE".to_string()),
};
let res = http::request(&req, Some(&body.to_string()))?;
if is_success_status(res.status_code()) {
Ok(CallToolResult {
is_error: None,
content: vec![Content {
annotations: None,
text: Some(String::from_utf8_lossy(&res.body()).to_string()),
mime_type: Some("application/json".to_string()),
r#type: ContentType::Text,
data: None,
}],
})
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!(
"Failed to delete file (status {}): {}",
res.status_code(),
String::from_utf8_lossy(&res.body())
)),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Please provide project_id, file_path, and branch".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
}
fn create_or_update_file(input: CallToolRequest) -> Result<CallToolResult, Error> {
let args = input.params.arguments.clone().unwrap_or_default();
let (token, gitlab_url) = get_gitlab_config()?;
if let (
Some(Value::String(project_id)),
Some(Value::String(file_path)),
Some(Value::String(content)),
Some(Value::String(branch)),
) = (
args.get("project_id"),
args.get("file_path"),
args.get("content"),
args.get("branch"),
) {
let commit_message = args
.get("commit_message")
.and_then(|v| v.as_str())
.unwrap_or("Update file via API")
.to_string();
// URL for checking file existence. Note: GitLab GET file API needs ref in query.
let check_file_url = format!(
"{}/projects/{}/repository/files/{}?ref={}",
gitlab_url,
urlencode_if_needed(project_id),
urlencode_if_needed(file_path),
branch
);
let mut headers_check = BTreeMap::new();
headers_check.insert("PRIVATE-TOKEN".to_string(), token.clone());
headers_check.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let check_req = HttpRequest {
url: check_file_url,
headers: headers_check,
method: Some("GET".to_string()),
};
let check_res = http::request::<()>(&check_req, None)?;
let http_method = match check_res.status_code() {
200 => "PUT", // File exists, so update
404 => "POST", // File does not exist, so create
_ => {
return Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!(
"Failed to check file existence (status {} on GET {}): {}",
check_res.status_code(),
check_req.url,
String::from_utf8_lossy(&check_res.body())
)),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
});
}
};
// URL for POST/PUT operations (does not have ref in query string, ref is in body via 'branch' parameter)
// Ensure file_path is URL encoded for this URL as well.
let operation_url = format!(
"{}/projects/{}/repository/files/{}",
gitlab_url,
urlencode_if_needed(project_id),
urlencode_if_needed(file_path)
);
let mut body_map = serde_json::Map::new();
body_map.insert("branch".to_string(), json!(branch));
body_map.insert("content".to_string(), json!(content));
body_map.insert("commit_message".to_string(), json!(commit_message));
if let Some(Value::String(author_email)) = args.get("author_email") {
body_map.insert("author_email".to_string(), json!(author_email));
}
if let Some(Value::String(author_name)) = args.get("author_name") {
body_map.insert("author_name".to_string(), json!(author_name));
}
// Note: For 'POST' (create), 'encoding' can be 'base64'.
// GitLab API often expects content to be base64 encoded for new files if not plain text.
// For simplicity, we assume content is plain text, and GitLab handles it.
// If issues arise with binary or special characters, 'content' might need explicit base64 encoding
// and adding "encoding": "base64" to body_map.
let body = Value::Object(body_map);
let mut headers_op = BTreeMap::new();
headers_op.insert("PRIVATE-TOKEN".to_string(), token);
headers_op.insert("Content-Type".to_string(), "application/json".to_string());
headers_op.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let req = HttpRequest {
url: operation_url.clone(),
headers: headers_op,
method: Some(http_method.to_string()),
};
let res = http::request(&req, Some(&body.to_string()))?;
if is_success_status(res.status_code()) {
Ok(CallToolResult {
is_error: None,
content: vec![Content {
annotations: None,
text: Some(String::from_utf8_lossy(&res.body()).to_string()),
mime_type: Some("application/json".to_string()),
r#type: ContentType::Text,
data: None,
}],
})
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!(
"Failed to {} file (method {}, status {} on {}): Response: {}",
if http_method == "POST" {
"create"
} else {
"update"
},
http_method,
res.status_code(),
req.url,
String::from_utf8_lossy(&res.body())
)),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Please provide project_id, file_path, content, and branch".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
}
fn create_branch(input: CallToolRequest) -> Result<CallToolResult, Error> {
let args = input.params.arguments.clone().unwrap_or_default();
let (token, gitlab_url) = get_gitlab_config()?;
if let (
Some(Value::String(project_id)),
Some(Value::String(branch_name)),
Some(Value::String(ref_name)),
) = (
args.get("project_id"),
args.get("branch_name"),
args.get("ref"),
) {
let url = format!(
"{}/projects/{}/repository/branches",
gitlab_url,
urlencode_if_needed(project_id)
);
let body = json!({
"branch": branch_name,
"ref": ref_name
});
let mut headers = BTreeMap::new();
headers.insert("PRIVATE-TOKEN".to_string(), token);
headers.insert("Content-Type".to_string(), "application/json".to_string());
headers.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let req = HttpRequest {
url,
headers,
method: Some("POST".to_string()),
};
let res = http::request(&req, Some(&body.to_string()))?;
if is_success_status(res.status_code()) {
Ok(CallToolResult {
is_error: None,
content: vec![Content {
annotations: None,
text: Some(String::from_utf8_lossy(&res.body()).to_string()),
mime_type: Some("application/json".to_string()),
r#type: ContentType::Text,
data: None,
}],
})
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!("Failed to create branch: {}", res.status_code())),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Please provide project_id, branch_name, and ref".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
}
fn create_merge_request(input: CallToolRequest) -> Result<CallToolResult, Error> {
let args = input.params.arguments.clone().unwrap_or_default();
let (token, gitlab_url) = get_gitlab_config()?;
if let (
Some(Value::String(project_id)),
Some(Value::String(source_branch)),
Some(Value::String(target_branch)),
) = (
args.get("project_id"),
args.get("source_branch"),
args.get("target_branch"),
) {
let url = format!(
"{}/projects/{}/merge_requests",
gitlab_url,
urlencode_if_needed(project_id)
);
// Use provided title if present, otherwise use default format
let title = args
.get("title")
.and_then(|t| t.as_str())
.map(|t| t.to_string())
.unwrap_or_else(|| format!("Merge {} into {}", source_branch, target_branch));
let body = json!({
"source_branch": source_branch,
"target_branch": target_branch,
"title": title,
});
let mut headers = BTreeMap::new();
headers.insert("PRIVATE-TOKEN".to_string(), token);
headers.insert("Content-Type".to_string(), "application/json".to_string());
headers.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let req = HttpRequest {
url,
headers,
method: Some("POST".to_string()),
};
let res = http::request(&req, Some(&body.to_string()))?;
if is_success_status(res.status_code()) {
Ok(CallToolResult {
is_error: None,
content: vec![Content {
annotations: None,
text: Some(String::from_utf8_lossy(&res.body()).to_string()),
mime_type: Some("application/json".to_string()),
r#type: ContentType::Text,
data: None,
}],
})
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!(
"Failed to create merge request: {}",
res.status_code()
)),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Please provide project_id, source_branch, and target_branch".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
}
fn update_merge_request(input: CallToolRequest) -> Result<CallToolResult, Error> {
let args = input.params.arguments.clone().unwrap_or_default();
let (token, gitlab_url) = get_gitlab_config()?;
if let (
Some(Value::String(project_id)),
Some(Value::String(merge_request_iid)),
Some(Value::String(title)),
Some(Value::String(description)),
) = (
args.get("project_id"),
args.get("merge_request_iid"),
args.get("title"),
args.get("description"),
) {
let url = format!(
"{}/projects/{}/merge_requests/{}",
gitlab_url,
urlencode_if_needed(project_id),
merge_request_iid
);
let mut body_map = serde_json::Map::new();
body_map.insert("title".to_string(), json!(title));
body_map.insert("description".to_string(), json!(description));
let body = Value::Object(body_map);
let mut headers = BTreeMap::new();
headers.insert("PRIVATE-TOKEN".to_string(), token);
headers.insert("Content-Type".to_string(), "application/json".to_string());
headers.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let req = HttpRequest {
url,
headers,
method: Some("PUT".to_string()),
};
let res = http::request(&req, Some(&body.to_string()))?;
if is_success_status(res.status_code()) {
Ok(CallToolResult {
is_error: None,
content: vec![Content {
annotations: None,
text: Some(String::from_utf8_lossy(&res.body()).to_string()),
mime_type: Some("application/json".to_string()),
r#type: ContentType::Text,
data: None,
}],
})
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!(
"Failed to update merge request: {} - Response: {}",
res.status_code(),
String::from_utf8_lossy(&res.body())
)),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(
"Please provide project_id, merge_request_iid, title, and description".into(),
),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
}
fn gl_get_merge_request(input: CallToolRequest) -> Result<CallToolResult, Error> {
let args = input.params.arguments.clone().unwrap_or_default();
let (token, gitlab_url) = get_gitlab_config()?;
if let (Some(Value::String(project_id)), Some(Value::String(merge_request_iid))) =
(args.get("project_id"), args.get("merge_request_iid"))
{
let url = format!(
"{}/projects/{}/merge_requests/{}",
gitlab_url,
urlencode_if_needed(project_id),
merge_request_iid
);
let mut headers = BTreeMap::new();
headers.insert("PRIVATE-TOKEN".to_string(), token);
headers.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let req = HttpRequest {
url,
headers,
method: Some("GET".to_string()),
};
let res = http::request::<()>(&req, None)?;
if is_success_status(res.status_code()) {
Ok(CallToolResult {
is_error: None,
content: vec![Content {
annotations: None,
text: Some(String::from_utf8_lossy(&res.body()).to_string()),
mime_type: Some("application/json".to_string()),
r#type: ContentType::Text,
data: None,
}],
})
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!(
"Failed to get merge request: {} - Response: {}",
res.status_code(),
String::from_utf8_lossy(&res.body())
)),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Please provide project_id and merge_request_iid".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
}
fn create_snippet(input: CallToolRequest) -> Result<CallToolResult, Error> {
let args = input.params.arguments.clone().unwrap_or_default();
let (token, gitlab_url) = get_gitlab_config()?;
if let (Some(Value::String(title)), Some(Value::String(content))) =
(args.get("title"), args.get("content"))
{
let url = format!("{}/snippets", gitlab_url);
// Get visibility from args or default to "private"
let visibility = args
.get("visibility")
.and_then(|v| v.as_str())
.unwrap_or("private");
let body = json!({
"title": title,
"file_name": format!("{}.txt", title.to_lowercase().replace(" ", "_")),
"content": content,
"visibility": visibility
});
let mut headers = BTreeMap::new();
headers.insert("PRIVATE-TOKEN".to_string(), token);
headers.insert("Content-Type".to_string(), "application/json".to_string());
headers.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let req = HttpRequest {
url,
headers,
method: Some("POST".to_string()),
};
let res = http::request(&req, Some(&body.to_string()))?;
if is_success_status(res.status_code()) {
Ok(CallToolResult {
is_error: None,
content: vec![Content {
annotations: None,
text: Some(String::from_utf8_lossy(&res.body()).to_string()),
mime_type: Some("application/json".to_string()),
r#type: ContentType::Text,
data: None,
}],
})
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!("Failed to create snippet: {}", res.status_code())),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Please provide title and content".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
}
fn update_snippet(input: CallToolRequest) -> Result<CallToolResult, Error> {
let args = input.params.arguments.clone().unwrap_or_default();
let (token, gitlab_url) = get_gitlab_config()?;
if let (
Some(Value::String(snippet_id)),
Some(Value::String(title)),
Some(Value::String(content)),
) = (
args.get("snippet_id"),
args.get("title"),
args.get("content"),
) {
let url = format!("{}/snippets/{}", gitlab_url, snippet_id);
let body = json!({
"title": title,
"file_name": format!("{}.txt", title.to_lowercase().replace(" ", "_")),
"content": content,
});
let mut headers = BTreeMap::new();
headers.insert("PRIVATE-TOKEN".to_string(), token);
headers.insert("Content-Type".to_string(), "application/json".to_string());
headers.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let req = HttpRequest {
url,
headers,
method: Some("PUT".to_string()),
};
let res = http::request(&req, Some(&body.to_string()))?;
if is_success_status(res.status_code()) {
Ok(CallToolResult {
is_error: None,
content: vec![Content {
annotations: None,
text: Some(String::from_utf8_lossy(&res.body()).to_string()),
mime_type: Some("application/json".to_string()),
r#type: ContentType::Text,
data: None,
}],
})
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!("Failed to update snippet: {}", res.status_code())),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Please provide snippet_id, title, and content".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
}
fn get_snippet(input: CallToolRequest) -> Result<CallToolResult, Error> {
let args = input.params.arguments.clone().unwrap_or_default();
let (token, gitlab_url) = get_gitlab_config()?;
if let Some(Value::String(snippet_id)) = args.get("snippet_id") {
let url = format!("{}/snippets/{}", gitlab_url, snippet_id);
let mut headers = BTreeMap::new();
headers.insert("PRIVATE-TOKEN".to_string(), token);
headers.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let req = HttpRequest {
url,
headers,
method: Some("GET".to_string()),
};
let res = http::request::<()>(&req, None)?;
if is_success_status(res.status_code()) {
Ok(CallToolResult {
is_error: None,
content: vec![Content {
annotations: None,
text: Some(String::from_utf8_lossy(&res.body()).to_string()),
mime_type: Some("application/json".to_string()),
r#type: ContentType::Text,
data: None,
}],
})
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!("Failed to get snippet: {}", res.status_code())),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Please provide snippet_id".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
}
fn delete_snippet(input: CallToolRequest) -> Result<CallToolResult, Error> {
let args = input.params.arguments.clone().unwrap_or_default();
let (token, gitlab_url) = get_gitlab_config()?;
if let Some(Value::String(snippet_id)) = args.get("snippet_id") {
let url = format!("{}/snippets/{}", gitlab_url, snippet_id);
let mut headers = BTreeMap::new();
headers.insert("PRIVATE-TOKEN".to_string(), token);
headers.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let req = HttpRequest {
url,
headers,
method: Some("DELETE".to_string()),
};
let res = http::request::<()>(&req, None)?;
if is_success_status(res.status_code()) {
Ok(CallToolResult {
is_error: None,
content: vec![Content {
annotations: None,
text: Some("Snippet deleted successfully".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!("Failed to delete snippet: {}", res.status_code())),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Please provide snippet_id".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
}
fn gl_list_branches(input: CallToolRequest) -> Result<CallToolResult, Error> {
let args = input.params.arguments.clone().unwrap_or_default();
let (token, gitlab_url) = get_gitlab_config()?;
if let Some(Value::String(project_id)) = args.get("project_id") {
let url = format!(
"{}/projects/{}/repository/branches",
gitlab_url,
urlencode_if_needed(project_id)
);
let mut headers = BTreeMap::new();
headers.insert("PRIVATE-TOKEN".to_string(), token);
headers.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let req = HttpRequest {
url,
headers,
method: Some("GET".to_string()),
};
let res = http::request::<()>(&req, None)?;
if is_success_status(res.status_code()) {
Ok(CallToolResult {
is_error: None,
content: vec![Content {
annotations: None,
text: Some(String::from_utf8_lossy(&res.body()).to_string()),
mime_type: Some("application/json".to_string()),
r#type: ContentType::Text,
data: None,
}],
})
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!("Failed to list branches: {}", res.status_code())),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Please provide project_id".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
}
fn gl_list_issues(input: CallToolRequest) -> Result<CallToolResult, Error> {
let args = input.params.arguments.clone().unwrap_or_default();
let (token, gitlab_url) = get_gitlab_config()?;
if let Some(Value::String(project_id)) = args.get("project_id") {
let mut url_params = vec![];
if let Some(Value::String(state)) = args.get("state") {
url_params.push(format!("state={}", state));
}
if let Some(Value::String(labels)) = args.get("labels") {
url_params.push(format!("labels={}", urlencoding::encode(labels)));
}
let query_string = if url_params.is_empty() {
"".to_string()
} else {
format!("?{}", url_params.join("&"))
};
let url = format!(
"{}/projects/{}/issues{}",
gitlab_url,
urlencode_if_needed(project_id),
query_string
);
let mut headers = BTreeMap::new();
headers.insert("PRIVATE-TOKEN".to_string(), token);
headers.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let req = HttpRequest {
url,
headers,
method: Some("GET".to_string()),
};
let res = http::request::<()>(&req, None)?;
if is_success_status(res.status_code()) {
Ok(CallToolResult {
is_error: None,
content: vec![Content {
annotations: None,
text: Some(String::from_utf8_lossy(&res.body()).to_string()),
mime_type: Some("application/json".to_string()),
r#type: ContentType::Text,
data: None,
}],
})
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!("Failed to list issues: {}", res.status_code())),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Please provide project_id".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
}
fn gl_get_repo_tree(input: CallToolRequest) -> Result<CallToolResult, Error> {
let args = input.params.arguments.clone().unwrap_or_default();
let (token, gitlab_url) = get_gitlab_config()?;
if let Some(Value::String(project_id_val)) = args.get("project_id") {
let project_id = project_id_val.as_str();
let requested_path_opt = args.get("path").and_then(|v| v.as_str());
let ref_name_opt = args.get("ref").and_then(|v| v.as_str());
let recursive_opt = args.get("recursive").and_then(|v| v.as_bool());
let mut all_entries: Vec<GitLabRepoEntry> = Vec::new();
let mut current_page_number: u32 = 1;
const PER_PAGE_COUNT: u32 = 100; // GitLab's typical max per_page
const MAX_PAGES: u32 = 100; // Safety break: 100 pages * 100 items/page = 10,000 items
loop {
if current_page_number > MAX_PAGES {
// Log this or return a partial result with a warning if desired
// For now, just break and use what we have.
// Consider returning an error if this limit is hit.
break;
}
let mut url_params = vec![
format!("per_page={}", PER_PAGE_COUNT),
format!("page={}", current_page_number),
];
if let Some(path_str) = requested_path_opt {
if !path_str.is_empty() {
url_params.push(format!("path={}", urlencoding::encode(path_str)));
}
}
if let Some(ref_name_str) = ref_name_opt {
url_params.push(format!("ref={}", urlencoding::encode(ref_name_str)));
}
if let Some(recursive_bool) = recursive_opt {
if recursive_bool {
url_params.push("recursive=true".to_string());
}
}
let query_string = format!("?{}", url_params.join("&"));
let url = format!(
"{}/projects/{}/repository/tree{}",
gitlab_url,
urlencode_if_needed(project_id),
query_string
);
let mut headers = BTreeMap::new();
headers.insert("PRIVATE-TOKEN".to_string(), token.clone()); // Clone token for loop
headers.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let req = HttpRequest {
url: url.clone(),
headers,
method: Some("GET".to_string()),
};
let res = http::request::<()>(&req, None)?;
if !is_success_status(res.status_code()) {
return Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!(
"Failed to get repository tree page {} from {}: {} - Response: {}",
current_page_number,
req.url,
res.status_code(),
String::from_utf8_lossy(&res.body())
)),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
});
}
match serde_json::from_slice::<Vec<GitLabRepoEntry>>(&res.body()) {
Ok(page_entries) => {
let num_fetched = page_entries.len();
all_entries.extend(page_entries);
if num_fetched < PER_PAGE_COUNT as usize {
break; // Last page fetched
}
}
Err(e) => {
return Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!(
"Failed to parse repository tree data from GitLab API (page {}): {}",
current_page_number, e
)),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
});
}
}
current_page_number += 1;
}
// Proceed with building the tree from all_entries
match build_and_format_tree_from_entries(all_entries, requested_path_opt, project_id) {
Ok(tree_string) => Ok(CallToolResult {
is_error: None,
content: vec![Content {
annotations: None,
text: Some(tree_string),
mime_type: Some("text/plain".to_string()),
r#type: ContentType::Text,
data: None,
}],
}),
Err(e_str) => Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(e_str),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
}),
}
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Please provide project_id".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
}
fn gl_get_repo_members(input: CallToolRequest) -> Result<CallToolResult, Error> {
let args = input.params.arguments.clone().unwrap_or_default();
let (token, gitlab_url) = get_gitlab_config()?;
if let Some(Value::String(project_id_val)) = args.get("project_id") {
let project_id = project_id_val.as_str();
let include_inherited = args
.get("include_inherited_members")
.and_then(|v| v.as_bool())
.unwrap_or(false);
let query_opt = args.get("query").and_then(|v| v.as_str());
let members_path = if include_inherited {
"members/all"
} else {
"members"
};
let mut all_members_json: Vec<Value> = Vec::new();
let mut current_page_number: u32 = 1;
const PER_PAGE_COUNT: u32 = 100; // GitLab's typical max per_page
const MAX_PAGES: u32 = 100; // Safety break: 100 pages * 100 items/page = 10,000 members
loop {
if current_page_number > MAX_PAGES {
// Log this or return a partial result with a warning if desired
break;
}
let mut url_params = vec![
format!("per_page={}", PER_PAGE_COUNT),
format!("page={}", current_page_number),
];
if let Some(query_str) = query_opt {
url_params.push(format!("query={}", urlencoding::encode(query_str)));
}
let query_string = format!("?{}", url_params.join("&"));
let url = format!(
"{}/projects/{}/{}{}",
gitlab_url,
urlencode_if_needed(project_id),
members_path,
query_string
);
let mut headers = BTreeMap::new();
headers.insert("PRIVATE-TOKEN".to_string(), token.clone());
headers.insert("User-Agent".to_string(), "hyper-mcp/0.1.0".to_string());
let req = HttpRequest {
url: url.clone(),
headers,
method: Some("GET".to_string()),
};
let res = http::request::<()>(&req, None)?;
if !is_success_status(res.status_code()) {
return Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!(
"Failed to get repository members page {} from {}: {} - Response: {}",
current_page_number,
req.url,
res.status_code(),
String::from_utf8_lossy(&res.body())
)),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
});
}
match serde_json::from_slice::<Vec<Value>>(&res.body()) {
Ok(page_members) => {
let num_fetched = page_members.len();
all_members_json.extend(page_members);
if num_fetched < PER_PAGE_COUNT as usize {
break; // Last page fetched
}
}
Err(e) => {
return Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some(format!(
"Failed to parse repository members data from GitLab API (page {}): {}",
current_page_number, e
)),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
});
}
}
current_page_number += 1;
}
Ok(CallToolResult {
is_error: None,
content: vec![Content {
annotations: None,
text: Some(serde_json::to_string(&all_members_json)?),
mime_type: Some("application/json".to_string()),
r#type: ContentType::Text,
data: None,
}],
})
} else {
Ok(CallToolResult {
is_error: Some(true),
content: vec![Content {
annotations: None,
text: Some("Please provide project_id".into()),
mime_type: None,
r#type: ContentType::Text,
data: None,
}],
})
}
}
pub(crate) fn describe() -> Result<ListToolsResult, Error> {
Ok(ListToolsResult {
tools: vec![
ToolDescription {
name: "gl_delete_file".into(),
description: "Delete a file in a GitLab project repository. Requires project_id, file_path, branch, and optional commit_message.".into(),
input_schema: json!({
"type": "object",
"properties": {
"project_id": {
"type": "string",
"description": "The project identifier - can be a numeric project ID (e.g. '123') or a URL-encoded path (e.g. 'group%2Fproject')",
},
"file_path": {
"type": "string",
"description": "The path to the file in the project",
},
"branch": {
"type": "string",
"description": "The name of the branch to delete the file from",
},
"commit_message": {
"type": "string",
"description": "The commit message. Optional, defaults to 'Delete file via API'",
},
"author_email": {
"type": "string",
"description": "The email of the commit author. Optional.",
},
"author_name": {
"type": "string",
"description": "The name of the commit author. Optional.",
},
},
"required": ["project_id", "file_path", "branch"],
})
.as_object()
.unwrap()
.clone(),
},
ToolDescription {
name: "gl_create_issue".into(),
description: "Create a new issue in a GitLab project".into(),
input_schema: json!({
"type": "object",
"properties": {
"project_id": {
"type": "string",
"description": "The project identifier - can be a numeric project ID (e.g. '123') or a URL-encoded path (e.g. 'group%2Fproject')",
},
"title": {
"type": "string",
"description": "The title of the issue",
},
"description": {
"type": "string",
"description": "The description of the issue",
},
"labels": {
"type": "string",
"description": "Comma-separated list of labels",
},
},
"required": ["project_id", "title", "description"],
})
.as_object()
.unwrap()
.clone(),
},
ToolDescription {
name: "gl_get_issue".into(),
description: "Get details of a specific issue".into(),
input_schema: json!({
"type": "object",
"properties": {
"project_id": {
"type": "string",
"description": "The project identifier - can be a numeric project ID (e.g. '123') or a URL-encoded path (e.g. 'group%2Fproject')",
},
"issue_iid": {
"type": "string",
"description": "The internal ID of the issue",
},
},
"required": ["project_id", "issue_iid"],
})
.as_object()
.unwrap()
.clone(),
},
ToolDescription {
name: "gl_update_issue".into(),
description: "Update an existing issue in a GitLab project".into(),
input_schema: json!({
"type": "object",
"properties": {
"project_id": {
"type": "string",
"description": "The project identifier - can be a numeric project ID (e.g. '123') or a URL-encoded path (e.g. 'group%2Fproject')",
},
"issue_iid": {
"type": "string",
"description": "The internal ID of the issue",
},
"title": {
"type": "string",
"description": "The new title of the issue",
},
"description": {
"type": "string",
"description": "The new description of the issue",
},
"add_labels": {
"type": "string",
"description": "Comma-separated list of labels to add to the issue",
},
"remove_labels": {
"type": "string",
"description": "Comma-separated list of labels to remove from the issue",
},
"due_date": {
"type": "string",
"description": "The due date of the issue in YYYY-MM-DD format (e.g., 2024-03-11)",
},
},
"required": ["project_id", "issue_iid"],
})
.as_object()
.unwrap()
.clone(),
},
ToolDescription {
name: "gl_add_issue_comment".into(),
description: "Add a comment to an issue in a GitLab project".into(),
input_schema: json!({
"type": "object",
"properties": {
"project_id": {
"type": "string",
"description": "The project identifier - can be a numeric project ID (e.g. '123') or a URL-encoded path (e.g. 'group%2Fproject')",
},
"issue_iid": {
"type": "string",
"description": "The internal ID of the issue",
},
"comment": {
"type": "string",
"description": "The comment to add to the issue",
},
},
"required": ["project_id", "issue_iid", "comment"],
})
.as_object()
.unwrap()
.clone(),
},
ToolDescription {
name: "gl_list_issues".into(),
description: "List issues for a project in GitLab. Supports filtering by state and labels.".into(),
input_schema: json!({
"type": "object",
"properties": {
"project_id": {
"type": "string",
"description": "The project identifier - can be a numeric project ID (e.g. '123') or a URL-encoded path (e.g. 'group%2Fproject')",
},
"state": {
"type": "string",
"description": "Filter by state: 'opened', 'closed', or 'all'. Defaults to 'opened' if not specified by GitLab.",
},
"labels": {
"type": "string",
"description": "Comma-separated list of label names to filter by.",
},
},
"required": ["project_id"],
})
.as_object()
.unwrap()
.clone(),
},
ToolDescription {
name: "gl_get_file_contents".into(),
description: "Get the contents of a file in a GitLab project".into(),
input_schema: json!({
"type": "object",
"properties": {
"project_id": {
"type": "string",
"description": "The project identifier - can be a numeric project ID (e.g. '123') or a URL-encoded path (e.g. 'group%2Fproject')",
},
"file_path": {
"type": "string",
"description": "The path to the file in the project",
},
"ref": {
"type": "string",
"description": "The name of the branch, tag or commit (defaults to HEAD)",
},
},
"required": ["project_id", "file_path"],
})
.as_object()
.unwrap()
.clone(),
},
ToolDescription {
name: "gl_create_or_update_file".into(),
description: "Create or update a file in a GitLab project".into(),
input_schema: json!({
"type": "object",
"properties": {
"project_id": {
"type": "string",
"description": "The project identifier - can be a numeric project ID (e.g. '123') or a URL-encoded path (e.g. 'group%2Fproject')",
},
"file_path": {
"type": "string",
"description": "The path to the file in the project",
},
"content": {
"type": "string",
"description": "The content to write to the file",
},
"branch": {
"type": "string",
"description": "The name of the branch to create or update the file in",
},
"author_email": {
"type": "string",
"description": "The email of the commit author",
},
"author_name": {
"type": "string",
"description": "The name of the commit author",
},
"commit_message": {
"type": "string",
"description": "The commit message. Defaults to 'Update file via API' if not specified.",
},
},
"required": ["project_id", "file_path", "content", "branch"],
})
.as_object()
.unwrap()
.clone(),
},
ToolDescription {
name: "gl_create_branch".into(),
description: "Create a new branch in a GitLab project".into(),
input_schema: json!({
"type": "object",
"properties": {
"project_id": {
"type": "string",
"description": "The project identifier - can be a numeric project ID (e.g. '123') or a URL-encoded path (e.g. 'group%2Fproject')",
},
"branch_name": {
"type": "string",
"description": "The name of the new branch",
},
"ref": {
"type": "string",
"description": "The branch name or commit SHA to create the new branch from",
},
},
"required": ["project_id", "branch_name", "ref"],
})
.as_object()
.unwrap()
.clone(),
},
ToolDescription {
name: "gl_create_merge_request".into(),
description: "Create a new merge request in a GitLab project".into(),
input_schema: json!({
"type": "object",
"properties": {
"project_id": {
"type": "string",
"description": "The project identifier - can be a numeric project ID (e.g. '123') or a URL-encoded path (e.g. 'group%2Fproject')",
},
"source_branch": {
"type": "string",
"description": "The name of the source branch",
},
"target_branch": {
"type": "string",
"description": "The name of the target branch",
},
},
"required": ["project_id", "source_branch", "target_branch"],
})
.as_object()
.unwrap()
.clone(),
},
ToolDescription {
name: "gl_update_merge_request".into(),
description: "Update an existing merge request in a GitLab project.".into(),
input_schema: json!({
"type": "object",
"properties": {
"project_id": {
"type": "string",
"description": "The project identifier - can be a numeric project ID (e.g. '123') or a URL-encoded path (e.g. 'group%2Fproject')",
},
"merge_request_iid": {
"type": "string",
"description": "The internal ID (IID) of the merge request to update",
},
"title": {
"type": "string",
"description": "The new title for the merge request.",
},
"description": {
"type": "string",
"description": "The new description for the merge request.",
},
// Consider adding other common updatable fields like:
// "target_branch": { "type": "string", "description": "The target branch" },
// "state_event": { "type": "string", "description": "Event to change MR state (e.g., 'close', 'reopen')" }
},
"required": ["project_id", "merge_request_iid", "title", "description"],
})
.as_object()
.unwrap()
.clone(),
},
ToolDescription {
name: "gl_get_merge_request".into(),
description: "Get details of a specific merge request in a GitLab project.".into(),
input_schema: json!({
"type": "object",
"properties": {
"project_id": {
"type": "string",
"description": "The project identifier - can be a numeric project ID (e.g. '123') or a URL-encoded path (e.g. 'group%2Fproject')",
},
"merge_request_iid": {
"type": "string",
"description": "The internal ID (IID) of the merge request",
},
},
"required": ["project_id", "merge_request_iid"],
})
.as_object()
.unwrap()
.clone(),
},
ToolDescription {
name: "gl_create_snippet".into(),
description: "Create a new snippet".into(),
input_schema: json!({
"type": "object",
"properties": {
"title": {
"type": "string",
"description": "The title of the snippet",
},
"content": {
"type": "string",
"description": "The content of the snippet",
},
"visibility": {
"type": "string",
"description": "The visibility level of the snippet (private, internal, or public). Defaults to private if not specified.",
},
},
"required": ["title", "content"],
})
.as_object()
.unwrap()
.clone(),
},
ToolDescription {
name: "gl_update_snippet".into(),
description: "Update an existing snippet".into(),
input_schema: json!({
"type": "object",
"properties": {
"snippet_id": {
"type": "string",
"description": "The ID of the snippet",
},
"title": {
"type": "string",
"description": "The new title of the snippet",
},
"content": {
"type": "string",
"description": "The new content of the snippet",
},
},
"required": ["snippet_id", "title", "content"],
})
.as_object()
.unwrap()
.clone(),
},
ToolDescription {
name: "gl_get_snippet".into(),
description: "Get details of a specific snippet".into(),
input_schema: json!({
"type": "object",
"properties": {
"snippet_id": {
"type": "string",
"description": "The ID of the snippet",
},
},
"required": ["snippet_id"],
})
.as_object()
.unwrap()
.clone(),
},
ToolDescription {
name: "gl_delete_snippet".into(),
description: "Delete a snippet".into(),
input_schema: json!({
"type": "object",
"properties": {
"snippet_id": {
"type": "string",
"description": "The ID of the snippet",
},
},
"required": ["snippet_id"],
})
.as_object()
.unwrap()
.clone(),
},
ToolDescription {
name: "gl_list_branches".into(),
description: "List all branches in a GitLab project".into(),
input_schema: json!({
"type": "object",
"properties": {
"project_id": {
"type": "string",
"description": "The project identifier - can be a numeric project ID (e.g. '123') or a URL-encoded path (e.g. 'group%2Fproject')",
},
},
"required": ["project_id"],
})
.as_object()
.unwrap()
.clone(),
},
ToolDescription {
name: "gl_get_repo_tree".into(),
description: "Get the list of files and directories in a project repository. Handles pagination internally.".into(),
input_schema: json!({
"type": "object",
"properties": {
"project_id": {
"type": "string",
"description": "The project identifier - can be a numeric project ID (e.g. '123') or a URL-encoded path (e.g. 'group%2Fproject')",
},
"path": {
"type": "string",
"description": "The path inside the repository. Used to get content of subdirectories. Optional.",
},
"ref": {
"type": "string",
"description": "The name of a repository branch or tag or if not given the default branch. Optional.",
},
"recursive": {
"type": "boolean",
"description": "Boolean value used to get a recursive tree. If you want a complete tree, set this to true. Default is false. Optional.",
},
},
"required": ["project_id"],
})
.as_object()
.unwrap()
.clone(),
},
ToolDescription {
name: "gl_get_repo_members".into(),
description: "Get a list of members for a GitLab project. Supports fetching direct or inherited members and filtering by query. Handles pagination internally.".into(),
input_schema: json!({
"type": "object",
"properties": {
"project_id": {
"type": "string",
"description": "The project identifier - can be a numeric project ID (e.g. '123') or a URL-encoded path (e.g. 'group%2Fproject')",
},
"include_inherited_members": {
"type": "boolean",
"description": "Set to true to include inherited members (e.g., from groups). Defaults to false (direct members only). Optional.",
},
"query": {
"type": "string",
"description": "Filter by username, name, or public email. Optional.",
},
},
"required": ["project_id"],
})
.as_object()
.unwrap()
.clone(),
},
],
})
}
```
--------------------------------------------------------------------------------
/src/config.rs:
--------------------------------------------------------------------------------
```rust
use crate::cli::Cli;
use anyhow::{Context, Result};
use once_cell::sync::Lazy;
use regex::{Regex, RegexSet};
use serde::{Deserialize, Serialize};
use std::{collections::HashMap, convert::TryFrom, fmt, path::PathBuf, str::FromStr};
use url::Url;
#[derive(Clone, Debug, Eq, Hash, PartialEq, Serialize)]
pub struct PluginName(String);
#[derive(Clone, Debug)]
pub struct PluginNameParseError;
impl fmt::Display for PluginNameParseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Failed to parse plugin name")
}
}
impl std::error::Error for PluginNameParseError {}
static PLUGIN_NAME_REGEX: Lazy<Regex> = Lazy::new(|| {
Regex::new(r"^[A-Za-z0-9]+(?:[_][A-Za-z0-9]+)*$").expect("Failed to compile plugin name regex")
});
impl PluginName {
#[allow(dead_code)]
pub fn as_str(&self) -> &str {
&self.0
}
}
impl<'de> Deserialize<'de> for PluginName {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
PluginName::try_from(s.as_str()).map_err(serde::de::Error::custom)
}
}
impl TryFrom<&str> for PluginName {
type Error = PluginNameParseError;
fn try_from(value: &str) -> Result<Self, Self::Error> {
if PLUGIN_NAME_REGEX.is_match(value) {
Ok(PluginName(value.to_owned()))
} else {
Err(PluginNameParseError)
}
}
}
impl TryFrom<String> for PluginName {
type Error = PluginNameParseError;
fn try_from(value: String) -> Result<Self, Self::Error> {
PluginName::try_from(value.as_str())
}
}
impl TryFrom<&String> for PluginName {
type Error = PluginNameParseError;
fn try_from(value: &String) -> Result<Self, Self::Error> {
PluginName::try_from(value.as_str())
}
}
impl FromStr for PluginName {
type Err = PluginNameParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
PluginName::try_from(s)
}
}
impl fmt::Display for PluginName {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
#[derive(Clone, Debug, Serialize)]
#[serde(tag = "type", rename_all = "lowercase")]
pub enum AuthConfig {
Basic { username: String, password: String },
Token { token: String },
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(tag = "type", rename_all = "lowercase")]
enum InternalAuthConfig {
Basic { username: String, password: String },
Keyring { service: String, user: String },
Token { token: String },
}
impl<'de> Deserialize<'de> for AuthConfig {
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let internal = InternalAuthConfig::deserialize(deserializer)?;
match internal {
InternalAuthConfig::Basic { username, password } => {
Ok(AuthConfig::Basic { username, password })
}
InternalAuthConfig::Token { token } => Ok(AuthConfig::Token { token }),
InternalAuthConfig::Keyring { service, user } => {
use keyring::Entry;
use serde::de;
let entry =
Entry::new(service.as_str(), user.as_str()).map_err(de::Error::custom)?;
let secret = entry.get_secret().map_err(de::Error::custom)?;
Ok(serde_json::from_slice::<AuthConfig>(secret.as_slice())
.map_err(de::Error::custom)?)
}
}
}
}
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
pub struct Config {
#[serde(skip_serializing_if = "Option::is_none")]
pub auths: Option<HashMap<Url, AuthConfig>>,
#[serde(default)]
pub oci: OciConfig,
pub plugins: HashMap<PluginName, PluginConfig>,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct OciConfig {
#[serde(skip_serializing_if = "Option::is_none")]
pub cert_email: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub cert_issuer: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub cert_url: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub fulcio_certs: Option<PathBuf>,
pub insecure_skip_signature: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub rekor_pub_keys: Option<PathBuf>,
pub use_sigstore_tuf_data: bool,
}
impl Default for OciConfig {
fn default() -> Self {
OciConfig {
cert_email: None,
cert_issuer: None,
cert_url: None,
fulcio_certs: None,
insecure_skip_signature: false,
rekor_pub_keys: None,
use_sigstore_tuf_data: true,
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct PluginConfig {
#[serde(rename = "url", alias = "path")]
pub url: Url,
pub runtime_config: Option<RuntimeConfig>,
}
mod skip_serde {
use super::*;
use serde::{Deserializer, Serializer};
pub fn serialize<S>(set: &Option<RegexSet>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match set {
Some(set) => serializer.serialize_some(set.patterns()),
None => serializer.serialize_none(),
}
}
fn anchor_pattern(pattern: &String) -> String {
// Anchor the pattern to match the entire string
// only if it is not already anchored
if pattern.starts_with("^")
|| pattern.starts_with("\\A")
|| pattern.ends_with("$")
|| pattern.ends_with("\\z")
{
pattern.clone()
} else {
format!("^{}$", pattern)
}
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Option<RegexSet>, D::Error>
where
D: Deserializer<'de>,
{
let patterns: Option<Vec<String>> = Option::deserialize(deserializer)?;
match patterns {
Some(patterns) => RegexSet::new(
patterns
.into_iter()
.map(|p| anchor_pattern(&p))
.collect::<Vec<_>>(),
)
.map(Some)
.map_err(serde::de::Error::custom),
None => Ok(None),
}
}
}
#[derive(Clone, Debug, Default, Deserialize, Serialize)]
pub struct RuntimeConfig {
// List of prompts to skip loading at runtime.
#[serde(with = "skip_serde", default)]
pub skip_prompts: Option<RegexSet>,
// List of resource templatess to skip loading at runtime.
#[serde(with = "skip_serde", default)]
pub skip_resource_templates: Option<RegexSet>,
// List of resources to skip loading at runtime.
#[serde(with = "skip_serde", default)]
pub skip_resources: Option<RegexSet>,
// List of tools to skip loading at runtime.
#[serde(with = "skip_serde", default)]
pub skip_tools: Option<RegexSet>,
pub allowed_hosts: Option<Vec<String>>,
pub allowed_paths: Option<Vec<String>>,
pub env_vars: Option<HashMap<String, String>>,
pub memory_limit: Option<String>,
}
pub async fn load_config(cli: &Cli) -> Result<Config> {
// Get default config path in the user's config directory
let default_config_path = dirs::config_dir()
.map(|mut path| {
path.push("hyper-mcp");
path.push("config.json");
path
})
.unwrap();
let config_path = cli.config_file.as_ref().unwrap_or(&default_config_path);
if !config_path.exists() {
return Err(anyhow::anyhow!(
"Config file not found at: {}. Please create a config file first.",
config_path.display()
));
}
tracing::info!("Using config file at {}", config_path.display());
let ext = config_path
.extension()
.and_then(|e| e.to_str())
.unwrap_or("");
let content = tokio::fs::read_to_string(config_path)
.await
.with_context(|| format!("Failed to read config file at {}", config_path.display()))?;
let mut config: Config = match ext {
"json" => serde_json::from_str(&content)?,
"yaml" | "yml" => serde_yaml::from_str(&content)?,
"toml" => toml::from_str(&content)?,
_ => return Err(anyhow::anyhow!("Unsupported config format: {ext}")),
};
let mut oci = config.oci.clone();
if let Some(skip) = cli.insecure_skip_signature {
oci.insecure_skip_signature = skip;
}
if let Some(use_tuf) = cli.use_sigstore_tuf_data {
oci.use_sigstore_tuf_data = use_tuf;
}
if let Some(rekor_keys) = &cli.rekor_pub_keys {
oci.rekor_pub_keys = Some(rekor_keys.clone());
}
if let Some(fulcio_certs) = &cli.fulcio_certs {
oci.fulcio_certs = Some(fulcio_certs.clone());
}
if let Some(issuer) = &cli.cert_issuer {
oci.cert_issuer = Some(issuer.clone());
}
if let Some(email) = &cli.cert_email {
oci.cert_email = Some(email.clone());
}
if let Some(url) = &cli.cert_url {
oci.cert_url = Some(url.clone());
}
config.oci = oci;
Ok(config)
}
#[cfg(test)]
mod tests {
use super::*;
use std::path::Path;
use tokio::runtime::Runtime;
#[test]
fn test_plugin_name_valid() {
let valid_names = vec!["plugin1", "plugin_name", "PluginName", "plugin123"];
for name in valid_names {
assert!(
PluginName::try_from(name).is_ok(),
"Failed to parse valid name: {name}"
);
}
}
#[test]
fn test_plugin_name_invalid_comprehensive() {
// Test various hyphen scenarios - hyphens are no longer allowed
let hyphen_cases = vec![
("plugin-name", "single hyphen"),
("plugin-name-test", "multiple hyphens"),
("-plugin", "leading hyphen"),
("plugin-", "trailing hyphen"),
("--plugin", "leading double hyphen"),
("plugin--", "trailing double hyphen"),
("plugin--name", "consecutive hyphens"),
("plugin-_name", "hyphen before underscore"),
("plugin_-name", "hyphen after underscore"),
("my-plugin-123", "hyphens with numbers"),
("Plugin-Name", "hyphens with capitals"),
];
for (name, description) in hyphen_cases {
assert!(
PluginName::try_from(name).is_err(),
"Should reject plugin name '{name}' ({description})"
);
}
// Test underscore edge cases
let underscore_cases = vec![
("_plugin", "leading underscore"),
("plugin_", "trailing underscore"),
("__plugin", "leading double underscore"),
("plugin__", "trailing double underscore"),
("plugin__name", "consecutive underscores"),
("_plugin_", "leading and trailing underscores"),
];
for (name, description) in underscore_cases {
assert!(
PluginName::try_from(name).is_err(),
"Should reject plugin name '{name}' ({description})"
);
}
// Test special characters
let special_char_cases = vec![
("plugin@name", "at symbol"),
("plugin#name", "hash symbol"),
("plugin$name", "dollar sign"),
("plugin%name", "percent sign"),
("plugin&name", "ampersand"),
("plugin*name", "asterisk"),
("plugin(name)", "parentheses"),
("plugin+name", "plus sign"),
("plugin=name", "equals sign"),
("plugin[name]", "square brackets"),
("plugin{name}", "curly braces"),
("plugin|name", "pipe symbol"),
("plugin\\name", "backslash"),
("plugin:name", "colon"),
("plugin;name", "semicolon"),
("plugin\"name", "double quote"),
("plugin'name", "single quote"),
("plugin<name>", "angle brackets"),
("plugin,name", "comma"),
("plugin.name", "period"),
("plugin/name", "forward slash"),
("plugin?name", "question mark"),
];
for (name, description) in special_char_cases {
assert!(
PluginName::try_from(name).is_err(),
"Should reject plugin name '{name}' ({description})"
);
}
// Test whitespace cases
let whitespace_cases = vec![
("plugin name", "space in middle"),
(" plugin", "leading space"),
("plugin ", "trailing space"),
(" plugin", "leading double space"),
("plugin ", "trailing double space"),
("plugin name", "double space in middle"),
("plugin\tname", "tab character"),
("plugin\nname", "newline character"),
("plugin\rname", "carriage return"),
];
for (name, description) in whitespace_cases {
assert!(
PluginName::try_from(name).is_err(),
"Should reject plugin name '{name}' ({description})"
);
}
// Test empty and minimal cases
let empty_cases = vec![
("", "empty string"),
("_", "single underscore"),
("-", "single hyphen"),
("__", "double underscore"),
("--", "double hyphen"),
("_-", "underscore-hyphen"),
("-_", "hyphen-underscore"),
];
for (name, description) in empty_cases {
assert!(
PluginName::try_from(name).is_err(),
"Should reject plugin name '{name}' ({description})"
);
}
// Test unicode and non-ASCII cases
let unicode_cases = vec![
("plugín", "accented character"),
("plügïn", "umlaut characters"),
("плагин", "cyrillic characters"),
("プラグイン", "japanese characters"),
("插件", "chinese characters"),
("plugin名前", "mixed ASCII and japanese"),
("café-plugin", "accented character with hyphen"),
];
for (name, description) in unicode_cases {
assert!(
PluginName::try_from(name).is_err(),
"Should reject plugin name '{name}' ({description})"
);
}
}
#[test]
fn test_plugin_name_valid_comprehensive() {
// Test basic alphanumeric names
let basic_cases = vec![
("plugin", "simple lowercase"),
("Plugin", "simple capitalized"),
("PLUGIN", "simple uppercase"),
("MyPlugin", "camelCase"),
("plugin123", "with numbers"),
("123plugin", "starting with numbers"),
("p", "single character"),
("P", "single uppercase character"),
("1", "single number"),
];
for (name, description) in basic_cases {
assert!(
PluginName::try_from(name).is_ok(),
"Should accept valid plugin name '{name}' ({description})"
);
}
// Test names with underscores as separators
let underscore_cases = vec![
("plugin_name", "simple underscore"),
("my_plugin", "underscore separator"),
("plugin_name_test", "multiple underscores"),
("Plugin_Name", "underscore with capitals"),
("plugin_123", "underscore with numbers"),
("my_plugin_v2", "complex with version"),
("a_b", "minimal underscore case"),
("test_plugin_name_123", "long with mixed content"),
];
for (name, description) in underscore_cases {
assert!(
PluginName::try_from(name).is_ok(),
"Should accept valid plugin name '{name}' ({description})"
);
}
// Test mixed alphanumeric cases
let mixed_cases = vec![
("plugin1", "letters and single digit"),
("plugin123", "letters and multiple digits"),
("Plugin1Name", "mixed case with digits"),
("myPlugin2", "camelCase with digit"),
("testPlugin123", "longer mixed case"),
("ABC123", "all caps with numbers"),
("plugin1_test2", "mixed with underscore"),
("My_Plugin_V123", "complex mixed case"),
];
for (name, description) in mixed_cases {
assert!(
PluginName::try_from(name).is_ok(),
"Should accept valid plugin name '{name}' ({description})"
);
}
// Test longer valid names
let longer_cases = vec![
(
"very_long_plugin_name_that_should_be_valid",
"very long name",
),
(
"plugin_with_many_underscores_and_numbers_123",
"long mixed content",
),
("MyVeryLongPluginNameThatShouldWork", "long camelCase"),
("VERY_LONG_UPPERCASE_PLUGIN_NAME", "long uppercase"),
];
for (name, description) in longer_cases {
assert!(
PluginName::try_from(name).is_ok(),
"Should accept valid plugin name '{name}' ({description})"
);
}
// Test edge cases that should be valid
let edge_cases = vec![
("a1", "minimal valid case"),
("1a", "number then letter"),
("a_1", "letter underscore number"),
("1_a", "number underscore letter"),
];
for (name, description) in edge_cases {
assert!(
PluginName::try_from(name).is_ok(),
"Should accept valid plugin name '{name}' ({description})"
);
}
}
#[test]
fn test_plugin_name_display() {
let name_str = "plugin_name_123";
let plugin_name = PluginName::try_from(name_str).unwrap();
assert_eq!(plugin_name.to_string(), name_str);
}
#[test]
fn test_plugin_name_serialize_deserialize() {
let name_str = "plugin_name_123";
let plugin_name = PluginName::try_from(name_str).unwrap();
// Serialize
let serialized = serde_json::to_string(&plugin_name).unwrap();
assert_eq!(serialized, format!("\"{name_str}\""));
// Deserialize
let deserialized: PluginName = serde_json::from_str(&serialized).unwrap();
assert_eq!(deserialized, plugin_name);
}
#[test]
fn test_load_valid_yaml_config() {
let rt = Runtime::new().unwrap();
// Read the test fixture file
let path = Path::new("tests/fixtures/valid_config.yaml");
let cli = Cli {
config_file: Some(path.to_path_buf()),
..Default::default()
};
// Load the config
let config_result = rt.block_on(load_config(&cli));
assert!(config_result.is_ok(), "Failed to load valid YAML config");
let config = config_result.unwrap();
assert_eq!(config.plugins.len(), 3, "Expected 3 plugins in the config");
// Verify plugin names
assert!(
config
.plugins
.contains_key(&PluginName("test_plugin".to_string()))
);
assert!(
config
.plugins
.contains_key(&PluginName("another_plugin".to_string()))
);
assert!(
config
.plugins
.contains_key(&PluginName("minimal_plugin".to_string()))
);
// Verify plugin configs
let test_plugin = &config.plugins[&PluginName("test_plugin".to_string())];
assert_eq!(test_plugin.url.to_string(), "file:///path/to/plugin");
let runtime_config = test_plugin.runtime_config.as_ref().unwrap();
assert_eq!(runtime_config.skip_tools.as_ref().unwrap().len(), 2);
assert_eq!(runtime_config.allowed_hosts.as_ref().unwrap().len(), 2);
assert_eq!(runtime_config.allowed_paths.as_ref().unwrap().len(), 2);
assert_eq!(runtime_config.env_vars.as_ref().unwrap().len(), 2);
assert_eq!(runtime_config.memory_limit.as_ref().unwrap(), "1GB");
// Verify minimal plugin has no runtime config
let minimal_plugin = &config.plugins[&PluginName("minimal_plugin".to_string())];
assert!(minimal_plugin.runtime_config.is_none());
}
#[test]
fn test_load_valid_json_config() {
let rt = Runtime::new().unwrap();
// Read the test fixture file
let path = Path::new("tests/fixtures/valid_config.json");
let cli = Cli {
config_file: Some(path.to_path_buf()),
..Default::default()
};
// Load the config
let config_result = rt.block_on(load_config(&cli));
assert!(config_result.is_ok(), "Failed to load valid JSON config");
let config = config_result.unwrap();
assert_eq!(config.plugins.len(), 3, "Expected 3 plugins in the config");
// Verify plugin names
assert!(
config
.plugins
.contains_key(&PluginName("test_plugin".to_string()))
);
assert!(
config
.plugins
.contains_key(&PluginName("another_plugin".to_string()))
);
assert!(
config
.plugins
.contains_key(&PluginName("minimal_plugin".to_string()))
);
// Verify env vars
let test_plugin = &config.plugins[&PluginName("test_plugin".to_string())];
let runtime_config = test_plugin.runtime_config.as_ref().unwrap();
assert_eq!(runtime_config.env_vars.as_ref().unwrap()["DEBUG"], "true");
assert_eq!(
runtime_config.env_vars.as_ref().unwrap()["LOG_LEVEL"],
"info"
);
}
#[test]
fn test_load_invalid_plugin_name() {
let rt = Runtime::new().unwrap();
// Read the test fixture file
let path = Path::new("tests/fixtures/invalid_plugin_name.yaml");
let cli = Cli {
config_file: Some(path.to_path_buf()),
..Default::default()
};
// Load the config
let config_result = rt.block_on(load_config(&cli));
assert!(
config_result.is_err(),
"Expected error for invalid plugin name"
);
}
#[test]
fn test_load_invalid_url() {
let rt = Runtime::new().unwrap();
// Read the test fixture file
let path = Path::new("tests/fixtures/invalid_url.yaml");
let cli = Cli {
config_file: Some(path.to_path_buf()),
..Default::default()
};
// Load the config
let config_result = rt.block_on(load_config(&cli));
assert!(config_result.is_err(), "Expected error for invalid URL");
let error = config_result.unwrap_err();
assert!(
error.to_string().contains("not a valid url")
|| error.to_string().contains("invalid URL"),
"Error should mention the invalid URL"
);
}
#[test]
fn test_load_invalid_structure() {
let rt = Runtime::new().unwrap();
// Read the test fixture file
let path = Path::new("tests/fixtures/invalid_structure.yaml");
let cli = Cli {
config_file: Some(path.to_path_buf()),
..Default::default()
};
// Load the config
let config_result = rt.block_on(load_config(&cli));
assert!(
config_result.is_err(),
"Expected error for invalid structure"
);
}
#[test]
fn test_load_nonexistent_file() {
let rt = Runtime::new().unwrap();
// Create a path that doesn't exist
let nonexistent_path = Path::new("/tmp/definitely_not_a_real_config_file_12345.yaml");
let cli = Cli {
config_file: Some(nonexistent_path.to_path_buf()),
..Default::default()
};
// Load the config
let config_result = rt.block_on(load_config(&cli));
assert!(
config_result.is_err(),
"Expected error for nonexistent file"
);
let error = config_result.unwrap_err();
assert!(
error.to_string().contains("not found"),
"Error should mention file not found"
);
}
#[test]
fn test_load_unsupported_extension() {
let rt = Runtime::new().unwrap();
let path = Path::new("tests/fixtures/unsupported_config.txt");
let cli = Cli {
config_file: Some(path.to_path_buf()),
..Default::default()
};
// Load the config
let config_result = rt.block_on(load_config(&cli));
assert!(
config_result.is_err(),
"Expected error for unsupported extension"
);
let error = config_result.unwrap_err();
assert!(
error.to_string().contains("Unsupported config format"),
"Error should mention unsupported format"
);
}
#[test]
fn test_auth_config_basic_serialization() {
let auth_config = AuthConfig::Basic {
username: "testuser".to_string(),
password: "testpass".to_string(),
};
let serialized = serde_json::to_string(&auth_config).unwrap();
let expected = r#"{"type":"basic","username":"testuser","password":"testpass"}"#;
assert_eq!(serialized, expected);
}
#[test]
fn test_auth_config_token_serialization() {
let auth_config = AuthConfig::Token {
token: "test-token-123".to_string(),
};
let serialized = serde_json::to_string(&auth_config).unwrap();
let expected = r#"{"type":"token","token":"test-token-123"}"#;
assert_eq!(serialized, expected);
}
#[test]
fn test_auth_config_basic_deserialization() {
let json = r#"{"type":"basic","username":"testuser","password":"testpass"}"#;
let auth_config: AuthConfig = serde_json::from_str(json).unwrap();
match auth_config {
AuthConfig::Basic { username, password } => {
assert_eq!(username, "testuser");
assert_eq!(password, "testpass");
}
_ => panic!("Expected Basic auth config"),
}
}
#[test]
fn test_auth_config_token_deserialization() {
let json = r#"{"type":"token","token":"test-token-123"}"#;
let auth_config: AuthConfig = serde_json::from_str(json).unwrap();
match auth_config {
AuthConfig::Token { token } => {
assert_eq!(token, "test-token-123");
}
_ => panic!("Expected Token auth config"),
}
}
#[test]
fn test_auth_config_yaml_basic_deserialization() {
let yaml = r#"
type: basic
username: testuser
password: testpass
"#;
let auth_config: AuthConfig = serde_yaml::from_str(yaml).unwrap();
match auth_config {
AuthConfig::Basic { username, password } => {
assert_eq!(username, "testuser");
assert_eq!(password, "testpass");
}
_ => panic!("Expected Basic auth config"),
}
}
#[test]
fn test_auth_config_yaml_token_deserialization() {
let yaml = r#"
type: token
token: test-token-123
"#;
let auth_config: AuthConfig = serde_yaml::from_str(yaml).unwrap();
match auth_config {
AuthConfig::Token { token } => {
assert_eq!(token, "test-token-123");
}
_ => panic!("Expected Token auth config"),
}
}
#[test]
fn test_auth_config_invalid_type() {
let json = r#"{"type":"invalid","data":"test"}"#;
let result: Result<AuthConfig, _> = serde_json::from_str(json);
assert!(result.is_err(), "Expected error for invalid auth type");
}
#[test]
fn test_auth_config_missing_fields() {
// Missing username for basic auth
let json = r#"{"type":"basic","password":"testpass"}"#;
let result: Result<AuthConfig, _> = serde_json::from_str(json);
assert!(result.is_err(), "Expected error for missing username");
// Missing password for basic auth
let json = r#"{"type":"basic","username":"testuser"}"#;
let result: Result<AuthConfig, _> = serde_json::from_str(json);
assert!(result.is_err(), "Expected error for missing password");
// Missing token for token auth
let json = r#"{"type":"token"}"#;
let result: Result<AuthConfig, _> = serde_json::from_str(json);
assert!(result.is_err(), "Expected error for missing token");
}
#[test]
fn test_config_with_auths_deserialization() {
let json = r#"
{
"auths": {
"https://api.example.com": {
"type": "basic",
"username": "testuser",
"password": "testpass"
},
"https://secure.api.com": {
"type": "token",
"token": "bearer-token-123"
}
},
"plugins": {
"test_plugin": {
"url": "file:///path/to/plugin"
}
}
}
"#;
let config: Config = serde_json::from_str(json).unwrap();
assert!(config.auths.is_some());
let auths = config.auths.unwrap();
assert_eq!(auths.len(), 2);
let api_url = Url::parse("https://api.example.com").unwrap();
let secure_url = Url::parse("https://secure.api.com").unwrap();
assert!(auths.contains_key(&api_url));
assert!(auths.contains_key(&secure_url));
match &auths[&api_url] {
AuthConfig::Basic { username, password } => {
assert_eq!(username, "testuser");
assert_eq!(password, "testpass");
}
_ => panic!("Expected Basic auth for api.example.com"),
}
match &auths[&secure_url] {
AuthConfig::Token { token } => {
assert_eq!(token, "bearer-token-123");
}
_ => panic!("Expected Token auth for secure.api.com"),
}
}
#[test]
fn test_config_with_auths_yaml_deserialization() {
let yaml = r#"
auths:
"https://api.example.com":
type: basic
username: testuser
password: testpass
"https://secure.api.com":
type: token
token: bearer-token-123
plugins:
test_plugin:
url: "file:///path/to/plugin"
"#;
let config: Config = serde_yaml::from_str(yaml).unwrap();
assert!(config.auths.is_some());
let auths = config.auths.unwrap();
assert_eq!(auths.len(), 2);
let api_url = Url::parse("https://api.example.com").unwrap();
let secure_url = Url::parse("https://secure.api.com").unwrap();
assert!(auths.contains_key(&api_url));
assert!(auths.contains_key(&secure_url));
}
#[test]
fn test_config_without_auths() {
let json = r#"
{
"plugins": {
"test_plugin": {
"url": "file:///path/to/plugin"
}
}
}
"#;
let config: Config = serde_json::from_str(json).unwrap();
assert!(config.auths.is_none());
assert_eq!(config.plugins.len(), 1);
}
#[test]
fn test_auth_config_clone() {
let auth_config = AuthConfig::Basic {
username: "testuser".to_string(),
password: "testpass".to_string(),
};
let cloned = auth_config.clone();
match cloned {
AuthConfig::Basic { username, password } => {
assert_eq!(username, "testuser");
assert_eq!(password, "testpass");
}
_ => panic!("Expected Basic auth config"),
}
}
#[test]
fn test_auth_config_debug_format() {
let auth_config = AuthConfig::Token {
token: "secret-token".to_string(),
};
let debug_str = format!("{auth_config:?}");
assert!(debug_str.contains("Token"));
assert!(debug_str.contains("secret-token"));
}
#[test]
fn test_internal_auth_config_keyring_deserialization() {
let json = r#"{"type":"keyring","service":"test-service","user":"test-user"}"#;
let result: Result<InternalAuthConfig, _> = serde_json::from_str(json);
// This should deserialize successfully as InternalAuthConfig
assert!(result.is_ok());
match result.unwrap() {
InternalAuthConfig::Keyring { service, user } => {
assert_eq!(service, "test-service");
assert_eq!(user, "test-user");
}
_ => panic!("Expected Keyring auth config"),
}
}
#[test]
fn test_auth_config_empty_values() {
// Test with empty username
let json = r#"{"type":"basic","username":"","password":"testpass"}"#;
let auth_config: AuthConfig = serde_json::from_str(json).unwrap();
match auth_config {
AuthConfig::Basic { username, password } => {
assert_eq!(username, "");
assert_eq!(password, "testpass");
}
_ => panic!("Expected Basic auth config"),
}
// Test with empty token
let json = r#"{"type":"token","token":""}"#;
let auth_config: AuthConfig = serde_json::from_str(json).unwrap();
match auth_config {
AuthConfig::Token { token } => {
assert_eq!(token, "");
}
_ => panic!("Expected Token auth config"),
}
}
#[test]
fn test_load_config_with_auths_yaml() {
let rt = Runtime::new().unwrap();
let path = Path::new("tests/fixtures/config_with_auths.yaml");
let cli = Cli {
config_file: Some(path.to_path_buf()),
..Default::default()
};
let config_result = rt.block_on(load_config(&cli));
assert!(
config_result.is_ok(),
"Failed to load config with auths from YAML"
);
let config = config_result.unwrap();
assert!(config.auths.is_some(), "Expected auths to be present");
let auths = config.auths.unwrap();
assert_eq!(auths.len(), 4, "Expected 4 auth configurations");
// Test basic auth
let api_url = Url::parse("https://api.example.com").unwrap();
assert!(auths.contains_key(&api_url));
match &auths[&api_url] {
AuthConfig::Basic { username, password } => {
assert_eq!(username, "testuser");
assert_eq!(password, "testpass");
}
_ => panic!("Expected Basic auth for api.example.com"),
}
// Test token auth
let secure_url = Url::parse("https://secure.api.com").unwrap();
assert!(auths.contains_key(&secure_url));
match &auths[&secure_url] {
AuthConfig::Token { token } => {
assert_eq!(token, "bearer-token-123");
}
_ => panic!("Expected Token auth for secure.api.com"),
}
}
#[test]
fn test_load_config_with_auths_json() {
let rt = Runtime::new().unwrap();
let path = Path::new("tests/fixtures/config_with_auths.json");
let cli = Cli {
config_file: Some(path.to_path_buf()),
..Default::default()
};
let config_result = rt.block_on(load_config(&cli));
assert!(
config_result.is_ok(),
"Failed to load config with auths from JSON"
);
let config = config_result.unwrap();
assert!(config.auths.is_some(), "Expected auths to be present");
let auths = config.auths.unwrap();
assert_eq!(auths.len(), 4, "Expected 4 auth configurations");
// Test that all URLs are present
let expected_urls = vec![
"https://api.example.com",
"https://secure.api.com",
"https://private.registry.io",
"https://oauth.service.com",
];
for url_str in expected_urls {
let url = Url::parse(url_str).unwrap();
assert!(auths.contains_key(&url), "Missing auth for {url_str}");
}
}
#[test]
fn test_load_invalid_auth_config() {
let rt = Runtime::new().unwrap();
let path = Path::new("tests/fixtures/invalid_auth_config.yaml");
let cli = Cli {
config_file: Some(path.to_path_buf()),
..Default::default()
};
let config_result = rt.block_on(load_config(&cli));
assert!(
config_result.is_err(),
"Expected error for invalid auth config"
);
let error = config_result.unwrap_err();
let error_msg = error.to_string();
// The error should be related to deserialization
assert!(
error_msg.contains("unknown variant")
|| error_msg.contains("missing field")
|| error_msg.contains("invalid"),
"Error should indicate invalid auth configuration: {error_msg}"
);
}
#[test]
fn test_auth_config_url_matching() {
let mut auths = HashMap::new();
// Add auth for specific API endpoint
let api_url = Url::parse("https://api.example.com").unwrap();
auths.insert(
api_url,
AuthConfig::Token {
token: "api-token".to_string(),
},
);
// Add auth for broader domain
let domain_url = Url::parse("https://example.com").unwrap();
auths.insert(
domain_url,
AuthConfig::Basic {
username: "user".to_string(),
password: "pass".to_string(),
},
);
let config = Config {
auths: Some(auths),
plugins: HashMap::new(),
..Default::default()
};
// Serialize and deserialize to test round-trip
let json = serde_json::to_string(&config).unwrap();
let deserialized: Config = serde_json::from_str(&json).unwrap();
assert!(deserialized.auths.is_some());
assert_eq!(deserialized.auths.unwrap().len(), 2);
}
#[test]
fn test_auth_config_special_characters() {
// Test with special characters in passwords and tokens
let auth_basic = AuthConfig::Basic {
username: "[email protected]".to_string(),
password: "p@ssw0rd!#$%".to_string(),
};
let auth_token = AuthConfig::Token {
token: "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9.TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ".to_string(),
};
// Test serialization
let basic_json = serde_json::to_string(&auth_basic).unwrap();
let token_json = serde_json::to_string(&auth_token).unwrap();
// Test deserialization
let basic_deserialized: AuthConfig = serde_json::from_str(&basic_json).unwrap();
let token_deserialized: AuthConfig = serde_json::from_str(&token_json).unwrap();
match basic_deserialized {
AuthConfig::Basic { username, password } => {
assert_eq!(username, "[email protected]");
assert_eq!(password, "p@ssw0rd!#$%");
}
_ => panic!("Expected Basic auth config"),
}
match token_deserialized {
AuthConfig::Token { token } => {
assert!(token.starts_with("eyJ"));
}
_ => panic!("Expected Token auth config"),
}
}
#[test]
fn test_config_auths_optional() {
// Test config without auths field
let json_without_auths = r#"
{
"plugins": {
"test_plugin": {
"url": "file:///path/to/plugin"
}
}
}
"#;
let config: Config = serde_json::from_str(json_without_auths).unwrap();
assert!(config.auths.is_none());
// Test config with empty auths
let json_empty_auths = r#"
{
"auths": {},
"plugins": {
"test_plugin": {
"url": "file:///path/to/plugin"
}
}
}
"#;
let config: Config = serde_json::from_str(json_empty_auths).unwrap();
assert!(config.auths.is_some());
assert_eq!(config.auths.unwrap().len(), 0);
}
#[test]
fn test_keyring_auth_config_deserialization() {
// Test that keyring config deserializes correctly as InternalAuthConfig
let json = r#"{"type":"keyring","service":"test-service","user":"test-user"}"#;
let internal_auth: InternalAuthConfig = serde_json::from_str(json).unwrap();
match internal_auth {
InternalAuthConfig::Keyring { service, user } => {
assert_eq!(service, "test-service");
assert_eq!(user, "test-user");
}
_ => panic!("Expected Keyring auth config"),
}
}
#[test]
fn test_documentation_example_yaml() {
let rt = Runtime::new().unwrap();
let path = Path::new("tests/fixtures/documentation_example.yaml");
let cli = Cli {
config_file: Some(path.to_path_buf()),
..Default::default()
};
let config_result = rt.block_on(load_config(&cli));
assert!(
config_result.is_ok(),
"Documentation YAML example should be valid"
);
let config = config_result.unwrap();
// Verify auths are present and correct
assert!(config.auths.is_some());
let auths = config.auths.unwrap();
assert_eq!(
auths.len(),
3,
"Expected 3 auth configurations from documentation example"
);
// Verify basic auth
let registry_url = Url::parse("https://private.registry.io").unwrap();
match &auths[®istry_url] {
AuthConfig::Basic { username, password } => {
assert_eq!(username, "registry-user");
assert_eq!(password, "registry-pass");
}
_ => panic!("Expected Basic auth for private.registry.io"),
}
// Verify token auth
let github_url = Url::parse("https://api.github.com").unwrap();
match &auths[&github_url] {
AuthConfig::Token { token } => {
assert_eq!(token, "ghp_1234567890abcdef");
}
_ => panic!("Expected Token auth for api.github.com"),
}
// Verify plugins
assert_eq!(
config.plugins.len(),
3,
"Expected 3 plugins from documentation example"
);
assert!(config.plugins.contains_key(&PluginName("time".to_string())));
assert!(config.plugins.contains_key(&PluginName("myip".to_string())));
assert!(
config
.plugins
.contains_key(&PluginName("private_plugin".to_string()))
);
// Verify private plugin config
let private_plugin = &config.plugins[&PluginName("private_plugin".to_string())];
assert_eq!(
private_plugin.url.to_string(),
"https://private.registry.io/my_plugin"
);
assert!(private_plugin.runtime_config.is_some());
}
#[test]
fn test_documentation_example_json() {
let rt = Runtime::new().unwrap();
let path = Path::new("tests/fixtures/documentation_example.json");
let cli = Cli {
config_file: Some(path.to_path_buf()),
..Default::default()
};
let config_result = rt.block_on(load_config(&cli));
assert!(
config_result.is_ok(),
"Documentation JSON example should be valid"
);
let config = config_result.unwrap();
// Verify auths are present and correct
assert!(config.auths.is_some());
let auths = config.auths.unwrap();
assert_eq!(
auths.len(),
3,
"Expected 3 auth configurations from documentation example"
);
// Verify all auth URLs are present
let expected_auth_urls = vec![
"https://private.registry.io",
"https://api.github.com",
"https://enterprise.api.com",
];
for url_str in expected_auth_urls {
let url = Url::parse(url_str).unwrap();
assert!(auths.contains_key(&url), "Missing auth for {url_str}");
}
// Verify plugins match the documentation
assert_eq!(config.plugins.len(), 3);
let myip_plugin = &config.plugins[&PluginName("myip".to_string())];
let runtime_config = myip_plugin.runtime_config.as_ref().unwrap();
assert_eq!(runtime_config.env_vars.as_ref().unwrap()["FOO"], "bar");
assert_eq!(runtime_config.memory_limit.as_ref().unwrap(), "512Mi");
}
#[test]
fn test_url_prefix_matching_from_documentation() {
// Test the URL matching behavior described in documentation
let yaml = r#"
auths:
"https://example.com":
type: basic
username: "broad-user"
password: "broad-pass"
"https://example.com/api":
type: token
token: "api-token"
"https://example.com/api/v1":
type: basic
username: "v1-user"
password: "v1-pass"
plugins:
test_plugin:
url: "file:///test"
"#;
let config: Config = serde_yaml::from_str(yaml).unwrap();
assert!(config.auths.is_some());
let auths = config.auths.unwrap();
assert_eq!(auths.len(), 3);
// Verify all three auth configs are present
let base_url = Url::parse("https://example.com").unwrap();
let api_url = Url::parse("https://example.com/api").unwrap();
let v1_url = Url::parse("https://example.com/api/v1").unwrap();
assert!(auths.contains_key(&base_url));
assert!(auths.contains_key(&api_url));
assert!(auths.contains_key(&v1_url));
// Verify the specific auth types match documentation
match &auths[&base_url] {
AuthConfig::Basic { username, .. } => {
assert_eq!(username, "broad-user");
}
_ => panic!("Expected Basic auth for base URL"),
}
match &auths[&api_url] {
AuthConfig::Token { token } => {
assert_eq!(token, "api-token");
}
_ => panic!("Expected Token auth for API URL"),
}
match &auths[&v1_url] {
AuthConfig::Basic { username, .. } => {
assert_eq!(username, "v1-user");
}
_ => panic!("Expected Basic auth for v1 URL"),
}
}
#[test]
fn test_keyring_json_format_validation() {
// Test that the JSON formats shown in keyring documentation examples are valid
// Test basic auth JSON format from documentation
let basic_json = r#"{"type":"basic","username":"actual-user","password":"actual-pass"}"#;
let basic_auth: AuthConfig = serde_json::from_str(basic_json).unwrap();
match basic_auth {
AuthConfig::Basic { username, password } => {
assert_eq!(username, "actual-user");
assert_eq!(password, "actual-pass");
}
_ => panic!("Expected Basic auth config from keyring JSON"),
}
// Test token auth JSON format from documentation
let token_json = r#"{"type":"token","token":"actual-bearer-token"}"#;
let token_auth: AuthConfig = serde_json::from_str(token_json).unwrap();
match token_auth {
AuthConfig::Token { token } => {
assert_eq!(token, "actual-bearer-token");
}
_ => panic!("Expected Token auth config from keyring JSON"),
}
// Test JWT-like token from documentation
let jwt_json = r#"{"type":"token","token":"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9"}"#;
let jwt_auth: AuthConfig = serde_json::from_str(jwt_json).unwrap();
match jwt_auth {
AuthConfig::Token { token } => {
assert_eq!(token, "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9");
}
_ => panic!("Expected Token auth config from keyring JWT JSON"),
}
// Test corporate example from documentation
let corp_json = r#"{"type":"basic","username":"corp_user","password":"corp_secret"}"#;
let corp_auth: AuthConfig = serde_json::from_str(corp_json).unwrap();
match corp_auth {
AuthConfig::Basic { username, password } => {
assert_eq!(username, "corp_user");
assert_eq!(password, "corp_secret");
}
_ => panic!("Expected Basic auth config from corporate JSON"),
}
}
#[test]
#[ignore] // Requires system keyring access - run with `cargo test -- --ignored`
fn test_keyring_auth_integration() {
use std::process::Command;
use std::time::{SystemTime, UNIX_EPOCH};
// Generate unique service and user names to avoid conflicts
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs();
let service_name = format!("hyper-mcp-test-{timestamp}");
let user_name = format!("test-user-{timestamp}");
// Test auth config to store in keyring
let test_auth_json =
r#"{"type":"basic","username":"keyring-test-user","password":"keyring-test-pass"}"#;
// Platform-specific keyring operations
let (add_result, remove_result) = if cfg!(target_os = "macos") {
// macOS using security command
let add_result = Command::new("security")
.args([
"add-generic-password",
"-a",
&user_name,
"-s",
&service_name,
"-w",
test_auth_json,
])
.output();
let remove_result = Command::new("security")
.args([
"delete-generic-password",
"-a",
&user_name,
"-s",
&service_name,
])
.output();
(add_result, remove_result)
} else if cfg!(target_os = "linux") {
// Linux using secret-tool
let add_result = Command::new("bash")
.args([
"-c",
&format!("echo '{test_auth_json}' | secret-tool store --label='hyper-mcp test' service '{service_name}' username '{user_name}'"),
])
.output();
let remove_result = Command::new("secret-tool")
.args(["clear", "service", &service_name, "username", &user_name])
.output();
(add_result, remove_result)
} else if cfg!(target_os = "windows") {
// Windows using cmdkey
let escaped_json = test_auth_json.replace("\"", "\\\"");
let add_result = Command::new("cmdkey")
.args([
&format!("/generic:{service_name}"),
&format!("/user:{user_name}"),
&format!("/pass:{escaped_json}"),
])
.output();
let remove_result = Command::new("cmdkey")
.args([&format!("/delete:{service_name}")])
.output();
(add_result, remove_result)
} else {
// Unsupported platform
println!(
"Keyring test skipped on unsupported platform: {}",
std::env::consts::OS
);
return;
};
// Try to add the secret to keyring
let add_output = match add_result {
Ok(output) => output,
Err(e) => {
println!("Failed to execute keyring add command: {e}. Skipping test.");
return;
}
};
if !add_output.status.success() {
println!(
"Failed to add secret to keyring (exit code: {}). stdout: {}, stderr: {}. Skipping test.",
add_output.status.code().unwrap_or(-1),
String::from_utf8_lossy(&add_output.stdout),
String::from_utf8_lossy(&add_output.stderr)
);
return;
}
// Test keyring auth deserialization
let keyring_config_json =
format!(r#"{{"type":"keyring","service":"{service_name}","user":"{user_name}"}}"#);
let test_result = std::panic::catch_unwind(|| {
let internal_auth: InternalAuthConfig =
serde_json::from_str(&keyring_config_json).unwrap();
// This should trigger the keyring lookup and deserialize to AuthConfig
match internal_auth {
InternalAuthConfig::Keyring { service, user } => {
assert_eq!(service, service_name);
assert_eq!(user, user_name);
// Test the actual keyring deserialization through AuthConfig
let auth_config: Result<AuthConfig, _> =
serde_json::from_str(&keyring_config_json);
match auth_config {
Ok(AuthConfig::Basic { username, password }) => {
assert_eq!(username, "keyring-test-user");
assert_eq!(password, "keyring-test-pass");
}
Ok(AuthConfig::Token { .. }) => {
panic!("Expected Basic auth from keyring, got Token");
}
Err(e) => {
println!(
"Keyring lookup failed (this is expected if keyring service is not available): {e}"
);
}
}
}
_ => panic!("Expected Keyring internal auth config"),
}
});
// Always attempt cleanup regardless of test result
if let Ok(output) = remove_result {
if !output.status.success() {
println!(
"Warning: Failed to remove test secret from keyring (exit code: {}). stdout: {}, stderr: {}",
output.status.code().unwrap_or(-1),
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr)
);
}
}
// Re-panic if the test failed
if let Err(panic_info) = test_result {
std::panic::resume_unwind(panic_info);
}
}
#[test]
#[ignore] // Requires system keyring access and file creation - run with `cargo test -- --ignored`
fn test_keyring_auth_complete_config_integration() {
use std::process::Command;
use std::time::{SystemTime, UNIX_EPOCH};
use tokio::fs;
let rt = Runtime::new().unwrap();
// Generate unique identifiers
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs();
let service_name = format!("hyper-mcp-config-test-{timestamp}");
let user_name = format!("config-test-user-{timestamp}");
let temp_config_path = format!("test_config_{timestamp}.yaml");
// Auth config to store in keyring
let keyring_auth_json =
r#"{"type":"token","token":"test-keyring-token-from-complete-config"}"#;
// Create complete config with keyring auth
let config_content = format!(
r#"
auths:
"https://keyring-test.example.com":
type: keyring
service: "{service_name}"
user: "{user_name}"
"https://basic-test.example.com":
type: basic
username: "basic-user"
password: "basic-pass"
plugins:
test_plugin:
url: "file:///test/plugin"
runtime_config:
allowed_hosts:
- "keyring-test.example.com"
- "basic-test.example.com"
"#
);
// Platform-specific keyring operations
let (add_result, remove_result) = if cfg!(target_os = "macos") {
let add_result = Command::new("security")
.args([
"add-generic-password",
"-a",
&user_name,
"-s",
&service_name,
"-w",
keyring_auth_json,
])
.output();
let remove_result = Command::new("security")
.args([
"delete-generic-password",
"-a",
&user_name,
"-s",
&service_name,
])
.output();
(add_result, remove_result)
} else if cfg!(target_os = "linux") {
let add_result = Command::new("bash")
.args([
"-c",
&format!(
"echo '{keyring_auth_json}' | secret-tool store --label='hyper-mcp complete config test' service '{service_name}' username '{user_name}'"
),
])
.output();
let remove_result = Command::new("secret-tool")
.args(["clear", "service", &service_name, "username", &user_name])
.output();
(add_result, remove_result)
} else if cfg!(target_os = "windows") {
let escaped_json = keyring_auth_json.replace("\"", "\\\"");
let add_result = Command::new("cmdkey")
.args([
&format!("/generic:{service_name}"),
&format!("/user:{user_name}"),
&format!("/pass:{escaped_json}"),
])
.output();
let remove_result = Command::new("cmdkey")
.args([&format!("/delete:{service_name}")])
.output();
(add_result, remove_result)
} else {
println!(
"Keyring integration test skipped on unsupported platform: {}",
std::env::consts::OS
);
return;
};
// Create temporary config file
let config_path = Path::new(&temp_config_path);
let write_result = rt.block_on(fs::write(config_path, config_content));
if write_result.is_err() {
println!("Failed to create temporary config file. Skipping test.");
return;
}
// Try to add secret to keyring
let add_output = match add_result {
Ok(output) => output,
Err(e) => {
println!("Failed to execute keyring add command: {e}. Skipping test.");
let _ = rt.block_on(fs::remove_file(config_path));
return;
}
};
if !add_output.status.success() {
println!(
"Failed to add secret to keyring (exit code: {}). stdout: {}, stderr: {}. Skipping test.",
add_output.status.code().unwrap_or(-1),
String::from_utf8_lossy(&add_output.stdout),
String::from_utf8_lossy(&add_output.stderr)
);
let _ = rt.block_on(fs::remove_file(config_path));
return;
}
let cli = Cli {
config_file: Some(config_path.to_path_buf()),
..Default::default()
};
// Test loading the config file (this should trigger keyring lookup)
let load_result = rt.block_on(load_config(&cli));
// Cleanup keyring entry before checking results
if let Ok(output) = remove_result {
if !output.status.success() {
println!(
"Warning: Failed to remove test secret from keyring (exit code: {}). stdout: {}, stderr: {}. Manual cleanup may be required.",
output.status.code().unwrap_or(-1),
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr)
);
}
}
// Cleanup temporary config file
let _ = rt.block_on(fs::remove_file(config_path));
// Now check the test results
match load_result {
Ok(config) => {
// Verify auths are present
assert!(
config.auths.is_some(),
"Expected auths to be present in loaded config"
);
let auths = config.auths.unwrap();
assert_eq!(auths.len(), 2, "Expected 2 auth configurations");
// Verify keyring auth was resolved successfully
let keyring_url = Url::parse("https://keyring-test.example.com").unwrap();
assert!(
auths.contains_key(&keyring_url),
"Expected keyring auth URL to be present"
);
match &auths[&keyring_url] {
AuthConfig::Token { token } => {
assert_eq!(
token, "test-keyring-token-from-complete-config",
"Token from keyring should match stored value"
);
}
_ => panic!("Expected Token auth from keyring resolution"),
}
// Verify basic auth still works alongside keyring auth
let basic_url = Url::parse("https://basic-test.example.com").unwrap();
assert!(
auths.contains_key(&basic_url),
"Expected basic auth URL to be present"
);
match &auths[&basic_url] {
AuthConfig::Basic { username, password } => {
assert_eq!(username, "basic-user");
assert_eq!(password, "basic-pass");
}
_ => panic!("Expected Basic auth config"),
}
// Verify plugins loaded correctly
assert_eq!(config.plugins.len(), 1, "Expected 1 plugin in config");
assert!(
config
.plugins
.contains_key(&PluginName("test_plugin".to_string()))
);
println!(
"✅ Keyring integration test passed on platform: {}",
std::env::consts::OS
);
}
Err(e) => {
// Check if this is a keyring-related error
let error_msg = e.to_string();
if error_msg.contains("keyring") || error_msg.contains("secure storage") {
println!(
"Keyring lookup failed (keyring service may not be available): {e}. This is acceptable for CI environments."
);
} else {
panic!("Unexpected error loading config with keyring auth: {e}");
}
}
}
}
#[test]
#[ignore] // Requires system keyring access - run with `cargo test -- --ignored`
fn test_keyring_auth_direct_deserialization() {
use std::process::Command;
use std::time::{SystemTime, UNIX_EPOCH};
// Generate unique service and user names to avoid conflicts
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_secs();
let service_name = format!("hyper-mcp-direct-test-{timestamp}");
let user_name = format!("direct-test-user-{timestamp}");
// Test auth config to store in keyring (basic auth this time)
let test_auth_json =
r#"{"type":"basic","username":"direct-keyring-user","password":"direct-keyring-pass"}"#;
// Determine platform and execute appropriate keyring commands
if cfg!(target_os = "macos") {
// macOS: Add and test, then cleanup
let add_cmd = Command::new("security")
.args([
"add-generic-password",
"-a",
&user_name,
"-s",
&service_name,
"-w",
test_auth_json,
])
.output();
if let Ok(add_output) = add_cmd {
if add_output.status.success() {
// Test the keyring deserialization
let keyring_config_json = format!(
r#"{{"type":"keyring","service":"{service_name}","user":"{user_name}"}}"#
);
let auth_result: Result<AuthConfig, _> =
serde_json::from_str(&keyring_config_json);
// Cleanup first
let _ = Command::new("security")
.args([
"delete-generic-password",
"-a",
&user_name,
"-s",
&service_name,
])
.output();
// Verify result
match auth_result {
Ok(AuthConfig::Basic { username, password }) => {
assert_eq!(username, "direct-keyring-user");
assert_eq!(password, "direct-keyring-pass");
println!("✅ macOS keyring direct deserialization test passed");
}
Ok(_) => panic!("Expected Basic auth from keyring"),
Err(e) => {
println!(
"Keyring lookup failed on macOS (may not be available in CI): {e}"
);
}
}
} else {
println!("Failed to add secret to macOS keyring, skipping test");
}
}
} else if cfg!(target_os = "linux") {
// Linux: Add and test, then cleanup
let add_cmd = Command::new("bash")
.args([
"-c",
&format!(
"echo '{test_auth_json}' | secret-tool store --label='hyper-mcp direct test' service '{service_name}' username '{user_name}'"
),
])
.output();
if let Ok(add_output) = add_cmd {
if add_output.status.success() {
// Test the keyring deserialization
let keyring_config_json = format!(
r#"{{"type":"keyring","service":"{service_name}","user":"{user_name}"}}"#
);
let auth_result: Result<AuthConfig, _> =
serde_json::from_str(&keyring_config_json);
// Cleanup first
let _ = Command::new("secret-tool")
.args(["clear", "service", &service_name, "username", &user_name])
.output();
// Verify result
match auth_result {
Ok(AuthConfig::Basic { username, password }) => {
assert_eq!(username, "direct-keyring-user");
assert_eq!(password, "direct-keyring-pass");
println!("✅ Linux keyring direct deserialization test passed");
}
Ok(_) => panic!("Expected Basic auth from keyring"),
Err(e) => {
println!(
"Keyring lookup failed on Linux (may not be available in CI): {e}"
);
}
}
} else {
println!("Failed to add secret to Linux keyring, skipping test");
}
}
} else if cfg!(target_os = "windows") {
// Windows: Add and test, then cleanup
let escaped_json = test_auth_json.replace("\"", "\\\"");
let add_cmd = Command::new("cmdkey")
.args([
&format!("/generic:{service_name}"),
&format!("/user:{user_name}"),
&format!("/pass:{escaped_json}"),
])
.output();
if let Ok(add_output) = add_cmd {
if add_output.status.success() {
// Test the keyring deserialization
let keyring_config_json = format!(
r#"{{"type":"keyring","service":"{service_name}","user":"{user_name}"}}"#
);
let auth_result: Result<AuthConfig, _> =
serde_json::from_str(&keyring_config_json);
// Cleanup first
let _ = Command::new("cmdkey")
.args([&format!("/delete:{service_name}")])
.output();
// Verify result
match auth_result {
Ok(AuthConfig::Basic { username, password }) => {
assert_eq!(username, "direct-keyring-user");
assert_eq!(password, "direct-keyring-pass");
println!("✅ Windows keyring direct deserialization test passed");
}
Ok(_) => panic!("Expected Basic auth from keyring"),
Err(e) => {
println!(
"Keyring lookup failed on Windows (may not be available in CI): {e}"
);
}
}
} else {
println!("Failed to add secret to Windows keyring, skipping test");
}
}
} else {
println!(
"Direct keyring deserialization test skipped on unsupported platform: {}",
std::env::consts::OS
);
}
}
#[test]
fn test_platform_detection_and_keyring_tool_availability() {
use std::process::Command;
println!(
"Running platform detection test on: {}",
std::env::consts::OS
);
if cfg!(target_os = "macos") {
// Test macOS security command availability
let security_check = Command::new("security").arg("help").output();
match security_check {
Ok(output) => {
if output.status.success() {
println!("✅ macOS security command is available");
// Test that we can list keychains (read-only operation)
let list_check = Command::new("security").args(["list-keychains"]).output();
match list_check {
Ok(list_output) if list_output.status.success() => {
println!("✅ macOS keychain access is functional");
}
_ => {
println!("⚠️ macOS keychain access may be limited");
}
}
} else {
println!("❌ macOS security command failed");
}
}
Err(e) => {
println!("❌ macOS security command not found: {e}");
}
}
} else if cfg!(target_os = "linux") {
// Test Linux secret-tool availability
let secret_tool_check = Command::new("secret-tool").arg("--help").output();
match secret_tool_check {
Ok(output) => {
if output.status.success() {
println!("✅ Linux secret-tool is available");
} else {
println!("❌ Linux secret-tool command failed");
}
}
Err(e) => {
println!(
"❌ Linux secret-tool not found: {e}. Install with: sudo apt-get install libsecret-tools"
);
}
}
// Check if dbus session is available (required for keyring)
let dbus_check = Command::new("dbus-send")
.args([
"--session",
"--dest=org.freedesktop.DBus",
"--print-reply",
"/org/freedesktop/DBus",
"org.freedesktop.DBus.ListNames",
])
.output();
match dbus_check {
Ok(output) if output.status.success() => {
println!("✅ Linux D-Bus session is available");
}
_ => {
println!("⚠️ Linux D-Bus session may not be available (required for keyring)");
}
}
} else if cfg!(target_os = "windows") {
// Test Windows cmdkey availability
let cmdkey_check = Command::new("cmdkey").arg("/?").output();
match cmdkey_check {
Ok(output) => {
if output.status.success() {
println!("✅ Windows cmdkey is available");
// Test that we can list credentials (read-only operation)
let list_check = Command::new("cmdkey").args(["/list"]).output();
match list_check {
Ok(list_output) if list_output.status.success() => {
println!("✅ Windows Credential Manager access is functional");
}
_ => {
println!("⚠️ Windows Credential Manager access may be limited");
}
}
} else {
println!("❌ Windows cmdkey command failed");
}
}
Err(e) => {
println!("❌ Windows cmdkey not found: {e}");
}
}
} else {
println!(
"ℹ️ Platform {} is not supported for keyring authentication",
std::env::consts::OS
);
}
}
#[test]
fn test_keyring_auth_config_missing_service() {
let json = r#"{"type":"keyring","user":"test-user"}"#;
let result: Result<InternalAuthConfig, _> = serde_json::from_str(json);
assert!(result.is_err(), "Expected error for missing service field");
}
#[test]
fn test_keyring_auth_config_missing_user() {
let json = r#"{"type":"keyring","service":"test-service"}"#;
let result: Result<InternalAuthConfig, _> = serde_json::from_str(json);
assert!(result.is_err(), "Expected error for missing user field");
}
#[test]
fn test_keyring_auth_config_empty_values() {
let json = r#"{"type":"keyring","service":"","user":"test-user"}"#;
let internal_auth: InternalAuthConfig = serde_json::from_str(json).unwrap();
match internal_auth {
InternalAuthConfig::Keyring { service, user } => {
assert_eq!(service, "");
assert_eq!(user, "test-user");
}
_ => panic!("Expected Keyring auth config"),
}
}
#[test]
fn test_mixed_auth_types_config() {
let json = r#"
{
"auths": {
"https://basic.example.com": {
"type": "basic",
"username": "basicuser",
"password": "basicpass"
},
"https://token.example.com": {
"type": "token",
"token": "token-123"
}
},
"plugins": {
"test_plugin": {
"url": "file:///path/to/plugin"
}
}
}
"#;
let config: Config = serde_json::from_str(json).unwrap();
assert!(config.auths.is_some());
let auths = config.auths.unwrap();
assert_eq!(auths.len(), 2);
// Verify we have both auth types
let basic_url = Url::parse("https://basic.example.com").unwrap();
let token_url = Url::parse("https://token.example.com").unwrap();
match &auths[&basic_url] {
AuthConfig::Basic { username, password } => {
assert_eq!(username, "basicuser");
assert_eq!(password, "basicpass");
}
_ => panic!("Expected Basic auth"),
}
match &auths[&token_url] {
AuthConfig::Token { token } => {
assert_eq!(token, "token-123");
}
_ => panic!("Expected Token auth"),
}
}
#[test]
fn test_auth_config_yaml_mixed_types() {
let yaml = r#"
auths:
"https://basic.example.com":
type: basic
username: basicuser
password: basicpass
"https://token.example.com":
type: token
token: token-123
plugins:
test_plugin:
url: "file:///path/to/plugin"
"#;
let config: Config = serde_yaml::from_str(yaml).unwrap();
assert!(config.auths.is_some());
let auths = config.auths.unwrap();
assert_eq!(auths.len(), 2);
}
#[test]
fn test_auth_config_special_urls() {
let mut auths = HashMap::new();
// Test with localhost URL
let localhost_url = Url::parse("http://localhost:8080").unwrap();
auths.insert(
localhost_url.clone(),
AuthConfig::Basic {
username: "localuser".to_string(),
password: "localpass".to_string(),
},
);
// Test with IP address URL
let ip_url = Url::parse("https://192.168.1.100:443").unwrap();
auths.insert(
ip_url.clone(),
AuthConfig::Token {
token: "ip-token".to_string(),
},
);
// Test with custom port
let custom_port_url = Url::parse("https://api.example.com:9000").unwrap();
auths.insert(
custom_port_url.clone(),
AuthConfig::Basic {
username: "portuser".to_string(),
password: "portpass".to_string(),
},
);
let config = Config {
auths: Some(auths),
plugins: HashMap::new(),
..Default::default()
};
// Test serialization and deserialization round-trip
let json = serde_json::to_string(&config).unwrap();
let deserialized: Config = serde_json::from_str(&json).unwrap();
assert!(deserialized.auths.is_some());
let deserialized_auths = deserialized.auths.unwrap();
assert_eq!(deserialized_auths.len(), 3);
assert!(deserialized_auths.contains_key(&localhost_url));
assert!(deserialized_auths.contains_key(&ip_url));
assert!(deserialized_auths.contains_key(&custom_port_url));
}
#[test]
fn test_auth_config_unicode_values() {
// Test with unicode characters in credentials
let auth_config = AuthConfig::Basic {
username: "用户名".to_string(),
password: "密码🔐".to_string(),
};
let json = serde_json::to_string(&auth_config).unwrap();
let deserialized: AuthConfig = serde_json::from_str(&json).unwrap();
match deserialized {
AuthConfig::Basic { username, password } => {
assert_eq!(username, "用户名");
assert_eq!(password, "密码🔐");
}
_ => panic!("Expected Basic auth config"),
}
}
#[test]
fn test_auth_config_long_token() {
// Test with very long token (JWT-like)
let long_token = "eyJhbGciOiJSUzI1NiIsImtpZCI6IjE2NzAyODYyNjMifQ.eyJhdWQiOiJodHRwczovL2FwaS5leGFtcGxlLmNvbSIsImV4cCI6MTYzNzI4NjI2MywiaWF0IjoxNjM3Mjc5MDYzLCJpc3MiOiJodHRwczovL2F1dGguZXhhbXBsZS5jb20iLCJzdWIiOiJ1c2VyQGV4YW1wbGUuY29tIn0.signature_here_would_be_much_longer";
let auth_config = AuthConfig::Token {
token: long_token.to_string(),
};
let json = serde_json::to_string(&auth_config).unwrap();
let deserialized: AuthConfig = serde_json::from_str(&json).unwrap();
match deserialized {
AuthConfig::Token { token } => {
assert_eq!(token, long_token);
assert!(token.len() > 200);
}
_ => panic!("Expected Token auth config"),
}
}
// Tests for skip_tools Option<RegexSet> functionality
#[test]
fn test_skip_tools_none() {
let runtime_config = RuntimeConfig {
skip_prompts: None,
skip_resource_templates: None,
skip_resources: None,
skip_tools: None,
allowed_hosts: None,
allowed_paths: None,
env_vars: None,
memory_limit: None,
};
// Test serialization
let json = serde_json::to_string(&runtime_config).unwrap();
assert!(json.contains("\"skip_tools\":null"));
// Test deserialization
let deserialized: RuntimeConfig = serde_json::from_str(&json).unwrap();
assert!(deserialized.skip_tools.is_none());
}
#[test]
fn test_skip_tools_some_basic() {
let json = r#"{
"skip_tools": ["tool1", "tool2", "tool3"]
}"#;
let runtime_config: RuntimeConfig = serde_json::from_str(json).unwrap();
let skip_tools = runtime_config.skip_tools.as_ref().unwrap();
assert_eq!(skip_tools.len(), 3);
assert!(skip_tools.is_match("tool1"));
assert!(skip_tools.is_match("tool2"));
assert!(skip_tools.is_match("tool3"));
assert!(!skip_tools.is_match("tool4"));
assert!(!skip_tools.is_match("tool1_extended"));
}
#[test]
fn test_skip_tools_regex_patterns() {
let json = r#"{
"skip_tools": ["tool.*", "debug_.*", "test_[0-9]+"]
}"#;
let runtime_config: RuntimeConfig = serde_json::from_str(json).unwrap();
let skip_tools = runtime_config.skip_tools.as_ref().unwrap();
// Test wildcard patterns
assert!(skip_tools.is_match("tool1"));
assert!(skip_tools.is_match("tool_anything"));
assert!(skip_tools.is_match("toolbox"));
// Test prefix patterns
assert!(skip_tools.is_match("debug_info"));
assert!(skip_tools.is_match("debug_error"));
// Test numbered patterns
assert!(skip_tools.is_match("test_1"));
assert!(skip_tools.is_match("test_99"));
// Test non-matches
assert!(!skip_tools.is_match("my_tool"));
assert!(!skip_tools.is_match("debug"));
assert!(!skip_tools.is_match("test_abc"));
// "tool" should match "tool.*" pattern since it becomes "^tool.*$"
assert!(skip_tools.is_match("tool"));
}
#[test]
fn test_skip_tools_anchoring_behavior() {
let json = r#"{
"skip_tools": ["tool", "^prefix_.*", ".*_suffix$", "^exact_match$"]
}"#;
let runtime_config: RuntimeConfig = serde_json::from_str(json).unwrap();
let skip_tools = runtime_config.skip_tools.as_ref().unwrap();
// "tool" should be auto-anchored to "^tool$"
assert!(skip_tools.is_match("tool"));
assert!(!skip_tools.is_match("tool_extended"));
assert!(!skip_tools.is_match("my_tool"));
// "^prefix_.*" should match anything starting with "prefix_"
assert!(skip_tools.is_match("prefix_anything"));
assert!(skip_tools.is_match("prefix_"));
assert!(!skip_tools.is_match("my_prefix_tool"));
// ".*_suffix$" should match anything ending with "_suffix"
assert!(skip_tools.is_match("any_suffix"));
assert!(skip_tools.is_match("_suffix"));
assert!(!skip_tools.is_match("suffix_extended"));
// "^exact_match$" should only match exactly "exact_match"
assert!(skip_tools.is_match("exact_match"));
assert!(!skip_tools.is_match("exact_match_extended"));
// "prefix_exact_match" matches "^prefix_.*" pattern, not "^exact_match$"
assert!(skip_tools.is_match("prefix_exact_match"));
}
#[test]
fn test_skip_tools_serialization_roundtrip() {
let original_patterns = vec![
"tool1".to_string(),
"tool.*".to_string(),
"debug_.*".to_string(),
];
let regex_set = RegexSet::new(&original_patterns).unwrap();
let runtime_config = RuntimeConfig {
skip_prompts: None,
skip_resource_templates: None,
skip_resources: None,
skip_tools: Some(regex_set),
allowed_hosts: None,
allowed_paths: None,
env_vars: None,
memory_limit: None,
};
// Serialize
let json = serde_json::to_string(&runtime_config).unwrap();
// Deserialize
let deserialized: RuntimeConfig = serde_json::from_str(&json).unwrap();
let skip_tools = deserialized.skip_tools.as_ref().unwrap();
// Verify functionality is preserved
assert!(skip_tools.is_match("tool1"));
assert!(skip_tools.is_match("tool_anything"));
assert!(skip_tools.is_match("debug_info"));
assert!(!skip_tools.is_match("other_tool"));
}
#[test]
fn test_skip_tools_yaml_deserialization() {
let yaml = r#"
skip_tools:
- "tool1"
- "tool.*"
- "debug_.*"
allowed_hosts:
- "example.com"
"#;
let runtime_config: RuntimeConfig = serde_yaml::from_str(yaml).unwrap();
let skip_tools = runtime_config.skip_tools.as_ref().unwrap();
assert!(skip_tools.is_match("tool1"));
assert!(skip_tools.is_match("tool_test"));
assert!(skip_tools.is_match("debug_info"));
assert!(!skip_tools.is_match("other"));
}
#[test]
fn test_skip_tools_invalid_regex() {
let json = r#"{
"skip_tools": ["valid_tool", "[unclosed_bracket", "another_valid"]
}"#;
let result: Result<RuntimeConfig, _> = serde_json::from_str(json);
assert!(result.is_err());
let error_msg = result.unwrap_err().to_string();
assert!(error_msg.contains("regex") || error_msg.contains("bracket"));
}
#[test]
fn test_skip_tools_empty_patterns() {
let json = r#"{
"skip_tools": []
}"#;
let runtime_config: RuntimeConfig = serde_json::from_str(json).unwrap();
let skip_tools = runtime_config.skip_tools.as_ref().unwrap();
assert_eq!(skip_tools.len(), 0);
assert!(!skip_tools.is_match("anything"));
}
#[test]
fn test_skip_tools_special_regex_characters() {
let json = r#"{
"skip_tools": ["tool\\.exe", "script\\?", "temp\\*file"]
}"#;
let runtime_config: RuntimeConfig = serde_json::from_str(json).unwrap();
let skip_tools = runtime_config.skip_tools.as_ref().unwrap();
// Test literal matching of special characters
assert!(skip_tools.is_match("tool.exe"));
assert!(skip_tools.is_match("script?"));
assert!(skip_tools.is_match("temp*file"));
// These should not match due to anchoring
assert!(!skip_tools.is_match("my_tool.exe"));
assert!(!skip_tools.is_match("script?.bat"));
}
#[test]
fn test_skip_tools_case_sensitivity() {
let json = r#"{
"skip_tools": ["Tool", "DEBUG.*"]
}"#;
let runtime_config: RuntimeConfig = serde_json::from_str(json).unwrap();
let skip_tools = runtime_config.skip_tools.as_ref().unwrap();
// RegexSet is case sensitive by default
assert!(skip_tools.is_match("Tool"));
assert!(!skip_tools.is_match("tool"));
assert!(!skip_tools.is_match("TOOL"));
assert!(skip_tools.is_match("DEBUG_info"));
assert!(!skip_tools.is_match("debug_info"));
}
#[test]
fn test_skip_tools_default_behavior() {
// Test that skip_tools defaults to None when not specified
let json = r#"{
"allowed_hosts": ["example.com"]
}"#;
let runtime_config: RuntimeConfig = serde_json::from_str(json).unwrap();
assert!(runtime_config.skip_tools.is_none());
}
#[test]
fn test_skip_tools_matching_functionality() {
let patterns = vec![
"exact".to_string(),
"prefix.*".to_string(),
".*suffix".to_string(),
];
let regex_set = RegexSet::new(
patterns
.iter()
.map(|p| format!("^{}$", p))
.collect::<Vec<_>>(),
)
.unwrap();
// Test exact match
assert!(regex_set.is_match("exact"));
assert!(!regex_set.is_match("exact_more"));
// Test prefix match
assert!(regex_set.is_match("prefix123"));
assert!(regex_set.is_match("prefixABC"));
assert!(!regex_set.is_match("not_prefix123"));
// Test suffix match
assert!(regex_set.is_match("anysuffix"));
assert!(regex_set.is_match("123suffix"));
assert!(!regex_set.is_match("suffix_more"));
}
#[test]
fn test_skip_tools_examples_integration() {
let rt = Runtime::new().unwrap();
// Load the skip_tools examples config
let path = Path::new("tests/fixtures/skip_tools_examples.yaml");
let cli = Cli {
config_file: Some(path.to_path_buf()),
..Default::default()
};
let config_result = rt.block_on(load_config(&cli));
assert!(
config_result.is_ok(),
"Failed to load skip_tools examples config"
);
let config = config_result.unwrap();
assert_eq!(
config.plugins.len(),
10,
"Expected 10 plugins in the config"
);
// Test exact_match_plugin
let exact_plugin = &config.plugins[&PluginName("exact_match_plugin".to_string())];
let exact_skip_tools = exact_plugin
.runtime_config
.as_ref()
.unwrap()
.skip_tools
.as_ref()
.unwrap();
assert!(exact_skip_tools.is_match("debug_tool"));
assert!(exact_skip_tools.is_match("test_runner"));
assert!(exact_skip_tools.is_match("deprecated_helper"));
assert!(!exact_skip_tools.is_match("other_tool"));
assert!(!exact_skip_tools.is_match("debug_tool_extended"));
// Test wildcard_plugin
let wildcard_plugin = &config.plugins[&PluginName("wildcard_plugin".to_string())];
let wildcard_skip_tools = wildcard_plugin
.runtime_config
.as_ref()
.unwrap()
.skip_tools
.as_ref()
.unwrap();
assert!(wildcard_skip_tools.is_match("temp_file"));
assert!(wildcard_skip_tools.is_match("temp_data"));
assert!(wildcard_skip_tools.is_match("file_backup"));
assert!(wildcard_skip_tools.is_match("data_backup"));
assert!(wildcard_skip_tools.is_match("debug"));
assert!(wildcard_skip_tools.is_match("debugger"));
assert!(!wildcard_skip_tools.is_match("backup_file"));
assert!(!wildcard_skip_tools.is_match("temp"));
// Test regex_plugin
let regex_plugin = &config.plugins[&PluginName("regex_plugin".to_string())];
let regex_skip_tools = regex_plugin
.runtime_config
.as_ref()
.unwrap()
.skip_tools
.as_ref()
.unwrap();
assert!(regex_skip_tools.is_match("tool_1"));
assert!(regex_skip_tools.is_match("tool_42"));
assert!(regex_skip_tools.is_match("test_unit"));
assert!(regex_skip_tools.is_match("test_integration"));
assert!(regex_skip_tools.is_match("data_helper"));
assert!(!regex_skip_tools.is_match("tool_abc"));
assert!(!regex_skip_tools.is_match("test_system"));
assert!(!regex_skip_tools.is_match("Data_helper"));
// Test anchored_plugin
let anchored_plugin = &config.plugins[&PluginName("anchored_plugin".to_string())];
let anchored_skip_tools = anchored_plugin
.runtime_config
.as_ref()
.unwrap()
.skip_tools
.as_ref()
.unwrap();
assert!(anchored_skip_tools.is_match("system_tool"));
assert!(anchored_skip_tools.is_match("data_internal"));
assert!(anchored_skip_tools.is_match("exact_only"));
assert!(!anchored_skip_tools.is_match("my_system_tool"));
assert!(!anchored_skip_tools.is_match("data_internal_ext"));
assert!(!anchored_skip_tools.is_match("exact_only_more"));
// Test case_sensitive_plugin
let case_plugin = &config.plugins[&PluginName("case_sensitive_plugin".to_string())];
let case_skip_tools = case_plugin
.runtime_config
.as_ref()
.unwrap()
.skip_tools
.as_ref()
.unwrap();
assert!(case_skip_tools.is_match("Tool"));
assert!(!case_skip_tools.is_match("tool"));
assert!(!case_skip_tools.is_match("TOOL"));
assert!(case_skip_tools.is_match("DEBUG_info"));
assert!(!case_skip_tools.is_match("debug_info"));
assert!(case_skip_tools.is_match("CamelCaseHelper"));
assert!(!case_skip_tools.is_match("camelCaseHelper"));
// Test special_chars_plugin
let special_plugin = &config.plugins[&PluginName("special_chars_plugin".to_string())];
let special_skip_tools = special_plugin
.runtime_config
.as_ref()
.unwrap()
.skip_tools
.as_ref()
.unwrap();
assert!(special_skip_tools.is_match("file.exe"));
assert!(special_skip_tools.is_match("script?"));
assert!(special_skip_tools.is_match("temp*data"));
assert!(special_skip_tools.is_match("path\\tool"));
assert!(!special_skip_tools.is_match("fileXexe"));
assert!(!special_skip_tools.is_match("script"));
// Test empty_skip_plugin
let empty_plugin = &config.plugins[&PluginName("empty_skip_plugin".to_string())];
let empty_skip_tools = empty_plugin
.runtime_config
.as_ref()
.unwrap()
.skip_tools
.as_ref()
.unwrap();
assert_eq!(empty_skip_tools.len(), 0);
assert!(!empty_skip_tools.is_match("anything"));
// Test no_skip_plugin
let no_skip_plugin = &config.plugins[&PluginName("no_skip_plugin".to_string())];
assert!(
no_skip_plugin
.runtime_config
.as_ref()
.unwrap()
.skip_tools
.is_none()
);
// Test full_config_plugin has all components
let full_plugin = &config.plugins[&PluginName("full_config_plugin".to_string())];
let full_runtime = full_plugin.runtime_config.as_ref().unwrap();
let full_skip_tools = full_runtime.skip_tools.as_ref().unwrap();
assert!(full_skip_tools.is_match("admin_tool"));
assert!(full_skip_tools.is_match("tool_dangerous"));
assert!(full_skip_tools.is_match("system_critical"));
assert!(!full_skip_tools.is_match("safe_tool"));
assert_eq!(full_runtime.allowed_hosts.as_ref().unwrap().len(), 2);
assert_eq!(full_runtime.allowed_paths.as_ref().unwrap().len(), 2);
assert_eq!(full_runtime.env_vars.as_ref().unwrap().len(), 2);
assert_eq!(full_runtime.memory_limit.as_ref().unwrap(), "2GB");
}
}
```