Unblock typed runtime integration config primitives

Add typed runtime-facing MCP and OAuth configuration models on top of the existing merged settings loader so later parity work can consume validated structures instead of ad hoc JSON traversal.

This keeps the first slice bounded to parsing, precedence, exports, and tests. While validating the slice under the repo's required clippy gate, I also fixed a handful of pre-existing clippy failures in runtime file operations so the requested verification command can pass for this commit.

Constraint: Must keep scope to parity-unblocking primitives, not full MCP or OAuth flow execution
Constraint: cargo clippy --all-targets is a required verification gate for this repo
Rejected: Add a new integrations crate first | too much boundary churn for the first landing slice
Rejected: Leave existing clippy failures untouched | would block the required verification command for this commit
Confidence: medium
Scope-risk: moderate
Reversibility: clean
Directive: Keep future MCP/OAuth additions layered on these typed config surfaces before introducing transport orchestration
Tested: cargo fmt --all; cargo test -p runtime; cargo clippy -p runtime --all-targets -- -D warnings
Not-tested: workspace-wide clippy/test beyond the runtime crate; live MCP or OAuth network flows
This commit is contained in:
Yeachan-Heo
2026-03-31 19:17:16 +00:00
parent 4586764a0e
commit 6037aaeff1
3 changed files with 542 additions and 15 deletions

View File

@@ -138,9 +138,9 @@ pub fn read_file(
let content = fs::read_to_string(&absolute_path)?;
let lines: Vec<&str> = content.lines().collect();
let start_index = offset.unwrap_or(0).min(lines.len());
let end_index = limit
.map(|limit| start_index.saturating_add(limit).min(lines.len()))
.unwrap_or(lines.len());
let end_index = limit.map_or(lines.len(), |limit| {
start_index.saturating_add(limit).min(lines.len())
});
let selected = lines[start_index..end_index].join("\n");
Ok(ReadFileOutput {
@@ -296,12 +296,12 @@ pub fn grep_search(input: &GrepSearchInput) -> io::Result<GrepSearchOutput> {
continue;
}
let Ok(content) = fs::read_to_string(&file_path) else {
let Ok(file_content) = fs::read_to_string(&file_path) else {
continue;
};
if output_mode == "count" {
let count = regex.find_iter(&content).count();
let count = regex.find_iter(&file_content).count();
if count > 0 {
filenames.push(file_path.to_string_lossy().into_owned());
total_matches += count;
@@ -309,7 +309,7 @@ pub fn grep_search(input: &GrepSearchInput) -> io::Result<GrepSearchOutput> {
continue;
}
let lines: Vec<&str> = content.lines().collect();
let lines: Vec<&str> = file_content.lines().collect();
let mut matched_lines = Vec::new();
for (index, line) in lines.iter().enumerate() {
if regex.is_match(line) {
@@ -327,13 +327,13 @@ pub fn grep_search(input: &GrepSearchInput) -> io::Result<GrepSearchOutput> {
for index in matched_lines {
let start = index.saturating_sub(input.before.unwrap_or(context));
let end = (index + input.after.unwrap_or(context) + 1).min(lines.len());
for current in start..end {
for (current, line_content) in lines.iter().enumerate().take(end).skip(start) {
let prefix = if input.line_numbers.unwrap_or(true) {
format!("{}:{}:", file_path.to_string_lossy(), current + 1)
} else {
format!("{}:", file_path.to_string_lossy())
};
content_lines.push(format!("{prefix}{}", lines[current]));
content_lines.push(format!("{prefix}{line_content}"));
}
}
}
@@ -341,7 +341,7 @@ pub fn grep_search(input: &GrepSearchInput) -> io::Result<GrepSearchOutput> {
let (filenames, applied_limit, applied_offset) =
apply_limit(filenames, input.head_limit, input.offset);
let content = if output_mode == "content" {
let rendered_content = if output_mode == "content" {
let (lines, limit, offset) = apply_limit(content_lines, input.head_limit, input.offset);
return Ok(GrepSearchOutput {
mode: Some(output_mode),
@@ -361,7 +361,7 @@ pub fn grep_search(input: &GrepSearchInput) -> io::Result<GrepSearchOutput> {
mode: Some(output_mode.clone()),
num_files: filenames.len(),
filenames,
content,
content: rendered_content,
num_lines: None,
num_matches: (output_mode == "count").then_some(total_matches),
applied_limit,
@@ -376,8 +376,7 @@ fn collect_search_files(base_path: &Path) -> io::Result<Vec<PathBuf>> {
let mut files = Vec::new();
for entry in WalkDir::new(base_path) {
let entry =
entry.map_err(|error| io::Error::new(io::ErrorKind::Other, error.to_string()))?;
let entry = entry.map_err(|error| io::Error::other(error.to_string()))?;
if entry.file_type().is_file() {
files.push(entry.path().to_path_buf());
}