Skip to content

Fix and improve paths and edit tool responses #774

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 18 commits into from
May 16, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
d7a883b
fix: add message when deleting a binary file
humbertoyusta May 13, 2025
d1cb876
fix: if parent is empty, complain to ask for abs path
humbertoyusta May 13, 2025
ff61011
fix: add tool failed flag, so UI can know when tool call failed
humbertoyusta May 14, 2025
7539724
feat: getting fullpath of the file and then opening it in IDE & hidin…
alashchev17 May 14, 2025
4f23370
fix: updated fixtures & combobox tests
alashchev17 May 14, 2025
c48db73
fix: preprocess path for normalization in /v1/fullpath to make it mor…
humbertoyusta May 14, 2025
d90e949
fix: add tool_failed to test messages
humbertoyusta May 15, 2025
c36a70e
fix: add check to create textdoc to create files only inside project …
humbertoyusta May 15, 2025
f9ff7be
fix: improve path normalization in Windows for file edit tools
humbertoyusta May 15, 2025
025687f
fix: improve workdir handling, to disallow running commands outside o…
humbertoyusta May 15, 2025
e9594a3
fix: pass ccx to commant to match against confirm deny, so it can be …
humbertoyusta May 15, 2025
4d6f4ae
refactor: resolve shell workdir in a separate method
humbertoyusta May 15, 2025
908b9d1
fix: no need to try join with abs path in the right
humbertoyusta May 15, 2025
4c7c4b7
fix: make tool_failed optional in type definition
alashchev17 May 15, 2025
4c2c347
fix: made some fields optional in metering to avoid coins information…
alashchev17 May 15, 2025
1b6ce54
chore: assigning 0 to destructured elements instead of nullish operat…
alashchev17 May 15, 2025
4a6af8d
update libgit2 and shadow rs
humbertoyusta May 16, 2025
4d16efe
Merge branch 'dev' into fix-and-improve-paths-and-edit-tool-responses
humbertoyusta May 16, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions refact-agent/engine/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ default = ["vecdb"]
vecdb = ["sqlite-vec"]

[build-dependencies]
shadow-rs = "0.36.0"
shadow-rs = "1.1.0"

[dependencies]
astral-tokio-tar = "0.5.2"
Expand All @@ -31,7 +31,7 @@ diff = "0.1.13"
dunce = "1.0.5"
dyn_partial_eq = "=0.1.2"
futures = "0.3"
git2 = "0.19.0"
git2 = "0.20.2"
glob = "0.3.1"
hashbrown = "0.15.2"
headless_chrome = "1.0.16"
Expand Down Expand Up @@ -63,7 +63,7 @@ serde_cbor = "0.11.2"
serde_json = { version = "1", features = ["preserve_order"] }
serde_yaml = "0.9.31"
# all features = ["compression", "docs", "event_log", "failpoints", "io_uring", "lock_free_delays", "measure_allocs", "miri_optimizations", "mutex", "no_inline", "no_logs", "pretty_backtrace", "testing"]
shadow-rs = { version = "0.36.0", features = [], default-features = false }
shadow-rs = { version = "1.1.0", features = [], default-features = false }
sha2 = "0.10.8"
shell-words = "1.1.0"
shell-escape = "0.1.5"
Expand Down
4 changes: 2 additions & 2 deletions refact-agent/engine/build.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@

fn main() -> shadow_rs::SdResult<()> {
shadow_rs::new()
fn main() {
shadow_rs::ShadowBuilder::builder().build().unwrap();
}
6 changes: 4 additions & 2 deletions refact-agent/engine/src/call_validation.rs
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,8 @@ pub struct ChatMessage {
#[serde(default, skip_serializing_if = "String::is_empty")]
pub tool_call_id: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub tool_failed: Option<bool>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub usage: Option<ChatUsage>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub checkpoints: Vec<Checkpoint>,
Expand All @@ -187,7 +189,7 @@ pub struct ChatMessage {
pub enum ModelType {
Chat,
Completion,
Embedding,
Embedding,
}

#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq)]
Expand Down Expand Up @@ -289,7 +291,7 @@ impl ChatMode {
pub fn is_agentic(self) -> bool {
match self {
ChatMode::AGENT => true,
ChatMode::NO_TOOLS | ChatMode::EXPLORE | ChatMode::CONFIGURE |
ChatMode::NO_TOOLS | ChatMode::EXPLORE | ChatMode::CONFIGURE |
ChatMode::PROJECT_SUMMARY => false,
}
}
Expand Down
2 changes: 1 addition & 1 deletion refact-agent/engine/src/caps/caps.rs
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,7 @@ pub async fn load_caps_value_from_url(

if !cmdline.api_key.is_empty() {
headers.insert(reqwest::header::AUTHORIZATION, reqwest::header::HeaderValue::from_str(&format!("Bearer {}", cmdline.api_key)).unwrap());
headers.insert(reqwest::header::USER_AGENT, reqwest::header::HeaderValue::from_str(&format!("refact-lsp {}", crate::version::build_info::PKG_VERSION)).unwrap());
headers.insert(reqwest::header::USER_AGENT, reqwest::header::HeaderValue::from_str(&format!("refact-lsp {}", crate::version::build::PKG_VERSION)).unwrap());
}

let mut last_status = 0;
Expand Down
15 changes: 11 additions & 4 deletions refact-agent/engine/src/files_correction.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,10 +135,6 @@ async fn complete_path_with_project_dir(
if path_exists(&candidate_path, is_dir) && candidate_path.starts_with(&p) {
return Some(candidate_path);
}
let j_path = p.join(&candidate_path);
if path_exists(&j_path, is_dir) {
return Some(j_path);
}

// This might save a roundtrip:
// .../project1/project1/1.cpp
Expand Down Expand Up @@ -456,6 +452,17 @@ pub fn canonicalize_normalized_path(p: PathBuf) -> PathBuf {
p.canonicalize().unwrap_or_else(|_| absolute(&p).unwrap_or(p))
}

pub async fn check_if_its_inside_a_workspace_or_config(gcx: Arc<ARwLock<GlobalContext>>, path: &Path) -> Result<(), String> {
let workspace_folders = get_project_dirs(gcx.clone()).await;
let config_dir = gcx.read().await.config_dir.clone();

if workspace_folders.iter().any(|d| path.starts_with(d)) || path.starts_with(&config_dir) {
Ok(())
} else {
Err(format!("Path '{path:?}' is outside of project directories:\n{workspace_folders:?}"))
}
}

pub fn any_glob_matches_path(globs: &[String], path: &Path) -> bool {
globs.iter().any(|glob| {
let pattern = glob::Pattern::new(glob).unwrap();
Expand Down
12 changes: 6 additions & 6 deletions refact-agent/engine/src/forward_to_openai_endpoint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ pub async fn forward_to_openai_style_endpoint(
headers.insert(AUTHORIZATION, HeaderValue::from_str(&format!("Bearer {}", model_rec.api_key)).unwrap());
}
if model_rec.support_metadata {
headers.insert(USER_AGENT, HeaderValue::from_str(&format!("refact-lsp {}", crate::version::build_info::PKG_VERSION)).unwrap());
headers.insert(USER_AGENT, HeaderValue::from_str(&format!("refact-lsp {}", crate::version::build::PKG_VERSION)).unwrap());
}
let mut data = json!({
"model": model_rec.name.clone(),
Expand Down Expand Up @@ -64,7 +64,7 @@ pub async fn forward_to_openai_style_endpoint(
if let Some(meta) = meta {
data["meta"] = json!(meta);
}

// When cancelling requests, coroutine ususally gets aborted here on the following line.
let req = client.post(&model_rec.endpoint)
.headers(headers)
Expand Down Expand Up @@ -105,7 +105,7 @@ pub async fn forward_to_openai_style_endpoint_streaming(
headers.insert(AUTHORIZATION, HeaderValue::from_str(&format!("Bearer {}", model_rec.api_key)).unwrap());
}
if model_rec.support_metadata {
headers.insert(USER_AGENT, HeaderValue::from_str(format!("refact-lsp {}", crate::version::build_info::PKG_VERSION).as_str()).unwrap());
headers.insert(USER_AGENT, HeaderValue::from_str(format!("refact-lsp {}", crate::version::build::PKG_VERSION).as_str()).unwrap());
}

let mut data = json!({
Expand Down Expand Up @@ -146,7 +146,7 @@ pub async fn forward_to_openai_style_endpoint_streaming(
if let Some(meta) = meta {
data["meta"] = json!(meta);
}

if model_rec.endpoint.is_empty() {
return Err(format!("No endpoint configured for {}", model_rec.id));
}
Expand Down Expand Up @@ -252,7 +252,7 @@ pub async fn get_embedding_openai_style(
// info!("get_embedding_openai_style: {:?}", json);
// {"data":[{"embedding":[0.0121664945...],"index":0,"object":"embedding"}, {}, {}]}
// or {"data":[{"embedding":[0.0121664945...]}, {}, {}]} without index

let mut result: Vec<Vec<f32>> = vec![vec![]; B];
match serde_json::from_value::<Vec<EmbeddingsResultOpenAI>>(json["data"].clone()) {
Ok(unordered) => {
Expand All @@ -268,7 +268,7 @@ pub async fn get_embedding_openai_style(
match serde_json::from_value::<Vec<EmbeddingsResultOpenAINoIndex>>(json["data"].clone()) {
Ok(ordered) => {
if ordered.len() != B {
return Err(format!("get_embedding_openai_style: response length mismatch: expected {}, got {}",
return Err(format!("get_embedding_openai_style: response length mismatch: expected {}, got {}",
B, ordered.len()));
}
for (i, res) in ordered.into_iter().enumerate() {
Expand Down
10 changes: 5 additions & 5 deletions refact-agent/engine/src/http/routers/info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@ use crate::custom_error::ScratchError;

pub fn get_build_info() -> IndexMap<&'static str, &'static str> {
IndexMap::from([
("version", crate::version::build_info::PKG_VERSION),
("commit", crate::version::build_info::COMMIT_HASH),
("build_os", crate::version::build_info::BUILD_OS),
("rust_version", crate::version::build_info::RUST_VERSION),
("cargo_version", crate::version::build_info::CARGO_VERSION),
("version", crate::version::build::PKG_VERSION),
("commit", crate::version::build::COMMIT_HASH),
("build_os", crate::version::build::BUILD_OS),
("rust_version", crate::version::build::RUST_VERSION),
("cargo_version", crate::version::build::CARGO_VERSION),
])
}

Expand Down
9 changes: 5 additions & 4 deletions refact-agent/engine/src/http/routers/v1/gui_help_handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use serde::Deserialize;
use tokio::sync::RwLock as ARwLock;
use crate::at_commands::at_file::{file_repair_candidates, return_one_candidate_or_a_good_error};
use crate::custom_error::ScratchError;
use crate::files_correction::correct_to_nearest_dir_path;
use crate::files_correction::{correct_to_nearest_dir_path, preprocess_path_for_normalization};
use crate::global_context::GlobalContext;

#[derive(Deserialize)]
Expand All @@ -22,11 +22,12 @@ pub async fn handle_v1_fullpath(
let post = serde_json::from_slice::<ResolveShortenedPathPost>(&body_bytes)
.map_err(|e| ScratchError::new(StatusCode::UNPROCESSABLE_ENTITY, format!("JSON problem: {}", e)))?;

let candidates_file = file_repair_candidates(gcx.clone(), &post.path, 10, false).await;
let candidates_dir = correct_to_nearest_dir_path(gcx.clone(), &post.path, false, 10).await;
let path = preprocess_path_for_normalization(post.path);
let candidates_file = file_repair_candidates(gcx.clone(), &path, 10, false).await;
let candidates_dir = correct_to_nearest_dir_path(gcx.clone(), &path, false, 10).await;
let candidates = candidates_file.into_iter().chain(candidates_dir.clone().into_iter()).collect::<HashSet<_>>().into_iter().collect::<Vec<_>>();

match return_one_candidate_or_a_good_error(gcx.clone(), &post.path, &candidates, &vec![], false).await {
match return_one_candidate_or_a_good_error(gcx.clone(), &path, &candidates, &vec![], false).await {
Ok(candidate) => {
let is_directory = candidates_dir.contains(&candidate);
Ok(Response::builder()
Expand Down
3 changes: 2 additions & 1 deletion refact-agent/engine/src/integrations/docker/integr_docker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -163,8 +163,9 @@ impl Tool for ToolDocker {
]))
}

fn command_to_match_against_confirm_deny(
async fn command_to_match_against_confirm_deny(
&self,
_ccx: Arc<AMutex<AtCommandsContext>>,
args: &HashMap<String, Value>,
) -> Result<String, String> {
let command = parse_command(args)?;
Expand Down
3 changes: 2 additions & 1 deletion refact-agent/engine/src/integrations/integr_cmdline.rs
Original file line number Diff line number Diff line change
Expand Up @@ -305,8 +305,9 @@ impl Tool for ToolCmdline {
}
}

fn command_to_match_against_confirm_deny(
async fn command_to_match_against_confirm_deny(
&self,
_ccx: Arc<AMutex<AtCommandsContext>>,
args: &HashMap<String, serde_json::Value>,
) -> Result<String, String> {
let (command, _workdir) = _parse_command_args(args, &self.cfg)?;
Expand Down
3 changes: 2 additions & 1 deletion refact-agent/engine/src/integrations/integr_github.rs
Original file line number Diff line number Diff line change
Expand Up @@ -131,8 +131,9 @@ impl Tool for ToolGithub {
Ok((false, results))
}

fn command_to_match_against_confirm_deny(
async fn command_to_match_against_confirm_deny(
&self,
_ccx: Arc<AMutex<AtCommandsContext>>,
args: &HashMap<String, Value>,
) -> Result<String, String> {
let mut command_args = parse_command_args(args)?;
Expand Down
3 changes: 2 additions & 1 deletion refact-agent/engine/src/integrations/integr_gitlab.rs
Original file line number Diff line number Diff line change
Expand Up @@ -129,8 +129,9 @@ impl Tool for ToolGitlab {
Ok((false, results))
}

fn command_to_match_against_confirm_deny(
async fn command_to_match_against_confirm_deny(
&self,
_ccx: Arc<AMutex<AtCommandsContext>>,
args: &HashMap<String, Value>,
) -> Result<String, String> {
let mut command_args = parse_command_args(args)?;
Expand Down
3 changes: 2 additions & 1 deletion refact-agent/engine/src/integrations/integr_mcp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -534,8 +534,9 @@ impl Tool for ToolMCP {
sanitized_yaml_name
}

fn command_to_match_against_confirm_deny(
async fn command_to_match_against_confirm_deny(
&self,
_ccx: Arc<AMutex<AtCommandsContext>>,
_args: &HashMap<String, serde_json::Value>,
) -> Result<String, String> {
let command = self.mcp_tool.name.clone();
Expand Down
3 changes: 2 additions & 1 deletion refact-agent/engine/src/integrations/integr_mysql.rs
Original file line number Diff line number Diff line change
Expand Up @@ -137,8 +137,9 @@ impl Tool for ToolMysql {
Ok((true, results))
}

fn command_to_match_against_confirm_deny(
async fn command_to_match_against_confirm_deny(
&self,
_ccx: Arc<AMutex<AtCommandsContext>>,
args: &HashMap<String, Value>,
) -> Result<String, String> {
let query = match args.get("query") {
Expand Down
3 changes: 2 additions & 1 deletion refact-agent/engine/src/integrations/integr_pdb.rs
Original file line number Diff line number Diff line change
Expand Up @@ -161,8 +161,9 @@ impl Tool for ToolPdb {
Ok(tool_answer(output, tool_call_id))
}

fn command_to_match_against_confirm_deny(
async fn command_to_match_against_confirm_deny(
&self,
_ccx: Arc<AMutex<AtCommandsContext>>,
args: &HashMap<String, Value>,
) -> Result<String, String> {
let (command, _) = parse_args(args)?;
Expand Down
3 changes: 2 additions & 1 deletion refact-agent/engine/src/integrations/integr_postgres.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,8 +136,9 @@ impl Tool for ToolPostgres {
Ok((true, results))
}

fn command_to_match_against_confirm_deny(
async fn command_to_match_against_confirm_deny(
&self,
_ccx: Arc<AMutex<AtCommandsContext>>,
args: &HashMap<String, Value>,
) -> Result<String, String> {
let query = match args.get("query") {
Expand Down
52 changes: 39 additions & 13 deletions refact-agent/engine/src/integrations/integr_shell.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,14 @@ use async_trait::async_trait;
use tokio::process::Command;

use crate::at_commands::at_commands::AtCommandsContext;
use crate::at_commands::at_file::return_one_candidate_or_a_good_error;
use crate::files_correction::canonical_path;
use crate::files_correction::canonicalize_normalized_path;
use crate::files_correction::check_if_its_inside_a_workspace_or_config;
use crate::files_correction::correct_to_nearest_dir_path;
use crate::files_correction::get_active_project_path;
use crate::files_correction::get_project_dirs;
use crate::files_correction::preprocess_path_for_normalization;
use crate::files_correction::CommandSimplifiedDirExt;
use crate::global_context::GlobalContext;
use crate::integrations::process_io_utils::execute_command;
Expand Down Expand Up @@ -79,10 +86,10 @@ impl Tool for ToolShell {
tool_call_id: &String,
args: &HashMap<String, Value>,
) -> Result<(bool, Vec<ContextEnum>), String> {
let (command, workdir_maybe) = parse_args(args)?;
let gcx = ccx.lock().await.global_context.clone();
let (command, workdir_maybe) = parse_args(gcx.clone(), args).await?;
let timeout = self.cfg.timeout.parse::<u64>().unwrap_or(10);

let gcx = ccx.lock().await.global_context.clone();
let mut error_log = Vec::<YamlError>::new();
let env_variables = crate::integrations::setting_up_integrations::get_vars_for_replacements(gcx.clone(), &mut error_log).await;

Expand Down Expand Up @@ -137,10 +144,10 @@ impl Tool for ToolShell {

async fn match_against_confirm_deny(
&self,
_ccx: Arc<AMutex<AtCommandsContext>>,
ccx: Arc<AMutex<AtCommandsContext>>,
args: &HashMap<String, Value>
) -> Result<MatchConfirmDeny, String> {
let command_to_match = self.command_to_match_against_confirm_deny(&args).map_err(|e| {
let command_to_match = self.command_to_match_against_confirm_deny(ccx.clone(), &args).await.map_err(|e| {
format!("Error getting tool command to match: {}", e)
})?;
if command_to_match.is_empty() {
Expand All @@ -164,11 +171,13 @@ impl Tool for ToolShell {
})
}

fn command_to_match_against_confirm_deny(
async fn command_to_match_against_confirm_deny(
&self,
ccx: Arc<AMutex<AtCommandsContext>>,
args: &HashMap<String, Value>,
) -> Result<String, String> {
let (command, _) = parse_args(args)?;
let gcx = ccx.lock().await.global_context.clone();
let (command, _) = parse_args(gcx, args).await?;
Ok(command)
}

Expand Down Expand Up @@ -221,7 +230,7 @@ pub async fn execute_shell_command(
Ok(out)
}

fn parse_args(args: &HashMap<String, Value>) -> Result<(String, Option<PathBuf>), String> {
async fn parse_args(gcx: Arc<ARwLock<GlobalContext>>, args: &HashMap<String, Value>) -> Result<(String, Option<PathBuf>), String> {
let command = match args.get("command") {
Some(Value::String(s)) => {
if s.is_empty() {
Expand All @@ -239,12 +248,7 @@ fn parse_args(args: &HashMap<String, Value>) -> Result<(String, Option<PathBuf>)
if s.is_empty() {
None
} else {
let workdir = crate::files_correction::canonical_path(s);
if !workdir.exists() {
return Err("Workdir doesn't exist".to_string());
} else {
Some(workdir)
}
Some(resolve_shell_workdir(gcx.clone(), s).await?)
}
},
Some(v) => return Err(format!("argument `workdir` is not a string: {:?}", v)),
Expand All @@ -254,6 +258,28 @@ fn parse_args(args: &HashMap<String, Value>) -> Result<(String, Option<PathBuf>)
Ok((command, workdir))
}

async fn resolve_shell_workdir(gcx: Arc<ARwLock<GlobalContext>>, raw_path: &str) -> Result<PathBuf, String> {
let path_str = preprocess_path_for_normalization(raw_path.to_string());
let path = PathBuf::from(&path_str);

let workdir = if path.is_absolute() {
let path = canonicalize_normalized_path(path);
check_if_its_inside_a_workspace_or_config(gcx.clone(), &path).await?;
path
} else {
let project_dirs = get_project_dirs(gcx.clone()).await;
let candidates = correct_to_nearest_dir_path(gcx.clone(), &path_str, false, 3).await;
canonical_path(
return_one_candidate_or_a_good_error(gcx.clone(), &path_str, &candidates, &project_dirs, true).await?
)
};
if !workdir.exists() {
Err("Workdir doesn't exist".to_string())
} else {
Ok(workdir)
}
}

pub const SHELL_INTEGRATION_SCHEMA: &str = r#"
fields:
timeout:
Expand Down
Loading
Loading