diff --git a/Cargo.lock b/Cargo.lock index 725050c16..f75a8160d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1446,6 +1446,7 @@ dependencies = [ "semver", "serde", "serde_json", + "serde_yaml", "sha2", "shell-color", "shell-words", @@ -6514,6 +6515,19 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_yaml" +version = "0.9.34+deprecated" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" +dependencies = [ + "indexmap", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + [[package]] name = "sha1" version = "0.10.6" @@ -7772,6 +7786,12 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" +[[package]] +name = "unsafe-libyaml" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" + [[package]] name = "untrusted" version = "0.9.0" diff --git a/Cargo.toml b/Cargo.toml index dff3ba057..c72527397 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -134,6 +134,7 @@ jsonschema = "0.30.0" zip = "2.2.0" rmcp = { version = "0.8.0", features = ["client", "transport-sse-client-reqwest", "reqwest", "transport-streamable-http-client-reqwest", "transport-child-process", "tower", "auth"] } chat-cli-ui = { path = "crates/chat-cli-ui" } +serde_yaml = "0.9" [workspace.lints.rust] future_incompatible = "warn" diff --git a/crates/chat-cli/Cargo.toml b/crates/chat-cli/Cargo.toml index 52725c72e..c40ddb27a 100644 --- a/crates/chat-cli/Cargo.toml +++ b/crates/chat-cli/Cargo.toml @@ -121,6 +121,7 @@ jsonschema.workspace = true zip.workspace = true rmcp.workspace = true chat-cli-ui.workspace = true +serde_yaml.workspace = true [target.'cfg(unix)'.dependencies] nix.workspace = true diff --git a/crates/chat-cli/src/api_client/mod.rs b/crates/chat-cli/src/api_client/mod.rs index f21b448b7..fab79c95c 100644 --- a/crates/chat-cli/src/api_client/mod.rs +++ b/crates/chat-cli/src/api_client/mod.rs @@ -382,6 +382,7 @@ impl ApiClient { conversation_id, user_input_message, history, + agent_continuation_id, } = conversation; let model_id_opt: Option = user_input_message.model_id.clone(); @@ -400,6 +401,8 @@ impl ApiClient { .map(|v| v.into_iter().map(|i| i.try_into()).collect::, _>>()) .transpose()?, ) + .set_agent_continuation_id(agent_continuation_id) + .agent_task_type(amzn_codewhisperer_streaming_client::types::AgentTaskType::Vibe) .build() .expect("building conversation should not fail"); @@ -744,6 +747,7 @@ mod tests { model_id: Some("model".to_owned()), }, history: None, + agent_continuation_id: None, }) .await .unwrap(); diff --git a/crates/chat-cli/src/api_client/model.rs b/crates/chat-cli/src/api_client/model.rs index 808081ec6..b8dbf57c4 100644 --- a/crates/chat-cli/src/api_client/model.rs +++ b/crates/chat-cli/src/api_client/model.rs @@ -97,6 +97,7 @@ pub struct ConversationState { pub conversation_id: Option, pub user_input_message: UserInputMessage, pub history: Option>, + pub agent_continuation_id: Option, } #[derive(Debug, Clone)] @@ -542,7 +543,7 @@ impl TryFrom for amzn_qdeveloper_streaming_client::typ } #[non_exhaustive] -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq)] pub enum ChatResponseStream { AssistantResponseEvent { content: String, @@ -563,6 +564,16 @@ pub enum ChatResponseStream { conversation_id: Option, utterance_id: Option, }, + MetadataEvent { + total_tokens: Option, + uncached_input_tokens: Option, + output_tokens: Option, + cache_read_input_tokens: Option, + cache_write_input_tokens: Option, + }, + MeteringEvent { + usage: Option, + }, SupplementaryWebLinksEvent(()), ToolUseEvent { tool_use_id: String, @@ -609,6 +620,18 @@ impl From for Ch conversation_id, utterance_id, }, + amzn_codewhisperer_streaming_client::types::ChatResponseStream::MetadataEvent( + amzn_codewhisperer_streaming_client::types::MetadataEvent { token_usage, .. }, + ) => ChatResponseStream::MetadataEvent { + total_tokens: token_usage.as_ref().map(|t| t.total_tokens), + uncached_input_tokens: token_usage.as_ref().map(|t| t.uncached_input_tokens), + output_tokens: token_usage.as_ref().map(|t| t.output_tokens), + cache_read_input_tokens: token_usage.as_ref().and_then(|t| t.cache_read_input_tokens), + cache_write_input_tokens: token_usage.as_ref().and_then(|t| t.cache_write_input_tokens), + }, + amzn_codewhisperer_streaming_client::types::ChatResponseStream::MeteringEvent( + amzn_codewhisperer_streaming_client::types::MeteringEvent { usage, .. }, + ) => ChatResponseStream::MeteringEvent { usage }, amzn_codewhisperer_streaming_client::types::ChatResponseStream::ToolUseEvent( amzn_codewhisperer_streaming_client::types::ToolUseEvent { tool_use_id, @@ -626,7 +649,7 @@ impl From for Ch amzn_codewhisperer_streaming_client::types::ChatResponseStream::SupplementaryWebLinksEvent(_) => { ChatResponseStream::SupplementaryWebLinksEvent(()) }, - _ => ChatResponseStream::Unknown, + _other => ChatResponseStream::Unknown, } } } @@ -665,6 +688,18 @@ impl From for ChatR conversation_id, utterance_id, }, + amzn_qdeveloper_streaming_client::types::ChatResponseStream::MetadataEvent( + amzn_qdeveloper_streaming_client::types::MetadataEvent { token_usage, .. }, + ) => ChatResponseStream::MetadataEvent { + total_tokens: token_usage.as_ref().map(|t| t.total_tokens), + uncached_input_tokens: token_usage.as_ref().map(|t| t.uncached_input_tokens), + output_tokens: token_usage.as_ref().map(|t| t.output_tokens), + cache_read_input_tokens: token_usage.as_ref().and_then(|t| t.cache_read_input_tokens), + cache_write_input_tokens: token_usage.as_ref().and_then(|t| t.cache_write_input_tokens), + }, + amzn_qdeveloper_streaming_client::types::ChatResponseStream::MeteringEvent( + amzn_qdeveloper_streaming_client::types::MeteringEvent { usage, .. }, + ) => ChatResponseStream::MeteringEvent { usage }, amzn_qdeveloper_streaming_client::types::ChatResponseStream::ToolUseEvent( amzn_qdeveloper_streaming_client::types::ToolUseEvent { tool_use_id, @@ -682,7 +717,7 @@ impl From for ChatR amzn_qdeveloper_streaming_client::types::ChatResponseStream::SupplementaryWebLinksEvent(_) => { ChatResponseStream::SupplementaryWebLinksEvent(()) }, - _ => ChatResponseStream::Unknown, + _other => ChatResponseStream::Unknown, } } } @@ -870,7 +905,8 @@ impl From for amzn_codewhisperer_streaming_client::types::User .set_user_input_message_context(value.user_input_message_context.map(Into::into)) .set_user_intent(value.user_intent.map(Into::into)) .set_model_id(value.model_id) - .origin(amzn_codewhisperer_streaming_client::types::Origin::Cli) + //TODO: Setup new origin. + .origin(amzn_codewhisperer_streaming_client::types::Origin::AiEditor) .build() .expect("Failed to build UserInputMessage") } @@ -884,7 +920,8 @@ impl From for amzn_qdeveloper_streaming_client::types::UserInp .set_user_input_message_context(value.user_input_message_context.map(Into::into)) .set_user_intent(value.user_intent.map(Into::into)) .set_model_id(value.model_id) - .origin(amzn_qdeveloper_streaming_client::types::Origin::Cli) + //TODO: Setup new origin. + .origin(amzn_qdeveloper_streaming_client::types::Origin::AiEditor) .build() .expect("Failed to build UserInputMessage") } diff --git a/crates/chat-cli/src/cli/agent/mod.rs b/crates/chat-cli/src/cli/agent/mod.rs index ec7e41d44..1be9912ad 100644 --- a/crates/chat-cli/src/cli/agent/mod.rs +++ b/crates/chat-cli/src/cli/agent/mod.rs @@ -195,6 +195,7 @@ impl Default for Agent { "file://AGENTS.md", "file://README.md", "file://.amazonq/rules/**/*.md", + "file://.kiro/steering/**/*.md", ] .into_iter() .map(Into::into) diff --git a/crates/chat-cli/src/cli/chat/context.rs b/crates/chat-cli/src/cli/chat/context.rs index 6cb760ac3..d93d729e6 100644 --- a/crates/chat-cli/src/cli/chat/context.rs +++ b/crates/chat-cli/src/cli/chat/context.rs @@ -366,7 +366,8 @@ async fn process_path( /// /// This method: /// 1. Reads the content of the file -/// 2. Adds the (filename, content) pair to the context collection +/// 2. Checks front matter inclusion rules for steering files +/// 3. Adds the (filename, content) pair to the context collection if allowed /// /// # Arguments /// * `path` - The path to the file @@ -377,10 +378,69 @@ async fn process_path( async fn add_file_to_context(os: &Os, path: &Path, context_files: &mut Vec<(String, String)>) -> Result<()> { let filename = path.to_string_lossy().to_string(); let content = os.fs.read_to_string(path).await?; + + // Check if this is a steering file that needs front matter filtering + if filename.contains(".kiro/steering") && filename.ends_with(".md") && !should_include_steering_file(&content)? { + return Ok(()); + } + context_files.push((filename, content)); Ok(()) } +#[derive(Debug, Deserialize)] +struct FrontMatter { + inclusion: Option, +} + +/// Check if a steering file should be included based on its front matter +fn should_include_steering_file(content: &str) -> Result { + // Check if file has YAML front matter + if !content.starts_with("---\n") { + // No front matter - include the file + return Ok(true); + } + + // Find the end of the front matter + let lines: Vec<&str> = content.lines().collect(); + let mut end_index = None; + + for (i, line) in lines.iter().enumerate().skip(1) { + if line.trim() == "---" { + end_index = Some(i); + break; + } + } + + let end_index = match end_index { + Some(idx) => idx, + None => { + // Malformed front matter - include the file + return Ok(true); + }, + }; + + // Extract and parse the front matter + let front_matter_lines = &lines[1..end_index]; + let front_matter_yaml = front_matter_lines.join("\n"); + + match serde_yaml::from_str::(&front_matter_yaml) { + Ok(front_matter) => { + match front_matter.inclusion.as_deref() { + Some("always") => Ok(true), + Some("fileMatch") => Ok(false), // Exclude fileMatch files + Some("manual") => Ok(false), // Exclude manual files + None => Ok(true), // No inclusion field - include + Some(_) => Ok(true), // Unknown inclusion value - include + } + }, + Err(_) => { + // Failed to parse front matter - include the file + Ok(true) + }, + } +} + #[cfg(test)] mod tests { use super::*; @@ -458,4 +518,35 @@ mod tests { 96_000 ); } + + #[test] + fn test_should_include_steering_file() { + // Test file without front matter - should be included + let content_no_frontmatter = "# Regular markdown file\nSome content here."; + assert!(should_include_steering_file(content_no_frontmatter).unwrap()); + + // Test file with inclusion: always - should be included + let content_always = "---\ninclusion: always\n---\n# Always included\nContent here."; + assert!(should_include_steering_file(content_always).unwrap()); + + // Test file with inclusion: fileMatch - should be excluded + let content_filematch = "---\ninclusion: fileMatch\n---\n# File match only\nContent here."; + assert!(!should_include_steering_file(content_filematch).unwrap()); + + // Test file with inclusion: manual - should be excluded + let content_manual = "---\ninclusion: manual\n---\n# Manual only\nContent here."; + assert!(!should_include_steering_file(content_manual).unwrap()); + + // Test file with no inclusion field - should be included + let content_no_inclusion = "---\ntitle: Some Title\n---\n# No inclusion field\nContent here."; + assert!(should_include_steering_file(content_no_inclusion).unwrap()); + + // Test file with malformed front matter - should be included + let content_malformed = "---\ninvalid yaml: [\n---\n# Malformed\nContent here."; + assert!(should_include_steering_file(content_malformed).unwrap()); + + // Test file with incomplete front matter - should be included + let content_incomplete = "---\ninclusion: always\n# Missing closing ---\nContent here."; + assert!(should_include_steering_file(content_incomplete).unwrap()); + } } diff --git a/crates/chat-cli/src/cli/chat/conversation.rs b/crates/chat-cli/src/cli/chat/conversation.rs index d36ddc6f2..30afc0ac4 100644 --- a/crates/chat-cli/src/cli/chat/conversation.rs +++ b/crates/chat-cli/src/cli/chat/conversation.rs @@ -102,6 +102,57 @@ pub struct McpServerInfo { pub config: CustomToolConfig, } +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct UserTurnMetadata { + continuation_id: String, + /// [RequestMetadata] about the ongoing operation. + requests: Vec, +} + +impl Default for UserTurnMetadata { + fn default() -> Self { + Self::new() + } +} + +/// Enum used to store metadata about user turns +impl UserTurnMetadata { + pub fn new() -> Self { + Self { + continuation_id: uuid::Uuid::new_v4().to_string(), + requests: vec![], + } + } + + pub fn continuation_id(&self) -> &str { + &self.continuation_id + } + + pub fn add_request(&mut self, request: RequestMetadata) { + self.requests.push(request); + } + + pub fn first_request(&self) -> Option { + self.requests.first().cloned() + } + + pub fn last_request(&self) -> Option { + self.requests.last().cloned() + } + + pub fn iter(&self) -> impl Iterator { + self.requests.iter() + } + + pub fn first(&self) -> Option<&RequestMetadata> { + self.requests.first() + } + + pub fn last(&self) -> Option<&RequestMetadata> { + self.requests.last() + } +} + /// Tracks state related to an ongoing conversation. #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ConversationState { @@ -149,6 +200,9 @@ pub struct ConversationState { /// Tangent mode checkpoint - stores main conversation when in tangent mode #[serde(default, skip_serializing_if = "Option::is_none")] tangent_state: Option, + /// Metadata about the ongoing user turn operation + #[serde(default)] + pub user_turn_metadata: UserTurnMetadata, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -212,6 +266,7 @@ impl ConversationState { checkpoint_manager: None, mcp_enabled, tangent_state: None, + user_turn_metadata: UserTurnMetadata::new(), } } @@ -378,6 +433,10 @@ impl ConversationState { self.next_message = None; } + pub fn current_continuation_id(&self) -> &str { + self.user_turn_metadata.continuation_id() + } + pub async fn set_next_user_message(&mut self, input: String) { debug_assert!(self.next_message.is_none(), "next_message should not exist"); if let Some(next_message) = self.next_message.as_ref() { @@ -614,6 +673,7 @@ impl ConversationState { dropped_context_files, tools: &self.tools, model_id: self.model_info.as_ref().map(|m| m.model_id.as_str()), + continuation_id: Some(self.user_turn_metadata.continuation_id()), }) } @@ -719,6 +779,7 @@ impl ConversationState { .unwrap_or(UserMessage::new_prompt(summary_content, None)) // should not happen .into_user_input_message(self.model_info.as_ref().map(|m| m.model_id.clone()), &tools), history: Some(flatten_history(history.iter())), + agent_continuation_id: Some(self.user_turn_metadata.continuation_id().to_string()), }) } @@ -777,6 +838,7 @@ Return only the JSON configuration, no additional text.", conversation_id: Some(self.conversation_id.clone()), user_input_message: generation_message.into_user_input_message(self.model.clone(), &tools), history: Some(flatten_history(history.iter())), + agent_continuation_id: Some(self.user_turn_metadata.continuation_id().to_string()), }) } @@ -992,6 +1054,7 @@ pub struct BackendConversationStateImpl<'a, T, U> { pub dropped_context_files: Vec<(String, String)>, pub tools: &'a HashMap>, pub model_id: Option<&'a str>, + pub continuation_id: Option<&'a str>, } impl BackendConversationStateImpl<'_, std::collections::vec_deque::Iter<'_, HistoryEntry>, Option>> { @@ -1007,6 +1070,7 @@ impl BackendConversationStateImpl<'_, std::collections::vec_deque::Iter<'_, Hist conversation_id: Some(self.conversation_id.to_string()), user_input_message, history: Some(history), + agent_continuation_id: self.continuation_id.map(str::to_string), }) } diff --git a/crates/chat-cli/src/cli/chat/mod.rs b/crates/chat-cli/src/cli/chat/mod.rs index 67b12867e..31612c38a 100644 --- a/crates/chat-cli/src/cli/chat/mod.rs +++ b/crates/chat-cli/src/cli/chat/mod.rs @@ -71,7 +71,10 @@ use cli::model::{ select_model, }; pub use conversation::ConversationState; -use conversation::TokenWarningLevel; +use conversation::{ + TokenWarningLevel, + UserTurnMetadata, +}; use crossterm::style::{ Attribute, Stylize, @@ -588,8 +591,6 @@ pub struct ChatSession { /// Used to track the time taken from initially prompting the user to tool execute /// completion. tool_turn_start_time: Option, - /// [RequestMetadata] about the ongoing operation. - user_turn_request_metadata: Vec, /// Telemetry events to be sent as part of the conversation. The HashMap key is tool_use_id. tool_use_telemetry_events: HashMap, /// State used to keep track of tool use relation @@ -725,7 +726,6 @@ impl ChatSession { spinner: None, conversation, tool_uses: vec![], - user_turn_request_metadata: vec![], pending_tool_index: None, tool_turn_start_time: None, tool_use_telemetry_events: HashMap::new(), @@ -797,7 +797,7 @@ impl ChatSession { // Wait for handle_response to finish handling the ctrlc. tokio::time::sleep(Duration::from_millis(5)).await; if let Some(request_metadata) = request_metadata.lock().await.take() { - self.user_turn_request_metadata.push(request_metadata); + self.conversation.user_turn_metadata.add_request(request_metadata); } self.send_chat_telemetry(os, TelemetryResult::Cancelled, None, None, None, true).await; Err(ChatError::Interrupted { tool_uses: None }) @@ -1376,7 +1376,7 @@ impl ChatSession { // Wait for handle_response to finish handling the ctrlc. tokio::time::sleep(Duration::from_millis(5)).await; if let Some(request_metadata) = request_metadata.lock().await.take() { - self.user_turn_request_metadata.push(request_metadata); + self.conversation.user_turn_metadata.add_request(request_metadata); } self.send_chat_telemetry( os, @@ -1514,7 +1514,9 @@ impl ChatSession { message, request_metadata, })) => { - self.user_turn_request_metadata.push(request_metadata.clone()); + self.conversation + .user_turn_metadata + .add_request(request_metadata.clone()); break (message.content().to_string(), request_metadata); }, Some(Ok(_)) => (), @@ -1523,7 +1525,9 @@ impl ChatSession { self.failed_request_ids.push(request_id.clone()); }; - self.user_turn_request_metadata.push(err.request_metadata.clone()); + self.conversation + .user_turn_metadata + .add_request(err.request_metadata.clone()); let (reason, reason_desc) = get_error_reason(&err); self.send_chat_telemetry( @@ -1653,7 +1657,7 @@ impl ChatSession { // Wait for handle_response to finish handling the ctrlc. tokio::time::sleep(Duration::from_millis(5)).await; if let Some(request_metadata) = request_metadata.lock().await.take() { - self.user_turn_request_metadata.push(request_metadata); + self.conversation.user_turn_metadata.add_request(request_metadata); } self.send_chat_telemetry( os, @@ -1749,7 +1753,9 @@ impl ChatSession { message, request_metadata, })) => { - self.user_turn_request_metadata.push(request_metadata.clone()); + self.conversation + .user_turn_metadata + .add_request(request_metadata.clone()); break (message.content().to_string(), request_metadata); }, Some(Ok(_)) => (), @@ -1758,7 +1764,9 @@ impl ChatSession { self.failed_request_ids.push(request_id.clone()); } - self.user_turn_request_metadata.push(err.request_metadata.clone()); + self.conversation + .user_turn_metadata + .add_request(err.request_metadata.clone()); let (reason, reason_desc) = get_error_reason(&err); self.send_chat_telemetry( @@ -1941,13 +1949,13 @@ impl ChatSession { async fn handle_input(&mut self, os: &mut Os, mut user_input: String) -> Result { queue!(self.stderr, style::Print('\n'))?; user_input = sanitize_unicode_tags(&user_input); - let input = user_input.trim(); + let input_trimmed = user_input.trim().to_string(); // handle image path - if let Some(chat_state) = does_input_reference_file(input) { + if let Some(chat_state) = does_input_reference_file(&input_trimmed) { return Ok(chat_state); } - if let Some(mut args) = input.strip_prefix("/").and_then(shlex::split) { + if let Some(mut args) = input_trimmed.strip_prefix("/").and_then(shlex::split) { // Required for printing errors correctly. let orig_args = args.clone(); @@ -2039,7 +2047,7 @@ impl ChatSession { Ok(ChatState::PromptUser { skip_printing_tools: false, }) - } else if let Some(command) = input.strip_prefix("@") { + } else if let Some(command) = input_trimmed.strip_prefix("@") { let input_parts = shlex::split(command).ok_or(ChatError::Custom("Error splitting prompt command".into()))?; @@ -2057,7 +2065,7 @@ impl ChatSession { arguments, }; return subcommand.execute(os, self).await; - } else if let Some(command) = input.strip_prefix("!") { + } else if let Some(command) = input_trimmed.strip_prefix("!") { // Use platform-appropriate shell let result = if cfg!(target_os = "windows") { std::process::Command::new("cmd").args(["/C", command]).status() @@ -2105,10 +2113,9 @@ impl ChatSession { // Check for a pending tool approval if let Some(index) = self.pending_tool_index { - let is_trust = ["t", "T"].contains(&input); let tool_use = &mut self.tool_uses[index]; - if ["y", "Y"].contains(&input) || is_trust { - if is_trust { + if is_approval_response(&input_trimmed) { + if is_trust_response(&input_trimmed) { let formatted_tool_name = self .conversation .tool_manager @@ -2151,17 +2158,22 @@ impl ChatSession { // TODO: Update this flow to something that does *not* require two requests just to // get a meaningful response from the user - this is a short term solution before // we decide on a better flow. - let user_input = if ["n", "N"].contains(&user_input.trim()) { + let user_input = if is_reject_response(user_input.trim()) { "I deny this tool request. Ask a follow up question clarifying the expected action".to_string() } else { user_input }; + self.conversation.abandon_tool_use(&self.tool_uses, user_input); } else { + // For regular user messages, always generate new continuation ID self.conversation.set_next_user_message(user_input).await; } - - self.reset_user_turn(); + // For tool approval responses (y/n/t), preserve active turn + let preserve_turn = is_tool_permission_interaction(&input_trimmed); + if !preserve_turn { + self.reset_user_turn(); + } let conv_state = self .conversation @@ -2652,7 +2664,7 @@ impl ChatSession { ); let mut response_prefix_printed = false; - let mut tool_uses = Vec::new(); + let mut tool_uses: Vec = Vec::new(); let mut tool_name_being_recvd: Option = None; if self.spinner.is_some() { @@ -2713,7 +2725,7 @@ impl ChatSession { error!(?request_id, ?message, "Encountered an unexpected model response"); } self.conversation.push_assistant_message(os, message, Some(rm.clone())); - self.user_turn_request_metadata.push(rm); + self.conversation.user_turn_metadata.add_request(rm); ended = true; }, } @@ -2723,8 +2735,9 @@ impl ChatSession { self.failed_request_ids.push(request_id.clone()); }; - self.user_turn_request_metadata - .push(recv_error.request_metadata.clone()); + self.conversation + .user_turn_metadata + .add_request(recv_error.request_metadata.clone()); let (reason, reason_desc) = get_error_reason(&recv_error); let status_code = recv_error.status_code(); @@ -2964,6 +2977,24 @@ impl ChatSession { execute!(self.stdout, style::Print("\n"))?; } + // Display continuation ID if available and debug mode is enabled + if std::env::var_os("Q_SHOW_CONTINUATION_IDS").is_some() { + queue!( + self.stdout, + style::Print(format!("({})\n", self.conversation.current_continuation_id())), + )?; + } + + // Display continuation ID if available and debug mode is enabled + if std::env::var_os("Q_SHOW_CONTINUATION_IDS").is_some() { + queue!( + self.stdout, + style::SetForegroundColor(Color::DarkGrey), + style::Print(format!("({})\n", self.conversation.current_continuation_id())), + style::SetForegroundColor(Color::Reset) + )?; + } + for (i, citation) in &state.citations { queue!( self.stdout, @@ -3462,8 +3493,8 @@ impl ChatSession { /// This should *always* be called whenever a new user prompt is sent to the backend. Note /// that includes tool use rejections. fn reset_user_turn(&mut self) { - info!(?self.user_turn_request_metadata, "Resetting the current user turn"); - self.user_turn_request_metadata.clear(); + info!(?self.conversation.user_turn_metadata, "Resetting the current user turn"); + self.conversation.user_turn_metadata = UserTurnMetadata::new(); } /// Sends an "codewhispererterminal_addChatMessage" telemetry event. @@ -3487,32 +3518,33 @@ impl ChatSession { is_end_turn: bool, ) { // Get metadata for the most recent request. - let md = self.user_turn_request_metadata.last(); - + let md = self.conversation.user_turn_metadata.last_request(); let conversation_id = self.conversation.conversation_id().to_owned(); let data = ChatAddedMessageParams { - request_id: md.and_then(|md| md.request_id.clone()), - message_id: md.map(|md| md.message_id.clone()), + request_id: md.as_ref().and_then(|md| md.request_id.clone()), + message_id: md.as_ref().map(|md| md.message_id.clone()), context_file_length: self.conversation.context_message_length(), - model: md.and_then(|m| m.model_id.clone()), + model: md.as_ref().and_then(|m| m.model_id.clone()), reason: reason.clone(), reason_desc: reason_desc.clone(), status_code, - time_to_first_chunk_ms: md.and_then(|md| md.time_to_first_chunk.map(|d| d.as_secs_f64() * 1000.0)), - time_between_chunks_ms: md.map(|md| { + time_to_first_chunk_ms: md + .as_ref() + .and_then(|md| md.time_to_first_chunk.map(|d| d.as_secs_f64() * 1000.0)), + time_between_chunks_ms: md.as_ref().map(|md| { md.time_between_chunks .iter() .map(|d| d.as_secs_f64() * 1000.0) .collect::>() }), - chat_conversation_type: md.and_then(|md| md.chat_conversation_type), + chat_conversation_type: md.as_ref().and_then(|md| md.chat_conversation_type), tool_use_id: self.conversation.latest_tool_use_ids(), tool_name: self.conversation.latest_tool_use_names(), - assistant_response_length: md.map(|md| md.response_size as i32), + assistant_response_length: md.as_ref().map(|md| md.response_size as i32), message_meta_tags: { - let mut tags = md.map(|md| md.message_meta_tags.clone()).unwrap_or_default(); + let mut tags = md.as_ref().map(|md| md.message_meta_tags.clone()).unwrap_or_default(); if self.conversation.is_in_tangent_mode() { - tags.push(crate::telemetry::core::MessageMetaTag::TangentMode); + tags.push(MessageMetaTag::TangentMode); } tags }, @@ -3523,11 +3555,11 @@ impl ChatSession { .ok(); if is_end_turn { - let mds = &self.user_turn_request_metadata; + let mds = &self.conversation.user_turn_metadata; // Get the user turn duration. - let start_time = mds.first().map(|md| md.request_start_timestamp_ms); - let end_time = mds.last().map(|md| md.stream_end_timestamp_ms); + let start_time = mds.first_request().map(|md| md.request_start_timestamp_ms); + let end_time = mds.last_request().map(|md| md.stream_end_timestamp_ms); let user_turn_duration_seconds = match (start_time, end_time) { // Convert ms back to seconds (Some(start), Some(end)) => end.saturating_sub(start) as i64 / 1000, @@ -3545,7 +3577,7 @@ impl ChatSession { .iter() .map(|md| md.time_to_first_chunk.map(|d| d.as_secs_f64() * 1000.0)) .collect::<_>(), - chat_conversation_type: md.and_then(|md| md.chat_conversation_type), + chat_conversation_type: md.as_ref().and_then(|md| md.chat_conversation_type), assistant_response_length: mds.iter().map(|md| md.response_size as i64).sum(), message_meta_tags: mds.last().map(|md| md.message_meta_tags.clone()).unwrap_or_default(), user_prompt_length: mds.first().map(|md| md.user_prompt_length).unwrap_or_default() as i64, @@ -3567,7 +3599,7 @@ impl ChatSession { reason_desc: Option, status_code: Option, ) { - let md = self.user_turn_request_metadata.last(); + let md = self.conversation.user_turn_metadata.last_request(); os.telemetry .send_response_error( &os.database, @@ -3577,8 +3609,8 @@ impl ChatSession { Some(reason), reason_desc, status_code, - md.and_then(|md| md.request_id.clone()), - md.map(|md| md.message_id.clone()), + md.as_ref().and_then(|md| md.request_id.clone()), + md.as_ref().map(|md| md.message_id.clone()), ) .await .ok(); @@ -3741,6 +3773,32 @@ fn does_input_reference_file(input: &str) -> Option { None } +/// Check if input is a "trust" response (t/T) +fn is_trust_response(input: &str) -> bool { + ["t", "T"].contains(&input.trim()) +} + +/// Check if input is an "accept" response (y/Y) +fn is_accept_response(input: &str) -> bool { + ["y", "Y"].contains(&input.trim()) +} + +/// Check if input is a "reject" response (n/N) +fn is_reject_response(input: &str) -> bool { + ["n", "N"].contains(&input.trim()) +} + +/// Check if input is any simple tool interaction response (y/Y/n/N/t/T) +/// These responses should preserve continuation ID +fn is_tool_permission_interaction(input: &str) -> bool { + is_trust_response(input) || is_accept_response(input) || is_reject_response(input) +} + +/// Check if input is any approval response (y/Y/t/T) +fn is_approval_response(input: &str) -> bool { + is_accept_response(input) || is_trust_response(input) +} + // Helper method to save the agent config to file async fn save_agent_config(os: &mut Os, config: &Agent, agent_name: &str, is_global: bool) -> Result<(), ChatError> { let config_dir = if is_global { diff --git a/crates/chat-cli/src/cli/chat/parser.rs b/crates/chat-cli/src/cli/chat/parser.rs index 2e0cdfb03..a80d206d1 100644 --- a/crates/chat-cli/src/cli/chat/parser.rs +++ b/crates/chat-cli/src/cli/chat/parser.rs @@ -470,14 +470,24 @@ impl ResponseParser { async fn parse_tool_use(&mut self, id: String, name: String) -> Result { let mut tool_string = String::new(); let start = Instant::now(); - while let Some(ChatResponseStream::ToolUseEvent { .. }) = self.peek().await? { - if let Some(ChatResponseStream::ToolUseEvent { input, stop, .. }) = self.next().await? { - if let Some(i) = input { - tool_string.push_str(&i); - } - if let Some(true) = stop { + loop { + match self.peek().await? { + Some(ChatResponseStream::ToolUseEvent { .. }) => { + if let Some(ChatResponseStream::ToolUseEvent { input, stop, .. }) = self.next().await? { + if let Some(i) = input { + tool_string.push_str(&i); + } + if let Some(true) = stop { + break; + } + } + }, + Some(ChatResponseStream::MetadataEvent { .. } | ChatResponseStream::MeteringEvent { .. }) => { + self.next().await?; + }, + _other => { break; - } + }, } } diff --git a/crates/chat-cli/src/util/consts.rs b/crates/chat-cli/src/util/consts.rs index 41fbf5bef..0a3cd328a 100644 --- a/crates/chat-cli/src/util/consts.rs +++ b/crates/chat-cli/src/util/consts.rs @@ -67,7 +67,10 @@ pub mod env_var { Q_BUNDLE_METADATA_PATH = "Q_BUNDLE_METADATA_PATH", /// Identifier for the client application or service using the chat-cli - Q_CLI_CLIENT_APPLICATION = "Q_CLI_CLIENT_APPLICATION" + Q_CLI_CLIENT_APPLICATION = "Q_CLI_CLIENT_APPLICATION", + + /// Shows continuation IDs in chat output for debugging/development + Q_SHOW_CONTINUATION_IDS = "Q_SHOW_CONTINUATION_IDS" } }