diff --git a/.github/workflows/npm-publish-github-packages.yml b/.github/workflows/npm-publish-github-packages.yml new file mode 100644 index 00000000..ea2d329f --- /dev/null +++ b/.github/workflows/npm-publish-github-packages.yml @@ -0,0 +1,36 @@ +# This workflow will run tests using node and then publish a package to GitHub Packages when a release is created +# For more information see: https://docs.github.com/en/actions/publishing-packages/publishing-nodejs-packages + +name: Node.js Package + +on: + release: + types: [created] + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: 20 + - run: npm ci + - run: npm test + + publish-gpr: + needs: build + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-node@v4 + with: + node-version: 20 + registry-url: https://npm.pkg.github.com/ + - run: npm ci + - run: npm publish + env: + NODE_AUTH_TOKEN: ${{secrets.GITHUB_TOKEN}} diff --git a/Cargo.lock b/Cargo.lock index b7e7baec..25968b79 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,6 +2,12 @@ # It is not intended for manual editing. version = 4 +[[package]] +name = "adler2" +version = "2.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" + [[package]] name = "aead" version = "0.5.2" @@ -143,6 +149,20 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" +[[package]] +name = "auto_generate_cdp" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "359220d0b9360b79d17d648d0a3ba1e792ec36bdbc227c8fd0351df3a0415704" +dependencies = [ + "convert_case 0.8.0", + "proc-macro2", + "quote", + "serde", + "serde_json", + "ureq", +] + [[package]] name = "autocfg" version = "1.5.0" @@ -339,6 +359,15 @@ dependencies = [ "static_assertions", ] +[[package]] +name = "convert_case" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baaaa0ecca5b51987b9423ccdc971514dd8b0bb7b4060b983d3664dad3f1f89f" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "convert_case" version = "0.10.0" @@ -383,6 +412,15 @@ dependencies = [ "libc", ] +[[package]] +name = "crc32fast" +version = "1.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9481c1c90cbf2ac953f07c8d4a58aa3945c425b7185c9154d67a65e4230da511" +dependencies = [ + "cfg-if", +] + [[package]] name = "crossbeam-channel" version = "0.5.15" @@ -524,6 +562,12 @@ dependencies = [ "syn 2.0.117", ] +[[package]] +name = "data-encoding" +version = "2.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7a1e2f27636f116493b8b860f5546edb47c8d8f8ea73e1d2a20be88e28d1fea" + [[package]] name = "deltae" version = "0.3.2" @@ -586,7 +630,7 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" dependencies = [ - "convert_case", + "convert_case 0.10.0", "proc-macro2", "quote", "rustc_version", @@ -726,6 +770,16 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" +[[package]] +name = "flate2" +version = "1.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c" +dependencies = [ + "crc32fast", + "miniz_oxide", +] + [[package]] name = "fnv" version = "1.0.7" @@ -928,6 +982,7 @@ dependencies = [ "dotenvy", "futures-util", "google-workspace", + "headless_chrome", "hostname", "iana-time-zone", "keyring", @@ -993,6 +1048,29 @@ dependencies = [ "foldhash 0.2.0", ] +[[package]] +name = "headless_chrome" +version = "1.0.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "333344ecb4b6a91ddd2e6a3c4fdb54aaddfbd2c82847f9c58fe42dd88afcf08e" +dependencies = [ + "anyhow", + "auto_generate_cdp", + "base64", + "derive_builder", + "log", + "rand 0.9.2", + "regex", + "serde", + "serde_json", + "tempfile", + "thiserror 2.0.18", + "tungstenite", + "url", + "which", + "winreg", +] + [[package]] name = "heck" version = "0.5.0" @@ -1541,6 +1619,16 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" +[[package]] +name = "miniz_oxide" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1fa76a2c86f704bdb222d66965fb3d63269ce38518b83cb0575fca855ebb6316" +dependencies = [ + "adler2", + "simd-adler32", +] + [[package]] name = "mio" version = "1.2.0" @@ -2228,6 +2316,7 @@ version = "0.23.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "758025cb5fccfd3bc2fd74708fd4682be41d99e5dff73c377c0646c6012c73a4" dependencies = [ + "log", "once_cell", "ring", "rustls-pki-types", @@ -2449,6 +2538,17 @@ dependencies = [ "syn 2.0.117", ] +[[package]] +name = "sha1" +version = "0.10.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + [[package]] name = "sha1_smol" version = "1.0.1" @@ -2512,6 +2612,12 @@ dependencies = [ "libc", ] +[[package]] +name = "simd-adler32" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "703d5c7ef118737c72f1af64ad2f6f8c5e1921f818cdcb97b8fe6fc69bf66214" + [[package]] name = "siphasher" version = "1.0.2" @@ -2540,6 +2646,17 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "socks" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0c3dbbd9ae980613c6dd8e28a9407b50509d3803b57624d5dfe8315218cd58b" +dependencies = [ + "byteorder", + "libc", + "winapi", +] + [[package]] name = "stable_deref_trait" version = "1.2.1" @@ -3039,6 +3156,23 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" +[[package]] +name = "tungstenite" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442" +dependencies = [ + "bytes", + "data-encoding", + "http", + "httparse", + "log", + "rand 0.9.2", + "sha1", + "thiserror 2.0.18", + "utf-8", +] + [[package]] name = "typenum" version = "1.19.0" @@ -3108,6 +3242,36 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" +[[package]] +name = "ureq" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dea7109cdcd5864d4eeb1b58a1648dc9bf520360d7af16ec26d0a9354bafcfc0" +dependencies = [ + "base64", + "flate2", + "log", + "percent-encoding", + "rustls", + "rustls-pki-types", + "socks", + "ureq-proto", + "utf8-zero", + "webpki-roots", +] + +[[package]] +name = "ureq-proto" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e994ba84b0bd1b1b0cf92878b7ef898a5c1760108fe7b6010327e274917a808c" +dependencies = [ + "base64", + "http", + "httparse", + "log", +] + [[package]] name = "url" version = "2.5.8" @@ -3120,6 +3284,18 @@ dependencies = [ "serde", ] +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf8-zero" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8c0a043c9540bae7c578c88f91dda8bd82e59ae27c21baca69c8b191aaf5a6e" + [[package]] name = "utf8_iter" version = "1.0.4" @@ -3321,6 +3497,15 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "webpki-roots" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52f5ee44c96cf55f1b349600768e3ece3a8f26010c05265ab73f945bb1a2eb9d" +dependencies = [ + "rustls-pki-types", +] + [[package]] name = "wezterm-bidi" version = "0.2.3" @@ -3393,6 +3578,15 @@ dependencies = [ "wezterm-dynamic", ] +[[package]] +name = "which" +version = "8.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "81995fafaaaf6ae47a7d0cc83c67caf92aeb7e5331650ae6ff856f7c0c60c459" +dependencies = [ + "libc", +] + [[package]] name = "winapi" version = "0.3.9" @@ -3492,6 +3686,15 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + [[package]] name = "windows-sys" version = "0.60.2" @@ -3705,6 +3908,16 @@ dependencies = [ "memchr", ] +[[package]] +name = "winreg" +version = "0.55.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb5a765337c50e9ec252c2069be9bf91c7df47afb103b642ba3a53bf8101be97" +dependencies = [ + "cfg-if", + "windows-sys 0.59.0", +] + [[package]] name = "wit-bindgen" version = "0.51.0" diff --git a/crates/google-workspace-cli/Cargo.toml b/crates/google-workspace-cli/Cargo.toml index 058b109e..d6af7c32 100644 --- a/crates/google-workspace-cli/Cargo.toml +++ b/crates/google-workspace-cli/Cargo.toml @@ -66,6 +66,13 @@ tracing-subscriber = { version = "0.3", features = ["env-filter", "json"] } tracing-appender = "0.2" uuid = { version = "1.22.0", features = ["v4", "v5"] } mime_guess2 = "2.3.1" +headless_chrome = { version = "1.0", optional = true } + +[features] +default = [] +# Enables the `browser` agent tool. Requires a Chrome/Chromium binary on PATH +# at runtime. +browser-agent = ["dep:headless_chrome"] [target.'cfg(target_os = "macos")'.dependencies] keyring = { version = "3.6.3", features = ["apple-native"] } diff --git a/crates/google-workspace-cli/src/agent/config.rs b/crates/google-workspace-cli/src/agent/config.rs new file mode 100644 index 00000000..60d71186 --- /dev/null +++ b/crates/google-workspace-cli/src/agent/config.rs @@ -0,0 +1,388 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Agent runtime configuration. +//! +//! Configuration is resolved from (in order of precedence): +//! 1. Command-line flags. +//! 2. Environment variables (`GWS_AGENT_*`, `OPENROUTER_API_KEY`, ...). +//! 3. Sensible defaults. +//! +//! No file-based configuration is read — agent settings are either explicit +//! flags or pulled from the environment so that credentials never end up on +//! disk implicitly. + +use crate::error::GwsError; + +/// Which LLM provider the agent talks to. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum ProviderKind { + /// OpenRouter (OpenAI-compatible router for Claude, GPT, Llama, Gemini, ...). + OpenRouter, + /// Local Ollama daemon exposing its OpenAI-compatible endpoint. + Ollama, +} + +impl ProviderKind { + pub fn parse(s: &str) -> Result { + match s.to_ascii_lowercase().as_str() { + "openrouter" | "or" => Ok(ProviderKind::OpenRouter), + "ollama" | "local" | "llama" => Ok(ProviderKind::Ollama), + other => Err(GwsError::Validation(format!( + "Unknown agent provider '{other}'. Valid values: openrouter, ollama." + ))), + } + } + + pub fn as_str(self) -> &'static str { + match self { + ProviderKind::OpenRouter => "openrouter", + ProviderKind::Ollama => "ollama", + } + } +} + +/// Fully resolved agent configuration. +#[derive(Debug, Clone)] +pub struct AgentConfig { + pub provider: ProviderKind, + pub model: String, + pub base_url: String, + pub api_key: Option, + /// Referrer / app title headers sent to OpenRouter for usage attribution. + pub app_title: String, + pub app_referer: String, + /// Maximum agent iterations (LLM call + tool execution cycles) per user turn. + pub max_steps: u32, + /// Maximum tokens for the model response. + pub max_tokens: Option, + /// Temperature override. + pub temperature: Option, + /// System prompt prepended to every conversation. + pub system_prompt: String, + /// If true, the agent asks for confirmation before executing any tool. + pub approve_tools: bool, + /// Single-shot prompt. When present, the agent runs one turn and exits. + pub one_shot: Option, + /// Output format: "text" (default) or "json". + pub output_format: AgentOutputFormat, + /// Supabase URL used for persistent conversation memory. Falls back to + /// in-process memory when unset. + pub supabase_url: Option, + pub supabase_key: Option, + pub supabase_memory_table: String, + /// Conversation identifier for multi-turn memory. + pub conversation_id: String, + /// Enabled tool names. Empty means "all available". + pub enabled_tools: Vec, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum AgentOutputFormat { + Text, + Json, +} + +impl AgentOutputFormat { + fn parse(s: &str) -> Result { + match s.to_ascii_lowercase().as_str() { + "text" | "plain" => Ok(AgentOutputFormat::Text), + "json" => Ok(AgentOutputFormat::Json), + other => Err(GwsError::Validation(format!( + "Unknown agent --output format '{other}'. Valid values: text, json." + ))), + } + } +} + +/// Default system prompt. Keep this concise; tools describe themselves. +const DEFAULT_SYSTEM_PROMPT: &str = + "You are GWS Agent, a precise, concise command-line assistant that \ +augments the Google Workspace CLI (`gws`). You can call tools to read and \ +modify Google Workspace resources, query Notion, trigger Zapier hooks, read \ +and write Supabase rows, and operate a headless browser. Always prefer using \ +a tool when it can ground the answer in real data. When a tool fails, \ +explain what happened and suggest next steps. Reply in plain text, no \ +markdown headings."; + +fn env_trimmed(name: &str) -> Option { + std::env::var(name).ok().and_then(|v| { + let t = v.trim(); + if t.is_empty() { + None + } else { + Some(t.to_string()) + } + }) +} + +/// Parsed CLI flags for `gws agent`. +#[derive(Default, Debug)] +pub(crate) struct RawFlags { + pub provider: Option, + pub model: Option, + pub base_url: Option, + pub prompt: Option, + pub max_steps: Option, + pub max_tokens: Option, + pub temperature: Option, + pub system: Option, + pub approve: bool, + pub output: Option, + pub conversation: Option, + pub tools: Vec, + pub help: bool, +} + +/// Parse the argument vector passed after `gws agent`. +pub(crate) fn parse_flags(args: &[String]) -> Result { + let mut flags = RawFlags::default(); + let mut iter = args.iter(); + while let Some(arg) = iter.next() { + match arg.as_str() { + "-h" | "--help" => flags.help = true, + "--provider" => flags.provider = Some(take_value(&mut iter, "--provider")?), + "--model" => flags.model = Some(take_value(&mut iter, "--model")?), + "--base-url" => flags.base_url = Some(take_value(&mut iter, "--base-url")?), + "-p" | "--prompt" => flags.prompt = Some(take_value(&mut iter, "--prompt")?), + "--max-steps" => { + flags.max_steps = Some(take_value(&mut iter, "--max-steps")?.parse().map_err( + |e: std::num::ParseIntError| { + GwsError::Validation(format!("Invalid --max-steps: {e}")) + }, + )?) + } + "--max-tokens" => { + flags.max_tokens = Some(take_value(&mut iter, "--max-tokens")?.parse().map_err( + |e: std::num::ParseIntError| { + GwsError::Validation(format!("Invalid --max-tokens: {e}")) + }, + )?) + } + "--temperature" => { + flags.temperature = Some(take_value(&mut iter, "--temperature")?.parse().map_err( + |e: std::num::ParseFloatError| { + GwsError::Validation(format!("Invalid --temperature: {e}")) + }, + )?) + } + "--system" => flags.system = Some(take_value(&mut iter, "--system")?), + "--approve-tools" => flags.approve = true, + "--output" => flags.output = Some(take_value(&mut iter, "--output")?), + "--conversation" => flags.conversation = Some(take_value(&mut iter, "--conversation")?), + "--tool" => flags.tools.push(take_value(&mut iter, "--tool")?), + "--tools" => { + let raw = take_value(&mut iter, "--tools")?; + flags.tools.extend( + raw.split(',') + .map(|s| s.trim().to_string()) + .filter(|s| !s.is_empty()), + ); + } + other if other.starts_with("--") => { + return Err(GwsError::Validation(format!( + "Unknown flag '{other}' for `gws agent`. Run `gws agent --help`." + ))); + } + // Positional fallback: concatenate free text as the one-shot prompt. + other => { + if let Some(prev) = flags.prompt.take() { + flags.prompt = Some(format!("{prev} {other}")); + } else { + flags.prompt = Some(other.to_string()); + } + } + } + } + Ok(flags) +} + +fn take_value<'a, I: Iterator>( + iter: &mut I, + flag: &str, +) -> Result { + iter.next() + .cloned() + .ok_or_else(|| GwsError::Validation(format!("Flag '{flag}' requires a value."))) +} + +impl AgentConfig { + /// Resolve a full config from parsed flags + environment. + pub(crate) fn resolve(flags: RawFlags) -> Result { + let provider_str = flags + .provider + .or_else(|| env_trimmed("GWS_AGENT_PROVIDER")) + .unwrap_or_else(|| "openrouter".to_string()); + let provider = ProviderKind::parse(&provider_str)?; + + let default_model = match provider { + ProviderKind::OpenRouter => "anthropic/claude-opus-4.6", + ProviderKind::Ollama => "llama3.1", + }; + let model = flags + .model + .or_else(|| env_trimmed("GWS_AGENT_MODEL")) + .unwrap_or_else(|| default_model.to_string()); + + let default_base = match provider { + ProviderKind::OpenRouter => "https://openrouter.ai/api/v1", + ProviderKind::Ollama => "http://localhost:11434/v1", + }; + let base_url = flags + .base_url + .or_else(|| env_trimmed("GWS_AGENT_BASE_URL")) + .or_else(|| match provider { + ProviderKind::OpenRouter => env_trimmed("OPENROUTER_BASE_URL"), + ProviderKind::Ollama => env_trimmed("OLLAMA_BASE_URL"), + }) + .unwrap_or_else(|| default_base.to_string()); + + let api_key = match provider { + ProviderKind::OpenRouter => env_trimmed("OPENROUTER_API_KEY"), + ProviderKind::Ollama => env_trimmed("OLLAMA_API_KEY"), + }; + + let output_format = match flags.output.as_deref() { + Some(s) => AgentOutputFormat::parse(s)?, + None => AgentOutputFormat::Text, + }; + + let conversation_id = flags + .conversation + .or_else(|| env_trimmed("GWS_AGENT_CONVERSATION")) + .unwrap_or_else(|| format!("cli-{}", uuid::Uuid::new_v4())); + + let system_prompt = flags + .system + .or_else(|| env_trimmed("GWS_AGENT_SYSTEM_PROMPT")) + .unwrap_or_else(|| DEFAULT_SYSTEM_PROMPT.to_string()); + + Ok(Self { + provider, + model, + base_url, + api_key, + app_title: env_trimmed("GWS_AGENT_APP_TITLE") + .unwrap_or_else(|| "google-workspace-cli".to_string()), + app_referer: env_trimmed("GWS_AGENT_APP_REFERER") + .unwrap_or_else(|| "https://github.com/googleworkspace/cli".to_string()), + max_steps: flags.max_steps.unwrap_or(8), + max_tokens: flags.max_tokens, + temperature: flags.temperature, + system_prompt, + approve_tools: flags.approve, + one_shot: flags.prompt, + output_format, + supabase_url: env_trimmed("SUPABASE_URL"), + supabase_key: env_trimmed("SUPABASE_SERVICE_ROLE_KEY") + .or_else(|| env_trimmed("SUPABASE_ANON_KEY")), + supabase_memory_table: env_trimmed("GWS_AGENT_MEMORY_TABLE") + .unwrap_or_else(|| "gws_agent_memory".to_string()), + conversation_id, + enabled_tools: flags.tools, + }) + } + + pub fn format(&self) -> AgentOutputFormat { + self.output_format + } +} + +#[cfg(test)] +mod tests { + use super::*; + + fn strs(v: &[&str]) -> Vec { + v.iter().map(|s| s.to_string()).collect() + } + + #[test] + fn parse_provider() { + assert_eq!( + ProviderKind::parse("OpenRouter").unwrap(), + ProviderKind::OpenRouter + ); + assert_eq!(ProviderKind::parse("ollama").unwrap(), ProviderKind::Ollama); + assert_eq!(ProviderKind::parse("local").unwrap(), ProviderKind::Ollama); + assert!(ProviderKind::parse("gpt5").is_err()); + } + + #[test] + fn parse_output_format() { + assert_eq!( + AgentOutputFormat::parse("text").unwrap(), + AgentOutputFormat::Text + ); + assert_eq!( + AgentOutputFormat::parse("JSON").unwrap(), + AgentOutputFormat::Json + ); + assert!(AgentOutputFormat::parse("yaml").is_err()); + } + + #[test] + fn parse_flags_positional_becomes_prompt() { + let f = parse_flags(&strs(&["what", "is", "2+2"])).unwrap(); + assert_eq!(f.prompt.as_deref(), Some("what is 2+2")); + assert!(!f.help); + } + + #[test] + fn parse_flags_tools_csv() { + let f = parse_flags(&strs(&["--tools", "notion,zapier", "--tool", "gws"])).unwrap(); + assert_eq!(f.tools, vec!["notion", "zapier", "gws"]); + } + + #[test] + fn parse_flags_requires_value() { + let err = parse_flags(&strs(&["--model"])).unwrap_err(); + assert!(err.to_string().contains("--model")); + } + + #[test] + fn parse_flags_rejects_unknown() { + let err = parse_flags(&strs(&["--nope"])).unwrap_err(); + assert!(err.to_string().contains("--nope")); + } + + #[test] + #[serial_test::serial] + fn resolve_defaults_for_openrouter() { + std::env::remove_var("GWS_AGENT_PROVIDER"); + std::env::remove_var("GWS_AGENT_MODEL"); + std::env::remove_var("GWS_AGENT_BASE_URL"); + std::env::remove_var("OPENROUTER_API_KEY"); + let cfg = AgentConfig::resolve(RawFlags::default()).unwrap(); + assert_eq!(cfg.provider, ProviderKind::OpenRouter); + assert_eq!(cfg.base_url, "https://openrouter.ai/api/v1"); + assert!(cfg.model.starts_with("anthropic/")); + assert!(cfg.conversation_id.starts_with("cli-")); + } + + #[test] + #[serial_test::serial] + fn resolve_ollama_flag_wins_over_env() { + std::env::set_var("GWS_AGENT_PROVIDER", "openrouter"); + let flags = RawFlags { + provider: Some("ollama".to_string()), + model: Some("llama3.1:70b".to_string()), + ..Default::default() + }; + let cfg = AgentConfig::resolve(flags).unwrap(); + std::env::remove_var("GWS_AGENT_PROVIDER"); + assert_eq!(cfg.provider, ProviderKind::Ollama); + assert_eq!(cfg.model, "llama3.1:70b"); + assert_eq!(cfg.base_url, "http://localhost:11434/v1"); + } +} diff --git a/crates/google-workspace-cli/src/agent/llm.rs b/crates/google-workspace-cli/src/agent/llm.rs new file mode 100644 index 00000000..05b7a769 --- /dev/null +++ b/crates/google-workspace-cli/src/agent/llm.rs @@ -0,0 +1,454 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! OpenAI-compatible chat/completions client. +//! +//! Both OpenRouter and Ollama (via its built-in OpenAI compatibility layer +//! at `/v1/chat/completions`) accept this request shape, so a single client +//! serves both providers. The only difference is the `Authorization` header +//! and the attribution headers OpenRouter uses for usage tracking. + +use async_trait::async_trait; +use serde::{Deserialize, Serialize}; +use serde_json::Value; + +use crate::agent::config::{AgentConfig, ProviderKind}; +use crate::error::GwsError; + +/// Role of a single chat message. +#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "lowercase")] +pub enum Role { + System, + User, + Assistant, + Tool, +} + +impl Role { + #[allow(dead_code)] + pub fn as_str(self) -> &'static str { + match self { + Role::System => "system", + Role::User => "user", + Role::Assistant => "assistant", + Role::Tool => "tool", + } + } +} + +/// One chat-completions message. Optional fields match OpenAI semantics. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ChatMessage { + pub role: Role, + #[serde(skip_serializing_if = "Option::is_none")] + pub content: Option, + /// Assistant-initiated tool calls (populated on assistant messages). + #[serde(skip_serializing_if = "Option::is_none")] + pub tool_calls: Option>, + /// For tool role: id of the call this result belongs to. + #[serde(skip_serializing_if = "Option::is_none")] + pub tool_call_id: Option, + /// For tool role: function name that was invoked (optional per spec). + #[serde(skip_serializing_if = "Option::is_none")] + pub name: Option, +} + +impl ChatMessage { + pub fn system>(text: S) -> Self { + Self { + role: Role::System, + content: Some(text.into()), + tool_calls: None, + tool_call_id: None, + name: None, + } + } + pub fn user>(text: S) -> Self { + Self { + role: Role::User, + content: Some(text.into()), + tool_calls: None, + tool_call_id: None, + name: None, + } + } + pub fn assistant_text>(text: S) -> Self { + Self { + role: Role::Assistant, + content: Some(text.into()), + tool_calls: None, + tool_call_id: None, + name: None, + } + } + pub fn tool_result, S2: Into, S3: Into>( + call_id: S1, + name: S2, + content: S3, + ) -> Self { + Self { + role: Role::Tool, + content: Some(content.into()), + tool_calls: None, + tool_call_id: Some(call_id.into()), + name: Some(name.into()), + } + } +} + +/// Assistant-issued tool call. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ToolCall { + pub id: String, + /// OpenAI uses `"function"` as the discriminator. + #[serde(rename = "type", default = "default_tool_type")] + pub call_type: String, + pub function: ToolCallFunction, +} + +fn default_tool_type() -> String { + "function".to_string() +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct ToolCallFunction { + pub name: String, + /// JSON-encoded arguments. Always a string per the OpenAI spec, even + /// though providers sometimes return an object — we normalise below. + pub arguments: String, +} + +/// Strongly-typed LLM response suitable for driving the agent loop. +#[derive(Debug, Clone)] +pub struct LlmResponse { + pub content: Option, + pub tool_calls: Vec, + /// Provider-reported finish reason. Surfaced for future telemetry / + /// logging hooks; not currently consulted by the agent loop. + #[allow(dead_code)] + pub finish_reason: Option, +} + +#[async_trait] +pub trait LlmProvider: Send + Sync { + async fn complete( + &self, + messages: &[ChatMessage], + tools: &[Value], + ) -> Result; + + #[allow(dead_code)] + fn provider(&self) -> ProviderKind; + #[allow(dead_code)] + fn model(&self) -> &str; +} + +/// Shared OpenAI-compatible client. +pub struct OpenAiCompatibleClient { + client: reqwest::Client, + config: AgentConfig, +} + +impl OpenAiCompatibleClient { + pub fn new(config: AgentConfig) -> Result { + let client = crate::client::shared_client() + .map_err(|e| GwsError::Other(anyhow::anyhow!("agent http client: {e}")))?; + Ok(Self { client, config }) + } + + fn endpoint(&self) -> String { + format!( + "{}/chat/completions", + self.config.base_url.trim_end_matches('/') + ) + } +} + +#[derive(Debug, Deserialize)] +struct RawChoice { + message: RawMessage, + finish_reason: Option, +} + +#[derive(Debug, Deserialize)] +struct RawMessage { + #[serde(default)] + content: Option, + #[serde(default)] + tool_calls: Option>, +} + +#[derive(Debug, Deserialize)] +struct RawToolCall { + #[serde(default)] + id: Option, + #[serde(rename = "type", default)] + call_type: Option, + function: RawToolCallFunction, +} + +#[derive(Debug, Deserialize)] +struct RawToolCallFunction { + name: String, + /// Some providers (Ollama) emit a JSON object here instead of a string; + /// accept either and normalise to a string downstream. + #[serde(default)] + arguments: Value, +} + +#[derive(Debug, Deserialize)] +struct RawResponse { + choices: Vec, +} + +/// Flatten content that may come back as either a string or the +/// OpenAI-style list of content parts. +fn content_to_string(v: &Value) -> Option { + match v { + Value::Null => None, + Value::String(s) if s.is_empty() => None, + Value::String(s) => Some(s.clone()), + Value::Array(parts) => { + let joined: String = parts + .iter() + .filter_map(|p| p.get("text").and_then(|t| t.as_str())) + .collect::>() + .join(""); + if joined.is_empty() { + None + } else { + Some(joined) + } + } + other => Some(other.to_string()), + } +} + +fn arguments_to_string(v: &Value) -> String { + match v { + Value::Null => "{}".to_string(), + Value::String(s) if s.trim().is_empty() => "{}".to_string(), + Value::String(s) => s.clone(), + other => other.to_string(), + } +} + +#[async_trait] +impl LlmProvider for OpenAiCompatibleClient { + async fn complete( + &self, + messages: &[ChatMessage], + tools: &[Value], + ) -> Result { + let mut body = serde_json::json!({ + "model": self.config.model, + "messages": messages, + }); + if !tools.is_empty() { + body["tools"] = Value::Array(tools.to_vec()); + body["tool_choice"] = Value::String("auto".to_string()); + } + if let Some(t) = self.config.temperature { + body["temperature"] = serde_json::json!(t); + } + if let Some(m) = self.config.max_tokens { + body["max_tokens"] = serde_json::json!(m); + } + + let mut req = self.client.post(self.endpoint()).json(&body); + if let Some(ref key) = self.config.api_key { + req = req.bearer_auth(key); + } + if self.config.provider == ProviderKind::OpenRouter { + req = req + .header("HTTP-Referer", &self.config.app_referer) + .header("X-Title", &self.config.app_title); + } + + let resp = req + .send() + .await + .map_err(|e| GwsError::Other(anyhow::anyhow!("LLM request failed: {e}")))?; + let status = resp.status(); + let text = resp + .text() + .await + .map_err(|e| GwsError::Other(anyhow::anyhow!("LLM response read failed: {e}")))?; + if !status.is_success() { + return Err(GwsError::Other(anyhow::anyhow!( + "LLM HTTP {status}: {}", + truncate(&text, 500) + ))); + } + let raw: RawResponse = serde_json::from_str(&text).map_err(|e| { + GwsError::Other(anyhow::anyhow!( + "LLM JSON parse failed: {e}; body: {}", + truncate(&text, 300) + )) + })?; + + let choice = raw + .choices + .into_iter() + .next() + .ok_or_else(|| GwsError::Other(anyhow::anyhow!("LLM returned no choices")))?; + + let content = choice.message.content.as_ref().and_then(content_to_string); + let tool_calls = choice + .message + .tool_calls + .unwrap_or_default() + .into_iter() + .enumerate() + .map(|(i, t)| ToolCall { + id: t.id.unwrap_or_else(|| format!("call_{i}")), + call_type: t.call_type.unwrap_or_else(|| "function".to_string()), + function: ToolCallFunction { + name: t.function.name, + arguments: arguments_to_string(&t.function.arguments), + }, + }) + .collect(); + + Ok(LlmResponse { + content, + tool_calls, + finish_reason: choice.finish_reason, + }) + } + + fn provider(&self) -> ProviderKind { + self.config.provider + } + + fn model(&self) -> &str { + &self.config.model + } +} + +fn truncate(s: &str, max: usize) -> String { + if s.len() <= max { + s.to_string() + } else { + format!("{}…(truncated {} bytes)", &s[..max], s.len() - max) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + + #[test] + fn role_as_str_roundtrip() { + assert_eq!(Role::System.as_str(), "system"); + assert_eq!(Role::Tool.as_str(), "tool"); + } + + #[test] + fn chat_message_constructors_omit_empty_fields_in_json() { + let msg = ChatMessage::user("hi"); + let v = serde_json::to_value(&msg).unwrap(); + assert_eq!(v["role"], "user"); + assert_eq!(v["content"], "hi"); + assert!(v.get("tool_calls").is_none()); + assert!(v.get("name").is_none()); + } + + #[test] + fn tool_result_shape() { + let m = ChatMessage::tool_result("abc", "notion_search", "[]"); + let v = serde_json::to_value(&m).unwrap(); + assert_eq!(v["role"], "tool"); + assert_eq!(v["tool_call_id"], "abc"); + assert_eq!(v["name"], "notion_search"); + assert_eq!(v["content"], "[]"); + } + + #[test] + fn content_string_variants() { + assert_eq!(content_to_string(&json!("hi")).as_deref(), Some("hi")); + assert_eq!(content_to_string(&json!("")), None); + assert_eq!(content_to_string(&Value::Null), None); + assert_eq!( + content_to_string(&json!([{"type":"text","text":"a"},{"type":"text","text":"b"}])) + .as_deref(), + Some("ab") + ); + } + + #[test] + fn arguments_to_string_handles_object_or_string() { + assert_eq!(arguments_to_string(&json!("{\"x\":1}")), "{\"x\":1}"); + assert_eq!(arguments_to_string(&json!({"x":1})), "{\"x\":1}"); + assert_eq!(arguments_to_string(&Value::Null), "{}"); + assert_eq!(arguments_to_string(&json!(" ")), "{}"); + } + + #[test] + fn truncate_short_and_long() { + assert_eq!(truncate("abc", 10), "abc"); + let s = truncate("abcdef", 3); + assert!(s.starts_with("abc")); + assert!(s.contains("truncated")); + } + + #[test] + fn raw_response_parses_openai_shape() { + let body = json!({ + "id": "x", + "choices": [ + { + "finish_reason": "tool_calls", + "message": { + "role": "assistant", + "content": null, + "tool_calls": [ + { + "id": "call_1", + "type": "function", + "function": {"name": "notion_search", "arguments": "{\"q\":\"r\"}"} + } + ] + } + } + ] + }); + let raw: RawResponse = serde_json::from_value(body).unwrap(); + let c = &raw.choices[0]; + assert_eq!(c.finish_reason.as_deref(), Some("tool_calls")); + let tc = c.message.tool_calls.as_ref().unwrap(); + assert_eq!(tc[0].function.name, "notion_search"); + } + + #[test] + fn raw_response_parses_ollama_object_arguments() { + let body = json!({ + "choices": [{ + "message": { + "role": "assistant", + "tool_calls": [{ + "function": {"name": "f", "arguments": {"a": 1}} + }] + } + }] + }); + let raw: RawResponse = serde_json::from_value(body).unwrap(); + let args = &raw.choices[0].message.tool_calls.as_ref().unwrap()[0] + .function + .arguments; + assert_eq!(arguments_to_string(args), "{\"a\":1}"); + } +} diff --git a/crates/google-workspace-cli/src/agent/memory.rs b/crates/google-workspace-cli/src/agent/memory.rs new file mode 100644 index 00000000..a14a8113 --- /dev/null +++ b/crates/google-workspace-cli/src/agent/memory.rs @@ -0,0 +1,305 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! LangChain-style conversational memory. +//! +//! The agent reads any prior `user` / `assistant` turns for the current +//! `conversation_id` on start-up, then writes back the new user turn and +//! the final assistant answer after the loop completes. Tool traffic is +//! *not* persisted — it is recomputed per-turn to avoid bloating context +//! and because tool results often contain freshness-sensitive data. +//! +//! Two backends are supported: +//! * [`BufferMemory`] — in-process only, used as a fallback when Supabase +//! isn't configured and for tests. +//! * [`SupabaseMemory`] — persists to a Supabase table via the REST +//! (PostgREST) API. The table schema is: +//! +//! ```sql +//! create table gws_agent_memory ( +//! id bigserial primary key, +//! conversation_id text not null, +//! role text not null, +//! content text not null, +//! created_at timestamptz not null default now() +//! ); +//! create index on gws_agent_memory (conversation_id, created_at); +//! ``` + +use async_trait::async_trait; +use serde::{Deserialize, Serialize}; + +use crate::agent::config::AgentConfig; +use crate::agent::llm::{ChatMessage, Role}; +use crate::error::GwsError; + +/// A persistable conversation turn. Only `user` and `assistant` messages +/// survive round-trips — system prompts and tool messages are stripped. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct MemoryTurn { + pub role: String, + pub content: String, +} + +impl MemoryTurn { + pub fn from_message(m: &ChatMessage) -> Option { + let role = match m.role { + Role::User => "user", + Role::Assistant => "assistant", + _ => return None, + }; + let content = m.content.clone()?; + Some(Self { + role: role.to_string(), + content, + }) + } + + pub fn to_message(&self) -> Option { + match self.role.as_str() { + "user" => Some(ChatMessage::user(&self.content)), + "assistant" => Some(ChatMessage::assistant_text(&self.content)), + _ => None, + } + } +} + +#[async_trait] +pub trait Memory: Send + Sync { + async fn load(&self, conversation_id: &str) -> Result, GwsError>; + async fn append(&self, conversation_id: &str, turn: &MemoryTurn) -> Result<(), GwsError>; + fn kind(&self) -> &'static str; +} + +/// In-process ring buffer (per conversation_id). +#[derive(Default)] +pub struct BufferMemory { + inner: tokio::sync::Mutex>>, + capacity: usize, +} + +impl BufferMemory { + pub fn with_capacity(capacity: usize) -> Self { + Self { + inner: tokio::sync::Mutex::new(std::collections::HashMap::new()), + capacity, + } + } +} + +#[async_trait] +impl Memory for BufferMemory { + async fn load(&self, conversation_id: &str) -> Result, GwsError> { + let g = self.inner.lock().await; + Ok(g.get(conversation_id).cloned().unwrap_or_default()) + } + + async fn append(&self, conversation_id: &str, turn: &MemoryTurn) -> Result<(), GwsError> { + let mut g = self.inner.lock().await; + let entry = g.entry(conversation_id.to_string()).or_default(); + entry.push(turn.clone()); + if self.capacity > 0 && entry.len() > self.capacity { + let overflow = entry.len() - self.capacity; + entry.drain(0..overflow); + } + Ok(()) + } + + fn kind(&self) -> &'static str { + "buffer" + } +} + +/// Supabase-backed memory using the PostgREST HTTP API. +pub struct SupabaseMemory { + client: reqwest::Client, + base_url: String, + api_key: String, + table: String, +} + +impl SupabaseMemory { + pub fn from_config(config: &AgentConfig) -> Result, GwsError> { + let (url, key) = match (&config.supabase_url, &config.supabase_key) { + (Some(u), Some(k)) => (u.clone(), k.clone()), + _ => return Ok(None), + }; + let client = crate::client::shared_client() + .map_err(|e| GwsError::Other(anyhow::anyhow!("supabase http client: {e}")))?; + Ok(Some(Self { + client, + base_url: url.trim_end_matches('/').to_string(), + api_key: key, + table: config.supabase_memory_table.clone(), + })) + } + + fn endpoint(&self) -> String { + format!("{}/rest/v1/{}", self.base_url, self.table) + } +} + +#[async_trait] +impl Memory for SupabaseMemory { + async fn load(&self, conversation_id: &str) -> Result, GwsError> { + let url = format!( + "{}?conversation_id=eq.{}&order=created_at.asc&select=role,content", + self.endpoint(), + urlencoding(conversation_id) + ); + let resp = self + .client + .get(&url) + .header("apikey", &self.api_key) + .bearer_auth(&self.api_key) + .send() + .await + .map_err(|e| GwsError::Other(anyhow::anyhow!("supabase load: {e}")))?; + let status = resp.status(); + let text = resp.text().await.unwrap_or_default(); + if !status.is_success() { + return Err(GwsError::Other(anyhow::anyhow!( + "supabase load HTTP {status}: {text}" + ))); + } + let rows: Vec = serde_json::from_str(&text) + .map_err(|e| GwsError::Other(anyhow::anyhow!("supabase load parse: {e}")))?; + Ok(rows) + } + + async fn append(&self, conversation_id: &str, turn: &MemoryTurn) -> Result<(), GwsError> { + let body = serde_json::json!([{ + "conversation_id": conversation_id, + "role": turn.role, + "content": turn.content, + }]); + let resp = self + .client + .post(self.endpoint()) + .header("apikey", &self.api_key) + .bearer_auth(&self.api_key) + .header("Content-Type", "application/json") + .header("Prefer", "return=minimal") + .json(&body) + .send() + .await + .map_err(|e| GwsError::Other(anyhow::anyhow!("supabase append: {e}")))?; + let status = resp.status(); + if !status.is_success() { + let text = resp.text().await.unwrap_or_default(); + return Err(GwsError::Other(anyhow::anyhow!( + "supabase append HTTP {status}: {text}" + ))); + } + Ok(()) + } + + fn kind(&self) -> &'static str { + "supabase" + } +} + +/// Minimal URL encoder for query-string values (alphanumerics, `-_.~` pass +/// through; everything else is percent-encoded). Good enough for arbitrary +/// conversation IDs without pulling `form_urlencoded`. +fn urlencoding(input: &str) -> String { + let mut out = String::with_capacity(input.len()); + for b in input.bytes() { + if b.is_ascii_alphanumeric() || matches!(b, b'-' | b'_' | b'.' | b'~') { + out.push(b as char); + } else { + out.push_str(&format!("%{b:02X}")); + } + } + out +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn memory_turn_round_trip() { + let m = ChatMessage::user("hello"); + let t = MemoryTurn::from_message(&m).unwrap(); + assert_eq!(t.role, "user"); + let back = t.to_message().unwrap(); + assert_eq!(back.role, Role::User); + assert_eq!(back.content.as_deref(), Some("hello")); + } + + #[test] + fn memory_turn_skips_system_and_tool() { + let s = ChatMessage::system("sys"); + assert!(MemoryTurn::from_message(&s).is_none()); + let t = ChatMessage::tool_result("id", "name", "result"); + assert!(MemoryTurn::from_message(&t).is_none()); + } + + #[tokio::test] + async fn buffer_memory_per_conversation() { + let mem = BufferMemory::with_capacity(0); + mem.append( + "a", + &MemoryTurn { + role: "user".into(), + content: "hi".into(), + }, + ) + .await + .unwrap(); + mem.append( + "b", + &MemoryTurn { + role: "user".into(), + content: "yo".into(), + }, + ) + .await + .unwrap(); + let a = mem.load("a").await.unwrap(); + assert_eq!(a.len(), 1); + assert_eq!(a[0].content, "hi"); + let b = mem.load("b").await.unwrap(); + assert_eq!(b[0].content, "yo"); + } + + #[tokio::test] + async fn buffer_memory_trims_to_capacity() { + let mem = BufferMemory::with_capacity(2); + for i in 0..5 { + mem.append( + "c", + &MemoryTurn { + role: "user".into(), + content: format!("m{i}"), + }, + ) + .await + .unwrap(); + } + let out = mem.load("c").await.unwrap(); + assert_eq!(out.len(), 2); + assert_eq!(out[0].content, "m3"); + assert_eq!(out[1].content, "m4"); + } + + #[test] + fn urlencoding_escapes_specials() { + assert_eq!(urlencoding("abc-_."), "abc-_."); + assert_eq!(urlencoding("a b"), "a%20b"); + assert_eq!(urlencoding("eq."), "eq."); + assert_eq!(urlencoding("a/b?c"), "a%2Fb%3Fc"); + } +} diff --git a/crates/google-workspace-cli/src/agent/mod.rs b/crates/google-workspace-cli/src/agent/mod.rs new file mode 100644 index 00000000..d1759a33 --- /dev/null +++ b/crates/google-workspace-cli/src/agent/mod.rs @@ -0,0 +1,571 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! `gws agent` — a terminal AI agent that can call tools. +//! +//! This module wires together: +//! +//! * Config resolution ([`config`]). +//! * LLM provider (OpenRouter / Ollama via OpenAI-compatible REST, [`llm`]). +//! * Tool registry ([`tool`], [`tools`]). +//! * Conversation memory ([`memory`]). +//! * The agent loop itself ([`run_turn`]). +//! +//! The public entry point is [`handle_agent_command`], invoked from +//! `main.rs` when the user runs `gws agent ...`. + +pub mod config; +pub mod llm; +pub mod memory; +pub mod tool; +pub mod tools; + +use std::io::Write; +use std::sync::Arc; + +use serde_json::Value; + +use crate::error::GwsError; + +use config::{parse_flags, AgentConfig, AgentOutputFormat}; +use llm::{ChatMessage, LlmProvider, OpenAiCompatibleClient, Role, ToolCall}; +use memory::{BufferMemory, Memory, MemoryTurn, SupabaseMemory}; +use tool::{ToolError, ToolRegistry}; + +/// Summary of what a single turn produced; returned from [`run_turn`] and +/// used to drive the REPL / JSON output. +#[derive(Debug)] +pub struct TurnResult { + pub answer: String, + pub steps: u32, + pub tool_calls: Vec, +} + +#[derive(Debug, Clone)] +pub struct ExecutedTool { + pub name: String, + pub arguments: String, + pub result: Result, +} + +/// Entry point invoked from `main.rs` when the user runs `gws agent ...`. +pub async fn handle_agent_command(args: &[String]) -> Result<(), GwsError> { + let flags = parse_flags(args)?; + if flags.help { + print_help(); + return Ok(()); + } + let config = AgentConfig::resolve(flags)?; + let provider: Arc = Arc::new(OpenAiCompatibleClient::new(config.clone())?); + + let memory: Arc = match SupabaseMemory::from_config(&config)? { + Some(s) => Arc::new(s), + None => Arc::new(BufferMemory::with_capacity(200)), + }; + + let mut registry = tools::default_registry(&config); + registry.retain(&config.enabled_tools); + + if let Some(prompt) = config.one_shot.clone() { + let result = run_turn( + &config, + provider.as_ref(), + memory.as_ref(), + ®istry, + &prompt, + ) + .await?; + emit_result(&config, &result); + return Ok(()); + } + + run_repl(&config, provider, memory, registry).await +} + +/// Print `gws agent --help` text. +fn print_help() { + println!( + "gws agent — terminal AI agent with tool use + +USAGE: + gws agent [FLAGS] [PROMPT...] + +When a prompt is supplied, the agent answers once and exits. With no +prompt, it drops into a REPL; type `exit` or Ctrl-D to leave. + +FLAGS: + -h, --help Show this help. + -p, --prompt One-shot prompt (alternative to positional). + --provider openrouter (default) or ollama. + --model Model id. Default: anthropic/claude-opus-4.6 + (OpenRouter) or llama3.1 (Ollama). + --base-url Override the provider base URL. + --max-steps Max LLM/tool iterations per turn (default 8). + --max-tokens Cap on response tokens. + --temperature Sampling temperature. + --system Override the system prompt. + --approve-tools Ask for confirmation before each tool call. + --output text (default) or json. + --conversation Reuse a persistent conversation id. + --tools Comma-separated allow-list of tool names. + --tool Repeatable tool allow-list entry. + +ENVIRONMENT: + OPENROUTER_API_KEY OpenRouter auth. + OLLAMA_BASE_URL Override local Ollama host. + NOTION_API_KEY Enables the `notion` tool. + ZAPIER_WEBHOOK_URL Enables the `zapier_webhook` tool. + ZAPIER_WEBHOOK__URL Adds named hooks. + ZAPIER_NLA_API_KEY Enables the `zapier_actions` tool. + SUPABASE_URL Enables Supabase tool + persistent memory + SUPABASE_SERVICE_ROLE_KEY (or SUPABASE_ANON_KEY). + GWS_AGENT_MEMORY_TABLE Override memory table (default gws_agent_memory). + +EXAMPLES: + gws agent 'Summarize my 5 most recent Gmail threads.' + gws agent --provider ollama --model llama3.1 --approve-tools + gws agent --conversation triage-2026 --prompt 'What did we decide?' +" + ); +} + +/// Pretty-print a [`TurnResult`] according to the configured output format. +fn emit_result(config: &AgentConfig, result: &TurnResult) { + match config.format() { + AgentOutputFormat::Text => { + println!("{}", result.answer); + } + AgentOutputFormat::Json => { + let tool_calls: Vec = result + .tool_calls + .iter() + .map(|t| { + let (ok, val) = match &t.result { + Ok(s) => (true, Value::String(s.clone())), + Err(e) => (false, Value::String(e.clone())), + }; + serde_json::json!({ + "tool": t.name, + "arguments": t.arguments, + "ok": ok, + "result": val, + }) + }) + .collect(); + let out = serde_json::json!({ + "answer": result.answer, + "steps": result.steps, + "tool_calls": tool_calls, + }); + println!( + "{}", + serde_json::to_string_pretty(&out).unwrap_or_else(|_| "{}".to_string()) + ); + } + } +} + +async fn run_repl( + config: &AgentConfig, + provider: Arc, + memory: Arc, + registry: ToolRegistry, +) -> Result<(), GwsError> { + let tools_line = if registry.is_empty() { + "(no tools enabled)".to_string() + } else { + registry.names().join(", ") + }; + eprintln!( + "gws agent — provider={} model={} memory={} tools={}", + config.provider.as_str(), + config.model, + memory.kind(), + tools_line + ); + eprintln!("Type 'exit' or press Ctrl-D to quit."); + + let stdin = std::io::stdin(); + loop { + eprint!("you> "); + std::io::stderr().flush().ok(); + let mut line = String::new(); + let n = stdin + .read_line(&mut line) + .map_err(|e| GwsError::Other(anyhow::anyhow!("stdin read failed: {e}")))?; + if n == 0 { + eprintln!(); + break; + } + let prompt = line.trim(); + if prompt.is_empty() { + continue; + } + if matches!(prompt, "exit" | "quit" | ":q") { + break; + } + match run_turn( + config, + provider.as_ref(), + memory.as_ref(), + ®istry, + prompt, + ) + .await + { + Ok(result) => emit_result(config, &result), + Err(e) => eprintln!("error: {e}"), + } + } + Ok(()) +} + +/// Execute a single user turn. Public for testing of the tool-dispatch +/// loop via a mock [`LlmProvider`]. +pub async fn run_turn( + config: &AgentConfig, + provider: &dyn LlmProvider, + memory: &dyn Memory, + registry: &ToolRegistry, + prompt: &str, +) -> Result { + let specs = registry.as_openai_specs(); + let mut messages: Vec = Vec::new(); + messages.push(ChatMessage::system(&config.system_prompt)); + + // Replay prior turns. + for t in memory.load(&config.conversation_id).await? { + if let Some(m) = t.to_message() { + messages.push(m); + } + } + let user_msg = ChatMessage::user(prompt); + messages.push(user_msg.clone()); + + let mut executed: Vec = Vec::new(); + let mut steps = 0u32; + let mut final_answer: Option = None; + + for _ in 0..config.max_steps { + steps += 1; + let resp = provider.complete(&messages, &specs).await?; + + if resp.tool_calls.is_empty() { + final_answer = resp.content.clone(); + if let Some(ref text) = resp.content { + messages.push(ChatMessage::assistant_text(text)); + } else { + messages.push(ChatMessage::assistant_text("")); + } + break; + } + + // Record assistant's tool_calls message so the tool responses can + // reference it by id. + messages.push(ChatMessage { + role: Role::Assistant, + content: resp.content.clone(), + tool_calls: Some(resp.tool_calls.clone()), + tool_call_id: None, + name: None, + }); + + for call in &resp.tool_calls { + let (result, arg_str) = dispatch_tool(config, registry, call).await; + messages.push(ChatMessage::tool_result( + call.id.clone(), + call.function.name.clone(), + match &result { + Ok(s) => s.clone(), + Err(e) => format!("ERROR: {e}"), + }, + )); + executed.push(ExecutedTool { + name: call.function.name.clone(), + arguments: arg_str, + result: result.map_err(|e| e.to_string()), + }); + } + } + + let answer = final_answer.unwrap_or_else(|| { + "(agent stopped without producing a final answer — raise --max-steps?)".to_string() + }); + + // Persist this turn (user prompt + final answer only). + if let Some(turn) = MemoryTurn::from_message(&user_msg) { + memory.append(&config.conversation_id, &turn).await.ok(); + } + let assistant_turn = MemoryTurn { + role: "assistant".to_string(), + content: answer.clone(), + }; + memory + .append(&config.conversation_id, &assistant_turn) + .await + .ok(); + + Ok(TurnResult { + answer, + steps, + tool_calls: executed, + }) +} + +async fn dispatch_tool( + config: &AgentConfig, + registry: &ToolRegistry, + call: &ToolCall, +) -> (Result, String) { + let name = &call.function.name; + let arg_str = call.function.arguments.clone(); + let Some(tool) = registry.get(name) else { + return ( + Err(ToolError::runtime(format!("unknown tool '{name}'"))), + arg_str, + ); + }; + + let args_value: Value = serde_json::from_str(&arg_str).unwrap_or_else(|_| { + // Some models emit empty strings for no-arg tools; fall back to {}. + if arg_str.trim().is_empty() { + Value::Object(Default::default()) + } else { + Value::String(arg_str.clone()) + } + }); + + if config.approve_tools && !approve_interactive(name, &args_value) { + return (Err(ToolError::Denied), arg_str); + } + + let result = tool.call(args_value).await; + (result, arg_str) +} + +/// Ask the user on stderr whether to run a tool. Returns `true` when the +/// user answers y/yes/. +fn approve_interactive(name: &str, args: &Value) -> bool { + let compact = serde_json::to_string(args).unwrap_or_else(|_| "{}".to_string()); + eprintln!("tool> {name}({compact})"); + eprint!("approve? [Y/n] "); + std::io::stderr().flush().ok(); + let mut line = String::new(); + if std::io::stdin().read_line(&mut line).is_err() { + return false; + } + let t = line.trim().to_ascii_lowercase(); + matches!(t.as_str(), "" | "y" | "yes") +} + +#[cfg(test)] +mod tests { + use super::*; + use async_trait::async_trait; + use std::sync::Mutex; + + /// Scripted provider that returns queued responses in order. + struct ScriptedProvider { + script: Mutex>, + } + + impl ScriptedProvider { + fn new(script: Vec) -> Self { + Self { + script: Mutex::new(script), + } + } + } + + #[async_trait] + impl LlmProvider for ScriptedProvider { + async fn complete( + &self, + _messages: &[ChatMessage], + _tools: &[Value], + ) -> Result { + let mut g = self.script.lock().unwrap(); + if g.is_empty() { + return Err(GwsError::Other(anyhow::anyhow!("script exhausted"))); + } + Ok(g.remove(0)) + } + fn provider(&self) -> config::ProviderKind { + config::ProviderKind::OpenRouter + } + fn model(&self) -> &str { + "test-model" + } + } + + fn test_config() -> AgentConfig { + AgentConfig { + provider: config::ProviderKind::OpenRouter, + model: "test".to_string(), + base_url: "http://localhost".to_string(), + api_key: None, + app_title: "t".into(), + app_referer: "t".into(), + max_steps: 4, + max_tokens: None, + temperature: None, + system_prompt: "be helpful".into(), + approve_tools: false, + one_shot: None, + output_format: AgentOutputFormat::Text, + supabase_url: None, + supabase_key: None, + supabase_memory_table: "gws_agent_memory".into(), + conversation_id: "t1".into(), + enabled_tools: vec![], + } + } + + struct FixedEcho; + #[async_trait] + impl tool::Tool for FixedEcho { + fn name(&self) -> &str { + "echo" + } + fn description(&self) -> &str { + "echo" + } + fn parameters_schema(&self) -> Value { + serde_json::json!({"type": "object"}) + } + async fn call(&self, args: Value) -> Result { + Ok(args + .get("text") + .and_then(|v| v.as_str()) + .unwrap_or("") + .to_string()) + } + } + + #[tokio::test] + async fn single_turn_no_tools() { + let cfg = test_config(); + let mem = BufferMemory::with_capacity(100); + let reg = ToolRegistry::new(); + let provider = ScriptedProvider::new(vec![llm::LlmResponse { + content: Some("Hello there.".into()), + tool_calls: vec![], + finish_reason: Some("stop".into()), + }]); + let result = run_turn(&cfg, &provider, &mem, ®, "hi").await.unwrap(); + assert_eq!(result.answer, "Hello there."); + assert_eq!(result.steps, 1); + assert!(result.tool_calls.is_empty()); + + // Memory has the user turn and assistant turn. + let turns = mem.load(&cfg.conversation_id).await.unwrap(); + assert_eq!(turns.len(), 2); + assert_eq!(turns[0].role, "user"); + assert_eq!(turns[1].role, "assistant"); + assert_eq!(turns[1].content, "Hello there."); + } + + #[tokio::test] + async fn tool_call_then_answer() { + let cfg = test_config(); + let mem = BufferMemory::with_capacity(100); + let mut reg = ToolRegistry::new(); + reg.insert(Arc::new(FixedEcho)); + + let provider = ScriptedProvider::new(vec![ + llm::LlmResponse { + content: None, + tool_calls: vec![ToolCall { + id: "c1".into(), + call_type: "function".into(), + function: llm::ToolCallFunction { + name: "echo".into(), + arguments: "{\"text\":\"world\"}".into(), + }, + }], + finish_reason: Some("tool_calls".into()), + }, + llm::LlmResponse { + content: Some("Got: world".into()), + tool_calls: vec![], + finish_reason: Some("stop".into()), + }, + ]); + + let result = run_turn(&cfg, &provider, &mem, ®, "echo").await.unwrap(); + assert_eq!(result.answer, "Got: world"); + assert_eq!(result.steps, 2); + assert_eq!(result.tool_calls.len(), 1); + assert_eq!(result.tool_calls[0].name, "echo"); + assert!(result.tool_calls[0].result.is_ok()); + } + + #[tokio::test] + async fn unknown_tool_surfaces_error_but_continues() { + let cfg = test_config(); + let mem = BufferMemory::with_capacity(100); + let reg = ToolRegistry::new(); + + let provider = ScriptedProvider::new(vec![ + llm::LlmResponse { + content: None, + tool_calls: vec![ToolCall { + id: "c1".into(), + call_type: "function".into(), + function: llm::ToolCallFunction { + name: "nope".into(), + arguments: "{}".into(), + }, + }], + finish_reason: Some("tool_calls".into()), + }, + llm::LlmResponse { + content: Some("I can't do that.".into()), + tool_calls: vec![], + finish_reason: Some("stop".into()), + }, + ]); + let result = run_turn(&cfg, &provider, &mem, ®, "?").await.unwrap(); + assert_eq!(result.answer, "I can't do that."); + assert_eq!(result.tool_calls.len(), 1); + assert!(result.tool_calls[0].result.is_err()); + } + + #[tokio::test] + async fn exceeds_max_steps_returns_fallback_answer() { + let mut cfg = test_config(); + cfg.max_steps = 1; + let mem = BufferMemory::with_capacity(100); + let mut reg = ToolRegistry::new(); + reg.insert(Arc::new(FixedEcho)); + + // Only one response, which is a tool call → loop exits without + // producing an answer. + let provider = ScriptedProvider::new(vec![llm::LlmResponse { + content: None, + tool_calls: vec![ToolCall { + id: "c1".into(), + call_type: "function".into(), + function: llm::ToolCallFunction { + name: "echo".into(), + arguments: "{\"text\":\"x\"}".into(), + }, + }], + finish_reason: Some("tool_calls".into()), + }]); + let result = run_turn(&cfg, &provider, &mem, ®, "loop").await.unwrap(); + assert!(result.answer.contains("max-steps")); + assert_eq!(result.steps, 1); + } +} diff --git a/crates/google-workspace-cli/src/agent/tool.rs b/crates/google-workspace-cli/src/agent/tool.rs new file mode 100644 index 00000000..326b44a3 --- /dev/null +++ b/crates/google-workspace-cli/src/agent/tool.rs @@ -0,0 +1,213 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Tool abstraction for the agent loop. +//! +//! A `Tool` is anything the LLM can invoke through an OpenAI-style +//! `tool_calls` function. Each tool advertises a name, a natural-language +//! description and a JSON Schema describing its arguments; execution takes +//! a `serde_json::Value` of arguments and returns a `String` that is fed +//! back to the model as the `tool` role message. +//! +//! Tools are expected to be cheap to clone (typically they hold +//! `Arc`-wrapped HTTP clients + configuration). + +use async_trait::async_trait; +use serde_json::Value; +use std::collections::BTreeMap; +use std::sync::Arc; + +/// Error returned from tool execution. +/// +/// We never propagate raw network or deserialization errors to the model — +/// tool failures are serialized into the tool-call response so the model +/// can recover. +#[derive(Debug, thiserror::Error)] +pub enum ToolError { + /// User declined an interactive approval prompt. + #[error("user denied tool execution")] + Denied, + /// Any runtime failure (network, parse, remote API error) — already + /// formatted for presentation to the model. + #[error("{0}")] + Runtime(String), +} + +impl ToolError { + pub fn runtime>(msg: M) -> Self { + ToolError::Runtime(msg.into()) + } +} + +/// A single tool the agent can invoke. +#[async_trait] +pub trait Tool: Send + Sync { + /// Unique snake_case identifier sent to the model. + fn name(&self) -> &str; + /// Short human description shown to the model. + fn description(&self) -> &str; + /// JSON Schema (draft-07 style) for the arguments object. + fn parameters_schema(&self) -> Value; + /// Execute the tool and return a string result. + async fn call(&self, args: Value) -> Result; +} + +/// OpenAI-compatible tool specification shipped to the model. +pub fn openai_tool_spec(tool: &dyn Tool) -> Value { + serde_json::json!({ + "type": "function", + "function": { + "name": tool.name(), + "description": tool.description(), + "parameters": tool.parameters_schema(), + } + }) +} + +/// Collection of tools indexed by name. +#[derive(Clone, Default)] +pub struct ToolRegistry { + tools: BTreeMap>, +} + +impl ToolRegistry { + pub fn new() -> Self { + Self::default() + } + + pub fn insert(&mut self, tool: Arc) { + self.tools.insert(tool.name().to_string(), tool); + } + + pub fn get(&self, name: &str) -> Option<&Arc> { + self.tools.get(name) + } + + #[allow(dead_code)] + pub fn len(&self) -> usize { + self.tools.len() + } + + pub fn is_empty(&self) -> bool { + self.tools.is_empty() + } + + /// Names in insertion / alphabetical order (BTreeMap preserves order). + pub fn names(&self) -> Vec<&str> { + self.tools.keys().map(|s| s.as_str()).collect() + } + + /// Retain only tools whose names appear in `allow`. An empty allow-list + /// means "keep everything". + pub fn retain(&mut self, allow: &[String]) { + if allow.is_empty() { + return; + } + let allow: std::collections::HashSet<&str> = allow.iter().map(|s| s.as_str()).collect(); + self.tools.retain(|k, _| allow.contains(k.as_str())); + } + + /// OpenAI-style tool specs for every registered tool. + pub fn as_openai_specs(&self) -> Vec { + self.tools + .values() + .map(|t| openai_tool_spec(t.as_ref())) + .collect() + } +} + +#[cfg(test)] +mod tests { + use super::*; + use serde_json::json; + + struct Echo; + #[async_trait] + impl Tool for Echo { + fn name(&self) -> &str { + "echo" + } + fn description(&self) -> &str { + "Echo the input string back." + } + fn parameters_schema(&self) -> Value { + json!({ + "type": "object", + "properties": {"text": {"type": "string"}}, + "required": ["text"], + }) + } + async fn call(&self, args: Value) -> Result { + let t = args + .get("text") + .and_then(|v| v.as_str()) + .ok_or_else(|| ToolError::runtime("missing 'text'"))?; + Ok(t.to_string()) + } + } + + #[test] + fn registry_retains_allowlist() { + let mut r = ToolRegistry::new(); + r.insert(Arc::new(Echo)); + r.insert(Arc::new(RenamedEcho("alpha"))); + r.insert(Arc::new(RenamedEcho("beta"))); + assert_eq!(r.len(), 3); + r.retain(&["alpha".to_string(), "echo".to_string()]); + assert_eq!(r.len(), 2); + assert!(r.get("alpha").is_some()); + assert!(r.get("echo").is_some()); + assert!(r.get("beta").is_none()); + } + + #[test] + fn empty_retain_is_noop() { + let mut r = ToolRegistry::new(); + r.insert(Arc::new(Echo)); + r.retain(&[]); + assert_eq!(r.len(), 1); + } + + #[test] + fn openai_spec_shape() { + let spec = openai_tool_spec(&Echo); + assert_eq!(spec["type"], "function"); + assert_eq!(spec["function"]["name"], "echo"); + assert!(spec["function"]["parameters"]["properties"]["text"].is_object()); + } + + #[tokio::test] + async fn echo_tool_runs() { + let tool = Echo; + let out = tool.call(json!({"text": "hi"})).await.unwrap(); + assert_eq!(out, "hi"); + } + + struct RenamedEcho(&'static str); + #[async_trait] + impl Tool for RenamedEcho { + fn name(&self) -> &str { + self.0 + } + fn description(&self) -> &str { + "renamed echo" + } + fn parameters_schema(&self) -> Value { + json!({"type": "object"}) + } + async fn call(&self, _args: Value) -> Result { + Ok(self.0.to_string()) + } + } +} diff --git a/crates/google-workspace-cli/src/agent/tools/browser.rs b/crates/google-workspace-cli/src/agent/tools/browser.rs new file mode 100644 index 00000000..729aba12 --- /dev/null +++ b/crates/google-workspace-cli/src/agent/tools/browser.rs @@ -0,0 +1,247 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Browser-control tool (feature-gated on `browser-agent`). +//! +//! When the `browser-agent` feature is enabled (and a local Chrome/Chromium +//! binary is on PATH), this tool uses `headless_chrome` to drive a real +//! browser: navigate, click selectors, read text, and screenshot. When the +//! feature is disabled we still register a stub so the model receives an +//! informative error instead of an unexplained "no such tool" refusal. +//! +//! The browser instance is reused across tool calls within a single agent +//! session. + +use async_trait::async_trait; +use serde_json::{json, Value}; + +use crate::agent::tool::{Tool, ToolError}; + +#[cfg(feature = "browser-agent")] +mod backend { + use super::*; + use std::sync::Arc; + use tokio::sync::Mutex; + + pub struct BrowserBackend { + // `headless_chrome::Browser` is Send+Sync but not clonable; we wrap + // it behind a Mutex so concurrent tool calls serialise. + inner: Arc>>, + } + + impl BrowserBackend { + pub fn new() -> Self { + Self { + inner: Arc::new(Mutex::new(None)), + } + } + + async fn browser(&self) -> Result { + let mut guard = self.inner.lock().await; + if guard.is_none() { + let b = tokio::task::spawn_blocking(headless_chrome::Browser::default) + .await + .map_err(|e| ToolError::runtime(format!("browser spawn: {e}")))? + .map_err(|e| ToolError::runtime(format!("browser launch: {e}")))?; + *guard = Some(b); + } + Ok(guard.as_ref().unwrap().clone()) + } + + pub async fn run(&self, action: &str, args: &Value) -> Result { + let browser = self.browser().await?; + let args = args.clone(); + let action = action.to_string(); + tokio::task::spawn_blocking(move || run_blocking(&browser, &action, &args)) + .await + .map_err(|e| ToolError::runtime(format!("browser task: {e}")))? + } + } + + fn run_blocking( + browser: &headless_chrome::Browser, + action: &str, + args: &Value, + ) -> Result { + let tab = browser + .new_tab() + .map_err(|e| ToolError::runtime(format!("new_tab: {e}")))?; + match action { + "navigate" => { + let url = args + .get("url") + .and_then(|v| v.as_str()) + .ok_or_else(|| ToolError::runtime("missing 'url'"))?; + tab.navigate_to(url) + .map_err(|e| ToolError::runtime(format!("navigate: {e}")))?; + tab.wait_until_navigated() + .map_err(|e| ToolError::runtime(format!("wait: {e}")))?; + Ok(format!("navigated to {url}")) + } + "click" => { + let url = args.get("url").and_then(|v| v.as_str()); + let selector = args + .get("selector") + .and_then(|v| v.as_str()) + .ok_or_else(|| ToolError::runtime("missing 'selector'"))?; + if let Some(u) = url { + tab.navigate_to(u) + .map_err(|e| ToolError::runtime(format!("navigate: {e}")))?; + tab.wait_until_navigated() + .map_err(|e| ToolError::runtime(format!("wait: {e}")))?; + } + let el = tab + .wait_for_element(selector) + .map_err(|e| ToolError::runtime(format!("element: {e}")))?; + el.click() + .map_err(|e| ToolError::runtime(format!("click: {e}")))?; + Ok(format!("clicked {selector}")) + } + "text" => { + let url = args + .get("url") + .and_then(|v| v.as_str()) + .ok_or_else(|| ToolError::runtime("missing 'url'"))?; + tab.navigate_to(url) + .map_err(|e| ToolError::runtime(format!("navigate: {e}")))?; + tab.wait_until_navigated() + .map_err(|e| ToolError::runtime(format!("wait: {e}")))?; + let body = tab + .wait_for_element("body") + .map_err(|e| ToolError::runtime(format!("body: {e}")))?; + let text = body + .get_inner_text() + .map_err(|e| ToolError::runtime(format!("inner text: {e}")))?; + Ok(truncate(&text, 4000)) + } + other => Err(ToolError::runtime(format!( + "unknown browser action '{other}'" + ))), + } + } + + fn truncate(s: &str, max: usize) -> String { + if s.len() <= max { + s.to_string() + } else { + format!("{}…(truncated {} bytes)", &s[..max], s.len() - max) + } + } +} + +#[cfg(not(feature = "browser-agent"))] +mod backend { + use super::*; + + pub struct BrowserBackend; + + impl BrowserBackend { + pub fn new() -> Self { + Self + } + + pub async fn run(&self, _action: &str, _args: &Value) -> Result { + Err(ToolError::runtime( + "browser tool is not available: rebuild `gws` with --features browser-agent and \ + ensure a Chrome/Chromium binary is on PATH", + )) + } + } +} + +pub struct BrowserTool { + backend: backend::BrowserBackend, +} + +impl BrowserTool { + pub fn new() -> Self { + Self { + backend: backend::BrowserBackend::new(), + } + } +} + +impl Default for BrowserTool { + fn default() -> Self { + Self::new() + } +} + +#[async_trait] +impl Tool for BrowserTool { + fn name(&self) -> &str { + "browser" + } + + fn description(&self) -> &str { + "Drive a headless browser. Actions: `navigate` (open a URL), `click` \ + (optionally navigate, then click a CSS selector), `text` (navigate \ + and return the page body text)." + } + + fn parameters_schema(&self) -> Value { + json!({ + "type": "object", + "properties": { + "action": {"type": "string", "enum": ["navigate", "click", "text"]}, + "url": {"type": "string", "description": "Target URL."}, + "selector": {"type": "string", "description": "CSS selector (for action=click)."} + }, + "required": ["action"] + }) + } + + async fn call(&self, args: Value) -> Result { + let action = args + .get("action") + .and_then(|v| v.as_str()) + .ok_or_else(|| ToolError::runtime("missing 'action'"))? + .to_string(); + self.backend.run(&action, &args).await + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn schema_has_action() { + let t = BrowserTool::new(); + let s = t.parameters_schema(); + assert_eq!(s["required"][0], "action"); + } + + #[tokio::test] + async fn missing_action_errors() { + let t = BrowserTool::new(); + let err = t.call(json!({})).await.unwrap_err(); + assert!(err.to_string().contains("action")); + } + + // When the `browser-agent` feature is disabled (default in CI), the + // stub backend must return an informative error instead of silently + // succeeding. + #[cfg(not(feature = "browser-agent"))] + #[tokio::test] + async fn stub_is_helpful() { + let t = BrowserTool::new(); + let err = t + .call(json!({"action": "navigate", "url": "https://example.com"})) + .await + .unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("browser-agent")); + } +} diff --git a/crates/google-workspace-cli/src/agent/tools/gws.rs b/crates/google-workspace-cli/src/agent/tools/gws.rs new file mode 100644 index 00000000..1b3724a1 --- /dev/null +++ b/crates/google-workspace-cli/src/agent/tools/gws.rs @@ -0,0 +1,189 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Google Workspace tool. Lets the LLM call the `gws` binary itself +//! (self-hosted) to reach every Google Workspace API the CLI already +//! exposes. +//! +//! Arguments are validated to prevent shell injection (we never go through +//! a shell) and to keep the model from requesting interactive auth flows. + +use async_trait::async_trait; +use serde_json::{json, Value}; +use tokio::process::Command; + +use crate::agent::tool::{Tool, ToolError}; + +/// Arguments that must not appear verbatim — they can trigger interactive +/// flows or shell out further. +const DENIED_ARG_PREFIXES: &[&str] = &["--upload", "--output"]; + +pub struct GwsTool { + /// Path (or name) of the `gws` binary. Defaults to the running executable. + bin: String, +} + +impl GwsTool { + pub fn new() -> Self { + let bin = std::env::current_exe() + .ok() + .and_then(|p| p.to_str().map(String::from)) + .unwrap_or_else(|| "gws".to_string()); + Self { bin } + } +} + +impl Default for GwsTool { + fn default() -> Self { + Self::new() + } +} + +#[async_trait] +impl Tool for GwsTool { + fn name(&self) -> &str { + "google_workspace" + } + + fn description(&self) -> &str { + "Call the Google Workspace CLI (`gws`). Provide a list of CLI args, \ + e.g. ['drive', 'files', 'list', '--pageSize', '5']. Uses the \ + already-authenticated user; stdout JSON is returned verbatim. Do \ + not pass --upload or --output (agent sandbox)." + } + + fn parameters_schema(&self) -> Value { + json!({ + "type": "object", + "properties": { + "args": { + "type": "array", + "items": {"type": "string"}, + "description": "Argument vector passed to `gws` (no shell metacharacters).", + "minItems": 1 + } + }, + "required": ["args"] + }) + } + + async fn call(&self, args: Value) -> Result { + let list = args + .get("args") + .and_then(|v| v.as_array()) + .ok_or_else(|| ToolError::runtime("missing 'args' array"))?; + if list.is_empty() { + return Err(ToolError::runtime("'args' must not be empty")); + } + let mut argv: Vec = Vec::with_capacity(list.len()); + for a in list { + let s = a + .as_str() + .ok_or_else(|| ToolError::runtime("'args' entries must be strings"))?; + for deny in DENIED_ARG_PREFIXES { + if s == *deny || s.starts_with(&format!("{deny}=")) { + return Err(ToolError::runtime(format!( + "argument '{s}' is not allowed from agent tool calls" + ))); + } + } + argv.push(s.to_string()); + } + // Disallow recursive agent self-invocation to avoid runaway loops. + if argv.first().map(|s| s.as_str()) == Some("agent") { + return Err(ToolError::runtime("recursive agent invocation is blocked")); + } + + let output = Command::new(&self.bin) + .args(&argv) + .env("NO_COLOR", "1") + .output() + .await + .map_err(|e| ToolError::runtime(format!("gws spawn failed: {e}")))?; + + let stdout = String::from_utf8_lossy(&output.stdout).into_owned(); + let stderr = String::from_utf8_lossy(&output.stderr).into_owned(); + let code = output.status.code().unwrap_or(-1); + if output.status.success() { + Ok(stdout) + } else { + Err(ToolError::runtime(format!( + "gws exit {code}\nstderr: {}\nstdout: {}", + truncate(&stderr, 800), + truncate(&stdout, 800) + ))) + } + } +} + +fn truncate(s: &str, max: usize) -> String { + if s.len() <= max { + s.to_string() + } else { + format!("{}…", &s[..max]) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + async fn rejects_missing_args() { + let t = GwsTool::new(); + let err = t.call(json!({})).await.unwrap_err(); + assert!(err.to_string().contains("missing 'args'")); + } + + #[tokio::test] + async fn rejects_empty_args() { + let t = GwsTool::new(); + let err = t.call(json!({"args": []})).await.unwrap_err(); + assert!(err.to_string().contains("must not be empty")); + } + + #[tokio::test] + async fn rejects_non_string_args() { + let t = GwsTool::new(); + let err = t.call(json!({"args": [1, 2, 3]})).await.unwrap_err(); + assert!(err.to_string().contains("must be strings")); + } + + #[tokio::test] + async fn blocks_denied_prefixes() { + let t = GwsTool::new(); + let err = t + .call(json!({"args": ["drive", "files", "list", "--upload=secret"]})) + .await + .unwrap_err(); + assert!(err.to_string().contains("not allowed")); + } + + #[tokio::test] + async fn blocks_recursive_agent() { + let t = GwsTool::new(); + let err = t + .call(json!({"args": ["agent", "--prompt", "hi"]})) + .await + .unwrap_err(); + assert!(err.to_string().contains("recursive")); + } + + #[test] + fn schema_requires_args() { + let t = GwsTool::new(); + let s = t.parameters_schema(); + assert_eq!(s["required"][0], "args"); + } +} diff --git a/crates/google-workspace-cli/src/agent/tools/mod.rs b/crates/google-workspace-cli/src/agent/tools/mod.rs new file mode 100644 index 00000000..7566e3c4 --- /dev/null +++ b/crates/google-workspace-cli/src/agent/tools/mod.rs @@ -0,0 +1,57 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Concrete tool implementations exposed to the agent. +//! +//! Each tool is self-contained and only registered when its required +//! credentials are present. This keeps the model's tool list small and +//! avoids offering capabilities that would predictably fail. + +pub mod browser; +pub mod gws; +pub mod notion; +pub mod supabase; +pub mod zapier; + +use std::sync::Arc; + +use crate::agent::config::AgentConfig; +use crate::agent::tool::{Tool, ToolRegistry}; + +/// Build the default registry by probing the environment for credentials +/// and registering the tools that can actually run. +pub fn default_registry(config: &AgentConfig) -> ToolRegistry { + let mut reg = ToolRegistry::new(); + + // Google Workspace: always available; the `gws` binary is us. + reg.insert(Arc::new(gws::GwsTool::new()) as Arc); + + if let Some(t) = notion::NotionTool::from_env() { + reg.insert(Arc::new(t) as Arc); + } + if let Some(t) = zapier::ZapierWebhookTool::from_env() { + reg.insert(Arc::new(t) as Arc); + } + if let Some(t) = zapier::ZapierNlaTool::from_env() { + reg.insert(Arc::new(t) as Arc); + } + if let Some(t) = supabase::SupabaseTool::from_config(config) { + reg.insert(Arc::new(t) as Arc); + } + // Browser: registers a stub when the `browser-agent` feature is + // disabled so the model gets a clear error rather than silent absence. + reg.insert(Arc::new(browser::BrowserTool::new()) as Arc); + + reg +} diff --git a/crates/google-workspace-cli/src/agent/tools/notion.rs b/crates/google-workspace-cli/src/agent/tools/notion.rs new file mode 100644 index 00000000..529e1b5a --- /dev/null +++ b/crates/google-workspace-cli/src/agent/tools/notion.rs @@ -0,0 +1,259 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Notion tool. Wraps a small, frequently-used subset of the Notion REST +//! API: search, retrieve a page's properties, and append plain-text blocks +//! to a page. +//! +//! Authentication is a Notion integration token (`NOTION_API_KEY` or +//! `NOTION_TOKEN`). Users invite the integration to the pages/databases +//! they want the agent to touch. + +use async_trait::async_trait; +use serde_json::{json, Value}; + +use crate::agent::tool::{Tool, ToolError}; + +const NOTION_API_VERSION: &str = "2022-06-28"; +const NOTION_BASE: &str = "https://api.notion.com/v1"; + +pub struct NotionTool { + client: reqwest::Client, + token: String, +} + +impl NotionTool { + pub fn from_env() -> Option { + let token = std::env::var("NOTION_API_KEY") + .ok() + .or_else(|| std::env::var("NOTION_TOKEN").ok())?; + if token.trim().is_empty() { + return None; + } + let client = crate::client::shared_client().ok()?; + Some(Self { + client, + token: token.trim().to_string(), + }) + } + + async fn search(&self, query: &str, page_size: u32) -> Result { + let body = json!({"query": query, "page_size": page_size.min(50)}); + self.post("/search", &body).await + } + + async fn retrieve_page(&self, page_id: &str) -> Result { + let path = format!("/pages/{}", percent_encode_path(page_id)); + self.get(&path).await + } + + async fn append_text(&self, page_id: &str, text: &str) -> Result { + let body = json!({ + "children": [{ + "object": "block", + "type": "paragraph", + "paragraph": { + "rich_text": [{ + "type": "text", + "text": {"content": text} + }] + } + }] + }); + let path = format!("/blocks/{}/children", percent_encode_path(page_id)); + self.patch(&path, &body).await + } + + async fn get(&self, path: &str) -> Result { + let url = format!("{NOTION_BASE}{path}"); + self.dispatch(self.client.get(url)).await + } + + async fn post(&self, path: &str, body: &Value) -> Result { + let url = format!("{NOTION_BASE}{path}"); + self.dispatch(self.client.post(url).json(body)).await + } + + async fn patch(&self, path: &str, body: &Value) -> Result { + let url = format!("{NOTION_BASE}{path}"); + self.dispatch(self.client.patch(url).json(body)).await + } + + async fn dispatch(&self, req: reqwest::RequestBuilder) -> Result { + let resp = req + .bearer_auth(&self.token) + .header("Notion-Version", NOTION_API_VERSION) + .send() + .await + .map_err(|e| ToolError::runtime(format!("notion request failed: {e}")))?; + let status = resp.status(); + let text = resp.text().await.unwrap_or_default(); + if !status.is_success() { + return Err(ToolError::runtime(format!( + "notion HTTP {status}: {}", + truncate(&text, 500) + ))); + } + Ok(text) + } +} + +#[async_trait] +impl Tool for NotionTool { + fn name(&self) -> &str { + "notion" + } + + fn description(&self) -> &str { + "Search, read, and append to Notion pages. Supports actions: `search` (by text), \ + `retrieve_page` (by page id), and `append_text` (append a paragraph block to a page)." + } + + fn parameters_schema(&self) -> Value { + json!({ + "type": "object", + "properties": { + "action": { + "type": "string", + "enum": ["search", "retrieve_page", "append_text"], + "description": "Which Notion operation to perform." + }, + "query": {"type": "string", "description": "Search query (for action=search)."}, + "page_id": {"type": "string", "description": "Notion page id (32-char UUID, dashes optional)."}, + "text": {"type": "string", "description": "Text to append as a paragraph (for action=append_text)."}, + "page_size": {"type": "integer", "description": "Max search results (1-50).", "minimum": 1, "maximum": 50} + }, + "required": ["action"] + }) + } + + async fn call(&self, args: Value) -> Result { + let action = args + .get("action") + .and_then(|v| v.as_str()) + .ok_or_else(|| ToolError::runtime("missing required 'action'"))?; + match action { + "search" => { + let q = args + .get("query") + .and_then(|v| v.as_str()) + .unwrap_or("") + .to_string(); + let ps = args.get("page_size").and_then(|v| v.as_u64()).unwrap_or(10) as u32; + self.search(&q, ps).await + } + "retrieve_page" => { + let id = args + .get("page_id") + .and_then(|v| v.as_str()) + .ok_or_else(|| ToolError::runtime("missing 'page_id'"))?; + self.retrieve_page(id).await + } + "append_text" => { + let id = args + .get("page_id") + .and_then(|v| v.as_str()) + .ok_or_else(|| ToolError::runtime("missing 'page_id'"))?; + let t = args + .get("text") + .and_then(|v| v.as_str()) + .ok_or_else(|| ToolError::runtime("missing 'text'"))?; + self.append_text(id, t).await + } + other => Err(ToolError::runtime(format!( + "unknown notion action '{other}' — valid: search, retrieve_page, append_text" + ))), + } + } +} + +/// Percent-encode a path segment, allowing unreserved characters + hyphens +/// (Notion page IDs are UUIDs, sometimes with dashes). +fn percent_encode_path(s: &str) -> String { + let mut out = String::with_capacity(s.len()); + for b in s.bytes() { + if b.is_ascii_alphanumeric() || matches!(b, b'-' | b'_' | b'.' | b'~') { + out.push(b as char); + } else { + out.push_str(&format!("%{b:02X}")); + } + } + out +} + +fn truncate(s: &str, max: usize) -> String { + if s.len() <= max { + s.to_string() + } else { + format!("{}…", &s[..max]) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn schema_has_required_action() { + let client = reqwest::Client::new(); + let t = NotionTool { + client, + token: "x".to_string(), + }; + let s = t.parameters_schema(); + assert_eq!(s["required"][0], "action"); + let actions = s["properties"]["action"]["enum"].as_array().unwrap(); + assert_eq!(actions.len(), 3); + } + + #[tokio::test] + async fn unknown_action_errors() { + let client = reqwest::Client::new(); + let t = NotionTool { + client, + token: "x".to_string(), + }; + let err = t.call(json!({"action": "delete"})).await.unwrap_err(); + assert!(err.to_string().contains("unknown notion action")); + } + + #[tokio::test] + async fn missing_action_errors() { + let client = reqwest::Client::new(); + let t = NotionTool { + client, + token: "x".to_string(), + }; + let err = t.call(json!({})).await.unwrap_err(); + assert!(err.to_string().contains("action")); + } + + #[test] + #[serial_test::serial] + fn from_env_needs_token() { + std::env::remove_var("NOTION_API_KEY"); + std::env::remove_var("NOTION_TOKEN"); + assert!(NotionTool::from_env().is_none()); + std::env::set_var("NOTION_API_KEY", "secret-1"); + let t = NotionTool::from_env().unwrap(); + assert_eq!(t.token, "secret-1"); + std::env::remove_var("NOTION_API_KEY"); + } + + #[test] + fn percent_encode_basic() { + assert_eq!(percent_encode_path("abc-123"), "abc-123"); + assert_eq!(percent_encode_path("a b"), "a%20b"); + } +} diff --git a/crates/google-workspace-cli/src/agent/tools/supabase.rs b/crates/google-workspace-cli/src/agent/tools/supabase.rs new file mode 100644 index 00000000..9f76f585 --- /dev/null +++ b/crates/google-workspace-cli/src/agent/tools/supabase.rs @@ -0,0 +1,431 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Supabase (PostgREST) tool. Exposes `select`, `insert`, `update`, and +//! `delete` against arbitrary tables, plus an `rpc` entry for calling +//! Postgres functions. +//! +//! The caller must supply a table name; we validate it against a strict +//! identifier pattern (`[a-zA-Z_][a-zA-Z0-9_]*`) to prevent URL injection +//! before embedding it into the REST path. + +use async_trait::async_trait; +use serde_json::{json, Value}; + +use crate::agent::config::AgentConfig; +use crate::agent::tool::{Tool, ToolError}; + +pub struct SupabaseTool { + client: reqwest::Client, + base_url: String, + api_key: String, +} + +impl SupabaseTool { + pub fn from_config(config: &AgentConfig) -> Option { + let base = config + .supabase_url + .as_ref()? + .trim_end_matches('/') + .to_string(); + let key = config.supabase_key.as_ref()?.clone(); + if base.is_empty() || key.is_empty() { + return None; + } + let client = crate::client::shared_client().ok()?; + Some(Self { + client, + base_url: base, + api_key: key, + }) + } + + fn rest_url(&self, table: &str) -> String { + format!("{}/rest/v1/{}", self.base_url, table) + } + + fn rpc_url(&self, func: &str) -> String { + format!("{}/rest/v1/rpc/{}", self.base_url, func) + } + + async fn handle_resp(resp: reqwest::Response) -> Result { + let status = resp.status(); + let text = resp.text().await.unwrap_or_default(); + if !status.is_success() { + return Err(ToolError::runtime(format!( + "supabase HTTP {status}: {text}" + ))); + } + Ok(text) + } +} + +/// Validate that a string is safe to embed in a PostgREST URL as a table +/// or column identifier. +fn validate_identifier(s: &str, label: &str) -> Result<(), ToolError> { + if s.is_empty() { + return Err(ToolError::runtime(format!("{label} must not be empty"))); + } + let mut chars = s.chars(); + let first = chars.next().unwrap(); + if !(first.is_ascii_alphabetic() || first == '_') { + return Err(ToolError::runtime(format!( + "{label} must start with an ASCII letter or underscore" + ))); + } + for c in chars { + if !(c.is_ascii_alphanumeric() || c == '_') { + return Err(ToolError::runtime(format!( + "{label} may only contain ASCII alphanumerics or underscores" + ))); + } + } + Ok(()) +} + +/// Build a PostgREST query string from a `filter` object of the form +/// `{"column": {"op": "eq", "value": "x"}}`. Falsy `op` defaults to `eq`. +fn build_query( + filter: Option<&Value>, + limit: Option, + order: Option<&str>, +) -> Result { + let mut parts: Vec = Vec::new(); + if let Some(Value::Object(map)) = filter { + for (col, spec) in map { + validate_identifier(col, "filter column")?; + let (op, value) = match spec { + Value::Object(m) => { + let op = m + .get("op") + .and_then(|v| v.as_str()) + .unwrap_or("eq") + .to_string(); + let v = m.get("value").cloned().unwrap_or(Value::Null); + (op, v) + } + other => ("eq".to_string(), other.clone()), + }; + if !matches!( + op.as_str(), + "eq" | "neq" | "gt" | "gte" | "lt" | "lte" | "like" | "ilike" | "is" | "in" + ) { + return Err(ToolError::runtime(format!( + "unsupported filter operator '{op}'" + ))); + } + let value_str = match &value { + Value::String(s) => s.clone(), + Value::Null => "null".to_string(), + other => other.to_string(), + }; + parts.push(format!("{col}={op}.{}", encode(&value_str))); + } + } + if let Some(n) = limit { + parts.push(format!("limit={n}")); + } + if let Some(o) = order { + // Simple allow-list: `col` or `col.asc` / `col.desc` + let mut split = o.splitn(2, '.'); + let col = split.next().unwrap_or(""); + validate_identifier(col, "order column")?; + if let Some(dir) = split.next() { + if !matches!(dir, "asc" | "desc") { + return Err(ToolError::runtime(format!( + "unsupported order direction '{dir}'" + ))); + } + } + parts.push(format!("order={}", encode(o))); + } + Ok(parts.join("&")) +} + +fn encode(s: &str) -> String { + let mut out = String::with_capacity(s.len()); + for b in s.bytes() { + if b.is_ascii_alphanumeric() || matches!(b, b'-' | b'_' | b'.' | b'~' | b',' | b':') { + out.push(b as char); + } else { + out.push_str(&format!("%{b:02X}")); + } + } + out +} + +#[async_trait] +impl Tool for SupabaseTool { + fn name(&self) -> &str { + "supabase" + } + + fn description(&self) -> &str { + "Read and write Supabase (Postgres) rows via PostgREST. Actions: \ + `select`, `insert`, `update`, `delete`, `rpc`. Identifiers are \ + validated; filters use {op, value} pairs." + } + + fn parameters_schema(&self) -> Value { + json!({ + "type": "object", + "properties": { + "action": {"type": "string", "enum": ["select", "insert", "update", "delete", "rpc"]}, + "table": {"type": "string", "description": "Table name (identifier only)."}, + "function": {"type": "string", "description": "RPC function name (for action=rpc)."}, + "filter": { + "type": "object", + "description": "Column filters, e.g. {\"id\": {\"op\": \"eq\", \"value\": \"123\"}}.", + "additionalProperties": true + }, + "row": {"type": "object", "description": "Row body for insert/update.", "additionalProperties": true}, + "limit": {"type": "integer", "minimum": 1, "maximum": 1000}, + "order": {"type": "string", "description": "Order clause like 'created_at.desc'."}, + "params": {"type": "object", "description": "Params for action=rpc.", "additionalProperties": true} + }, + "required": ["action"] + }) + } + + async fn call(&self, args: Value) -> Result { + let action = args + .get("action") + .and_then(|v| v.as_str()) + .ok_or_else(|| ToolError::runtime("missing 'action'"))?; + let filter = args.get("filter"); + let limit = args.get("limit").and_then(|v| v.as_u64()); + let order = args.get("order").and_then(|v| v.as_str()); + let query = build_query(filter, limit, order)?; + + match action { + "select" => { + let table = args + .get("table") + .and_then(|v| v.as_str()) + .ok_or_else(|| ToolError::runtime("missing 'table'"))?; + validate_identifier(table, "table")?; + let url = if query.is_empty() { + format!("{}?select=*", self.rest_url(table)) + } else { + format!("{}?select=*&{}", self.rest_url(table), query) + }; + let resp = self + .client + .get(url) + .header("apikey", &self.api_key) + .bearer_auth(&self.api_key) + .send() + .await + .map_err(|e| ToolError::runtime(format!("supabase select: {e}")))?; + Self::handle_resp(resp).await + } + "insert" => { + let table = args + .get("table") + .and_then(|v| v.as_str()) + .ok_or_else(|| ToolError::runtime("missing 'table'"))?; + validate_identifier(table, "table")?; + let row = args + .get("row") + .cloned() + .ok_or_else(|| ToolError::runtime("missing 'row'"))?; + let body = match row { + Value::Array(_) => row, + other => Value::Array(vec![other]), + }; + let resp = self + .client + .post(self.rest_url(table)) + .header("apikey", &self.api_key) + .bearer_auth(&self.api_key) + .header("Content-Type", "application/json") + .header("Prefer", "return=representation") + .json(&body) + .send() + .await + .map_err(|e| ToolError::runtime(format!("supabase insert: {e}")))?; + Self::handle_resp(resp).await + } + "update" => { + let table = args + .get("table") + .and_then(|v| v.as_str()) + .ok_or_else(|| ToolError::runtime("missing 'table'"))?; + validate_identifier(table, "table")?; + if query.is_empty() { + return Err(ToolError::runtime( + "update requires a non-empty 'filter' (safety guard against full-table updates)", + )); + } + let row = args + .get("row") + .cloned() + .ok_or_else(|| ToolError::runtime("missing 'row'"))?; + let url = format!("{}?{}", self.rest_url(table), query); + let resp = self + .client + .patch(url) + .header("apikey", &self.api_key) + .bearer_auth(&self.api_key) + .header("Content-Type", "application/json") + .header("Prefer", "return=representation") + .json(&row) + .send() + .await + .map_err(|e| ToolError::runtime(format!("supabase update: {e}")))?; + Self::handle_resp(resp).await + } + "delete" => { + let table = args + .get("table") + .and_then(|v| v.as_str()) + .ok_or_else(|| ToolError::runtime("missing 'table'"))?; + validate_identifier(table, "table")?; + if query.is_empty() { + return Err(ToolError::runtime( + "delete requires a non-empty 'filter' (safety guard)", + )); + } + let url = format!("{}?{}", self.rest_url(table), query); + let resp = self + .client + .delete(url) + .header("apikey", &self.api_key) + .bearer_auth(&self.api_key) + .send() + .await + .map_err(|e| ToolError::runtime(format!("supabase delete: {e}")))?; + Self::handle_resp(resp).await + } + "rpc" => { + let func = args + .get("function") + .and_then(|v| v.as_str()) + .ok_or_else(|| ToolError::runtime("missing 'function'"))?; + validate_identifier(func, "function")?; + let params = args + .get("params") + .cloned() + .unwrap_or_else(|| Value::Object(Default::default())); + let resp = self + .client + .post(self.rpc_url(func)) + .header("apikey", &self.api_key) + .bearer_auth(&self.api_key) + .json(¶ms) + .send() + .await + .map_err(|e| ToolError::runtime(format!("supabase rpc: {e}")))?; + Self::handle_resp(resp).await + } + other => Err(ToolError::runtime(format!( + "unknown supabase action '{other}'" + ))), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn identifier_accepts_plain() { + assert!(validate_identifier("users", "table").is_ok()); + assert!(validate_identifier("_private_1", "x").is_ok()); + } + + #[test] + fn identifier_rejects_injection() { + assert!(validate_identifier("users;drop", "t").is_err()); + assert!(validate_identifier("users table", "t").is_err()); + assert!(validate_identifier("", "t").is_err()); + assert!(validate_identifier("1users", "t").is_err()); + } + + #[test] + fn build_query_default_op_is_eq() { + let q = build_query(Some(&json!({"id": "abc"})), None, None).unwrap(); + assert_eq!(q, "id=eq.abc"); + } + + #[test] + fn build_query_with_op_and_limit_order() { + let q = build_query( + Some(&json!({"age": {"op": "gte", "value": 18}})), + Some(25), + Some("created_at.desc"), + ) + .unwrap(); + assert!(q.contains("age=gte.18")); + assert!(q.contains("limit=25")); + assert!(q.contains("order=created_at.desc")); + } + + #[test] + fn build_query_rejects_bad_operator() { + let err = build_query( + Some(&json!({"x": {"op": "; drop", "value": 1}})), + None, + None, + ) + .unwrap_err(); + assert!(err.to_string().contains("unsupported filter operator")); + } + + #[test] + fn build_query_rejects_bad_order_direction() { + let err = build_query(None, None, Some("x.sideways")).unwrap_err(); + assert!(err.to_string().contains("order direction")); + } + + #[tokio::test] + async fn update_without_filter_is_refused() { + let tool = SupabaseTool { + client: reqwest::Client::new(), + base_url: "https://example.test".to_string(), + api_key: "x".to_string(), + }; + let err = tool + .call(json!({"action": "update", "table": "users", "row": {"a": 1}})) + .await + .unwrap_err(); + assert!(err.to_string().contains("non-empty 'filter'")); + } + + #[tokio::test] + async fn delete_without_filter_is_refused() { + let tool = SupabaseTool { + client: reqwest::Client::new(), + base_url: "https://example.test".to_string(), + api_key: "x".to_string(), + }; + let err = tool + .call(json!({"action": "delete", "table": "users"})) + .await + .unwrap_err(); + assert!(err.to_string().contains("safety guard")); + } + + #[tokio::test] + async fn unknown_action_errors() { + let tool = SupabaseTool { + client: reqwest::Client::new(), + base_url: "https://example.test".to_string(), + api_key: "x".to_string(), + }; + let err = tool.call(json!({"action": "truncate"})).await.unwrap_err(); + assert!(err.to_string().contains("unknown supabase action")); + } +} diff --git a/crates/google-workspace-cli/src/agent/tools/zapier.rs b/crates/google-workspace-cli/src/agent/tools/zapier.rs new file mode 100644 index 00000000..97f830b1 --- /dev/null +++ b/crates/google-workspace-cli/src/agent/tools/zapier.rs @@ -0,0 +1,301 @@ +// Copyright 2026 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Zapier integration. Two tools: +//! +//! * [`ZapierWebhookTool`] — `POST`s JSON payloads to a "Catch Hook" Zap. +//! Registered when `ZAPIER_WEBHOOK_URL` is set; additional named hooks +//! can be configured by setting `ZAPIER_WEBHOOK__URL`. +//! +//! * [`ZapierNlaTool`] — Natural-Language Actions exposed through Zapier's +//! AI Actions API. Registered when `ZAPIER_NLA_API_KEY` is set. + +use async_trait::async_trait; +use serde_json::{json, Value}; +use std::collections::BTreeMap; + +use crate::agent::tool::{Tool, ToolError}; + +const ZAPIER_NLA_BASE: &str = "https://actions.zapier.com/api/v1"; + +pub struct ZapierWebhookTool { + client: reqwest::Client, + /// Map hook name → URL. The primary hook is under `default`. + hooks: BTreeMap, +} + +impl ZapierWebhookTool { + pub fn from_env() -> Option { + let mut hooks = BTreeMap::new(); + if let Ok(v) = std::env::var("ZAPIER_WEBHOOK_URL") { + let v = v.trim().to_string(); + if !v.is_empty() { + hooks.insert("default".to_string(), v); + } + } + // Discover ZAPIER_WEBHOOK__URL variants. + for (k, v) in std::env::vars() { + if let Some(rest) = k.strip_prefix("ZAPIER_WEBHOOK_") { + if let Some(name) = rest.strip_suffix("_URL") { + let name = name.to_ascii_lowercase(); + if name == "default" { + continue; + } + let v = v.trim().to_string(); + if !v.is_empty() { + hooks.insert(name, v); + } + } + } + } + if hooks.is_empty() { + return None; + } + let client = crate::client::shared_client().ok()?; + Some(Self { client, hooks }) + } +} + +#[async_trait] +impl Tool for ZapierWebhookTool { + fn name(&self) -> &str { + "zapier_webhook" + } + + fn description(&self) -> &str { + "Trigger a Zapier 'Catch Hook' by POSTing a JSON payload. Use `hook` to \ + select a named webhook (defaults to 'default'); put the payload in `data`." + } + + fn parameters_schema(&self) -> Value { + let names: Vec<&str> = self.hooks.keys().map(|s| s.as_str()).collect(); + json!({ + "type": "object", + "properties": { + "hook": { + "type": "string", + "description": "Named hook to invoke.", + "enum": names, + "default": "default" + }, + "data": { + "type": "object", + "description": "Arbitrary JSON payload forwarded to Zapier.", + "additionalProperties": true + } + }, + "required": ["data"] + }) + } + + async fn call(&self, args: Value) -> Result { + let hook = args + .get("hook") + .and_then(|v| v.as_str()) + .unwrap_or("default"); + let url = self.hooks.get(hook).ok_or_else(|| { + ToolError::runtime(format!( + "unknown hook '{hook}' — available: {}", + self.hooks.keys().cloned().collect::>().join(", ") + )) + })?; + let data = args + .get("data") + .cloned() + .unwrap_or_else(|| Value::Object(Default::default())); + let resp = self + .client + .post(url) + .json(&data) + .send() + .await + .map_err(|e| ToolError::runtime(format!("zapier webhook: {e}")))?; + let status = resp.status(); + let text = resp.text().await.unwrap_or_default(); + if !status.is_success() { + return Err(ToolError::runtime(format!( + "zapier webhook HTTP {status}: {text}" + ))); + } + Ok(format!("status={status} body={text}")) + } +} + +pub struct ZapierNlaTool { + client: reqwest::Client, + api_key: String, +} + +impl ZapierNlaTool { + pub fn from_env() -> Option { + let key = std::env::var("ZAPIER_NLA_API_KEY").ok()?; + if key.trim().is_empty() { + return None; + } + let client = crate::client::shared_client().ok()?; + Some(Self { + client, + api_key: key.trim().to_string(), + }) + } +} + +#[async_trait] +impl Tool for ZapierNlaTool { + fn name(&self) -> &str { + "zapier_actions" + } + + fn description(&self) -> &str { + "Invoke Zapier Natural-Language Actions. Use action='list' to discover \ + exposed actions, or action='run' with action_id plus a plain-English \ + instruction." + } + + fn parameters_schema(&self) -> Value { + json!({ + "type": "object", + "properties": { + "action": {"type": "string", "enum": ["list", "run"]}, + "action_id": {"type": "string", "description": "Zapier exposed action id (for action=run)."}, + "instructions": {"type": "string", "description": "Natural-language instructions describing what the action should do."}, + "params": {"type": "object", "description": "Optional structured overrides."} + }, + "required": ["action"] + }) + } + + async fn call(&self, args: Value) -> Result { + let action = args + .get("action") + .and_then(|v| v.as_str()) + .ok_or_else(|| ToolError::runtime("missing 'action'"))?; + match action { + "list" => { + let resp = self + .client + .get(format!("{ZAPIER_NLA_BASE}/exposed/")) + .header("x-api-key", &self.api_key) + .send() + .await + .map_err(|e| ToolError::runtime(format!("zapier list: {e}")))?; + let status = resp.status(); + let text = resp.text().await.unwrap_or_default(); + if !status.is_success() { + return Err(ToolError::runtime(format!( + "zapier list HTTP {status}: {text}" + ))); + } + Ok(text) + } + "run" => { + let id = args + .get("action_id") + .and_then(|v| v.as_str()) + .ok_or_else(|| ToolError::runtime("missing 'action_id'"))?; + let instructions = args + .get("instructions") + .and_then(|v| v.as_str()) + .unwrap_or(""); + let mut body = json!({"instructions": instructions}); + if let Some(params) = args.get("params") { + body["params"] = params.clone(); + } + let resp = self + .client + .post(format!("{ZAPIER_NLA_BASE}/exposed/{id}/execute/")) + .header("x-api-key", &self.api_key) + .json(&body) + .send() + .await + .map_err(|e| ToolError::runtime(format!("zapier run: {e}")))?; + let status = resp.status(); + let text = resp.text().await.unwrap_or_default(); + if !status.is_success() { + return Err(ToolError::runtime(format!( + "zapier run HTTP {status}: {text}" + ))); + } + Ok(text) + } + other => Err(ToolError::runtime(format!( + "unknown zapier action '{other}'" + ))), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + #[serial_test::serial] + fn webhook_from_env_requires_any_url() { + std::env::remove_var("ZAPIER_WEBHOOK_URL"); + // Clear any test-local custom hooks. + let to_clear: Vec = std::env::vars() + .map(|(k, _)| k) + .filter(|k| k.starts_with("ZAPIER_WEBHOOK_")) + .collect(); + for k in &to_clear { + std::env::remove_var(k); + } + assert!(ZapierWebhookTool::from_env().is_none()); + + std::env::set_var( + "ZAPIER_WEBHOOK_URL", + "https://hooks.zapier.com/hooks/catch/1/abc/", + ); + std::env::set_var("ZAPIER_WEBHOOK_REMINDERS_URL", "https://hooks.zapier.com/x"); + let t = ZapierWebhookTool::from_env().unwrap(); + assert!(t.hooks.contains_key("default")); + assert!(t.hooks.contains_key("reminders")); + std::env::remove_var("ZAPIER_WEBHOOK_URL"); + std::env::remove_var("ZAPIER_WEBHOOK_REMINDERS_URL"); + } + + #[tokio::test] + async fn webhook_rejects_unknown_hook() { + let client = reqwest::Client::new(); + let mut hooks = BTreeMap::new(); + hooks.insert("default".to_string(), "https://example.test".to_string()); + let tool = ZapierWebhookTool { client, hooks }; + let err = tool + .call(json!({"hook": "ghost", "data": {}})) + .await + .unwrap_err(); + assert!(err.to_string().contains("unknown hook")); + } + + #[tokio::test] + async fn nla_rejects_unknown_action() { + let tool = ZapierNlaTool { + client: reqwest::Client::new(), + api_key: "x".to_string(), + }; + let err = tool.call(json!({"action": "bogus"})).await.unwrap_err(); + assert!(err.to_string().contains("unknown zapier action")); + } + + #[tokio::test] + async fn nla_run_requires_action_id() { + let tool = ZapierNlaTool { + client: reqwest::Client::new(), + api_key: "x".to_string(), + }; + let err = tool.call(json!({"action": "run"})).await.unwrap_err(); + assert!(err.to_string().contains("action_id")); + } +} diff --git a/crates/google-workspace-cli/src/credential_store.rs b/crates/google-workspace-cli/src/credential_store.rs index ffc6db25..00db2ec8 100644 --- a/crates/google-workspace-cli/src/credential_store.rs +++ b/crates/google-workspace-cli/src/credential_store.rs @@ -482,12 +482,15 @@ mod tests { PlatformError, } + /// Optional callback invoked by the mock keyring whenever a value is stored. + type OnSetCallback = RefCell>>; + /// Mock keyring for testing `resolve_key()` without OS dependencies. struct MockKeyring { get_state: MockState, set_succeeds: bool, last_set: RefCell>, - on_set: RefCell>>, + on_set: OnSetCallback, } impl MockKeyring { diff --git a/crates/google-workspace-cli/src/executor.rs b/crates/google-workspace-cli/src/executor.rs index 46f31ac4..b3451bc9 100644 --- a/crates/google-workspace-cli/src/executor.rs +++ b/crates/google-workspace-cli/src/executor.rs @@ -1197,7 +1197,7 @@ mod tests { #[test] fn test_pagination_config_default() { let config = PaginationConfig::default(); - assert_eq!(config.page_all, false); + assert!(!config.page_all); assert_eq!(config.page_limit, 10); assert_eq!(config.page_delay_ms, 100); } @@ -1533,7 +1533,8 @@ mod tests { let file_content = b"Hello stream"; std::fs::write(&file_path, file_content).unwrap(); - let metadata = Some(json!({ "name": "small.txt" })); + let metadata_value = json!({ "name": "small.txt" }); + let metadata = Some(metadata_value.clone()); let file_size = file_content.len() as u64; let (_body, content_type, declared_len) = build_multipart_stream( @@ -1550,7 +1551,7 @@ mod tests { // Manually compute expected content length: // preamble = "--{boundary}\r\nContent-Type: application/json; charset=UTF-8\r\n\r\n{json}\r\n--{boundary}\r\nContent-Type: text/plain\r\n\r\n" // postamble = "\r\n--{boundary}--\r\n" - let metadata_json = serde_json::to_string(&metadata.unwrap()).unwrap(); + let metadata_json = serde_json::to_string(&metadata_value).unwrap(); let preamble = format!( "--{boundary}\r\nContent-Type: application/json; charset=UTF-8\r\n\r\n{metadata_json}\r\n\ --{boundary}\r\nContent-Type: text/plain\r\n\r\n" diff --git a/crates/google-workspace-cli/src/helpers/chat.rs b/crates/google-workspace-cli/src/helpers/chat.rs index 676dc78b..5955c7e3 100644 --- a/crates/google-workspace-cli/src/helpers/chat.rs +++ b/crates/google-workspace-cli/src/helpers/chat.rs @@ -202,8 +202,10 @@ mod tests { }, ); - let mut messages_res = RestResource::default(); - messages_res.methods = methods; + let messages_res = RestResource { + methods, + ..Default::default() + }; let mut spaces_res = RestResource::default(); spaces_res diff --git a/crates/google-workspace-cli/src/helpers/docs.rs b/crates/google-workspace-cli/src/helpers/docs.rs index d3ef7fa2..0f896e8a 100644 --- a/crates/google-workspace-cli/src/helpers/docs.rs +++ b/crates/google-workspace-cli/src/helpers/docs.rs @@ -173,8 +173,10 @@ mod tests { }, ); - let mut documents_res = RestResource::default(); - documents_res.methods = methods; + let documents_res = RestResource { + methods, + ..Default::default() + }; let mut resources = HashMap::new(); resources.insert("documents".to_string(), documents_res); diff --git a/crates/google-workspace-cli/src/helpers/gmail/reply.rs b/crates/google-workspace-cli/src/helpers/gmail/reply.rs index 6e5b8c21..7b7a5e6f 100644 --- a/crates/google-workspace-cli/src/helpers/gmail/reply.rs +++ b/crates/google-workspace-cli/src/helpers/gmail/reply.rs @@ -787,7 +787,7 @@ mod tests { .action(ArgAction::Append), ); let matches = cmd - .try_get_matches_from(&["test", "--message-id", "abc", "--body", "hi"]) + .try_get_matches_from(["test", "--message-id", "abc", "--body", "hi"]) .unwrap(); let config = parse_reply_args(&matches).unwrap(); assert!(config.remove.is_none()); diff --git a/crates/google-workspace-cli/src/helpers/script.rs b/crates/google-workspace-cli/src/helpers/script.rs index 11bcdebe..4b31db62 100644 --- a/crates/google-workspace-cli/src/helpers/script.rs +++ b/crates/google-workspace-cli/src/helpers/script.rs @@ -169,13 +169,7 @@ fn process_file(path: &Path) -> Result, GwsError> { filename.trim_end_matches(".js").trim_end_matches(".gs"), ), "html" => ("HTML", filename.trim_end_matches(".html")), - "json" => { - if filename == "appsscript.json" { - ("JSON", "appsscript") - } else { - return Ok(None); - } - } + "json" if filename == "appsscript.json" => ("JSON", "appsscript"), _ => return Ok(None), }; diff --git a/crates/google-workspace-cli/src/helpers/sheets.rs b/crates/google-workspace-cli/src/helpers/sheets.rs index 4357edec..d785b4d5 100644 --- a/crates/google-workspace-cli/src/helpers/sheets.rs +++ b/crates/google-workspace-cli/src/helpers/sheets.rs @@ -346,8 +346,10 @@ mod tests { }, ); - let mut values_res = RestResource::default(); - values_res.methods = methods; + let values_res = RestResource { + methods, + ..Default::default() + }; let mut spreadsheets_res = RestResource::default(); spreadsheets_res diff --git a/crates/google-workspace-cli/src/main.rs b/crates/google-workspace-cli/src/main.rs index 41dcc1e1..e349bda5 100644 --- a/crates/google-workspace-cli/src/main.rs +++ b/crates/google-workspace-cli/src/main.rs @@ -19,6 +19,7 @@ //! It supports deep schema validation, OAuth / Service Account authentication, //! interactive prompts, and integration with Model Armor. +mod agent; mod auth; pub(crate) mod auth_commands; mod client; @@ -138,6 +139,12 @@ async fn run() -> Result<(), GwsError> { return auth_commands::handle_auth_command(&auth_args).await; } + // Handle the `agent` command (terminal AI agent with tool use) + if first_arg == "agent" { + let agent_args: Vec = args.iter().skip(2).cloned().collect(); + return agent::handle_agent_command(&agent_args).await; + } + // Parse service name and optional version override let (api_name, version) = parse_service_and_version(&args, &first_arg)?; @@ -443,6 +450,7 @@ fn print_usage() { println!("USAGE:"); println!(" gws [sub-resource] [flags]"); println!(" gws schema [--resolve-refs]"); + println!(" gws agent [--prompt TEXT] [--provider openrouter|ollama] [--model M]"); println!(); println!("EXAMPLES:"); println!(" gws drive files list --params '{{\"pageSize\": 10}}'"); @@ -546,7 +554,7 @@ mod tests { .get_matches_from(vec!["test"]); let config = parse_pagination_config(&matches); - assert_eq!(config.page_all, false); + assert!(!config.page_all); assert_eq!(config.page_limit, 10); assert_eq!(config.page_delay_ms, 100); } @@ -579,7 +587,7 @@ mod tests { ]); let config = parse_pagination_config(&matches); - assert_eq!(config.page_all, true); + assert!(config.page_all); assert_eq!(config.page_limit, 20); assert_eq!(config.page_delay_ms, 500); } diff --git a/crates/google-workspace-cli/src/setup.rs b/crates/google-workspace-cli/src/setup.rs index 9ebd19cb..1d4f78c1 100644 --- a/crates/google-workspace-cli/src/setup.rs +++ b/crates/google-workspace-cli/src/setup.rs @@ -1837,7 +1837,7 @@ mod tests { continue; } assert!( - api_ids.iter().any(|id| *id == expected_suffix), + api_ids.contains(&expected_suffix), "Missing API ID for service '{}' (expected {})", entry.api_name, expected_suffix