diff --git a/AGENTS.md b/AGENTS.md index 5ed062d..54c77d3 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -28,7 +28,8 @@ secrets/ search.rs # search 命令:多条件查询,展示 secrets 字段 schema(无需 master_key) delete.rs # delete 命令:事务化,CASCADE 删除 secrets,含历史快照 update.rs # update 命令:增量更新,secrets 行级 UPSERT/DELETE,CAS 并发保护 - rollback.rs # rollback / history 命令:按 entry_version 恢复 entry + secrets + rollback.rs # rollback 命令:按 entry_version 恢复 entry + secrets + history.rs # history 命令:查看 entry 变更历史列表 run.rs # inject / run 命令:逐字段解密 + key_ref 引用解析 upgrade.rs # upgrade 命令:检查、校验摘要并下载最新版本,自动替换二进制 export_cmd.rs # export 命令:批量导出记录,支持 JSON/TOML/YAML,含解密明文 diff --git a/Cargo.lock b/Cargo.lock index 8bb2037..fc9d1bf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1836,7 +1836,7 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "secrets" -version = "0.9.0" +version = "0.9.1" dependencies = [ "aes-gcm", "anyhow", @@ -2448,7 +2448,6 @@ dependencies = [ "bytes", "libc", "mio", - "parking_lot", "pin-project-lite", "signal-hook-registry", "socket2", diff --git a/Cargo.toml b/Cargo.toml index c4b5b1b..029d877 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,32 +1,32 @@ [package] name = "secrets" -version = "0.9.0" +version = "0.9.1" edition = "2024" [dependencies] -aes-gcm = "0.10.3" -anyhow = "1.0.102" -argon2 = { version = "0.5.3", features = ["std"] } -chrono = { version = "0.4.44", features = ["serde"] } -clap = { version = "4.6.0", features = ["derive"] } -dirs = "6.0.0" -flate2 = "1.1.9" -keyring = { version = "3.6.3", features = ["apple-native", "windows-native", "linux-native"] } -rand = "0.10.0" -reqwest = { version = "0.12", default-features = false, features = ["rustls-tls", "json"] } -rpassword = "7.4.0" -self-replace = "1.5.0" -semver = "1.0.27" -serde = { version = "1.0.228", features = ["derive"] } -serde_json = "1.0.149" -serde_yaml = "0.9" -sha2 = "0.10.9" -sqlx = { version = "0.8.6", features = ["runtime-tokio", "tls-rustls", "postgres", "uuid", "json", "chrono"] } -tar = "0.4.44" -tempfile = "3.19" -tokio = { version = "1.50.0", features = ["full"] } -toml = "1.0.7" -tracing = "0.1" -tracing-subscriber = { version = "0.3", features = ["env-filter"] } -uuid = { version = "1.22.0", features = ["serde"] } -zip = { version = "8.2.0", default-features = false, features = ["deflate"] } +aes-gcm = "^0.10.3" +anyhow = "^1.0.102" +argon2 = { version = "^0.5.3", features = ["std"] } +chrono = { version = "^0.4.44", features = ["serde"] } +clap = { version = "^4.6.0", features = ["derive"] } +dirs = "^6.0.0" +flate2 = "^1.1.9" +keyring = { version = "^3.6.3", features = ["apple-native", "windows-native", "linux-native"] } +rand = "^0.10.0" +reqwest = { version = "^0.12", default-features = false, features = ["rustls-tls", "json"] } +rpassword = "^7.4.0" +self-replace = "^1.5.0" +semver = "^1.0.27" +serde = { version = "^1.0.228", features = ["derive"] } +serde_json = "^1.0.149" +serde_yaml = "^0.9" +sha2 = "^0.10.9" +sqlx = { version = "^0.8.6", features = ["runtime-tokio", "tls-rustls", "postgres", "uuid", "json", "chrono"] } +tar = "^0.4.44" +tempfile = "^3.19" +tokio = { version = "^1.50.0", features = ["rt-multi-thread", "macros", "fs", "io-util", "process", "signal"] } +toml = "^1.0.7" +tracing = "^0.1" +tracing-subscriber = { version = "^0.3", features = ["env-filter"] } +uuid = { version = "^1.22.0", features = ["serde"] } +zip = { version = "^8.2.0", default-features = false, features = ["deflate"] } diff --git a/src/audit.rs b/src/audit.rs index 3ee9c75..dc97dfe 100644 --- a/src/audit.rs +++ b/src/audit.rs @@ -1,6 +1,11 @@ use serde_json::Value; use sqlx::{Postgres, Transaction}; +/// Return the current OS user as the audit actor (falls back to empty string). +pub fn current_actor() -> String { + std::env::var("USER").unwrap_or_default() +} + /// Write an audit entry within an existing transaction. pub async fn log_tx( tx: &mut Transaction<'_, Postgres>, @@ -10,7 +15,7 @@ pub async fn log_tx( name: &str, detail: Value, ) { - let actor = std::env::var("USER").unwrap_or_default(); + let actor = current_actor(); let result: Result<_, sqlx::Error> = sqlx::query( "INSERT INTO audit_log (action, namespace, kind, name, detail, actor) \ VALUES ($1, $2, $3, $4, $5, $6)", diff --git a/src/commands/add.rs b/src/commands/add.rs index 71a6ca5..c916323 100644 --- a/src/commands/add.rs +++ b/src/commands/add.rs @@ -5,7 +5,8 @@ use std::fs; use crate::crypto; use crate::db; -use crate::output::OutputMode; +use crate::models::EntryRow; +use crate::output::{OutputMode, print_json}; // ── Key/value parsing helpers (shared with update.rs) ─────────────────────── @@ -228,13 +229,6 @@ pub async fn run(pool: &PgPool, args: AddArgs<'_>, master_key: &[u8; 32]) -> Res let mut tx = pool.begin().await?; // Upsert the entry row (tags + metadata). - #[derive(sqlx::FromRow)] - struct EntryRow { - id: uuid::Uuid, - version: i64, - tags: Vec, - metadata: Value, - } let existing: Option = sqlx::query_as( "SELECT id, version, tags, metadata FROM entries \ WHERE namespace = $1 AND kind = $2 AND name = $3", @@ -383,11 +377,8 @@ pub async fn run(pool: &PgPool, args: AddArgs<'_>, master_key: &[u8; 32]) -> Res }); match args.output { - OutputMode::Json => { - println!("{}", serde_json::to_string_pretty(&result_json)?); - } - OutputMode::JsonCompact => { - println!("{}", serde_json::to_string(&result_json)?); + OutputMode::Json | OutputMode::JsonCompact => { + print_json(&result_json, &args.output)?; } _ => { println!("Added: [{}/{}] {}", args.namespace, args.kind, args.name); diff --git a/src/commands/config.rs b/src/commands/config.rs index d69479e..f696e74 100644 --- a/src/commands/config.rs +++ b/src/commands/config.rs @@ -15,7 +15,7 @@ pub async fn run(action: crate::ConfigAction) -> Result<()> { database_url: Some(url.clone()), }; config::save_config(&cfg)?; - println!("Database URL saved to: {}", config_path().display()); + println!("Database URL saved to: {}", config_path()?.display()); println!(" {}", mask_password(&url)); } crate::ConfigAction::Show => { @@ -23,7 +23,7 @@ pub async fn run(action: crate::ConfigAction) -> Result<()> { match cfg.database_url { Some(url) => { println!("database_url = {}", mask_password(&url)); - println!("config file: {}", config_path().display()); + println!("config file: {}", config_path()?.display()); } None => { println!("Database URL not configured."); @@ -32,7 +32,7 @@ pub async fn run(action: crate::ConfigAction) -> Result<()> { } } crate::ConfigAction::Path => { - println!("{}", config_path().display()); + println!("{}", config_path()?.display()); } } Ok(()) diff --git a/src/commands/delete.rs b/src/commands/delete.rs index 2d9c3f9..489d35c 100644 --- a/src/commands/delete.rs +++ b/src/commands/delete.rs @@ -1,35 +1,20 @@ use anyhow::Result; -use serde_json::{Value, json}; -use sqlx::{FromRow, PgPool}; -use uuid::Uuid; +use serde_json::json; +use sqlx::PgPool; use crate::db; -use crate::output::OutputMode; +use crate::models::{EntryRow, SecretFieldRow}; +use crate::output::{OutputMode, print_json}; -#[derive(FromRow)] -struct EntryRow { - id: Uuid, - version: i64, - tags: Vec, - metadata: Value, +pub struct DeleteArgs<'a> { + pub namespace: &'a str, + pub kind: &'a str, + pub name: &'a str, + pub output: OutputMode, } -#[derive(FromRow)] -struct SecretFieldRow { - id: Uuid, - field_name: String, - field_type: String, - value_len: i32, - encrypted: Vec, -} - -pub async fn run( - pool: &PgPool, - namespace: &str, - kind: &str, - name: &str, - output: OutputMode, -) -> Result<()> { +pub async fn run(pool: &PgPool, args: DeleteArgs<'_>) -> Result<()> { + let (namespace, kind, name) = (args.namespace, args.kind, args.name); tracing::debug!(namespace, kind, name, "deleting entry"); let mut tx = pool.begin().await?; @@ -48,20 +33,10 @@ pub async fn run( let Some(row) = row else { tx.rollback().await?; tracing::warn!(namespace, kind, name, "entry not found for deletion"); - match output { - OutputMode::Json => println!( - "{}", - serde_json::to_string_pretty( - &json!({"action":"not_found","namespace":namespace,"kind":kind,"name":name}) - )? - ), - OutputMode::JsonCompact => println!( - "{}", - serde_json::to_string( - &json!({"action":"not_found","namespace":namespace,"kind":kind,"name":name}) - )? - ), - _ => println!("Not found: [{}/{}] {}", namespace, kind, name), + let v = json!({"action":"not_found","namespace":namespace,"kind":kind,"name":name}); + match args.output { + OutputMode::Text => println!("Not found: [{}/{}] {}", namespace, kind, name), + ref mode => print_json(&v, mode)?, } return Ok(()); }; @@ -124,20 +99,10 @@ pub async fn run( tx.commit().await?; - match output { - OutputMode::Json => println!( - "{}", - serde_json::to_string_pretty( - &json!({"action":"deleted","namespace":namespace,"kind":kind,"name":name}) - )? - ), - OutputMode::JsonCompact => println!( - "{}", - serde_json::to_string( - &json!({"action":"deleted","namespace":namespace,"kind":kind,"name":name}) - )? - ), - _ => println!("Deleted: [{}/{}] {}", namespace, kind, name), + let v = json!({"action":"deleted","namespace":namespace,"kind":kind,"name":name}); + match args.output { + OutputMode::Text => println!("Deleted: [{}/{}] {}", namespace, kind, name), + ref mode => print_json(&v, mode)?, } Ok(()) diff --git a/src/commands/history.rs b/src/commands/history.rs new file mode 100644 index 0000000..46d9653 --- /dev/null +++ b/src/commands/history.rs @@ -0,0 +1,78 @@ +use anyhow::Result; +use serde_json::{Value, json}; +use sqlx::{FromRow, PgPool}; + +use crate::output::{OutputMode, format_local_time, print_json}; + +pub struct HistoryArgs<'a> { + pub namespace: &'a str, + pub kind: &'a str, + pub name: &'a str, + pub limit: u32, + pub output: OutputMode, +} + +/// List history entries for an entry. +pub async fn run(pool: &PgPool, args: HistoryArgs<'_>) -> Result<()> { + #[derive(FromRow)] + struct HistorySummary { + version: i64, + action: String, + actor: String, + created_at: chrono::DateTime, + } + + let rows: Vec = sqlx::query_as( + "SELECT version, action, actor, created_at FROM entries_history \ + WHERE namespace = $1 AND kind = $2 AND name = $3 \ + ORDER BY id DESC LIMIT $4", + ) + .bind(args.namespace) + .bind(args.kind) + .bind(args.name) + .bind(args.limit as i64) + .fetch_all(pool) + .await?; + + match args.output { + OutputMode::Json | OutputMode::JsonCompact => { + let arr: Vec = rows + .iter() + .map(|r| { + json!({ + "version": r.version, + "action": r.action, + "actor": r.actor, + "created_at": r.created_at.format("%Y-%m-%dT%H:%M:%SZ").to_string(), + }) + }) + .collect(); + print_json(&Value::Array(arr), &args.output)?; + } + _ => { + if rows.is_empty() { + println!( + "No history found for [{}/{}] {}.", + args.namespace, args.kind, args.name + ); + return Ok(()); + } + println!( + "History for [{}/{}] {}:", + args.namespace, args.kind, args.name + ); + for r in &rows { + println!( + " v{:<4} {:8} {} {}", + r.version, + r.action, + r.actor, + format_local_time(r.created_at) + ); + } + println!(" (use `secrets rollback --to-version ` to restore)"); + } + } + + Ok(()) +} diff --git a/src/commands/import_cmd.rs b/src/commands/import_cmd.rs index 62a595f..845dd10 100644 --- a/src/commands/import_cmd.rs +++ b/src/commands/import_cmd.rs @@ -5,11 +5,13 @@ use std::collections::BTreeMap; use crate::commands::add::{self, AddArgs}; use crate::models::ExportFormat; -use crate::output::OutputMode; +use crate::output::{OutputMode, print_json}; pub struct ImportArgs<'a> { pub file: &'a str, /// Overwrite existing records when there is a conflict (upsert). + /// Without this flag, the import aborts on the first conflict. + /// A future `--skip` flag could allow silently skipping conflicts and continuing. pub force: bool, /// Check and preview operations without writing to the database. pub dry_run: bool, @@ -48,26 +50,29 @@ pub async fn run(pool: &PgPool, args: ImportArgs<'_>, master_key: &[u8; 32]) -> .unwrap_or(false); if exists && !args.force { - let msg = format!( - "[{}/{}/{}] conflict — record already exists (use --force to overwrite)", - entry.namespace, entry.kind, entry.name - ); + let v = serde_json::json!({ + "action": "conflict", + "namespace": entry.namespace, + "kind": entry.kind, + "name": entry.name, + }); match args.output { - OutputMode::Json | OutputMode::JsonCompact => { - let v = serde_json::json!({ - "action": "conflict", - "namespace": entry.namespace, - "kind": entry.kind, - "name": entry.name, - }); - let s = if args.output == OutputMode::Json { - serde_json::to_string_pretty(&v)? - } else { - serde_json::to_string(&v)? - }; - eprintln!("{}", s); + OutputMode::Text => eprintln!( + "[{}/{}/{}] conflict — record already exists (use --force to overwrite)", + entry.namespace, entry.kind, entry.name + ), + ref mode => { + // Write conflict notice to stderr so it does not mix with summary JSON. + eprint!( + "{}", + if *mode == OutputMode::Json { + serde_json::to_string_pretty(&v)? + } else { + serde_json::to_string(&v)? + } + ); + eprintln!(); } - _ => eprintln!("{}", msg), } return Err(anyhow::anyhow!( "Import aborted: conflict on [{}/{}/{}]", @@ -80,26 +85,19 @@ pub async fn run(pool: &PgPool, args: ImportArgs<'_>, master_key: &[u8; 32]) -> let action = if exists { "upsert" } else { "insert" }; if args.dry_run { + let v = serde_json::json!({ + "action": action, + "namespace": entry.namespace, + "kind": entry.kind, + "name": entry.name, + "dry_run": true, + }); match args.output { - OutputMode::Json | OutputMode::JsonCompact => { - let v = serde_json::json!({ - "action": action, - "namespace": entry.namespace, - "kind": entry.kind, - "name": entry.name, - "dry_run": true, - }); - let s = if args.output == OutputMode::Json { - serde_json::to_string_pretty(&v)? - } else { - serde_json::to_string(&v)? - }; - println!("{}", s); - } - _ => println!( + OutputMode::Text => println!( "[dry-run] {} [{}/{}/{}]", action, entry.namespace, entry.kind, entry.name ), + ref mode => print_json(&v, mode)?, } if exists { skipped += 1; @@ -131,25 +129,18 @@ pub async fn run(pool: &PgPool, args: ImportArgs<'_>, master_key: &[u8; 32]) -> .await { Ok(()) => { + let v = serde_json::json!({ + "action": action, + "namespace": entry.namespace, + "kind": entry.kind, + "name": entry.name, + }); match args.output { - OutputMode::Json | OutputMode::JsonCompact => { - let v = serde_json::json!({ - "action": action, - "namespace": entry.namespace, - "kind": entry.kind, - "name": entry.name, - }); - let s = if args.output == OutputMode::Json { - serde_json::to_string_pretty(&v)? - } else { - serde_json::to_string(&v)? - }; - println!("{}", s); - } - _ => println!( + OutputMode::Text => println!( "Imported [{}/{}/{}]", entry.namespace, entry.kind, entry.name ), + ref mode => print_json(&v, mode)?, } inserted += 1; } @@ -163,23 +154,15 @@ pub async fn run(pool: &PgPool, args: ImportArgs<'_>, master_key: &[u8; 32]) -> } } + let summary = serde_json::json!({ + "total": total, + "inserted": inserted, + "skipped": skipped, + "failed": failed, + "dry_run": args.dry_run, + }); match args.output { - OutputMode::Json | OutputMode::JsonCompact => { - let v = serde_json::json!({ - "total": total, - "inserted": inserted, - "skipped": skipped, - "failed": failed, - "dry_run": args.dry_run, - }); - let s = if args.output == OutputMode::Json { - serde_json::to_string_pretty(&v)? - } else { - serde_json::to_string(&v)? - }; - println!("{}", s); - } - _ => { + OutputMode::Text => { if args.dry_run { println!( "\n[dry-run] {} total: {} would insert, {} would skip, {} would fail", @@ -192,6 +175,7 @@ pub async fn run(pool: &PgPool, args: ImportArgs<'_>, master_key: &[u8; 32]) -> ); } } + ref mode => print_json(&summary, mode)?, } if failed > 0 { diff --git a/src/commands/mod.rs b/src/commands/mod.rs index e3f5545..c0fa776 100644 --- a/src/commands/mod.rs +++ b/src/commands/mod.rs @@ -2,6 +2,7 @@ pub mod add; pub mod config; pub mod delete; pub mod export_cmd; +pub mod history; pub mod import_cmd; pub mod init; pub mod rollback; diff --git a/src/commands/rollback.rs b/src/commands/rollback.rs index 8ba3522..e55a2b1 100644 --- a/src/commands/rollback.rs +++ b/src/commands/rollback.rs @@ -5,7 +5,7 @@ use uuid::Uuid; use crate::crypto; use crate::db; -use crate::output::{OutputMode, format_local_time}; +use crate::output::{OutputMode, print_json}; pub struct RollbackArgs<'a> { pub namespace: &'a str, @@ -255,83 +255,11 @@ pub async fn run(pool: &PgPool, args: RollbackArgs<'_>, master_key: &[u8; 32]) - }); match args.output { - OutputMode::Json => println!("{}", serde_json::to_string_pretty(&result_json)?), - OutputMode::JsonCompact => println!("{}", serde_json::to_string(&result_json)?), - _ => println!( + OutputMode::Text => println!( "Rolled back: [{}/{}] {} → version {}", args.namespace, args.kind, args.name, snap.version ), - } - - Ok(()) -} - -/// List history entries for an entry. -pub async fn list_history( - pool: &PgPool, - namespace: &str, - kind: &str, - name: &str, - limit: u32, - output: OutputMode, -) -> Result<()> { - #[derive(FromRow)] - struct HistorySummary { - version: i64, - action: String, - actor: String, - created_at: chrono::DateTime, - } - - let rows: Vec = sqlx::query_as( - "SELECT version, action, actor, created_at FROM entries_history \ - WHERE namespace = $1 AND kind = $2 AND name = $3 \ - ORDER BY id DESC LIMIT $4", - ) - .bind(namespace) - .bind(kind) - .bind(name) - .bind(limit as i64) - .fetch_all(pool) - .await?; - - match output { - OutputMode::Json | OutputMode::JsonCompact => { - let arr: Vec = rows - .iter() - .map(|r| { - json!({ - "version": r.version, - "action": r.action, - "actor": r.actor, - "created_at": r.created_at.format("%Y-%m-%dT%H:%M:%SZ").to_string(), - }) - }) - .collect(); - let out = if output == OutputMode::Json { - serde_json::to_string_pretty(&arr)? - } else { - serde_json::to_string(&arr)? - }; - println!("{}", out); - } - _ => { - if rows.is_empty() { - println!("No history found for [{}/{}] {}.", namespace, kind, name); - return Ok(()); - } - println!("History for [{}/{}] {}:", namespace, kind, name); - for r in &rows { - println!( - " v{:<4} {:8} {} {}", - r.version, - r.action, - r.actor, - format_local_time(r.created_at) - ); - } - println!(" (use `secrets rollback --to-version ` to restore)"); - } + ref mode => print_json(&result_json, mode)?, } Ok(()) diff --git a/src/commands/search.rs b/src/commands/search.rs index 0a20da2..a2e7cc4 100644 --- a/src/commands/search.rs +++ b/src/commands/search.rs @@ -121,7 +121,12 @@ struct PagedFetchArgs<'a> { offset: u32, } +/// A very large limit used when callers need all matching records (export, inject, run). +/// Postgres will stop scanning when this many rows are found; adjust if needed. +pub const FETCH_ALL_LIMIT: u32 = 100_000; + /// Fetch entries matching the given filters (used by search, inject, run). +/// `limit` caps the result set; pass `FETCH_ALL_LIMIT` when you need all matching records. pub async fn fetch_entries( pool: &PgPool, namespace: Option<&str>, @@ -129,6 +134,19 @@ pub async fn fetch_entries( name: Option<&str>, tags: &[String], query: Option<&str>, +) -> Result> { + fetch_entries_with_limit(pool, namespace, kind, name, tags, query, FETCH_ALL_LIMIT).await +} + +/// Like `fetch_entries` but with an explicit limit. Used internally by `search`. +pub(crate) async fn fetch_entries_with_limit( + pool: &PgPool, + namespace: Option<&str>, + kind: Option<&str>, + name: Option<&str>, + tags: &[String], + query: Option<&str>, + limit: u32, ) -> Result> { fetch_entries_paged( pool, @@ -139,7 +157,7 @@ pub async fn fetch_entries( tags, query, sort: "name", - limit: 200, + limit, offset: 0, }, ) diff --git a/src/commands/update.rs b/src/commands/update.rs index cfa5c46..51b12d9 100644 --- a/src/commands/update.rs +++ b/src/commands/update.rs @@ -1,6 +1,6 @@ use anyhow::Result; use serde_json::{Map, Value, json}; -use sqlx::{FromRow, PgPool}; +use sqlx::PgPool; use uuid::Uuid; use super::add::{ @@ -9,15 +9,8 @@ use super::add::{ }; use crate::crypto; use crate::db; -use crate::output::OutputMode; - -#[derive(FromRow)] -struct EntryRow { - id: Uuid, - version: i64, - tags: Vec, - metadata: Value, -} +use crate::models::EntryRow; +use crate::output::{OutputMode, print_json}; pub struct UpdateArgs<'a> { pub namespace: &'a str, @@ -284,11 +277,8 @@ pub async fn run(pool: &PgPool, args: UpdateArgs<'_>, master_key: &[u8; 32]) -> }); match args.output { - OutputMode::Json => { - println!("{}", serde_json::to_string_pretty(&result_json)?); - } - OutputMode::JsonCompact => { - println!("{}", serde_json::to_string(&result_json)?); + OutputMode::Json | OutputMode::JsonCompact => { + print_json(&result_json, &args.output)?; } _ => { println!("Updated: [{}/{}] {}", args.namespace, args.kind, args.name); diff --git a/src/config.rs b/src/config.rs index 1ff003c..d54bda9 100644 --- a/src/config.rs +++ b/src/config.rs @@ -8,19 +8,23 @@ pub struct Config { pub database_url: Option, } -pub fn config_dir() -> PathBuf { - dirs::config_dir() +pub fn config_dir() -> Result { + let dir = dirs::config_dir() .or_else(|| dirs::home_dir().map(|h| h.join(".config"))) - .unwrap_or_else(|| PathBuf::from(".config")) - .join("secrets") + .context( + "Cannot determine config directory: \ + neither XDG_CONFIG_HOME nor HOME is set", + )? + .join("secrets"); + Ok(dir) } -pub fn config_path() -> PathBuf { - config_dir().join("config.toml") +pub fn config_path() -> Result { + Ok(config_dir()?.join("config.toml")) } pub fn load_config() -> Result { - let path = config_path(); + let path = config_path()?; if !path.exists() { return Ok(Config::default()); } @@ -32,11 +36,11 @@ pub fn load_config() -> Result { } pub fn save_config(config: &Config) -> Result<()> { - let dir = config_dir(); + let dir = config_dir()?; fs::create_dir_all(&dir) .with_context(|| format!("failed to create config dir: {}", dir.display()))?; - let path = config_path(); + let path = dir.join("config.toml"); let content = toml::to_string_pretty(config).context("failed to serialize config")?; fs::write(&path, &content) .with_context(|| format!("failed to write config file: {}", path.display()))?; diff --git a/src/crypto.rs b/src/crypto.rs index 0eb4583..171a4e4 100644 --- a/src/crypto.rs +++ b/src/crypto.rs @@ -10,12 +10,24 @@ const KEYRING_SERVICE: &str = "secrets-cli"; const KEYRING_USER: &str = "master-key"; const NONCE_LEN: usize = 12; +// Argon2id parameters — OWASP recommended (m=64 MiB, t=3 iterations, p=4 threads, key=32 B) +const ARGON2_M_COST: u32 = 65_536; +const ARGON2_T_COST: u32 = 3; +const ARGON2_P_COST: u32 = 4; +const ARGON2_KEY_LEN: usize = 32; + // ─── Argon2id key derivation ───────────────────────────────────────────────── /// Derive a 32-byte Master Key from a password and salt using Argon2id. /// Parameters: m=65536 KiB (64 MB), t=3, p=4 — OWASP recommended. pub fn derive_master_key(password: &str, salt: &[u8]) -> Result<[u8; 32]> { - let params = Params::new(65536, 3, 4, Some(32)).context("invalid Argon2id params")?; + let params = Params::new( + ARGON2_M_COST, + ARGON2_T_COST, + ARGON2_P_COST, + Some(ARGON2_KEY_LEN), + ) + .context("invalid Argon2id params")?; let argon2 = Argon2::new(argon2::Algorithm::Argon2id, Version::V0x13, params); let mut key = [0u8; 32]; argon2 diff --git a/src/db.rs b/src/db.rs index d57ff84..1f879a9 100644 --- a/src/db.rs +++ b/src/db.rs @@ -3,6 +3,8 @@ use serde_json::Value; use sqlx::PgPool; use sqlx::postgres::PgPoolOptions; +use crate::audit::current_actor; + pub async fn create_pool(database_url: &str) -> Result { tracing::debug!("connecting to database"); let pool = PgPoolOptions::new() @@ -139,7 +141,7 @@ pub async fn snapshot_entry_history( tx: &mut sqlx::Transaction<'_, sqlx::Postgres>, p: EntrySnapshotParams<'_>, ) -> Result<()> { - let actor = std::env::var("USER").unwrap_or_default(); + let actor = current_actor(); sqlx::query( "INSERT INTO entries_history \ (entry_id, namespace, kind, name, version, action, tags, metadata, actor) \ @@ -177,7 +179,7 @@ pub async fn snapshot_secret_history( tx: &mut sqlx::Transaction<'_, sqlx::Postgres>, p: SecretSnapshotParams<'_>, ) -> Result<()> { - let actor = std::env::var("USER").unwrap_or_default(); + let actor = current_actor(); sqlx::query( "INSERT INTO secrets_history \ (entry_id, secret_id, entry_version, field_name, field_type, value_len, encrypted, action, actor) \ diff --git a/src/main.rs b/src/main.rs index 398119f..ac92040 100644 --- a/src/main.rs +++ b/src/main.rs @@ -639,7 +639,16 @@ async fn main() -> Result<()> { let _span = tracing::info_span!("cmd", command = "delete", %namespace, %kind, %name).entered(); let out = resolve_output_mode(output.as_deref())?; - commands::delete::run(&pool, &namespace, &kind, &name, out).await?; + commands::delete::run( + &pool, + commands::delete::DeleteArgs { + namespace: &namespace, + kind: &kind, + name: &name, + output: out, + }, + ) + .await?; } Commands::Update { @@ -685,7 +694,17 @@ async fn main() -> Result<()> { output, } => { let out = resolve_output_mode(output.as_deref())?; - commands::rollback::list_history(&pool, &namespace, &kind, &name, limit, out).await?; + commands::history::run( + &pool, + commands::history::HistoryArgs { + namespace: &namespace, + kind: &kind, + name: &name, + limit, + output: out, + }, + ) + .await?; } Commands::Rollback { diff --git a/src/models.rs b/src/models.rs index 469b397..28658c3 100644 --- a/src/models.rs +++ b/src/models.rs @@ -38,6 +38,27 @@ pub struct SecretField { pub updated_at: DateTime, } +// ── Internal query row types (shared across commands) ───────────────────────── + +/// Minimal entry row fetched for write operations (add / update / delete / rollback). +#[derive(Debug, sqlx::FromRow)] +pub struct EntryRow { + pub id: Uuid, + pub version: i64, + pub tags: Vec, + pub metadata: Value, +} + +/// Minimal secret field row fetched before snapshots or cascade deletes. +#[derive(Debug, sqlx::FromRow)] +pub struct SecretFieldRow { + pub id: Uuid, + pub field_name: String, + pub field_type: String, + pub value_len: i32, + pub encrypted: Vec, +} + // ── Export / Import types ────────────────────────────────────────────────────── /// Supported file formats for export/import. @@ -52,15 +73,12 @@ impl ExportFormat { /// Infer format from file extension (.json / .toml / .yaml / .yml). pub fn from_extension(path: &str) -> anyhow::Result { let ext = path.rsplit('.').next().unwrap_or("").to_lowercase(); - match ext.as_str() { - "json" => Ok(Self::Json), - "toml" => Ok(Self::Toml), - "yaml" | "yml" => Ok(Self::Yaml), - other => anyhow::bail!( + Self::from_str(&ext).map_err(|_| { + anyhow::anyhow!( "Cannot infer format from extension '.{}'. Use --format json|toml|yaml", - other - ), - } + ext + ) + }) } /// Parse from --format CLI value. @@ -146,16 +164,12 @@ pub fn json_to_toml_value(v: &Value) -> anyhow::Result { } Value::String(s) => Ok(toml::Value::String(s.clone())), Value::Array(arr) => { - // Check for uniform scalar type (TOML requires homogeneous arrays at the value level, - // though arrays of tables are handled separately via TOML's [[table]] syntax). - // For simplicity we convert each element; if types are mixed, toml crate will - // handle it gracefully or we fall back to a JSON string. let items: anyhow::Result> = arr.iter().map(json_to_toml_value).collect(); match items { Ok(vals) => Ok(toml::Value::Array(vals)), - Err(_) => { - // Fallback: serialise as JSON string + Err(e) => { + tracing::debug!(error = %e, "mixed-type array; falling back to JSON string"); Ok(toml::Value::String(serde_json::to_string(v)?)) } } @@ -171,8 +185,8 @@ pub fn json_to_toml_value(v: &Value) -> anyhow::Result { Ok(tv) => { toml_map.insert(k.clone(), tv); } - Err(_) => { - // Fallback: serialise as JSON string + Err(e) => { + tracing::debug!(key = %k, error = %e, "field not representable in TOML; falling back to JSON string"); toml_map .insert(k.clone(), toml::Value::String(serde_json::to_string(val)?)); } diff --git a/src/output.rs b/src/output.rs index d9b690d..deb3878 100644 --- a/src/output.rs +++ b/src/output.rs @@ -50,3 +50,16 @@ pub fn format_local_time(dt: DateTime) -> String { .format("%Y-%m-%d %H:%M:%S %:z") .to_string() } + +/// Print a JSON value to stdout in the requested output mode. +/// - `Json` → pretty-printed +/// - `JsonCompact` → single line +/// - `Text` → no-op (caller is responsible for the text branch) +pub fn print_json(value: &serde_json::Value, mode: &OutputMode) -> anyhow::Result<()> { + match mode { + OutputMode::Json => println!("{}", serde_json::to_string_pretty(value)?), + OutputMode::JsonCompact => println!("{}", serde_json::to_string(value)?), + OutputMode::Text => {} + } + Ok(()) +}