diff --git a/.envrc b/.envrc index 3550a30..9312441 100644 --- a/.envrc +++ b/.envrc @@ -1 +1,2 @@ use flake +unset TMPDIR diff --git a/src/import/csv_parser.rs b/src/import/csv_parser.rs index d0ab33e..2b4c37a 100644 --- a/src/import/csv_parser.rs +++ b/src/import/csv_parser.rs @@ -56,6 +56,28 @@ pub fn parse_csv(path: &Path) -> Result> { Ok(records) } +/// Parse multiple CSV files and merge into a single JSON array. +/// Each record gets a "File" field set to the filename stem (e.g., "sales" from "sales.csv"). +pub fn merge_csvs(paths: &[impl AsRef]) -> Result> { + let mut all_records = Vec::new(); + for path in paths { + let path = path.as_ref(); + let stem = path + .file_stem() + .and_then(|s| s.to_str()) + .unwrap_or("unknown") + .to_string(); + let records = parse_csv(path)?; + for mut record in records { + if let Value::Object(ref mut map) = record { + map.insert("File".to_string(), Value::String(stem.clone())); + } + all_records.push(record); + } + } + Ok(all_records) +} + fn parse_csv_field(field: &str) -> Value { if field.is_empty() { return Value::Null; @@ -158,6 +180,40 @@ mod tests { assert_eq!(records[0]["Active"], Value::String("true".to_string())); } + #[test] + fn merge_csvs_adds_file_field_from_stem() { + let dir = tempdir().unwrap(); + let sales = dir.path().join("sales.csv"); + let expenses = dir.path().join("expenses.csv"); + fs::write(&sales, "Region,Revenue\nEast,100\nWest,200").unwrap(); + fs::write(&expenses, "Region,Revenue\nEast,50\nWest,75").unwrap(); + + let records = merge_csvs(&[sales, expenses]).unwrap(); + assert_eq!(records.len(), 4); + assert_eq!(records[0]["File"], Value::String("sales".to_string())); + assert_eq!(records[1]["File"], Value::String("sales".to_string())); + assert_eq!(records[2]["File"], Value::String("expenses".to_string())); + assert_eq!(records[3]["File"], Value::String("expenses".to_string())); + // Original fields preserved + assert_eq!(records[0]["Region"], Value::String("East".to_string())); + assert_eq!( + records[2]["Revenue"], + Value::Number(serde_json::Number::from(50)) + ); + } + + #[test] + fn merge_csvs_single_file_works() { + let dir = tempdir().unwrap(); + let path = dir.path().join("data.csv"); + fs::write(&path, "Name,Value\nA,1").unwrap(); + + let records = merge_csvs(&[path]).unwrap(); + assert_eq!(records.len(), 1); + assert_eq!(records[0]["File"], Value::String("data".to_string())); + assert_eq!(records[0]["Name"], Value::String("A".to_string())); + } + #[test] fn parse_checking_csv_format() { // Simulates the format of /Users/edwlan/Downloads/Checking1.csv diff --git a/src/main.rs b/src/main.rs index ac3355d..7ebf733 100644 --- a/src/main.rs +++ b/src/main.rs @@ -29,7 +29,7 @@ trait Runnable { struct CmdLineArgs { file_path: Option, - import_path: Option, + import_paths: Vec, } impl Runnable for CmdLineArgs { @@ -38,36 +38,55 @@ impl Runnable for CmdLineArgs { let model = get_initial_model(&self.file_path)?; // Pre-TUI import: parse JSON or CSV and open wizard - let import_value = self.import_path.and_then(get_import_data); + let import_value = if self.import_paths.is_empty() { + None + } else { + get_import_data(&self.import_paths) + }; run_tui(model, self.file_path, import_value) } } -fn get_import_data(path: PathBuf) -> Option { - match std::fs::read_to_string(&path) { - Err(e) => { - eprintln!("Cannot read '{}': {e}", path.display()); - None +fn get_import_data(paths: &[PathBuf]) -> Option { + let all_csv = paths.iter().all(|p| csv_path_p(p)); + + if paths.len() > 1 { + if !all_csv { + eprintln!("Multi-file import only supports CSV files"); + return None; } - Ok(content) => { - if csv_path_p(&path) { - // Parse CSV and wrap as JSON array - match crate::import::csv_parser::parse_csv(&path) { - Ok(records) => Some(serde_json::Value::Array(records)), - Err(e) => { - eprintln!("CSV parse error: {e}"); - None + match crate::import::csv_parser::merge_csvs(paths) { + Ok(records) => Some(Value::Array(records)), + Err(e) => { + eprintln!("CSV merge error: {e}"); + None + } + } + } else { + let path = &paths[0]; + match std::fs::read_to_string(path) { + Err(e) => { + eprintln!("Cannot read '{}': {e}", path.display()); + None + } + Ok(content) => { + if csv_path_p(path) { + match crate::import::csv_parser::parse_csv(path) { + Ok(records) => Some(Value::Array(records)), + Err(e) => { + eprintln!("CSV parse error: {e}"); + None + } } - } - } else { - // Parse JSON - match serde_json::from_str::(&content) { - Err(e) => { - eprintln!("JSON parse error: {e}"); - None + } else { + match serde_json::from_str::(&content) { + Err(e) => { + eprintln!("JSON parse error: {e}"); + None + } + Ok(json) => Some(json), } - Ok(json) => Some(json), } } } @@ -127,7 +146,8 @@ impl Runnable for HelpArgs { println!("improvise — multi-dimensional data modeling TUI\n"); println!("USAGE:"); println!(" improvise [file.improv] Open or create a model"); - println!(" improvise --import data.json Import JSON (or CSV) then open TUI"); + println!(" improvise --import data.json Import JSON or CSV then open TUI"); + println!(" improvise --import a.csv b.csv Import multiple CSVs (filenames become a category)"); println!(" improvise --cmd '{{...}}' Run a JSON command (headless, repeatable)"); println!(" improvise --script cmds.jsonl Run commands from file (headless)"); println!("\nTUI KEYS (vim-style):"); @@ -154,7 +174,7 @@ fn parse_args(args: Vec) -> Box { let mut file_path: Option = None; let mut headless_cmds: Vec = Vec::new(); let mut headless_script: Option = None; - let mut import_path: Option = None; + let mut import_paths: Vec = Vec::new(); let mut i = 1; while i < args.len() { @@ -171,7 +191,11 @@ fn parse_args(args: Vec) -> Box { } "--import" => { i += 1; - import_path = args.get(i).map(PathBuf::from); + while i < args.len() && !args[i].starts_with('-') { + import_paths.push(PathBuf::from(&args[i])); + i += 1; + } + continue; // skip the i += 1 at the bottom } "--help" | "-h" => { return Box::new(HelpArgs); @@ -193,7 +217,7 @@ fn parse_args(args: Vec) -> Box { } else { Box::new(CmdLineArgs { file_path, - import_path, + import_paths, }) } }