- Combined function signature into a single line for readability. Co-Authored-By: fiddlerwoaroof/git-smart-commit (gpt-oss:20b)
373 lines
11 KiB
Rust
373 lines
11 KiB
Rust
mod command;
|
|
mod draw;
|
|
mod formula;
|
|
mod import;
|
|
mod model;
|
|
mod persistence;
|
|
mod ui;
|
|
mod view;
|
|
|
|
use crate::import::csv_parser::csv_path_p;
|
|
|
|
use std::path::PathBuf;
|
|
|
|
use anyhow::{Context, Result};
|
|
use clap::{Parser, Subcommand};
|
|
|
|
use command::CommandResult;
|
|
use draw::run_tui;
|
|
use model::Model;
|
|
use serde_json::Value;
|
|
|
|
#[derive(Parser)]
|
|
#[command(name = "improvise", about = "Multi-dimensional data modeling TUI")]
|
|
struct Cli {
|
|
/// Model file to open or create
|
|
file: Option<PathBuf>,
|
|
|
|
#[command(subcommand)]
|
|
command: Option<Commands>,
|
|
}
|
|
|
|
#[derive(Subcommand)]
|
|
enum Commands {
|
|
/// Import JSON or CSV data, then open TUI (or save with --output)
|
|
Import {
|
|
/// Files to import (multiple CSVs merge with a "File" category)
|
|
files: Vec<PathBuf>,
|
|
|
|
/// Mark field as category dimension (repeatable)
|
|
#[arg(long)]
|
|
category: Vec<String>,
|
|
|
|
/// Mark field as numeric measure (repeatable)
|
|
#[arg(long)]
|
|
measure: Vec<String>,
|
|
|
|
/// Mark field as time/date category (repeatable)
|
|
#[arg(long)]
|
|
time: Vec<String>,
|
|
|
|
/// Skip/exclude a field from import (repeatable)
|
|
#[arg(long)]
|
|
skip: Vec<String>,
|
|
|
|
/// Extract date component, e.g. "Date:Month" (repeatable)
|
|
#[arg(long)]
|
|
extract: Vec<String>,
|
|
|
|
/// Set category axis, e.g. "Payee:row" (repeatable)
|
|
#[arg(long)]
|
|
axis: Vec<String>,
|
|
|
|
/// Add formula, e.g. "Profit = Revenue - Cost" (repeatable)
|
|
#[arg(long)]
|
|
formula: Vec<String>,
|
|
|
|
/// Model name (default: "Imported Model")
|
|
#[arg(long)]
|
|
name: Option<String>,
|
|
|
|
/// Skip the interactive wizard
|
|
#[arg(long)]
|
|
no_wizard: bool,
|
|
|
|
/// Save to file instead of opening TUI
|
|
#[arg(short, long)]
|
|
output: Option<PathBuf>,
|
|
},
|
|
|
|
/// Run a JSON command headless (repeatable)
|
|
Cmd {
|
|
/// JSON command strings
|
|
json: Vec<String>,
|
|
|
|
/// Model file to load/save
|
|
#[arg(short, long)]
|
|
file: Option<PathBuf>,
|
|
},
|
|
|
|
/// Run commands from a script file headless
|
|
Script {
|
|
/// Script file (one JSON command per line, # comments)
|
|
path: PathBuf,
|
|
|
|
/// Model file to load/save
|
|
#[arg(short, long)]
|
|
file: Option<PathBuf>,
|
|
},
|
|
}
|
|
|
|
fn main() -> Result<()> {
|
|
let cli = Cli::parse();
|
|
|
|
match cli.command {
|
|
None => {
|
|
let model = get_initial_model(&cli.file)?;
|
|
run_tui(model, cli.file, None)
|
|
}
|
|
|
|
Some(Commands::Import {
|
|
files,
|
|
category,
|
|
measure,
|
|
time,
|
|
skip,
|
|
extract,
|
|
axis,
|
|
formula,
|
|
name,
|
|
no_wizard,
|
|
output,
|
|
}) => {
|
|
let import_value = if files.is_empty() {
|
|
anyhow::bail!("No files specified for import");
|
|
} else {
|
|
get_import_data(&files)
|
|
.ok_or_else(|| anyhow::anyhow!("Failed to parse import files"))?
|
|
};
|
|
|
|
let config = ImportConfig {
|
|
categories: category,
|
|
measures: measure,
|
|
time_fields: time,
|
|
skip_fields: skip,
|
|
extractions: parse_colon_pairs(&extract),
|
|
axes: parse_colon_pairs(&axis),
|
|
formulas: formula,
|
|
name,
|
|
};
|
|
|
|
if no_wizard {
|
|
run_headless_import(import_value, &config, output, cli.file)
|
|
} else {
|
|
run_wizard_import(import_value, &config, cli.file)
|
|
}
|
|
}
|
|
|
|
Some(Commands::Cmd { json, file }) => run_headless_commands(&json, &file),
|
|
|
|
Some(Commands::Script { path, file }) => run_headless_script(&path, &file),
|
|
}
|
|
}
|
|
|
|
// ── Import config ────────────────────────────────────────────────────────────
|
|
|
|
struct ImportConfig {
|
|
categories: Vec<String>,
|
|
measures: Vec<String>,
|
|
time_fields: Vec<String>,
|
|
skip_fields: Vec<String>,
|
|
extractions: Vec<(String, String)>,
|
|
axes: Vec<(String, String)>,
|
|
formulas: Vec<String>,
|
|
name: Option<String>,
|
|
}
|
|
|
|
fn parse_colon_pairs(args: &[String]) -> Vec<(String, String)> {
|
|
args.iter()
|
|
.filter_map(|s| {
|
|
let (a, b) = s.split_once(':')?;
|
|
Some((a.to_string(), b.to_string()))
|
|
})
|
|
.collect()
|
|
}
|
|
|
|
fn apply_config_to_pipeline(pipeline: &mut import::wizard::ImportPipeline, config: &ImportConfig) {
|
|
use import::analyzer::{DateComponent, FieldKind};
|
|
|
|
// Override field kinds
|
|
for p in &mut pipeline.proposals {
|
|
if config.categories.contains(&p.field) {
|
|
p.kind = FieldKind::Category;
|
|
p.accepted = true;
|
|
} else if config.measures.contains(&p.field) {
|
|
p.kind = FieldKind::Measure;
|
|
p.accepted = true;
|
|
} else if config.time_fields.contains(&p.field) {
|
|
p.kind = FieldKind::TimeCategory;
|
|
p.accepted = true;
|
|
} else if config.skip_fields.contains(&p.field) {
|
|
p.accepted = false;
|
|
}
|
|
}
|
|
|
|
// Apply date component extractions
|
|
for (field, comp_str) in &config.extractions {
|
|
let component = match comp_str.to_lowercase().as_str() {
|
|
"year" => DateComponent::Year,
|
|
"month" => DateComponent::Month,
|
|
"quarter" => DateComponent::Quarter,
|
|
_ => continue,
|
|
};
|
|
for p in &mut pipeline.proposals {
|
|
if p.field == *field && !p.date_components.contains(&component) {
|
|
p.date_components.push(component);
|
|
}
|
|
}
|
|
}
|
|
|
|
// Set formulas
|
|
pipeline.formulas = config.formulas.clone();
|
|
|
|
// Set model name
|
|
if let Some(ref name) = config.name {
|
|
pipeline.model_name = name.clone();
|
|
}
|
|
}
|
|
|
|
fn apply_axis_overrides(model: &mut Model, axes: &[(String, String)]) {
|
|
use view::Axis;
|
|
let view = model.active_view_mut();
|
|
for (cat, axis_str) in axes {
|
|
let axis = match axis_str.to_lowercase().as_str() {
|
|
"row" => Axis::Row,
|
|
"column" | "col" => Axis::Column,
|
|
"page" => Axis::Page,
|
|
"none" => Axis::None,
|
|
_ => continue,
|
|
};
|
|
view.set_axis(cat, axis);
|
|
}
|
|
}
|
|
|
|
fn run_headless_import(
|
|
import_value: Value,
|
|
config: &ImportConfig,
|
|
output: Option<PathBuf>,
|
|
model_file: Option<PathBuf>,
|
|
) -> Result<()> {
|
|
let mut pipeline = import::wizard::ImportPipeline::new(import_value);
|
|
apply_config_to_pipeline(&mut pipeline, config);
|
|
let mut model = pipeline.build_model()?;
|
|
model.normalize_view_state();
|
|
apply_axis_overrides(&mut model, &config.axes);
|
|
|
|
if let Some(path) = output.or(model_file) {
|
|
persistence::save(&model, &path)?;
|
|
eprintln!("Saved to {}", path.display());
|
|
} else {
|
|
eprintln!("No output path specified; use -o <path> or provide a model file");
|
|
}
|
|
Ok(())
|
|
}
|
|
|
|
fn run_wizard_import(
|
|
import_value: Value,
|
|
_config: &ImportConfig,
|
|
model_file: Option<PathBuf>,
|
|
) -> Result<()> {
|
|
let model = get_initial_model(&model_file)?;
|
|
// Pre-configure will happen inside the TUI via the wizard
|
|
// For now, pass import_value and let the wizard handle it
|
|
// TODO: pass config to wizard for pre-population
|
|
run_tui(model, model_file, Some(import_value))
|
|
}
|
|
|
|
// ── Import data loading ──────────────────────────────────────────────────────
|
|
|
|
fn get_import_data(paths: &[PathBuf]) -> Option<Value> {
|
|
let all_csv = paths.iter().all(|p| csv_path_p(p));
|
|
|
|
if paths.len() > 1 {
|
|
if !all_csv {
|
|
eprintln!("Multi-file import only supports CSV files");
|
|
return None;
|
|
}
|
|
match crate::import::csv_parser::merge_csvs(paths) {
|
|
Ok(records) => Some(Value::Array(records)),
|
|
Err(e) => {
|
|
eprintln!("CSV merge error: {e}");
|
|
None
|
|
}
|
|
}
|
|
} else {
|
|
let path = &paths[0];
|
|
match std::fs::read_to_string(path) {
|
|
Err(e) => {
|
|
eprintln!("Cannot read '{}': {e}", path.display());
|
|
None
|
|
}
|
|
Ok(content) => {
|
|
if csv_path_p(path) {
|
|
match crate::import::csv_parser::parse_csv(path) {
|
|
Ok(records) => Some(Value::Array(records)),
|
|
Err(e) => {
|
|
eprintln!("CSV parse error: {e}");
|
|
None
|
|
}
|
|
}
|
|
} else {
|
|
match serde_json::from_str::<Value>(&content) {
|
|
Err(e) => {
|
|
eprintln!("JSON parse error: {e}");
|
|
None
|
|
}
|
|
Ok(json) => Some(json),
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// ── Headless command execution ───────────────────────────────────────────────
|
|
|
|
fn run_headless_commands(cmds: &[String], file: &Option<PathBuf>) -> Result<()> {
|
|
let mut model = get_initial_model(file)?;
|
|
let mut exit_code = 0;
|
|
|
|
for line in cmds {
|
|
let parsed = match command::parse_line(line) {
|
|
Ok(cmds) => cmds,
|
|
Err(e) => {
|
|
let r = CommandResult::err(format!("Parse error: {e}"));
|
|
println!("{}", serde_json::to_string(&r)?);
|
|
exit_code = 1;
|
|
continue;
|
|
}
|
|
};
|
|
for cmd in &parsed {
|
|
let result = command::dispatch(&mut model, cmd);
|
|
if !result.ok {
|
|
exit_code = 1;
|
|
}
|
|
println!("{}", serde_json::to_string(&result)?);
|
|
}
|
|
}
|
|
|
|
if let Some(path) = file {
|
|
persistence::save(&model, path)?;
|
|
}
|
|
|
|
std::process::exit(exit_code);
|
|
}
|
|
|
|
fn run_headless_script(script_path: &PathBuf, file: &Option<PathBuf>) -> Result<()> {
|
|
let content = std::fs::read_to_string(script_path)?;
|
|
let lines: Vec<String> = content.lines().map(String::from).collect();
|
|
run_headless_commands(&lines, file)
|
|
}
|
|
|
|
// ── Helpers ──────────────────────────────────────────────────────────────────
|
|
|
|
fn get_initial_model(file_path: &Option<PathBuf>) -> Result<Model> {
|
|
if let Some(ref path) = file_path {
|
|
if path.exists() {
|
|
let mut m = persistence::load(path)
|
|
.with_context(|| format!("Failed to load {}", path.display()))?;
|
|
m.normalize_view_state();
|
|
Ok(m)
|
|
} else {
|
|
let name = path
|
|
.file_stem()
|
|
.and_then(|s| s.to_str())
|
|
.unwrap_or("New Model")
|
|
.to_string();
|
|
Ok(Model::new(name))
|
|
}
|
|
} else {
|
|
Ok(Model::new("New Model"))
|
|
}
|
|
}
|