feat(ui): add LoadModel and ImportJsonHeadless effects

Add two new effects for headless model operations:

- LoadModel: Loads a model from a file path, replacing the current one
- ImportJsonHeadless: Imports JSON/CSV files via the analyzer, builds
  a new model from detected fields, and replaces the current model

These effects enable headless mode to load and import data without
interactive prompts. ImportJsonHeadless handles both CSV and JSON
files, auto-detects array paths, and uses the existing import pipeline.

Co-Authored-By: fiddlerwoaroof/git-smart-commit (unsloth/Qwen3.5-35B-A3B-GGUF:Q5_K_M)
This commit is contained in:
Edward Langley
2026-04-04 10:56:35 -07:00
parent 64ab352490
commit 00c62d85b7

View File

@ -510,6 +510,137 @@ impl Effect for ExportCsv {
}
}
/// Load a model from a file, replacing the current one.
#[derive(Debug)]
pub struct LoadModel(pub PathBuf);
impl Effect for LoadModel {
fn apply(&self, app: &mut App) {
match crate::persistence::load(&self.0) {
Ok(mut loaded) => {
loaded.normalize_view_state();
app.model = loaded;
app.status_msg = format!("Loaded from {}", self.0.display());
}
Err(e) => {
app.status_msg = format!("Load error: {e}");
}
}
}
}
/// Headless JSON/CSV import: read file, analyze, build model, replace current.
#[derive(Debug)]
pub struct ImportJsonHeadless {
pub path: PathBuf,
pub model_name: Option<String>,
pub array_path: Option<String>,
}
impl Effect for ImportJsonHeadless {
fn apply(&self, app: &mut App) {
use crate::import::analyzer::{
analyze_records, extract_array_at_path, find_array_paths, FieldKind,
};
use crate::import::wizard::ImportPipeline;
let is_csv = self
.path
.extension()
.is_some_and(|ext| ext.eq_ignore_ascii_case("csv"));
let records = if is_csv {
match crate::import::csv_parser::parse_csv(&self.path) {
Ok(recs) => recs,
Err(e) => {
app.status_msg = format!("CSV error: {e}");
return;
}
}
} else {
let content = match std::fs::read_to_string(&self.path) {
Ok(c) => c,
Err(e) => {
app.status_msg = format!("Cannot read '{}': {e}", self.path.display());
return;
}
};
let value: serde_json::Value = match serde_json::from_str(&content) {
Ok(v) => v,
Err(e) => {
app.status_msg = format!("JSON parse error: {e}");
return;
}
};
if let Some(ap) = self.array_path.as_deref().filter(|s| !s.is_empty()) {
match extract_array_at_path(&value, ap) {
Some(arr) => arr.clone(),
None => {
app.status_msg = format!("No array at path '{ap}'");
return;
}
}
} else if let Some(arr) = value.as_array() {
arr.clone()
} else {
let paths = find_array_paths(&value);
if let Some(first) = paths.first() {
match extract_array_at_path(&value, first) {
Some(arr) => arr.clone(),
None => {
app.status_msg = "Could not extract records array".to_string();
return;
}
}
} else {
app.status_msg = "No array found in JSON".to_string();
return;
}
}
};
let proposals = analyze_records(&records);
let raw = if is_csv {
serde_json::Value::Array(records.clone())
} else {
serde_json::from_str(
&std::fs::read_to_string(&self.path).unwrap_or_default(),
)
.unwrap_or(serde_json::Value::Array(records.clone()))
};
let pipeline = ImportPipeline {
raw,
array_paths: vec![],
selected_path: self.array_path.as_deref().unwrap_or("").to_string(),
records,
proposals: proposals
.into_iter()
.map(|mut p| {
p.accepted = p.kind != FieldKind::Label;
p
})
.collect(),
model_name: self
.model_name
.as_deref()
.unwrap_or("Imported Model")
.to_string(),
formulas: vec![],
};
match pipeline.build_model() {
Ok(new_model) => {
app.model = new_model;
app.status_msg = "Imported successfully".to_string();
}
Err(e) => {
app.status_msg = format!("Import error: {e}");
}
}
}
}
#[derive(Debug)]
pub struct SetPanelOpen {
pub panel: Panel,