feat(crustex): initial commit

This commit is contained in:
2026-01-14 15:13:51 -05:00
commit 7a73fe9752
8 changed files with 1081 additions and 0 deletions

6
.gitignore vendored Normal file
View File

@@ -0,0 +1,6 @@
/target
Cargo.lock
*.pdf
*.tex
config.toml
.idea/

17
Cargo.toml Normal file
View File

@@ -0,0 +1,17 @@
[package]
name = "crustex"
version = "0.1.0"
edition = "2024"
publish = ["gitea"]
[dependencies]
toml = "0.9.8"
serde = { version = "1.0", features = ["derive"] }
which = "8.0.0"
clap = { version = "4.5.54", features = ["derive"] }
sha2 = "0.10.9"
hex-literal = "1.1.0"
git2 = "0.20.3"
dirs = "6.0.0"
chrono = "0.4.42"
ssh2-config = "0.6.5"

12
readme.md Normal file
View File

@@ -0,0 +1,12 @@
# crustex
A simple, rust-based, latex meta-build system. crustex is in very early development.
## Commands
- init
- setup
- compile
- clean
- version
- publish
- describe
- clear

142
src/compile/mod.rs Normal file
View File

@@ -0,0 +1,142 @@
use crate::graph::TeXFile;
use crate::config::Template;
use crate::config::Stages;
use which::which;
use std::collections::HashMap;
use std::path::PathBuf;
use std::process::Command;
fn check_for_compiler(compiler: &String) -> bool {
which(compiler).is_ok()
}
pub fn compile_dependency_graph(
dep_graph: &HashMap<TeXFile, Vec<TeXFile>>,
template: &Template
) {
let latex_compiler = template.compile.as_ref().and_then(|c| c.latex_compiler.as_ref()).unwrap();
let bibtex_compiler = template.compile.as_ref().and_then(|c| c.bibtex_compiler.as_ref()).unwrap();
let stages = template.compile.as_ref().and_then(|c| c.stages.as_ref()).unwrap();
if stages.contains(&Stages::Latex) {
if !check_for_compiler(latex_compiler) {
eprintln!("LaTeX compiler '{}' not found in PATH.", latex_compiler);
std::process::exit(1);
}
}
if stages.contains(&Stages::Bibtex) {
if !check_for_compiler(bibtex_compiler) {
eprintln!("BibTeX compiler '{}' not found in PATH.", bibtex_compiler);
std::process::exit(1);
}
}
if needs_recompile(dep_graph, template) {
let stage_commands: Vec<Vec<String>> = stages.iter().map(|stage| {
match stage {
Stages::Latex => format_latex_command(template),
Stages::Bibtex => format_bibtex_command(template),
}
}).collect();
if cfg!(target_os = "windows") {
panic!("Windows is not supported yet.");
} else {
for command_args in stage_commands {
let mut cmd = Command::new(&command_args[0]);
if command_args.len() > 1 {
cmd.args(&command_args[1..]);
}
let output = cmd.output().expect("Failed to execute command");
if !output.status.success() {
eprintln!("Command '{:?}' failed with status: {}", command_args, output.status);
eprintln!("Stdout: {}", String::from_utf8_lossy(&output.stdout));
eprintln!("Stderr: {}", String::from_utf8_lossy(&output.stderr));
std::process::exit(1);
}
}
};
}
}
fn needs_recompile(dep_graph: &HashMap<TeXFile, Vec<TeXFile>>, template: &Template) -> bool {
let output_pdf = format!("{:?}.pdf", template.config.job_name);
let output_exists = std::path::Path::new(&output_pdf).exists();
if !output_exists {
return true;
}
let last_modified_output = std::fs::metadata(&output_pdf)
.and_then(|meta| meta.modified())
.ok();
let mut needs_recompile = false;
for (node, edges) in dep_graph {
let last_modified_tex = std::fs::metadata(&node.filename)
.and_then(|meta| meta.modified())
.expect("Failed to get last-modified time for TeX file");
if let Some(last_modified_output) = last_modified_output {
if last_modified_tex > last_modified_output {
needs_recompile = true;
break;
}
}
for sub_node in edges {
let last_modified_sub = std::fs::metadata(&sub_node.filename)
.and_then(|meta| meta.modified())
.expect("Failed to get last-modified time for sub TeX file");
if let Some(last_modified_output) = last_modified_output {
if last_modified_sub > last_modified_output {
needs_recompile = true;
break;
}
}
}
}
needs_recompile
}
fn format_latex_command(template: &Template) -> Vec<String> {
let mut command = Vec::new();
let latex_compiler = template.compile.as_ref().and_then(|c| c.latex_compiler.as_ref()).unwrap();
command.push(latex_compiler.clone());
let compiler_flags = &template.compile.as_ref().and_then(|c| c.compiler_flags.clone());
if let Some(flags) = compiler_flags {
for flag in flags {
command.push(flag.clone());
}
}
if let Some(job_name) = &template.config.job_name {
command.push("-jobname".to_string());
command.push(job_name.clone());
}
let main_file = &template.config.main_file;
command.push(main_file.clone());
command
}
fn format_bibtex_command(template: &Template) -> Vec<String> {
let mut command = Vec::new();
let bibtex_compiler = template.compile.as_ref().and_then(|c| c.bibtex_compiler.as_ref()).unwrap();
command.push(bibtex_compiler.clone());
if let Some(job_name) = &template.config.job_name {
command.push(job_name.clone());
}
command
}

64
src/config/mod.rs Normal file
View File

@@ -0,0 +1,64 @@
use serde::Deserialize;
use std::fs;
use std::path::Path;
#[derive(Debug, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum Mode {
Git,
SSH,
Local
}
#[derive(Debug, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum Stages {
Latex,
Bibtex
}
#[derive(Debug, Deserialize)]
pub struct Config {
pub main_file: String,
pub job_name: Option<String>,
pub build_dir: Option<String>,
pub results_dir: Option<String>,
}
#[derive(Debug, Deserialize)]
pub struct Compile {
pub latex_compiler: Option<String>,
pub bibtex_compiler: Option<String>,
pub compiler_flags: Option<Vec<String>>,
pub stages: Option<Vec<Stages>>
}
impl Default for Compile {
fn default() -> Self {
Compile {
latex_compiler: Some("pdflatex".to_string()),
bibtex_compiler: Some("bibtex".to_string()),
compiler_flags: Some(Vec::new()),
stages: Some(vec![Stages::Latex, Stages::Bibtex, Stages::Latex, Stages::Latex])
}
}
}
#[derive(Debug, Deserialize)]
pub struct Extra {
pub method: Mode,
pub uri: String
}
#[derive(Debug, Deserialize)]
pub struct Template {
pub config: Config,
pub compile: Option<Compile>,
pub extra: Option<Extra>
}
pub fn load_config<P: AsRef<Path>>(path: P) -> Result<Template, Box<dyn std::error::Error>> {
let config_content = fs::read_to_string(path)?;
let config: Template = toml::from_str(&config_content)?;
Ok(config)
}

256
src/graph/mod.rs Normal file
View File

@@ -0,0 +1,256 @@
use std::alloc::System;
use std::iter::Peekable;
use std::str::Chars;
use std::path::{Path,PathBuf};
use std::collections::HashMap;
use std::time::SystemTime;
#[derive(Debug, PartialEq, Clone)]
pub enum Token {
Command(String),
LBrace,
RBrace,
LBracket,
RBracket,
Text(String),
Comment
}
#[derive(Debug)]
pub struct Include{
pub command: String,
pub filename: PathBuf,
pub exists: bool,
}
#[derive(Eq, Hash, PartialEq, Debug, Clone)]
pub struct TeXFile {
pub filename: PathBuf,
pub last_modified: SystemTime,
}
pub struct LatexLexer<'a> {
chars: Peekable<Chars<'a>>,
}
impl<'a> LatexLexer<'a> {
pub fn new(input: &'a str) -> Self {
Self {
chars: input.chars().peekable(),
}
}
fn read_command(&mut self) -> String {
let mut name = String::new();
if let Some(&c) = self.chars.peek() {
if !c.is_alphabetic() {
self.chars.next();
name.push(c);
return name;
}
}
while let Some(&c) = self.chars.peek() {
if c.is_alphabetic() {
name.push(c);
self.chars.next();
} else {
break;
}
}
name
}
fn read_text(&mut self) -> String {
let mut text = String::new();
while let Some(&c) = self.chars.peek() {
match c {
'\\' | '{' | '}' | '[' | ']' | '%' => break,
_ => {
text.push(c);
self.chars.next();
}
}
}
text
}
}
impl<'a> Iterator for LatexLexer<'a> {
type Item = Token;
fn next(&mut self) -> Option<Self::Item> {
let c = self.chars.next()?;
match c {
'\\' => Some(Token::Command(self.read_command())),
'{' => Some(Token::LBrace),
'}' => Some(Token::RBrace),
'[' => Some(Token::LBracket),
']' => Some(Token::RBracket),
'%' => {
while let Some(&next_char) = self.chars.peek() {
if next_char == '\n' { break; }
self.chars.next();
}
Some(Token::Comment)
}
_ => {
let mut text = String::from(c);
text.push_str(&self.read_text());
Some(Token::Text(text))
}
}
}
}
fn is_include_command(cmd: &str) -> bool {
matches!(cmd, "input" | "include" | "subfile" | "includegraphics" | "bibliography" | "addbibresource" | "documentclass")
}
fn collect_text_until_brace<I>(tokens: &mut I) -> Option<String>
where
I: Iterator<Item = Token>
{
let mut result = String::new();
let mut depth = 1;
while let Some(token) = tokens.next() {
match token {
Token::RBrace => {
depth -= 1;
if depth == 0 {
return Some(result.trim().to_string());
}
result.push('}')
}
Token::LBrace => {
depth += 1;
result.push('{');
}
Token::Text(t) => {
result.push_str(&t);
}
Token::Command(c) => {
result.push('\\');
result.push_str(&c);
}
_ => {}
}
}
None
}
fn skip_until_match<I>(tokens: &mut I, target: &Token)
where
I: Iterator<Item= Token>
{
for t in tokens {
if &t == target { return; }
}
}
fn parse_arguments<I>(tokens: &mut I) -> Option<String>
where
I: Iterator<Item = Token>
{
while let Some(token) = tokens.next() {
match token {
Token::LBracket => {
skip_until_match(tokens, &Token::RBracket);
}
Token::LBrace => {
return collect_text_until_brace(tokens);
}
Token::Text(t) if t.trim().is_empty() => {
continue;
}
_ => {
return None;
}
}
}
None
}
pub fn extract_includes(input: &str) -> Vec<Include> {
let lexer = LatexLexer::new(input);
let mut refs = Vec::new();
let mut tokens = lexer.filter(|t| !matches!(t,Token::Comment));
while let Some(token) = tokens.next() {
if let Token::Command(cmd_name) = token {
if is_include_command(&cmd_name) {
if let Some(filename) = parse_arguments(&mut tokens) {
let path = PathBuf::from(&filename);
let exists = path.exists();
refs.push(Include {
command: cmd_name,
filename: path,
exists,
});
}
}
}
}
refs
}
pub fn build_dependency_map(filepath: &PathBuf) -> HashMap<TeXFile, Vec<TeXFile>> {
let root_directory = filepath.parent().map(|p| p.to_path_buf()).unwrap_or_else(|| PathBuf::from("."));
let input = std::fs::read_to_string(filepath).expect("Failed to read LaTeX main file");
let main_tex_file = TeXFile {
filename: filepath.clone(),
last_modified: std::fs::metadata(filepath).expect("Failed to get metadata for main file").modified().expect("Failed to get last-modified time for main file")
};
let includes = extract_includes(&input);
let mut dep_map: HashMap<TeXFile, Vec<TeXFile>> = HashMap::new();
dep_map.insert(main_tex_file.clone(), Vec::new());
for include in includes {
if !include.exists && include.command == "documentclass" {
continue;
}
let include_path = &include.filename;
if !include_path.exists() {
eprintln!("Error: Included file {:?} does not exist, halting...", include_path);
std::process::exit(1);
}
match include.command.as_str() {
"input" | "include" | "subfile" | "import" | "subimport" | "documentclass" => {
let sub_dep_map = build_dependency_map(&include_path);
let meta = std::fs::metadata(&include_path).expect("Failed to get metadata for file: {include.filename:?}");
let modified_time = meta.modified().expect("Failed to get last-modified time for file: {include.filename:?}");
let sub_file = TeXFile {
filename: include_path.clone(),
last_modified: modified_time
};
// Merge sub_dep_map into dep_map
for (key, value) in sub_dep_map {
dep_map.entry(key).or_insert_with(Vec::new).extend(value);
}
dep_map.entry(main_tex_file.clone()).and_modify(|v| v.push(sub_file));
}
_ => {
let meta = std::fs::metadata(&include_path).expect("Failed to get metadata for file: {include.filename:?}");
let modified_time = meta.modified().expect("Failed to get last-modified time for file: {include.filename:?}");
let sub_file = TeXFile {
filename: include_path.clone(),
last_modified: modified_time
};
dep_map.entry(main_tex_file.clone()).and_modify(|v| v.push(sub_file));
}
}
}
dep_map
}

484
src/main.rs Normal file
View File

@@ -0,0 +1,484 @@
mod config;
mod utils;
mod graph;
mod compile;
use std::path::PathBuf;
use crate::config::load_config;
use crate::graph::build_dependency_map;
use crate::compile::compile_dependency_graph;
use crate::utils::{hash_file, verify_against_binlock, clone_dep_graph_structure, get_host_from_url, get_ssh_key_for_host};
use git2::{Repository, Cred, RemoteCallbacks, Error};
use clap::Parser;
use clap::Subcommand;
#[derive(Parser)]
#[command(name = "crustex LaTeX Build Tool", version = "1.0", author = "Emily M. Boudreaux", about = "Simplified LaTeX builds.")]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Subcommand)]
enum Commands {
Setup {
config_file: PathBuf,
#[arg(long, short)]
overwrite: bool
},
Compile {
build_dir: PathBuf,
},
Reconfigure {
config_file: PathBuf,
},
Clear {
build_dir: PathBuf,
},
Describe {
build_dir: PathBuf,
},
Publish {
build_dir: PathBuf,
},
Init {
project_name: String,
#[arg(long, short = 'd')]
project_dir: Option<PathBuf>,
#[arg(long, short = 'u')]
template_url: Option<String>,
#[arg(long, short = 't')]
template_name: Option<String>,
},
Template {
#[command(subcommand)]
command: TemplateCommands
},
Version {}
}
#[derive(Subcommand)]
enum TemplateCommands {
Register {
template_name: String,
template_url: String,
},
List,
Inspect {
template_name: String,
},
Remove {
template_name: String,
},
GetUrl {
template_name: String,
},
SetUrl {
template_name: String,
template_url: String,
},
}
fn main() {
let cli = Cli::parse();
let mut callbacks = RemoteCallbacks::new();
callbacks.credentials(|_url, username_from_url, _allowed_types| {
let username = username_from_url.unwrap_or("git");
let host = get_host_from_url(_url).unwrap_or_default();
if let Some(ssh_key_path) = get_ssh_key_for_host(&host) {
println!("Using credentials from url: {} (key path: {})", host, ssh_key_path.display());
return Cred::ssh_key(
username,
None,
&ssh_key_path,
None
);
}
Cred::default()
});
let mut fo = git2::FetchOptions::new();
fo.remote_callbacks(callbacks);
match &cli.command {
Commands::Setup { config_file, overwrite } => {
let template = match load_config(config_file) {
Ok(cfg) => cfg,
Err(e) => {
eprintln!("Failed to load config: {}", e);
return;
}
};
let mut build_path = PathBuf::new();
if template.config.build_dir.is_some() {
let build_dir = template.config.build_dir.unwrap();
build_path = PathBuf::from(build_dir);
} else {
build_path = PathBuf::from("build");
}
if build_path.exists() && overwrite != &true {
eprintln!("Build directory {:?} already exists. Use --overwrite to overwrite.", build_path);
std::process::exit(1);
} else if build_path.exists() && overwrite == &true {
std::fs::remove_dir_all(&build_path).unwrap();
std::fs::create_dir_all(&build_path).unwrap();
} else {
std::fs::create_dir_all(&build_path).unwrap();
}
let dest_config_path = build_path.join("crustex.toml");
std::fs::copy(config_file, dest_config_path).unwrap();
let file_hash = hash_file(config_file).unwrap();
let hash_path = build_path.join(".crustex.lock");
std::fs::write(hash_path, file_hash).unwrap();
}
Commands::Compile { build_dir } => {
let verified = verify_against_binlock(
&build_dir.join("crustex.toml"),
&build_dir.join(".crustex.lock")
).and_then(|v| Ok(v)).unwrap();
if !verified {
eprintln!("Configuration file has been modified since setup. Please reconfigure.");
std::process::exit(1);
}
let template = match load_config(build_dir.join("crustex.toml")) {
Ok(cfg) => cfg,
Err(e) => {
eprintln!("Failed to load config: {}", e);
return;
}
};
let dep_graph = build_dependency_map(&PathBuf::from(&template.config.main_file));
clone_dep_graph_structure(&dep_graph, &build_dir);
// Change the current working directory to the build directory
std::env::set_current_dir(&build_dir).unwrap();
compile_dependency_graph(&dep_graph, &template);
}
Commands::Reconfigure { config_file } => {
let template = match load_config(config_file) {
Ok(cfg) => cfg,
Err(e) => {
eprintln!("Failed to load config: {}", e);
return;
}
};
let mut build_path = PathBuf::new();
if template.config.build_dir.is_some() {
let build_dir = template.config.build_dir.unwrap();
build_path = PathBuf::from(build_dir);
} else {
build_path = PathBuf::from("build");
}
if !build_path.exists() {
eprintln!("Build directory {:?} does not exist. Please run setup first.", build_path);
std::process::exit(1);
}
let dest_config_path = build_path.join("crustex.toml");
std::fs::copy(config_file, dest_config_path).unwrap();
let file_hash = hash_file(config_file).unwrap();
let hash_path = build_path.join(".crustex.lock");
std::fs::write(hash_path, file_hash).unwrap();
}
Commands::Clear { build_dir } => {
let entries = std::fs::read_dir(build_dir).unwrap();
for entry in entries {
let entry = entry.unwrap();
let path = entry.path();
if path.file_name().unwrap() != "crustex.toml" && path.file_name().unwrap() != ".crustex.lock" {
if path.is_dir() {
std::fs::remove_dir_all(path).unwrap();
} else {
std::fs::remove_file(path).unwrap();
}
}
}
}
Commands::Describe { build_dir } => {
let template = match load_config(build_dir.join("crustex.toml")) {
Ok(cfg) => cfg,
Err(e) => {
eprintln!("Failed to load config: {}", e);
return;
}
};
println!("{:#?}", template);
}
Commands::Publish { build_dir } => {
let template = match load_config(build_dir.join("crustex.toml")) {
Ok(cfg) => cfg,
Err(e) => {
eprintln!("Failed to load config: {}", e);
return;
}
};
let main_file_path = PathBuf::from(&template.config.main_file);
let main_file_stem = main_file_path.file_stem().and_then(|s| s.to_str()).unwrap();
let job_name = &template.config.job_name.unwrap_or(main_file_stem.to_string());
let output_pdf = format!("{}.pdf", job_name);
let results_dir = if template.config.results_dir.is_some() {
PathBuf::from(template.config.results_dir.unwrap())
} else {
PathBuf::from(".")
};
if !results_dir.exists() {
std::fs::create_dir_all(&results_dir).unwrap();
}
let dest_path = results_dir.join(&output_pdf);
let src_path = build_dir.join(&output_pdf);
std::fs::copy(&src_path, &dest_path).unwrap();
}
Commands::Init { project_name, project_dir, template_url, template_name } => {
let dir_path = if let Some(dir) = project_dir {
dir.clone()
} else {
PathBuf::from(".").join(project_name)
};
if dir_path.exists() {
eprintln!("Directory {:?} already exists. Aborting.", dir_path);
std::process::exit(1);
}
std::fs::create_dir_all(&dir_path).unwrap();
if template_name.is_some() && template_url.is_some() {
eprintln!("Please provide either a template name or a template URL, not both.");
std::process::exit(1);
}
if (template_name.is_some() || template_url.is_some()) {
let mut builder = git2::build::RepoBuilder::new();
builder.fetch_options(fo);
if let Some(template_name) = template_name {
let home_dir = dirs::home_dir().unwrap();
let config_dir = home_dir.join(".config").join("crustex");
if !config_dir.exists() {
eprintln!("Configuration directory / crustex.toml {:?} does not exist. Have you registered any templates? Aborting.", config_dir);
std::process::exit(1);
}
let templates_config_path = config_dir.join("templates.toml");
let templates_content = std::fs::read_to_string(&templates_config_path).unwrap();
let templates: toml::Value = toml::from_str(&templates_content).unwrap_or(toml::Value::Table(toml::map::Map::new()));
if let toml::Value::Table(table) = templates {
if let Some(details) = table.get(template_name) {
match builder.clone(
details.get("url").and_then(|v| v.as_str()).unwrap_or(""),
&dir_path
) {
Ok(_) => {
println!("Cloned template '{}' into {:?}", template_name, dir_path);
}
Err(E) => {
eprintln!("Failed to clone template: {}", E);
std::process::exit(1);
}
};
} else {
println!("Template '{}' not found.", template_name);
}
} else {
println!("No templates registered.");
}
}
if let Some(template_url) = template_url {
match builder.clone(template_url, &dir_path) {
Ok(_) => {
println!("Cloned template from {} into {:?}", template_url, dir_path);
}
Err(E) => {
eprintln!("Failed to clone template: {}", E);
std::process::exit(1);
}
}
}
let git_dir = dir_path.join(".git");
if git_dir.exists() {
std::fs::remove_dir_all(git_dir).unwrap();
}
}
match Repository::init(&dir_path) {
Ok(_) => {
println!("Initialized Git repository in {:?}", dir_path);
}
Err(e) => {
eprintln!("Failed to initialize Git repository: {}", e);
std::process::exit(1);
}
}
let main_tex_filename = format!("{}.tex", project_name);
let default_config = format!(r#"[config]
main_file = "{}"
job_name = "{}"
build_dir = "build"
results_dir = "."
[compile]
latex_compiler = "pdflatex"
bibtex_compiler = "bibtex"
compiler_flags = ["-interaction=nonstopmode", "-halt-on-error"]
stages = ["latex", "bibtex", "latex", "latex"]
"#, main_tex_filename, project_name);
let default_main_tex = r#"% Crustex LaTeX Project Main File
\documentclass{article}
\begin{document}
Hello, Crustex!
\end{document}
"#;
std::fs::write(dir_path.join("crustex.toml"), default_config).unwrap();
std::fs::write(dir_path.join(&main_tex_filename), default_main_tex).unwrap();
println!("Initialized new Crustex project in {:?}", dir_path);
let repo = Repository::open(&dir_path).unwrap();
let mut index = repo.index().unwrap();
index.add_path(std::path::Path::new("crustex.toml")).unwrap();
index.add_path(std::path::Path::new(&main_tex_filename)).unwrap();
index.write().unwrap();
let oid = index.write_tree().unwrap();
let signature = repo.signature().unwrap();
let tree = repo.find_tree(oid).unwrap();
repo.commit(Some("HEAD"), &signature, &signature, "Initial commit", &tree, &[]).unwrap();
}
Commands::Template { command } => {
let home_dir = dirs::home_dir().unwrap();
let config_dir = home_dir.join(".config").join("crustex");
if !config_dir.exists() {
std::fs::create_dir_all(&config_dir).unwrap();
}
// check if templates.toml exists, if not create it
let templates_config_path = config_dir.join("templates.toml");
if !templates_config_path.exists() {
std::fs::write(&templates_config_path, "").unwrap();
}
match command {
TemplateCommands::Register { template_name, template_url } => {
let mut templates_content = std::fs::read_to_string(&templates_config_path).unwrap();
let new_entry = format!(r#"[{}]
url = "{}"
added = "{}"
"#, template_name, template_url, chrono::Utc::now().to_rfc3339());
templates_content.push_str(&new_entry);
std::fs::write(&templates_config_path, templates_content).unwrap();
println!("Registered template '{}' with URL '{}'.", template_name, template_url);
}
TemplateCommands::List => {
let templates_content = std::fs::read_to_string(&templates_config_path).unwrap();
let templates: toml::Value = toml::from_str(&templates_content).unwrap_or(toml::Value::Table(toml::map::Map::new()));
if let toml::Value::Table(table) = templates {
for (name, details) in table {
if let toml::Value::Table(detail_table) = details {
let url = detail_table.get("url").and_then(|v| v.as_str()).unwrap_or("N/A");
let added = detail_table.get("added").and_then(|v| v.as_str()).unwrap_or("N/A");
println!("Template: {}\n URL: {}\n Added: {}\n", name, url, added);
}
}
} else {
println!("No templates registered.");
}
}
TemplateCommands::Inspect { template_name } => {
let templates_content = std::fs::read_to_string(&templates_config_path).unwrap();
let templates: toml::Value = toml::from_str(&templates_content).unwrap_or(toml::Value::Table(toml::map::Map::new()));
if let toml::Value::Table(table) = templates {
if let Some(details) = table.get(template_name) {
println!("Details for template '{}':\n{:#?}", template_name, details);
} else {
println!("Template '{}' not found.", template_name);
}
} else {
println!("No templates registered.");
}
}
TemplateCommands::Remove { template_name } => {
let templates_content = std::fs::read_to_string(&templates_config_path).unwrap();
let mut templates: toml::Value = toml::from_str(&templates_content).unwrap_or(toml::Value::Table(toml::map::Map::new()));
if let toml::Value::Table(ref mut table) = templates {
if table.remove(template_name).is_some() {
let updated_content = toml::to_string(&templates).unwrap();
std::fs::write(&templates_config_path, updated_content).unwrap();
println!("Removed template '{}'.", template_name);
} else {
println!("Template '{}' not found.", template_name);
}
} else {
println!("No templates registered.");
}
}
TemplateCommands::GetUrl { template_name } => {
let templates_content = std::fs::read_to_string(&templates_config_path).unwrap();
let templates: toml::Value = toml::from_str(&templates_content).unwrap_or(toml::Value::Table(toml::map::Map::new()));
if let toml::Value::Table(table) = templates {
if let Some(details) = table.get(template_name) {
if let toml::Value::Table(detail_table) = details {
let url = detail_table.get("url").and_then(|v| v.as_str()).unwrap_or("N/A");
println!("URL for template '{}': {}", template_name, url);
} else {
println!("Template '{}' details are malformed.", template_name);
}
} else {
println!("Template '{}' not found.", template_name);
}
} else {
println!("No templates registered.");
}
}
TemplateCommands::SetUrl { template_name, template_url } => {
let templates_content = std::fs::read_to_string(&templates_config_path).unwrap();
let mut templates: toml::Value = toml::from_str(&templates_content).unwrap_or(toml::Value::Table(toml::map::Map::new()));
if let toml::Value::Table(ref mut table) = templates {
if let Some(details) = table.get_mut(template_name) {
if let toml::Value::Table(detail_table) = details {
detail_table.insert("url".to_string(), toml::Value::String(template_url.clone()));
let updated_content = toml::to_string(&templates).unwrap();
std::fs::write(&templates_config_path, updated_content).unwrap();
println!("Updated URL for template '{}'.", template_name);
} else {
println!("Template '{}' details are malformed.", template_name);
}
} else {
println!("Template '{}' not found.", template_name);
}
} else {
println!("No templates registered.");
}
}
}
}
Commands::Version{} => {
println!("crustex version: {}", env!("CARGO_PKG_VERSION"));
}
}
}

100
src/utils/mod.rs Normal file
View File

@@ -0,0 +1,100 @@
use hex_literal::hex;
use sha2::{Sha256, Digest};
use std::path::PathBuf;
use std::collections::HashMap;
use std::fs::File;
use std::io::BufReader;
use ssh2_config::{ParseRule, SshConfig};
use crate::graph::TeXFile;
pub fn hash_file(path: &PathBuf) -> Result<[u8; 32], Box<dyn std::error::Error>> {
let content = std::fs::read_to_string(path)?;
let mut hasher = Sha256::new();
hasher.update(content.as_bytes());
let result = hasher.finalize();
let hash_array: [u8; 32] = result.into();
Ok(hash_array)
}
pub fn verify_file_hash(path: &PathBuf, expected_hash: &[u8; 32]) -> Result<bool, Box<dyn std::error::Error>> {
let computed_hash = hash_file(path)?;
Ok(&computed_hash == expected_hash)
}
pub fn verify_against_binlock(path: &PathBuf, lock_path: &PathBuf) -> Result<bool, Box<dyn std::error::Error>> {
let lock_content = std::fs::read(lock_path)?;
if lock_content.len() != 32 {
return Err("Invalid lock file length".into());
}
let expected_hash = hash_file(path)?;
// Compare byte by byte
for i in 0..32 {
if lock_content[i] != expected_hash[i] {
return Ok(false);
}
}
Ok(true)
}
pub fn clone_dep_graph_structure(dep_graph: &HashMap<TeXFile, Vec<TeXFile>>, build_dir: &PathBuf) {
for (node, edges) in dep_graph {
println!("Cloning file: {:?}", node.filename);
let parent_path = node.filename.parent().map(|p| p.to_path_buf()).unwrap_or_else(|| PathBuf::from("."));
let cloned_path = build_dir.join(&parent_path);
if !cloned_path.exists() {
std::fs::create_dir_all(&cloned_path).unwrap();
}
let new_file_path = build_dir.join(&node.filename);
std::fs::copy(&node.filename, &new_file_path).unwrap();
for sub_node in edges {
let sub_parent_path = sub_node.filename.parent().map(|p| p.to_path_buf()).unwrap_or_else(|| PathBuf::from("."));
let sub_cloned_path = build_dir.join(&sub_parent_path);
if !sub_cloned_path.exists() {
std::fs::create_dir_all(&sub_cloned_path).unwrap();
}
let sub_new_file_path = build_dir.join(&sub_node.filename);
std::fs::copy(&sub_node.filename, &sub_new_file_path).unwrap();
}
}
}
pub fn get_ssh_key_for_host(host: &str) -> Option<PathBuf> {
let home = dirs::home_dir()?;
let config_path = home.join(".ssh").join("config");
if !config_path.exists() {
return None;
}
let mut reader = BufReader::new(File::open(config_path).ok()?);
let config = SshConfig::default().parse(&mut reader, ParseRule::ALLOW_UNKNOWN_FIELDS).ok()?;
let params = config.query(host);
params.identity_file?.first().map(PathBuf::from)
}
pub fn get_host_from_url(url: &str) -> Option<String> {
if url.starts_with("ssh://") {
let without_scheme = &url[6..];
let parts: Vec<&str> = without_scheme.split('/').collect();
let host_part = parts[0];
let host = host_part.split('@').last().unwrap_or(host_part);
return Some(host.to_string())
} else if url.starts_with("https://") {
let without_scheme = &url[8..];
let parts: Vec<&str> = without_scheme.split('/').collect();
let host_part = parts[0];
let host = host_part.split(':').next().unwrap_or(host_part);
return Some(host.to_string())
} else if url.contains('@') {
let parts: Vec<&str> = url.split('@').collect();
let host_part = parts[1];
let host = host_part.split(':').next().unwrap_or(host_part);
return Some(host.to_string())
} else {
return None
}
}