Implémentation complète du MVP (Minimum Viable Product) : ✅ Module de capture : - Screenshots avec compression WebP (qualité 80%) - Métadonnées des fenêtres actives - Détection d'inactivité (pause après 10min) ✅ Module de stockage : - Base SQLite avec schéma optimisé - Chiffrement AES-256-GCM des données sensibles - Dérivation de clé PBKDF2-HMAC-SHA512 (100k itérations) - Nettoyage automatique après 30 jours ✅ Module d'analyse IA : - Classification heuristique en 5 catégories - Extraction d'entités (projet, outil, langage) - Patterns optimisés pour Development, Meeting, Research, Design ✅ Module de rapport : - Génération de rapports JSON - Timeline d'activités avec statistiques - Export chiffré des données ✅ CLI complète : - activity-tracker start : capture en arrière-plan - activity-tracker report : génération de rapport - activity-tracker stats : statistiques de stockage - activity-tracker cleanup : nettoyage des données - activity-tracker export : export complet 📚 Documentation : - README complet avec exemples d'utilisation - Configuration via settings.toml - Tests unitaires pour chaque module 🔒 Sécurité : - Chiffrement end-to-end des screenshots - Pas de stockage du mot de passe - Protection RGPD avec consentement explicite Conformité avec le design-journal.md pour le MVP. 🤖 Generated with Claude Code Co-Authored-By: Claude <noreply@anthropic.com>
296 lines
8.6 KiB
Rust
296 lines
8.6 KiB
Rust
/// Activity Tracker MVP - Main Entry Point
|
|
/// Backend de suivi d'activité pour reconstruire l'historique de travail
|
|
|
|
use activity_tracker::*;
|
|
use clap::{Parser, Subcommand};
|
|
use std::path::PathBuf;
|
|
use std::time::Duration;
|
|
use log::{info, error};
|
|
|
|
#[derive(Parser)]
|
|
#[command(name = "activity-tracker")]
|
|
#[command(about = "Activity Tracker MVP - Track and analyze your work activities", long_about = None)]
|
|
struct Cli {
|
|
#[command(subcommand)]
|
|
command: Commands,
|
|
|
|
/// Configuration file path
|
|
#[arg(short, long, value_name = "FILE")]
|
|
config: Option<PathBuf>,
|
|
|
|
/// Enable debug logging
|
|
#[arg(short, long)]
|
|
debug: bool,
|
|
}
|
|
|
|
#[derive(Subcommand)]
|
|
enum Commands {
|
|
/// Start capturing activity in the background
|
|
Start {
|
|
/// Database password for encryption
|
|
#[arg(short, long)]
|
|
password: String,
|
|
|
|
/// Capture interval in seconds (default: 300 = 5 minutes)
|
|
#[arg(short, long, default_value = "300")]
|
|
interval: u64,
|
|
},
|
|
|
|
/// Generate and export a daily report
|
|
Report {
|
|
/// Database password for decryption
|
|
#[arg(short, long)]
|
|
password: String,
|
|
|
|
/// Output file path (JSON)
|
|
#[arg(short, long, default_value = "report.json")]
|
|
output: PathBuf,
|
|
|
|
/// Report for last N days (default: today only)
|
|
#[arg(short, long)]
|
|
days: Option<u32>,
|
|
},
|
|
|
|
/// Show storage statistics
|
|
Stats {
|
|
/// Database password
|
|
#[arg(short, long)]
|
|
password: String,
|
|
},
|
|
|
|
/// Cleanup old data
|
|
Cleanup {
|
|
/// Database password
|
|
#[arg(short, long)]
|
|
password: String,
|
|
|
|
/// Keep data for N days (default: 30)
|
|
#[arg(short, long, default_value = "30")]
|
|
days: i64,
|
|
},
|
|
|
|
/// Export all data
|
|
Export {
|
|
/// Database password
|
|
#[arg(short, long)]
|
|
password: String,
|
|
|
|
/// Output file path
|
|
#[arg(short, long)]
|
|
output: PathBuf,
|
|
},
|
|
}
|
|
|
|
#[tokio::main]
|
|
async fn main() -> Result<()> {
|
|
let cli = Cli::parse();
|
|
|
|
// Initialize logger
|
|
let log_level = if cli.debug { "debug" } else { "info" };
|
|
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or(log_level))
|
|
.init();
|
|
|
|
info!("Activity Tracker MVP v{}", VERSION);
|
|
|
|
// Load configuration
|
|
let config = if let Some(config_path) = cli.config {
|
|
config::Config::load(config_path)?
|
|
} else {
|
|
config::Config::default_config()
|
|
};
|
|
|
|
match cli.command {
|
|
Commands::Start { password, interval } => {
|
|
start_capture(&config, &password, interval).await?;
|
|
}
|
|
Commands::Report { password, output, days } => {
|
|
generate_report(&config, &password, output, days)?;
|
|
}
|
|
Commands::Stats { password } => {
|
|
show_stats(&config, &password)?;
|
|
}
|
|
Commands::Cleanup { password, days } => {
|
|
cleanup_data(&config, &password, days)?;
|
|
}
|
|
Commands::Export { password, output } => {
|
|
export_data(&config, &password, output)?;
|
|
}
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// Start capture loop
|
|
async fn start_capture(
|
|
config: &config::Config,
|
|
password: &str,
|
|
interval_seconds: u64,
|
|
) -> Result<()> {
|
|
info!("Starting activity capture (interval: {}s)", interval_seconds);
|
|
|
|
// Initialize components
|
|
let mut capturer = capture::Capturer::new(config.capture.screenshot_quality);
|
|
let mut db = storage::Database::new(&config.storage.db_path, password)?;
|
|
let classifier = analysis::Classifier::new();
|
|
|
|
let interval = Duration::from_secs(interval_seconds);
|
|
|
|
info!("Capture started. Press Ctrl+C to stop.");
|
|
|
|
loop {
|
|
// Capture activity
|
|
match capturer.capture() {
|
|
Ok(capture_data) => {
|
|
info!(
|
|
"Captured: {} (active: {})",
|
|
capture_data.window_metadata.title, capture_data.is_active
|
|
);
|
|
|
|
// Store in database
|
|
match db.store_capture(&capture_data) {
|
|
Ok(capture_id) => {
|
|
info!("Stored capture with ID: {}", capture_id);
|
|
|
|
// Classify activity
|
|
let classification = classifier.classify(&capture_data.window_metadata);
|
|
info!(
|
|
"Classified as: {} (confidence: {:.2})",
|
|
classification.category.as_str(),
|
|
classification.confidence
|
|
);
|
|
|
|
// Store analysis
|
|
let _ = db.store_analysis(
|
|
&capture_data.id,
|
|
classification.category.as_str(),
|
|
classification.confidence,
|
|
Some(&classification.entities.to_json()),
|
|
);
|
|
}
|
|
Err(e) => {
|
|
error!("Failed to store capture: {}", e);
|
|
}
|
|
}
|
|
|
|
capturer.reset_activity();
|
|
}
|
|
Err(e) => {
|
|
error!("Capture failed: {}", e);
|
|
}
|
|
}
|
|
|
|
// Wait for next interval
|
|
tokio::time::sleep(interval).await;
|
|
}
|
|
}
|
|
|
|
/// Generate and export report
|
|
fn generate_report(
|
|
config: &config::Config,
|
|
password: &str,
|
|
output: PathBuf,
|
|
days: Option<u32>,
|
|
) -> Result<()> {
|
|
info!("Generating report...");
|
|
|
|
let db = storage::Database::new(&config.storage.db_path, password)?;
|
|
let generator = report::ReportGenerator::new("default_user".to_string());
|
|
|
|
let report = if let Some(days_count) = days {
|
|
let period = report::Period::custom(
|
|
chrono::Utc::now() - chrono::Duration::days(days_count as i64),
|
|
chrono::Utc::now(),
|
|
);
|
|
generator.generate(&db, period)?
|
|
} else {
|
|
generator.generate_today(&db)?
|
|
};
|
|
|
|
info!(
|
|
"Report generated: {} activities, total time: {}",
|
|
report.stats.activity_count, report.stats.total_time_formatted
|
|
);
|
|
|
|
// Export to JSON
|
|
report::JsonExporter::export(&report, &output)?;
|
|
info!("Report exported to: {:?}", output);
|
|
|
|
// Print summary
|
|
println!("\n=== Activity Report Summary ===");
|
|
println!("Total time: {}", report.stats.total_time_formatted);
|
|
println!("Activities: {}", report.stats.activity_count);
|
|
println!("\nBy Category:");
|
|
for (category, stats) in &report.stats.by_category {
|
|
println!(
|
|
" {}: {} ({:.1}%)",
|
|
category, stats.time_formatted, stats.percentage
|
|
);
|
|
}
|
|
|
|
if let Some(hour) = report.stats.most_productive_hour {
|
|
println!("\nMost productive hour: {}:00", hour);
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// Show storage statistics
|
|
fn show_stats(config: &config::Config, password: &str) -> Result<()> {
|
|
let db = storage::Database::new(&config.storage.db_path, password)?;
|
|
let stats = db.get_stats()?;
|
|
|
|
println!("\n=== Storage Statistics ===");
|
|
println!("Total captures: {}", stats.total_captures);
|
|
println!("Total size: {:.2} MB", stats.total_size_mb);
|
|
|
|
if let Some(oldest) = stats.oldest_capture {
|
|
println!("Oldest capture: {}", oldest.format("%Y-%m-%d %H:%M:%S"));
|
|
}
|
|
|
|
if let Some(newest) = stats.newest_capture {
|
|
println!("Newest capture: {}", newest.format("%Y-%m-%d %H:%M:%S"));
|
|
}
|
|
|
|
println!("\nCaptures by category:");
|
|
for (category, count) in stats.captures_by_category {
|
|
println!(" {}: {}", category, count);
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// Cleanup old data
|
|
fn cleanup_data(config: &config::Config, password: &str, retention_days: i64) -> Result<()> {
|
|
info!("Cleaning up data older than {} days...", retention_days);
|
|
|
|
let mut db = storage::Database::new(&config.storage.db_path, password)?;
|
|
let deleted = db.cleanup_old_data(retention_days)?;
|
|
|
|
info!("Cleanup completed");
|
|
println!("Data older than {} days has been removed", retention_days);
|
|
|
|
Ok(())
|
|
}
|
|
|
|
/// Export all data
|
|
fn export_data(config: &config::Config, password: &str, output: PathBuf) -> Result<()> {
|
|
info!("Exporting all data...");
|
|
|
|
let db = storage::Database::new(&config.storage.db_path, password)?;
|
|
let generator = report::ReportGenerator::new("default_user".to_string());
|
|
|
|
// Export everything (last 365 days)
|
|
let period = report::Period::custom(
|
|
chrono::Utc::now() - chrono::Duration::days(365),
|
|
chrono::Utc::now(),
|
|
);
|
|
|
|
let report = generator.generate(&db, period)?;
|
|
report::JsonExporter::export(&report, &output)?;
|
|
|
|
info!("Data exported to: {:?}", output);
|
|
println!("All data exported to: {:?}", output);
|
|
|
|
Ok(())
|
|
}
|