Skip to content

Commit

Permalink
Added database and scanner integration first performance tests
Browse files Browse the repository at this point in the history
  • Loading branch information
okynos committed Dec 17, 2024
1 parent 749cde5 commit 8d3728f
Show file tree
Hide file tree
Showing 9 changed files with 177 additions and 80 deletions.
2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ itertools = "0.13.0"
hex = "0.4.3"
futures = "0.3.21"
sha3 = { version = "0.10.0", default-features = false }
sha2 = { version = "0.10.8" }
log = { version = "0.4.11", default-features = false }
gethostname = { version = "0.5.0", default-features = false }
uuid = { version = "1.0.0", default-features = false, features = ["v4"] }
Expand All @@ -25,6 +26,7 @@ time = { version = "0.3.17", default-features = false }
ctrlc = { version = "3.3.1", default-features = false, features = ["termination"] }
log-panics = { version = "2.1.0", features = ["with-backtrace"]}
rusqlite = { version = "0.32.1", features = ["bundled"]}
walkdir = "2.5.0"

[dependencies.regex]
version = "1.3"
Expand Down
3 changes: 2 additions & 1 deletion src/auditevent.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ use serde_json::{json, to_string};
use reqwest::Client;
use std::collections::HashMap;
use std::path::PathBuf;
use sha3::{Digest, Sha3_512};

use crate::appconfig;
use crate::appconfig::*;
Expand Down Expand Up @@ -145,7 +146,7 @@ impl Event {
size: utils::get_file_size(path["name"].clone().as_str()),
checksum: hash::get_checksum(format!("{}/{}",
parent["name"].clone(), path["name"].clone()),
cfg.events_max_file_checksum),
cfg.events_max_file_checksum, Sha3_512::new()),
fpid: utils::get_pid(),
system: String::from(utils::get_os()),

Expand Down
9 changes: 5 additions & 4 deletions src/db.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ pub struct DBFile {
pub timestamp: String,
pub hash: String,
pub path: String,
pub size: u32
pub size: u64
}

pub struct DB {
Expand Down Expand Up @@ -69,8 +69,8 @@ impl DB {
(),
);
match result {
Ok(_v) => println!("GOOD"),
Err(e) => println!("ERROR: {:?}", e)
Ok(_v) => info!("Database successfully created."),
Err(e) => error!("Error creating database, Error: '{}'", e)
}
self.close(connection);
}
Expand Down Expand Up @@ -104,7 +104,7 @@ impl DB {
path: row.get(3).unwrap(),
size: row.get(4).unwrap()
})
);
).unwrap();

self.close(connection);
data
Expand Down Expand Up @@ -137,6 +137,7 @@ impl DB {
impl fmt::Debug for DBFile {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result{
f.debug_tuple("")
.field(&self.id)
.field(&self.timestamp)
.field(&self.hash)
.field(&self.path)
Expand Down
107 changes: 83 additions & 24 deletions src/hash.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ const READ_CAPACITY: usize = 1024 * 1024 * 8; // Read file in chunks of 8MB

// To get file checksums
use hex::{encode, decode};
use sha3::{Sha3_512, Digest};
use sha3::Digest;
// To log the program process
use log::*;
// To manage hex to ascii conversion
Expand All @@ -16,44 +16,103 @@ use std::path::Path;
// To read file content
use std::io::{BufRead, BufReader};

// To calculate file content hash in sha512 format (SHA3 implementation)
pub fn get_checksum(filename: String, read_limit: usize) -> String {
let mut hasher = Sha3_512::new();
// To calculate file content hash (SHA3 implementation)
pub fn get_checksum<T: Digest>(filename: String, read_limit: usize, mut hasher: T) -> String {
//let mut hasher = Sha3_512::new();
let mut length = 1;
let mut iteration = 0;
let mut data_read = 0;
let limit: u64 = (read_limit * 1024 * 1024).try_into().unwrap();

if Path::new(&filename).is_file() {
debug!("Getting hash of file: {}", filename);
match File::open(filename.clone()){
Ok(file) => {
let size = file.metadata().unwrap().len();
let mut reader = BufReader::with_capacity(READ_CAPACITY, file);

while length > 0 && data_read <= read_limit {
if iteration == 2 {
debug!("Big file detected, the hash will take a while");
}

length = {
match reader.fill_buf(){
Ok(buffer) =>{
hasher.update(buffer);
buffer.len()
},
Err(e) => {
debug!("Cannot read file. Checksum set to 'UNKNOWN', error: {}", e);
0
}

if size > limit {
info!("File '{}' checksum skipped. File size is above limit.", filename);
String::from("UNKNOWN")
}else{
while length > 0 && data_read <= read_limit {
if iteration == 2 {
debug!("Big file detected, the hash will take a while");
}

length = {
match reader.fill_buf(){
Ok(buffer) =>{
hasher.update(buffer);
buffer.len()
},
Err(e) => {
debug!("Cannot read file. Checksum set to 'UNKNOWN', error: {}", e);
0
}
}
};
reader.consume(length);
data_read += length / (1024 * 1024);
iteration += 1;
};
reader.consume(length);
data_read += length / (1024 * 1024);
iteration += 1;
};
if data_read > read_limit {
encode(hasher.finalize())
}
},
Err(e) => {
debug!("Cannot open file to get checksum, error: {:?}", e);
String::from("UNKNOWN")
}
}
}else{
debug!("Cannot produce checksum of a removed file or directory.");
String::from("UNKNOWN")
}
}

// ----------------------------------------------------------------------------

pub fn get_checksumv2<T: Digest>(filename: String, read_limit: usize, mut hasher: T) -> String {
//let mut hasher = Sha3_512::new();
let mut length = 1;
let mut iteration = 0;
let mut data_read = 0;
let limit: usize = read_limit * 1024 * 1024;

if Path::new(&filename).is_file() {
debug!("Getting hash of file: {}", filename);
match File::open(filename.clone()){
Ok(file) => {
let size: usize = file.metadata().unwrap().len() as usize;
let mut reader = BufReader::with_capacity(READ_CAPACITY, file);


if size > limit {
info!("File '{}' checksum skipped. File size is above limit.", filename);
String::from("UNKNOWN")
}else{
while length > 0 && data_read <= limit {
if iteration == 2 {
debug!("Big file detected, the hash will take a while");
}

length = {
match reader.fill_buf(){
Ok(buffer) =>{
hasher.update(buffer);
buffer.len()
},
Err(e) => {
debug!("Cannot read file. Checksum set to 'UNKNOWN', error: {}", e);
0
}
}
};
reader.consume(length);
data_read += length;
iteration += 1;
};
encode(hasher.finalize())
}
},
Expand Down
74 changes: 40 additions & 34 deletions src/init.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,41 +3,47 @@
use crate::ruleset::Ruleset;
use crate::appconfig::*;
use crate::utils;
use crate::db;


pub fn init() -> (AppConfig, Ruleset) {
use std::path::Path;
use simplelog::WriteLogger;
use simplelog::Config;
use std::fs;

println!("[INFO] Achiefs File Integrity Monitoring software starting!");
println!("[INFO] Reading config...");
let cfg = AppConfig::new(utils::get_os(), None);

// Create folders to store logs based on config.yml
fs::create_dir_all(
Path::new( &cfg.clone().log_file
).parent().unwrap().to_str().unwrap()
).unwrap();

// Create logger output to write generated logs.
WriteLogger::init(
cfg.clone().get_level_filter(),
Config::default(),
fs::OpenOptions::new()
.create(true)
.append(true)
.open(cfg.clone().log_file)
.expect("Unable to open log file")
).unwrap();

println!("[INFO] Configuration successfully read, forwarding output to log file.");
println!("[INFO] Log file: '{}'", cfg.clone().log_file);
println!("[INFO] Log level: '{}'", cfg.clone().log_level);

let ruleset = Ruleset::new(utils::get_os(), None);

log_panics::init();
(cfg, ruleset)
use std::path::Path;
use simplelog::WriteLogger;
use simplelog::Config;
use std::fs;

println!("[INFO] Achiefs File Integrity Monitoring software starting!");
println!("[INFO] Reading config...");
let cfg = AppConfig::new(utils::get_os(), None);

// Create folders to store logs based on config.yml
fs::create_dir_all(
Path::new( &cfg.clone().log_file
).parent().unwrap().to_str().unwrap()
).unwrap();

// Create logger output to write generated logs.
WriteLogger::init(
cfg.clone().get_level_filter(),
Config::default(),
fs::OpenOptions::new()
.create(true)
.append(true)
.open(cfg.clone().log_file)
.expect("Unable to open log file")
).unwrap();

println!("[INFO] Configuration successfully read, forwarding output to log file.");
println!("[INFO] Log file: '{}'", cfg.clone().log_file);
println!("[INFO] Log level: '{}'", cfg.clone().log_level);

let ruleset = Ruleset::new(utils::get_os(), None);

let db = db::DB::new();
db.create_table();
println!("[INFO] Database created.");

println!("[INFO] Any error from this point will be logged in the log file.");
log_panics::init();
(cfg, ruleset)
}
13 changes: 1 addition & 12 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ use std::sync::mpsc;
use std::thread;
use log::{error, info};
use crate::init::init;
use crate::db::DBFile;

// Utils functions
mod utils;
Expand Down Expand Up @@ -38,6 +37,7 @@ mod multiwatcher;
mod rotator;
mod init;
mod db;
mod scanner;

// ----------------------------------------------------------------------------

Expand Down Expand Up @@ -77,17 +77,6 @@ async fn main() -> windows_service::Result<()> {
Ok(_v) => info!("FIM rotator thread started."),
Err(e) => error!("Could not start FIM rotator thread, error: {}", e)
};
let db = db::DB::new();
db.create_table();
db.insert_file(DBFile {
id: 0, // Not used for insert auto-increment
timestamp: String::from("Test"),
hash: String::from("Test"),
path: String::from("/test3"),
size: 0
});
db.get_file(String::from("/test3"));
db.print();
monitor::monitor(tx, rx, cfg, ruleset).await;
Ok(())
},
Expand Down
15 changes: 10 additions & 5 deletions src/monitor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,12 @@ use std::sync::mpsc;
use log::{info, error, debug, warn};
// To manage paths
use std::path::Path;
// To manage date and time
use std::time::{SystemTime, UNIX_EPOCH};
use time::OffsetDateTime;
// To use intersperse()
use itertools::Itertools;
// Event handling
use notify::event::{EventKind, AccessKind};
use sha3::{Digest, Sha3_512};


// Utils functions
Expand All @@ -36,6 +35,7 @@ use crate::logreader;
// integrations checker
use crate::launcher;
use crate::multiwatcher::MultiWatcher;
use crate::scanner;

// ----------------------------------------------------------------------------

Expand Down Expand Up @@ -130,7 +130,12 @@ pub async fn monitor(
}

match watcher.watch(Path::new(path), RecursiveMode::Recursive) {
Ok(_d) => debug!("Monitoring '{}' path.", path),
Ok(_d) => {
debug!("Monitoring '{}' path.", path);
debug!("Starting file scan to create hash database.");
scanner::scan_path(cfg.clone(), String::from(path));
debug!("Path '{}' scanned all files are hashed in DB.", path);
},
Err(e) => warn!("Could not monitor given path '{}', description: {}", path, e)
};
}
Expand Down Expand Up @@ -206,7 +211,7 @@ pub async fn monitor(

let current_date = OffsetDateTime::now_utc();
let index_name = format!("fim-{}-{}-{}", current_date.year(), current_date.month() as u8, current_date.day() );
let current_timestamp = format!("{:?}", SystemTime::now().duration_since(UNIX_EPOCH).expect("Time went backwards").as_millis());
let current_timestamp = utils::get_current_time_millis();
let kind: notify::EventKind = event.kind;
let path = event.paths[0].clone();

Expand Down Expand Up @@ -284,7 +289,7 @@ pub async fn monitor(
labels,
operation: event::get_operation(kind),
detailed_operation: event::get_detailed_operation(kind),
checksum: hash::get_checksum( String::from(path.to_str().unwrap()), cfg.clone().events_max_file_checksum ),
checksum: hash::get_checksum( String::from(path.to_str().unwrap()), cfg.clone().events_max_file_checksum, Sha3_512::new()),
fpid: utils::get_pid(),
system: cfg.clone().system
};
Expand Down
Loading

0 comments on commit 8d3728f

Please sign in to comment.