Skip to content

Commit

Permalink
separated terminal functionality into a different dedicated file
Browse files Browse the repository at this point in the history
  • Loading branch information
JKomieter committed Jun 7, 2024
1 parent b2bf942 commit f0eccc1
Show file tree
Hide file tree
Showing 4 changed files with 58 additions and 54 deletions.
4 changes: 2 additions & 2 deletions smartshreds/src/main.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
mod shred;
use std::path::PathBuf;

use shred::{dir_search, error::SmartShredsError, hashing};
use shred::{debug::display_duplicate_files, dir_search, error::SmartShredsError, hashing};
use structopt::StructOpt;

#[derive(Debug, StructOpt)]
Expand All @@ -24,7 +24,7 @@ fn main() -> Result<(), SmartShredsError> {
let args: CommandLine = CommandLine::from_args();
let directory_path = &args.directory_path;
let dup_files = dir_search::search_files_with_similar_names_in_dir(directory_path)?;
let _ = dir_search::display_duplicate_files(&dup_files);
let _ = display_duplicate_files(&dup_files);
let hashes = hashing::hash_duplicate_file(&dup_files)?;
println!("{:?}", hashes);
Ok(())
Expand Down
54 changes: 54 additions & 0 deletions smartshreds/src/shred/debug.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
use std::io::{StdoutLock, Write};

use termion::{color, input::MouseTerminal, style};

use super::{dir_search::DuplicateFile, error::SmartShredsError};

pub fn display_duplicate_files(files: &Vec<DuplicateFile>) -> Result<(), SmartShredsError> {
let stdout = MouseTerminal::from(std::io::stdout());
let mut stdout = stdout.lock();
if files.is_empty() {
print_no_duplicates(&mut stdout)?;
} else {
print_duplicates(&mut stdout, &files)?;
}
Ok(())
}

fn print_no_duplicates(stdout: &mut StdoutLock) -> Result<(), SmartShredsError> {
writeln!(
stdout,
"{}{}No duplicate files found in the directory.{}",
style::Bold,
color::Fg(color::Green),
style::Reset
)?;
Ok(())
}

fn print_duplicates(
stdout: &mut StdoutLock,
files: &Vec<DuplicateFile>,
) -> Result<(), SmartShredsError> {
writeln!(
stdout,
"{}{}Duplicate files found in the directory:{}",
style::Bold,
color::Fg(color::Yellow),
style::Reset
)?;

for (index, file) in files.iter().enumerate() {
writeln!(
stdout,
"{}{}{} - {}{}",
style::Bold,
color::Fg(color::Red),
index + 1,
file,
style::Reset
)?;
}

Ok(())
}
51 changes: 0 additions & 51 deletions smartshreds/src/shred/dir_search.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,7 @@ use regex::Regex;
use std::collections::HashMap;
use std::ffi::OsStr;
use std::fs::read_dir;
use std::io::{StdoutLock, Write};
use std::path::PathBuf;
use termion::input::MouseTerminal;
use termion::{color, style};

use super::error::SmartShredsError;

Expand Down Expand Up @@ -78,51 +75,3 @@ pub fn search_files_with_similar_names_in_dir(
Ok(duplicate_files)
}

fn print_no_duplicates(stdout: &mut StdoutLock) -> Result<(), SmartShredsError> {
writeln!(
stdout,
"{}{}No duplicate files found in the directory.{}",
style::Bold,
color::Fg(color::Green),
style::Reset
)?;
Ok(())
}

fn print_duplicates(
stdout: &mut StdoutLock,
files: &Vec<DuplicateFile>,
) -> Result<(), SmartShredsError> {
writeln!(
stdout,
"{}{}Duplicate files found in the directory:{}",
style::Bold,
color::Fg(color::Yellow),
style::Reset
)?;

for (index, file) in files.iter().enumerate() {
writeln!(
stdout,
"{}{}{} - {}{}",
style::Bold,
color::Fg(color::Red),
index + 1,
file,
style::Reset
)?;
}

Ok(())
}

pub fn display_duplicate_files(files: &Vec<DuplicateFile>) -> Result<(), SmartShredsError> {
let stdout = MouseTerminal::from(std::io::stdout());
let mut stdout = stdout.lock();
if files.is_empty() {
print_no_duplicates(&mut stdout)?;
} else {
print_duplicates(&mut stdout, &files)?;
}
Ok(())
}
3 changes: 2 additions & 1 deletion smartshreds/src/shred/mod.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
pub mod error;
pub mod dir_search;
pub mod hashing;
pub mod hashing;
pub mod debug;

0 comments on commit f0eccc1

Please sign in to comment.