Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add progress bar #106

Merged
merged 1 commit into from
Dec 2, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
167 changes: 87 additions & 80 deletions Cargo.lock

Large diffs are not rendered by default.

4 changes: 3 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,6 @@ members = [
"czkawka_cli",
"czkawka_gui",
"czkawka_gui_orbtk",
]
]
#[profile.release]
#lto = true
16 changes: 8 additions & 8 deletions czkawka_cli/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ fn main() {
df.set_delete_method(delete_method);
df.set_recursive_search(!not_recursive.not_recursive);

df.find_duplicates(None);
df.find_duplicates(None, None);

if let Some(file_name) = file_to_save.file_name() {
if !df.save_results_to_file(file_name) {
Expand All @@ -79,7 +79,7 @@ fn main() {
ef.set_excluded_items(path_list_to_str(excluded_items.excluded_items));
ef.set_delete_folder(delete_folders);

ef.find_empty_folders(None);
ef.find_empty_folders(None, None);

if let Some(file_name) = file_to_save.file_name() {
if !ef.save_results_to_file(file_name) {
Expand Down Expand Up @@ -114,7 +114,7 @@ fn main() {
bf.set_delete_method(big_file::DeleteMethod::Delete);
}

bf.find_big_files(None);
bf.find_big_files(None, None);

if let Some(file_name) = file_to_save.file_name() {
if !bf.save_results_to_file(file_name) {
Expand Down Expand Up @@ -148,7 +148,7 @@ fn main() {
ef.set_delete_method(empty_files::DeleteMethod::Delete);
}

ef.find_empty_files(None);
ef.find_empty_files(None, None);

if let Some(file_name) = file_to_save.file_name() {
if !ef.save_results_to_file(file_name) {
Expand Down Expand Up @@ -180,7 +180,7 @@ fn main() {
tf.set_delete_method(temporary::DeleteMethod::Delete);
}

tf.find_temporary_files(None);
tf.find_temporary_files(None, None);

if let Some(file_name) = file_to_save.file_name() {
if !tf.save_results_to_file(file_name) {
Expand Down Expand Up @@ -211,7 +211,7 @@ fn main() {
sf.set_recursive_search(!not_recursive.not_recursive);
sf.set_similarity(similarity);

sf.find_similar_images(None);
sf.find_similar_images(None, None);

if let Some(file_name) = file_to_save.file_name() {
if !sf.save_results_to_file(file_name) {
Expand Down Expand Up @@ -247,7 +247,7 @@ fn main() {
zf.set_delete_method(zeroed::DeleteMethod::Delete);
}

zf.find_zeroed_files(None);
zf.find_zeroed_files(None, None);

if let Some(file_name) = file_to_save.file_name() {
if !zf.save_results_to_file(file_name) {
Expand Down Expand Up @@ -283,7 +283,7 @@ fn main() {
// // TODO mf.set_delete_method(same_music::DeleteMethod::Delete);
// }

mf.find_same_music(None);
mf.find_same_music(None, None);

if let Some(file_name) = file_to_save.file_name() {
if !mf.save_results_to_file(file_name) {
Expand Down
3 changes: 3 additions & 0 deletions czkawka_core/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,6 @@ hamming = "0.1"
# Needed by same music
bitflags = "1.2.1"
audiotags = "0.2.7182"

# Futures - needed by async progress sender
futures = "0.3.8"
55 changes: 51 additions & 4 deletions czkawka_core/src/big_file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,21 @@ use crossbeam_channel::Receiver;
use humansize::{file_size_opts as options, FileSize};
use std::collections::BTreeMap;
use std::ffi::OsStr;
use std::fs;
use std::fs::{File, Metadata};
use std::io::Write;
use std::path::PathBuf;
use std::sync::atomic::Ordering;
use std::sync::atomic::{AtomicBool, AtomicU64};
use std::sync::Arc;
use std::thread::sleep;
use std::time::Duration;
use std::time::{SystemTime, UNIX_EPOCH};
use std::{fs, thread};

#[derive(Debug)]
pub struct ProgressData {
pub files_checked: usize,
}

#[derive(Clone)]
pub struct FileEntry {
Expand Down Expand Up @@ -73,9 +83,9 @@ impl BigFile {
}
}

pub fn find_big_files(&mut self, stop_receiver: Option<&Receiver<()>>) {
pub fn find_big_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&futures::channel::mpsc::Sender<ProgressData>>) {
self.optimize_directories();
if !self.look_for_big_files(stop_receiver) {
if !self.look_for_big_files(stop_receiver, progress_sender) {
self.stopped_search = true;
return;
}
Expand Down Expand Up @@ -111,7 +121,7 @@ impl BigFile {
self.allowed_extensions.set_allowed_extensions(allowed_extensions, &mut self.text_messages);
}

fn look_for_big_files(&mut self, stop_receiver: Option<&Receiver<()>>) -> bool {
fn look_for_big_files(&mut self, stop_receiver: Option<&Receiver<()>>, progress_sender: Option<&futures::channel::mpsc::Sender<ProgressData>>) -> bool {
let start_time: SystemTime = SystemTime::now();
let mut folders_to_check: Vec<PathBuf> = Vec::with_capacity(1024 * 2); // This should be small enough too not see to big difference and big enough to store most of paths without needing to resize vector

Expand All @@ -121,10 +131,42 @@ impl BigFile {
}
self.information.number_of_checked_folders += folders_to_check.len();

//// PROGRESS THREAD START
const LOOP_DURATION: u32 = 200; //in ms
let progress_thread_run = Arc::new(AtomicBool::new(true));

let atomic_file_counter = Arc::new(AtomicU64::new(0));

let progress_thread_handle;
if let Some(progress_sender) = progress_sender {
let mut progress_send = progress_sender.clone();
let progress_thread_run = progress_thread_run.clone();
let atomic_file_counter = atomic_file_counter.clone();
progress_thread_handle = thread::spawn(move || loop {
progress_send
.try_send(ProgressData {
files_checked: atomic_file_counter.load(Ordering::Relaxed) as usize,
})
.unwrap();
if !progress_thread_run.load(Ordering::Relaxed) {
break;
}
sleep(Duration::from_millis(LOOP_DURATION as u64));
});
} else {
progress_thread_handle = thread::spawn(|| {});
}

//// PROGRESS THREAD END

while !folders_to_check.is_empty() {
if stop_receiver.is_some() && stop_receiver.unwrap().try_recv().is_ok() {
// Be sure that every thread is closed
progress_thread_run.store(false, Ordering::Relaxed);
progress_thread_handle.join().unwrap();
return false;
}

let current_folder = folders_to_check.pop().unwrap();
let read_dir = match fs::read_dir(&current_folder) {
Ok(t) => t,
Expand Down Expand Up @@ -162,6 +204,7 @@ impl BigFile {

folders_to_check.push(next_folder);
} else if metadata.is_file() {
atomic_file_counter.fetch_add(1, Ordering::Relaxed);
// Extracting file extension
let file_extension = entry_data.path().extension().and_then(OsStr::to_str).map(str::to_lowercase);

Expand Down Expand Up @@ -211,6 +254,10 @@ impl BigFile {
}
}

// End thread which send info to gui
progress_thread_run.store(false, Ordering::Relaxed);
progress_thread_handle.join().unwrap();

// Extract n biggest files to new TreeMap
let mut new_map: BTreeMap<u64, Vec<FileEntry>> = Default::default();

Expand Down
Loading