From 0d128cb8aa3822b2b3b7cdae5781f15da688d80b Mon Sep 17 00:00:00 2001 From: Drew Short Date: Mon, 4 Jan 2016 13:35:57 -0600 Subject: [PATCH] Style changes Changed the repo to ignore the rs.bk files produced by rustfmt Ran RustFmt on the repository to cleanup the code --- .gitignore | 3 + src/cache.rs | 74 +++++----- src/hash.rs | 203 ++++++++++++++------------- src/lib.rs | 387 ++++++++++++++++++++++++++++----------------------- src/main.rs | 33 +++-- 5 files changed, 384 insertions(+), 316 deletions(-) diff --git a/.gitignore b/.gitignore index 9369b2a..fe4d3be 100644 --- a/.gitignore +++ b/.gitignore @@ -22,3 +22,6 @@ Cargo.lock # Ignore sublime workspace files *.sublime-workspace + +#Rustfmt backup files +*.rs.bk diff --git a/src/cache.rs b/src/cache.rs index 7808d5c..43c4133 100644 --- a/src/cache.rs +++ b/src/cache.rs @@ -17,7 +17,7 @@ use std::result::Result; const CACHE_DIR: &'static str = "./.hash_cache"; const CACHE_FILE_EXT: &'static str = "png"; -// Creates the required directories +// Creates the required directories pub fn prep_cache() -> Result<(), Error> { create_dir_all(CACHE_DIR) } @@ -42,78 +42,92 @@ fn get_file_hash(path: &Path) -> Result { /** * Put an image buffer in the cache */ -pub fn put_image_in_cache(path: &Path, size: u32, image: &ImageBuffer, Vec>) { +pub fn put_image_in_cache(path: &Path, size: u32, image: &ImageBuffer, Vec>) { let hash = get_file_hash(&path); match hash { Ok(sha1) => { - let cache_path_str = format!("{}/{}x{}_{}.{}",CACHE_DIR, size, size, sha1, CACHE_FILE_EXT); + let cache_path_str = format!("{}/{}x{}_{}.{}", + CACHE_DIR, + size, + size, + sha1, + CACHE_FILE_EXT); let cached_path = Path::new(&cache_path_str); // Save the file into the cache match image.save(cached_path) { - Ok(_) => {}, + Ok(_) => {} Err(e) => println!("Error: {}", e), } - }, - Err(e) => println!("Error: {}", e), + } + Err(e) => println!("Error: {}", e), } } /** * Expects a slice of slices that represents lines in the file */ -pub fn put_matrix_in_cache(path: &Path, size: u32, extension: &str, file_contents: &Vec>) { +pub fn put_matrix_in_cache(path: &Path, + size: u32, + extension: &str, + file_contents: &Vec>) { let hash = get_file_hash(&path); match hash { Ok(sha1) => { - let cache_path_str = format!("{}/{}x{}_{}.{}",CACHE_DIR, size, size, sha1, extension); + let cache_path_str = format!("{}/{}x{}_{}.{}", CACHE_DIR, size, size, sha1, extension); let cached_path = Path::new(&cache_path_str); // Save the file into the cache match File::create(&cached_path) { Ok(mut file) => { for row in file_contents { - let mut row_str = row.iter().fold(String::new(), |acc, &item| acc + &format!("{},",item)); - //remove the last comma - let desire_len = row_str.len()-1; + let mut row_str = row.iter().fold(String::new(), + |acc, &item| acc + &format!("{},", item)); + // remove the last comma + let desire_len = row_str.len() - 1; row_str.truncate(desire_len); row_str.push_str("\n"); file.write(&row_str.into_bytes()); } file.flush(); - }, - Err(_) => {}, + } + Err(_) => {} } - }, - Err(e) => println!("Error: {}", e), + } + Err(e) => println!("Error: {}", e), } } /** * Get an image buffer out of the cache */ -pub fn get_image_from_cache(path: &Path, size: u32) -> Option, Vec>> { +pub fn get_image_from_cache(path: &Path, + size: u32) + -> Option, Vec>> { let hash = get_file_hash(&path); match hash { Ok(sha1) => { // Check if the file exists in the cache - let cache_path_str = format!("{}/{}x{}_{}.{}",CACHE_DIR, size, size, sha1, CACHE_FILE_EXT); + let cache_path_str = format!("{}/{}x{}_{}.{}", + CACHE_DIR, + size, + size, + sha1, + CACHE_FILE_EXT); let cached_path = Path::new(&cache_path_str); // Try to open, if it does, then we can read the image in match File::open(&cached_path) { Ok(_) => { let image = image::open(&cached_path).unwrap(); Some(image.to_luma()) - }, + } // Don't really care here, it just means an existing cached // file doesn't exist, or can't be read. - Err(_) => { - None - }, + Err(_) => None, } - }, + } Err(e) => { println!("Error: {}", e); None - }, + } } } @@ -125,7 +139,7 @@ pub fn get_matrix_from_cache(path: &Path, size: u32, extension: &str) -> Option< match hash { Ok(sha1) => { // Check if the file exists in the cache - let cache_path_str = format!("{}/{}x{}_{}.{}",CACHE_DIR, size, size, sha1, extension); + let cache_path_str = format!("{}/{}x{}_{}.{}", CACHE_DIR, size, size, sha1, extension); let cached_path = Path::new(&cache_path_str); // Try to open, if it does, then we can read the image in match File::open(&cached_path) { @@ -134,20 +148,18 @@ pub fn get_matrix_from_cache(path: &Path, size: u32, extension: &str) -> Option< let mut matrix_data: Vec = Vec::new(); file.read_to_end(&mut matrix_data); let matrix_data_str = String::from_utf8(matrix_data); - //convert the matrix + // convert the matrix Some(matrix) - }, + } // Don't really care here, it just means an existing cached // file doesn't exist, or can't be read. - Err(_) => { - None - }, + Err(_) => None, } - }, + } Err(e) => { println!("Error: {}", e); None - }, + } } } diff --git a/src/hash.rs b/src/hash.rs index 7ab739c..a69d58f 100644 --- a/src/hash.rs +++ b/src/hash.rs @@ -10,11 +10,7 @@ extern crate complex; use std::path::Path; use std::f64; -use self::image::{ - GenericImage, - Pixel, - FilterType -}; +use self::image::{GenericImage, Pixel, FilterType}; use self::dft::Transform; use cache; @@ -40,7 +36,7 @@ const FLOAT_PRECISION_MIN_5: f64 = f64::MIN / 100000_f64; */ pub struct PreparedImage<'a> { orig_path: &'a str, - image: image::ImageBuffer,Vec> + image: image::ImageBuffer, Vec>, } /** @@ -50,7 +46,7 @@ pub struct PerceptualHashes<'a> { pub orig_path: &'a str, pub ahash: u64, pub dhash: u64, - pub phash: u64 + pub phash: u64, } /** @@ -61,14 +57,13 @@ pub struct PerceptualHashes<'a> { * High aims for 128 bit precision */ pub enum Precision { - Low, + Low, Medium, High, } -/* - * Get the size of the required image - */ +// Get the size of the required image +// impl Precision { fn get_size(&self) -> u32 { match *self { @@ -85,7 +80,7 @@ impl Precision { pub enum HashType { Ahash, Dhash, - Phash + Phash, } /** @@ -102,25 +97,34 @@ pub enum HashType { * A PreparedImage struct with the required information for performing hashing * */ -pub fn prepare_image<'a>(path: &'a Path, hash_type: &HashType, precision: &Precision) -> PreparedImage<'a> { +pub fn prepare_image<'a>(path: &'a Path, + hash_type: &HashType, + precision: &Precision) + -> PreparedImage<'a> { let image_path = path.to_str().unwrap(); let size: u32 = match *hash_type { HashType::Phash => precision.get_size() * 4, - _ => precision.get_size() + _ => precision.get_size(), }; // Check if we have the already converted image in a cache and use that if possible. match cache::get_image_from_cache(&path, size) { Some(image) => { - PreparedImage { orig_path: &*image_path, image: image } - }, + PreparedImage { + orig_path: &*image_path, + image: image, + } + } None => { // Otherwise let's do that work now and store it. let image = image::open(path).unwrap(); let small_image = image.resize_exact(size, size, FilterType::Lanczos3); let grey_image = small_image.to_luma(); cache::put_image_in_cache(&path, size, &grey_image); - PreparedImage { orig_path: &*image_path, image: grey_image } - }, + PreparedImage { + orig_path: &*image_path, + image: grey_image, + } + } } } @@ -132,7 +136,12 @@ pub fn get_perceptual_hashes<'a>(path: &'a Path, precision: &Precision) -> Perce let ahash = AHash::new(&path, &precision).get_hash(); let dhash = DHash::new(&path, &precision).get_hash(); let phash = PHash::new(&path, &precision).get_hash(); - PerceptualHashes { orig_path: &*image_path, ahash: ahash, dhash: dhash, phash: phash } + PerceptualHashes { + orig_path: &*image_path, + ahash: ahash, + dhash: dhash, + phash: phash, + } } /** @@ -147,8 +156,8 @@ pub fn calculate_hamming_distance(hash1: u64, hash2: u64) -> u64 { let mut hamming = 0u64; for bit in bin_diff_str.chars() { match bit { - '1' => hamming+=1, - _ => continue + '1' => hamming += 1, + _ => continue, } } hamming @@ -170,12 +179,12 @@ impl<'a> AHash<'a> { impl<'a> PerceptualHash for AHash<'a> { /** - * Calculate the ahash of the provided prepared image. - * - * # Returns - * - * A u64 representing the value of the hash - */ + * Calculate the ahash of the provided prepared image. + * + * # Returns + * + * A u64 representing the value of the hash + */ fn get_hash(&self) -> u64 { let (width, height) = self.prepared_image.image.dimensions(); @@ -183,11 +192,11 @@ impl<'a> PerceptualHash for AHash<'a> { let mut total = 0u64; for pixel in self.prepared_image.image.pixels() { let channels = pixel.channels(); - //println!("Pixel is: {}", channels[0]); + // println!("Pixel is: {}", channels[0]); total += channels[0] as u64; } - let mean = total / (width*height) as u64; - //println!("Mean for {} is {}", prepared_image.orig_path, mean); + let mean = total / (width * height) as u64; + // println!("Mean for {} is {}", prepared_image.orig_path, mean); // Calculating a hash based on the mean let mut hash = 0u64; @@ -196,14 +205,14 @@ impl<'a> PerceptualHash for AHash<'a> { let pixel_sum = channels[0] as u64; if pixel_sum >= mean { hash |= 1; - //println!("Pixel {} is >= {} therefore {:b}", pixel_sum, mean, hash); + // println!("Pixel {} is >= {} therefore {:b}", pixel_sum, mean, hash); } else { hash |= 0; - //println!("Pixel {} is < {} therefore {:b}", pixel_sum, mean, hash); + // println!("Pixel {} is < {} therefore {:b}", pixel_sum, mean, hash); } hash <<= 1; } - //println!("Hash for {} is {}", prepared_image.orig_path, hash); + // println!("Hash for {} is {}", prepared_image.orig_path, hash); hash } @@ -255,7 +264,7 @@ impl<'a> PerceptualHash for DHash<'a> { hash |= 1; } else { hash |= 0; - } + } hash } @@ -281,9 +290,9 @@ impl<'a> PerceptualHash for PHash<'a> { */ fn get_hash(&self) -> u64 { // Get the image data into a vector to perform the DFT on. - let width = self.prepared_image.image.width() as usize; - let height = self.prepared_image.image.height() as usize; - + let width = self.prepared_image.image.width() as usize; + let height = self.prepared_image.image.height() as usize; + // Get 2d data to 2d FFT/DFT let mut data_matrix: Vec> = Vec::new(); for x in 0..width { @@ -291,67 +300,70 @@ impl<'a> PerceptualHash for PHash<'a> { for y in 0..height { let pos_x = x as u32; let pos_y = y as u32; - data_matrix[x].push(self.prepared_image.image.get_pixel(pos_x,pos_y).channels()[0] as f64); + data_matrix[x] + .push(self.prepared_image.image.get_pixel(pos_x, pos_y).channels()[0] as f64); } } // Perform the 2D DFT operation on our matrix calculate_2d_dft(&mut data_matrix); // Store this DFT in the cache - cache::put_matrix_in_cache(&Path::new(self.prepared_image.orig_path),width as u32,&"dft",&data_matrix); - + cache::put_matrix_in_cache(&Path::new(self.prepared_image.orig_path), + width as u32, + &"dft", + &data_matrix); + // Only need the top left quadrant let target_width = (width / 4) as usize; let target_height = (height / 4) as usize; let dft_width = (width / 4) as f64; let dft_height = (height / 4) as f64; - //Calculate the mean + // Calculate the mean let mut total = 0f64; for x in 0..target_width { for y in 0..target_height { total += data_matrix[x][y]; } } - let mean = total / (dft_width * dft_height); + let mean = total / (dft_width * dft_height); // Calculating a hash based on the mean let mut hash = 0u64; for x in 0..target_width { - // println!("Mean: {} Values: {:?}",mean,data_matrix[x]); + // println!("Mean: {} Values: {:?}",mean,data_matrix[x]); for y in 0..target_height { if data_matrix[x][y] >= mean { hash |= 1; - //println!("Pixel {} is >= {} therefore {:b}", pixel_sum, mean, hash); + // println!("Pixel {} is >= {} therefore {:b}", pixel_sum, mean, hash); } else { hash |= 0; - //println!("Pixel {} is < {} therefore {:b}", pixel_sum, mean, hash); + // println!("Pixel {} is < {} therefore {:b}", pixel_sum, mean, hash); } hash <<= 1; } } - //println!("Hash for {} is {}", prepared_image.orig_path, hash); + // println!("Hash for {} is {}", prepared_image.orig_path, hash); hash } } -/* - * Use a 1D DFT to cacluate the 2D DFT. - * - * This is achieved by calculating the DFT for each row, then calculating the - * DFT for each column of DFT row data. This means that a 32x32 image with have - * 1024 1D DFT operations performed on it. (Slightly caclulation intensive) - * - * This operation is in place on the data in the provided vector - * - * Inspired by: - * http://www.inf.ufsc.br/~visao/khoros/html-dip/c5/s2/front-page.html - * - * Checked with: - * http://calculator.vhex.net/post/calculator-result/2d-discrete-fourier-transform - */ -fn calculate_2d_dft(data_matrix: &mut Vec>){ - //println!("{:?}", data_matrix); +// Use a 1D DFT to cacluate the 2D DFT. +// +// This is achieved by calculating the DFT for each row, then calculating the +// DFT for each column of DFT row data. This means that a 32x32 image with have +// 1024 1D DFT operations performed on it. (Slightly caclulation intensive) +// +// This operation is in place on the data in the provided vector +// +// Inspired by: +// http://www.inf.ufsc.br/~visao/khoros/html-dip/c5/s2/front-page.html +// +// Checked with: +// http://calculator.vhex.net/post/calculator-result/2d-discrete-fourier-transform +// +fn calculate_2d_dft(data_matrix: &mut Vec>) { + // println!("{:?}", data_matrix); let width = data_matrix.len(); let height = data_matrix[0].len(); @@ -363,13 +375,13 @@ fn calculate_2d_dft(data_matrix: &mut Vec>){ for y in 0..height { column.push(data_matrix[x][y]); } - + // Perform the DCT on this column - //println!("column[{}] before: {:?}", x, column); + // println!("column[{}] before: {:?}", x, column); let forward_plan = dft::Plan::new(dft::Operation::Forward, column.len()); column.transform(&forward_plan); let complex_column = dft::unpack(&column); - //println!("column[{}] after: {:?}", x, complex_column); + // println!("column[{}] after: {:?}", x, complex_column); complex_data_matrix.push(complex_column); } @@ -380,10 +392,10 @@ fn calculate_2d_dft(data_matrix: &mut Vec>){ row.push(complex_data_matrix[x][y]); } // Perform DCT on the row - //println!("row[{}] before: {:?}", y, row); + // println!("row[{}] before: {:?}", y, row); let forward_plan = dft::Plan::new(dft::Operation::Forward, row.len()); row.transform(&forward_plan); - //println!("row[{}] after: {:?}", y, row); + // println!("row[{}] after: {:?}", y, row); // Put the row values back for x in 0..width { @@ -394,45 +406,40 @@ fn calculate_2d_dft(data_matrix: &mut Vec>){ fn round_float(f: f64) -> f64 { if f >= FLOAT_PRECISION_MAX_1 || f <= FLOAT_PRECISION_MIN_1 { - f - } - else if f >= FLOAT_PRECISION_MAX_2 || f <= FLOAT_PRECISION_MIN_2 { - (f * 10_f64).round() / 10_f64 - } - else if f >= FLOAT_PRECISION_MAX_3 || f <= FLOAT_PRECISION_MIN_3 { - (f * 100_f64).round() / 100_f64 - } - else if f >= FLOAT_PRECISION_MAX_4 || f <= FLOAT_PRECISION_MIN_4 { - (f * 1000_f64).round() / 1000_f64 - } - else if f >= FLOAT_PRECISION_MAX_5 || f <= FLOAT_PRECISION_MIN_5 { - (f * 10000_f64).round() / 10000_f64 - } - else { - (f * 100000_f64).round() / 100000_f64 + f + } else if f >= FLOAT_PRECISION_MAX_2 || f <= FLOAT_PRECISION_MIN_2 { + (f * 10_f64).round() / 10_f64 + } else if f >= FLOAT_PRECISION_MAX_3 || f <= FLOAT_PRECISION_MIN_3 { + (f * 100_f64).round() / 100_f64 + } else if f >= FLOAT_PRECISION_MAX_4 || f <= FLOAT_PRECISION_MIN_4 { + (f * 1000_f64).round() / 1000_f64 + } else if f >= FLOAT_PRECISION_MAX_5 || f <= FLOAT_PRECISION_MIN_5 { + (f * 10000_f64).round() / 10000_f64 + } else { + (f * 100000_f64).round() / 100000_f64 } } #[test] fn test_2d_dft() { let mut test_matrix: Vec> = Vec::new(); - test_matrix.push(vec![1f64,1f64,1f64,3f64]); - test_matrix.push(vec![1f64,2f64,2f64,1f64]); - test_matrix.push(vec![1f64,2f64,2f64,1f64]); - test_matrix.push(vec![3f64,1f64,1f64,1f64]); - - println!("{:?}",test_matrix[0]); - println!("{:?}",test_matrix[1]); - println!("{:?}",test_matrix[2]); - println!("{:?}",test_matrix[3]); - + test_matrix.push(vec![1f64, 1f64, 1f64, 3f64]); + test_matrix.push(vec![1f64, 2f64, 2f64, 1f64]); + test_matrix.push(vec![1f64, 2f64, 2f64, 1f64]); + test_matrix.push(vec![3f64, 1f64, 1f64, 1f64]); + + println!("{:?}", test_matrix[0]); + println!("{:?}", test_matrix[1]); + println!("{:?}", test_matrix[2]); + println!("{:?}", test_matrix[3]); + println!("Performing 2d DFT"); calculate_2d_dft(&mut test_matrix); - println!("{:?}",test_matrix[0]); - println!("{:?}",test_matrix[1]); - println!("{:?}",test_matrix[2]); - println!("{:?}",test_matrix[3]); + println!("{:?}", test_matrix[0]); + println!("{:?}", test_matrix[1]); + println!("{:?}", test_matrix[2]); + println!("{:?}", test_matrix[3]); assert!(test_matrix[0][0] == 24_f64); assert!(test_matrix[0][1] == 0_f64); diff --git a/src/lib.rs b/src/lib.rs index c61e768..f935301 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -16,7 +16,7 @@ use hash::PerceptualHash; */ pub fn init() { match cache::prep_cache() { - Ok(_) => {}, + Ok(_) => {} Err(e) => println!("Error: {}", e), } } @@ -26,7 +26,7 @@ pub fn init() { */ pub fn teardown() { match cache::clear_cache() { - Ok(_) => {}, + Ok(_) => {} Err(e) => println!("Error: {}", e), } } @@ -51,16 +51,15 @@ pub fn get_hamming_distance(hash1: u64, hash2: u64) -> u64 { hash::calculate_hamming_distance(hash1, hash2) } -/* - * Module for the tests - */ +// Module for the tests +// #[cfg(test)] mod tests { use super::*; use std::fs; use std::path; - use hash; + use hash; #[test] fn test_can_get_test_images() { @@ -70,18 +69,18 @@ mod tests { let orig_path = path.unwrap().path(); let ext = path::Path::new(&orig_path).extension(); match ext { - Some(_) => { + Some(_) => { if ext.unwrap() == "jpg" { num_paths += 1; println!("Is a image {}: {:?}", num_paths, orig_path) ; } - }, + } _ => { println!("Not an image: {:?}", orig_path) ; - continue + continue; } } - //println!("Name: {}", path.unwrap().path().display()) + // println!("Name: {}", path.unwrap().path().display()) } // Currently 12 images in the test imaages directory assert!(num_paths == 12); @@ -89,34 +88,48 @@ mod tests { // Simple function for the unit tests to succinctly test a set of images // that are organized in the fashion of large->medium->small - fn test_imageset_hash( - large_phash: &hash::PerceptualHash, - medium_phash: &hash::PerceptualHash, - small_phash: &hash::PerceptualHash, - expected_large_hash: u64, - expected_medium_hash: u64, - expected_small_hash: u64, - expected_large_medium_hamming: u64, - expected_large_small_hamming: u64, - expected_medium_small_hamming: u64) { - + fn test_imageset_hash(large_phash: &hash::PerceptualHash, + medium_phash: &hash::PerceptualHash, + small_phash: &hash::PerceptualHash, + expected_large_hash: u64, + expected_medium_hash: u64, + expected_small_hash: u64, + expected_large_medium_hamming: u64, + expected_large_small_hamming: u64, + expected_medium_small_hamming: u64) { + let actual_large_hash = large_phash.get_hash(); let actual_medium_hash = medium_phash.get_hash(); let actual_small_hash = small_phash.get_hash(); // println for the purpose of debugging - println!("Large Image: expected: {} actual: {}", expected_large_hash, actual_large_hash); - println!("Medium Image: expected: {} actual: {}", expected_medium_hash, actual_medium_hash); - println!("Small Image: expected: {} actual: {}", expected_small_hash, actual_small_hash); - - let actual_large_medium_hamming = hash::calculate_hamming_distance(actual_large_hash, actual_medium_hash); - let actual_large_small_hamming = hash::calculate_hamming_distance(actual_large_hash, actual_small_hash); - let actual_medium_small_hamming = hash::calculate_hamming_distance(actual_medium_hash, actual_small_hash); - - println!("Large-Medium Hamming Distance: expected: {} actual: {}", expected_large_medium_hamming, actual_large_medium_hamming); - println!("Large-Small Hamming Distance: expected: {} actual: {}", expected_large_small_hamming, actual_large_small_hamming); - println!("Medium-Small Hamming Distance: expected: {} actual: {}", expected_medium_small_hamming, actual_medium_small_hamming); - + println!("Large Image: expected: {} actual: {}", + expected_large_hash, + actual_large_hash); + println!("Medium Image: expected: {} actual: {}", + expected_medium_hash, + actual_medium_hash); + println!("Small Image: expected: {} actual: {}", + expected_small_hash, + actual_small_hash); + + let actual_large_medium_hamming = hash::calculate_hamming_distance(actual_large_hash, + actual_medium_hash); + let actual_large_small_hamming = hash::calculate_hamming_distance(actual_large_hash, + actual_small_hash); + let actual_medium_small_hamming = hash::calculate_hamming_distance(actual_medium_hash, + actual_small_hash); + + println!("Large-Medium Hamming Distance: expected: {} actual: {}", + expected_large_medium_hamming, + actual_large_medium_hamming); + println!("Large-Small Hamming Distance: expected: {} actual: {}", + expected_large_small_hamming, + actual_large_small_hamming); + println!("Medium-Small Hamming Distance: expected: {} actual: {}", + expected_medium_small_hamming, + actual_medium_small_hamming); + // Doing that asserts assert!(actual_large_hash == expected_large_hash); assert!(actual_medium_hash == expected_medium_hash); @@ -134,58 +147,66 @@ mod tests { super::init(); // Sample_01 tests - test_imageset_hash( - &hash::AHash::new(path::Path::new("./test_images/sample_01_large.jpg"), &hash::Precision::Medium), - &hash::AHash::new(path::Path::new("./test_images/sample_01_medium.jpg"), &hash::Precision::Medium), - &hash::AHash::new(path::Path::new("./test_images/sample_01_small.jpg"), &hash::Precision::Medium), - 857051991849750, - 857051991849750, - 857051991849750, - 0u64, - 0u64, - 0u64 - ); - + test_imageset_hash(&hash::AHash::new(path::Path::new("./test_images/sample_01_large.jpg"), + &hash::Precision::Medium), + &hash::AHash::new(path::Path::new("./test_images/sample_01_medium.\ + jpg"), + &hash::Precision::Medium), + &hash::AHash::new(path::Path::new("./test_images/sample_01_small.jpg"), + &hash::Precision::Medium), + 857051991849750, + 857051991849750, + 857051991849750, + 0u64, + 0u64, + 0u64); + // Sample_02 tests - test_imageset_hash( - &hash::AHash::new(path::Path::new("./test_images/sample_02_large.jpg"), &hash::Precision::Medium), - &hash::AHash::new(path::Path::new("./test_images/sample_02_medium.jpg"), &hash::Precision::Medium), - &hash::AHash::new(path::Path::new("./test_images/sample_02_small.jpg"), &hash::Precision::Medium), - 18446744073441116160, - 18446744073441116160, - 18446744073441116160, - 0u64, - 0u64, - 0u64 - ); + test_imageset_hash(&hash::AHash::new(path::Path::new("./test_images/sample_02_large.jpg"), + &hash::Precision::Medium), + &hash::AHash::new(path::Path::new("./test_images/sample_02_medium.\ + jpg"), + &hash::Precision::Medium), + &hash::AHash::new(path::Path::new("./test_images/sample_02_small.jpg"), + &hash::Precision::Medium), + 18446744073441116160, + 18446744073441116160, + 18446744073441116160, + 0u64, + 0u64, + 0u64); // Sample_03 tests - test_imageset_hash( - &hash::AHash::new(path::Path::new("./test_images/sample_03_large.jpg"), &hash::Precision::Medium), - &hash::AHash::new(path::Path::new("./test_images/sample_03_medium.jpg"), &hash::Precision::Medium), - &hash::AHash::new(path::Path::new("./test_images/sample_03_small.jpg"), &hash::Precision::Medium), - 135670932300497406, - 135670932300497406, - 135670932300497406, - 0u64, - 0u64, - 0u64 - ); - + test_imageset_hash(&hash::AHash::new(path::Path::new("./test_images/sample_03_large.jpg"), + &hash::Precision::Medium), + &hash::AHash::new(path::Path::new("./test_images/sample_03_medium.\ + jpg"), + &hash::Precision::Medium), + &hash::AHash::new(path::Path::new("./test_images/sample_03_small.jpg"), + &hash::Precision::Medium), + 135670932300497406, + 135670932300497406, + 135670932300497406, + 0u64, + 0u64, + 0u64); + // Sample_04 tests - test_imageset_hash( - &hash::AHash::new(path::Path::new("./test_images/sample_04_large.jpg"), &hash::Precision::Medium), - &hash::AHash::new(path::Path::new("./test_images/sample_04_medium.jpg"), &hash::Precision::Medium), - &hash::AHash::new(path::Path::new("./test_images/sample_04_small.jpg"), &hash::Precision::Medium), - 18446460933225054208, - 18446460933090836480, - 18446460933090836480, - 1u64, - 1u64, - 0u64 - ); + test_imageset_hash(&hash::AHash::new(path::Path::new("./test_images/sample_04_large.jpg"), + &hash::Precision::Medium), + &hash::AHash::new(path::Path::new("./test_images/sample_04_medium.\ + jpg"), + &hash::Precision::Medium), + &hash::AHash::new(path::Path::new("./test_images/sample_04_small.jpg"), + &hash::Precision::Medium), + 18446460933225054208, + 18446460933090836480, + 18446460933090836480, + 1u64, + 1u64, + 0u64); // Clean_Cache - //super::teardown(); + // super::teardown(); } #[test] @@ -194,117 +215,133 @@ mod tests { super::init(); // Sample_01 tests - test_imageset_hash( - &hash::DHash::new(path::Path::new("./test_images/sample_01_large.jpg"), &hash::Precision::Medium), - &hash::DHash::new(path::Path::new("./test_images/sample_01_medium.jpg"), &hash::Precision::Medium), - &hash::DHash::new(path::Path::new("./test_images/sample_01_small.jpg"), &hash::Precision::Medium), - 7937395827556495926, - 7937395827556495926, - 7939647627370181174, - 0u64, - 1u64, - 1u64 - ); - + test_imageset_hash(&hash::DHash::new(path::Path::new("./test_images/sample_01_large.jpg"), + &hash::Precision::Medium), + &hash::DHash::new(path::Path::new("./test_images/sample_01_medium.\ + jpg"), + &hash::Precision::Medium), + &hash::DHash::new(path::Path::new("./test_images/sample_01_small.jpg"), + &hash::Precision::Medium), + 7937395827556495926, + 7937395827556495926, + 7939647627370181174, + 0u64, + 1u64, + 1u64); + // Sample_02 tests - test_imageset_hash( - &hash::DHash::new(path::Path::new("./test_images/sample_02_large.jpg"), &hash::Precision::Medium), - &hash::DHash::new(path::Path::new("./test_images/sample_02_medium.jpg"), &hash::Precision::Medium), - &hash::DHash::new(path::Path::new("./test_images/sample_02_small.jpg"), &hash::Precision::Medium), - 11009829669713008949, - 11009829670249879861, - 11009829669713008949, - 1u64, - 0u64, - 1u64 - ); + test_imageset_hash(&hash::DHash::new(path::Path::new("./test_images/sample_02_large.jpg"), + &hash::Precision::Medium), + &hash::DHash::new(path::Path::new("./test_images/sample_02_medium.\ + jpg"), + &hash::Precision::Medium), + &hash::DHash::new(path::Path::new("./test_images/sample_02_small.jpg"), + &hash::Precision::Medium), + 11009829669713008949, + 11009829670249879861, + 11009829669713008949, + 1u64, + 0u64, + 1u64); // Sample_03 tests - test_imageset_hash( - &hash::DHash::new(path::Path::new("./test_images/sample_03_large.jpg"), &hash::Precision::Medium), - &hash::DHash::new(path::Path::new("./test_images/sample_03_medium.jpg"), &hash::Precision::Medium), - &hash::DHash::new(path::Path::new("./test_images/sample_03_small.jpg"), &hash::Precision::Medium), - 225528496439353286, - 225528496439353286, - 226654396346195908, - 0u64, - 2u64, - 2u64 - ); - + test_imageset_hash(&hash::DHash::new(path::Path::new("./test_images/sample_03_large.jpg"), + &hash::Precision::Medium), + &hash::DHash::new(path::Path::new("./test_images/sample_03_medium.\ + jpg"), + &hash::Precision::Medium), + &hash::DHash::new(path::Path::new("./test_images/sample_03_small.jpg"), + &hash::Precision::Medium), + 225528496439353286, + 225528496439353286, + 226654396346195908, + 0u64, + 2u64, + 2u64); + // Sample_04 tests - test_imageset_hash( - &hash::DHash::new(path::Path::new("./test_images/sample_04_large.jpg"), &hash::Precision::Medium), - &hash::DHash::new(path::Path::new("./test_images/sample_04_medium.jpg"), &hash::Precision::Medium), - &hash::DHash::new(path::Path::new("./test_images/sample_04_small.jpg"), &hash::Precision::Medium), - 14620651386429567209, - 14620651386429567209, - 14620651386429567209, - 0u64, - 0u64, - 0u64 - ); + test_imageset_hash(&hash::DHash::new(path::Path::new("./test_images/sample_04_large.jpg"), + &hash::Precision::Medium), + &hash::DHash::new(path::Path::new("./test_images/sample_04_medium.\ + jpg"), + &hash::Precision::Medium), + &hash::DHash::new(path::Path::new("./test_images/sample_04_small.jpg"), + &hash::Precision::Medium), + 14620651386429567209, + 14620651386429567209, + 14620651386429567209, + 0u64, + 0u64, + 0u64); // Clean_Cache - //super::teardown(); + // super::teardown(); } - + #[test] fn test_confirm_phash_results() { // Prep_Cache super::init(); // Sample_01 tests - test_imageset_hash( - &hash::PHash::new(path::Path::new("./test_images/sample_01_large.jpg"), &hash::Precision::Medium), - &hash::PHash::new(path::Path::new("./test_images/sample_01_medium.jpg"), &hash::Precision::Medium), - &hash::PHash::new(path::Path::new("./test_images/sample_01_small.jpg"), &hash::Precision::Medium), - 72357778504597504, - 72357778504597504, - 72357778504597504, - 0u64, - 0u64, - 0u64 - ); - + test_imageset_hash(&hash::PHash::new(path::Path::new("./test_images/sample_01_large.jpg"), + &hash::Precision::Medium), + &hash::PHash::new(path::Path::new("./test_images/sample_01_medium.\ + jpg"), + &hash::Precision::Medium), + &hash::PHash::new(path::Path::new("./test_images/sample_01_small.jpg"), + &hash::Precision::Medium), + 72357778504597504, + 72357778504597504, + 72357778504597504, + 0u64, + 0u64, + 0u64); + // Sample_02 tests - test_imageset_hash( - &hash::PHash::new(path::Path::new("./test_images/sample_02_large.jpg"), &hash::Precision::Medium), - &hash::PHash::new(path::Path::new("./test_images/sample_02_medium.jpg"), &hash::Precision::Medium), - &hash::PHash::new(path::Path::new("./test_images/sample_02_small.jpg"), &hash::Precision::Medium), - 5332332327550844928, - 5332332327550844928, - 5332332327550844928, - 0u64, - 0u64, - 0u64 - ); + test_imageset_hash(&hash::PHash::new(path::Path::new("./test_images/sample_02_large.jpg"), + &hash::Precision::Medium), + &hash::PHash::new(path::Path::new("./test_images/sample_02_medium.\ + jpg"), + &hash::Precision::Medium), + &hash::PHash::new(path::Path::new("./test_images/sample_02_small.jpg"), + &hash::Precision::Medium), + 5332332327550844928, + 5332332327550844928, + 5332332327550844928, + 0u64, + 0u64, + 0u64); // Sample_03 tests - test_imageset_hash( - &hash::PHash::new(path::Path::new("./test_images/sample_03_large.jpg"), &hash::Precision::Medium), - &hash::PHash::new(path::Path::new("./test_images/sample_03_medium.jpg"), &hash::Precision::Medium), - &hash::PHash::new(path::Path::new("./test_images/sample_03_small.jpg"), &hash::Precision::Medium), - 6917529027641081856, - 6917529027641081856, - 6917529027641081856, - 0u64, - 0u64, - 0u64 - ); - + test_imageset_hash(&hash::PHash::new(path::Path::new("./test_images/sample_03_large.jpg"), + &hash::Precision::Medium), + &hash::PHash::new(path::Path::new("./test_images/sample_03_medium.\ + jpg"), + &hash::Precision::Medium), + &hash::PHash::new(path::Path::new("./test_images/sample_03_small.jpg"), + &hash::Precision::Medium), + 6917529027641081856, + 6917529027641081856, + 6917529027641081856, + 0u64, + 0u64, + 0u64); + // Sample_04 tests - test_imageset_hash( - &hash::PHash::new(path::Path::new("./test_images/sample_04_large.jpg"), &hash::Precision::Medium), - &hash::PHash::new(path::Path::new("./test_images/sample_04_medium.jpg"), &hash::Precision::Medium), - &hash::PHash::new(path::Path::new("./test_images/sample_04_small.jpg"), &hash::Precision::Medium), - 10997931646002397184, - 10997931646002397184, - 11142046834078253056, - 0u64, - 1u64, - 1u64 - ); + test_imageset_hash(&hash::PHash::new(path::Path::new("./test_images/sample_04_large.jpg"), + &hash::Precision::Medium), + &hash::PHash::new(path::Path::new("./test_images/sample_04_medium.\ + jpg"), + &hash::Precision::Medium), + &hash::PHash::new(path::Path::new("./test_images/sample_04_small.jpg"), + &hash::Precision::Medium), + 10997931646002397184, + 10997931646002397184, + 11142046834078253056, + 0u64, + 1u64, + 1u64); // Clean_Cache - //super::teardown(); + // super::teardown(); } } diff --git a/src/main.rs b/src/main.rs index 68d0ff0..6d306d4 100644 --- a/src/main.rs +++ b/src/main.rs @@ -15,15 +15,20 @@ const USAGE: &'static str = " Perceptual Image Hashing (pihash) Usage: - pihash [options] ... + pihash [options] \ + ... pihash (--help | --version) Options: - -h, --help Show this screen. + -h, --help \ + Show this screen. -V, --version Print version. - -a, --ahash Include an ahash calculation. - -d, --dhash Include an dhash calculation. - -p, --phash Include an phash calculation. + -a, \ + --ahash Include an ahash calculation. + -d, --dhash \ + Include an dhash calculation. + -p, --phash Include an phash \ + calculation. "; #[derive(Debug, RustcDecodable)] @@ -36,15 +41,15 @@ struct Args { fn main() { let args: Args = Docopt::new(USAGE) - .and_then(|d| d.decode()) - .unwrap_or_else(|e| e.exit()); + .and_then(|d| d.decode()) + .unwrap_or_else(|e| e.exit()); // Init the hashing library pihash::init(); - //println!("{:?}", args); + // println!("{:?}", args); // All flags set or, no flags set - if (args.flag_ahash && args.flag_dhash && args.flag_phash) - || (!args.flag_ahash && !args.flag_dhash && !args.flag_phash) { + if (args.flag_ahash && args.flag_dhash && args.flag_phash) || + (!args.flag_ahash && !args.flag_dhash && !args.flag_phash) { for path in args.arg_path { let image_path = Path::new(&path); let hashes = pihash::get_phashes(&image_path); @@ -53,10 +58,14 @@ fn main() { ahash: {} dhash: {} phash: {} - "#, hashes.orig_path, hashes.ahash, hashes.dhash, hashes.phash); + "#, + hashes.orig_path, + hashes.ahash, + hashes.dhash, + hashes.phash); println!("{}", hash_result); } - //Otherwise process only specific hashes + // Otherwise process only specific hashes } else { for path in args.arg_path { println!("file: {}", path);