Browse Source

Commented out a bunch of debug logging statements that aren't needed right now. Updated some hibernate config files. Added some exceptions to the logback config to ignore debug statements from hibernate and EHCache as they aren't important in our debugging right now. Updated todo... sort of...

master
Drew Short 10 years ago
parent
commit
dc643512b7
  1. 5
      pom.xml
  2. 2
      src/includes/logback.xml
  3. 25
      src/main/resources/hibernate.cfg.xml
  4. 4
      src/main/scala/com/sothr/imagetools/hash/AHash.scala
  5. 4
      src/main/scala/com/sothr/imagetools/hash/DHash.scala
  6. 8
      src/main/scala/com/sothr/imagetools/hash/HashService.scala
  7. 8
      src/main/scala/com/sothr/imagetools/hash/PHash.scala
  8. 21
      src/main/scala/com/sothr/imagetools/image/ImageService.scala
  9. 19
      src/test/resources/hibernate.cfg.xml
  10. 23
      todo

5
pom.xml

@ -150,6 +150,11 @@
<artifactId>hibernate-ehcache</artifactId>
<version>${lib.hibernate.version}</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-c3p0</artifactId>
<version>${lib.hibernate.version}</version>
</dependency>
</dependencies>
<build>

2
src/includes/logback.xml

@ -1,5 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
<logger name="org.hibernate" level="WARN"/>
<logger name="net.sf.ehcache" level="WARN"/>
<appender name="C" class="ch.qos.logback.core.ConsoleAppender">
<encoder>
<!-- Sorry Windows Users -->

25
src/main/resources/hibernate.cfg.xml

@ -7,10 +7,12 @@
<session-factory>
<!-- properties -->
<property name="connection.driver_class">org.h2.Driver</property>
<!--<property name="connection.connection.url">jdbc:h2:imageTools.db</property>-->
<property name="dialect">org.hibernate.dialect.H2Dialect</property>
<property name="show_sql">false</property>
<property name="hibernate.connection.driver_class">org.h2.Driver</property>
<!--<property name="hibernate.connection.connection.url">jdbc:h2:imageTools.db</property>-->
<property name="hibernate.dialect">org.hibernate.dialect.H2Dialect</property>
<property name="hibernate.show_sql">false</property>
<property name="hibernate.generate_statistics"></property>
<property name="hibernate.use_sql_comments"></property>
<!--<property name="transaction.factory_class">
org.hibernate.transaction.JTATransactionFactory
</property>-->
@ -19,10 +21,19 @@
<property name="hibernate.hbm2ddl.auto">update</property>
<!-- Enable Hibernate's automatic session context management -->
<property name="current_session_context_class">thread</property>
<property name="hibernate.current_session_context_class">thread</property>
<!-- Disable the second-level cache -->
<!--<property name="cache.provider_class">org.hibernate.cache.NoCacheProvider</property>-->
<!-- Enable the second-level cache -->
<property name="hibernate.cache.region.factory_class">org.hibernate.cache.ehcache.EhCacheRegionFactory</property>
<property name="hibernate.cache.region.factory_class">org.hibernate.cache.ehcache.SingletonEhCacheRegionFactory</property>
<property name="hibernate.cache.use_second_level_cache">true</property>
<property name="c3p0.acquire_increment">1</property>
<property name="c3p0.idle_test_period">100</property> <!-- seconds -->
<property name="c3p0.max_size">50</property>
<property name="c3p0.max_statements">0</property>
<property name="c3p0.min_size">5</property>
<property name="c3p0.timeout">100</property> <!-- seconds -->
<!-- mapping files -->
<mapping resource="hibernate/Image.hbm.xml"/>

4
src/main/scala/com/sothr/imagetools/hash/AHash.scala

@ -7,10 +7,10 @@ import grizzled.slf4j.Logging
*/
object AHash extends PerceptualHasher with Logging {
def getHash(imageData: Array[Array[Int]]): Long = {
debug("Generating AHash")
//debug("Generating AHash")
val width = imageData.length
val height = imageData(0).length
debug(s"Image data size: ${width}x${height}")
//debug(s"Image data size: ${width}x${height}")
//calculate average pixel
var total = 0

4
src/main/scala/com/sothr/imagetools/hash/DHash.scala

@ -7,10 +7,10 @@ import grizzled.slf4j.Logging
*/
object DHash extends PerceptualHasher with Logging {
def getHash(imageData: Array[Array[Int]]): Long = {
debug("Generating DHash")
//debug("Generating DHash")
val width = imageData.length
val height = imageData(0).length
debug(s"Image data size: ${width}x${height}")
//debug(s"Image data size: ${width}x${height}")
//calculate dhash
var hash = 0L

8
src/main/scala/com/sothr/imagetools/hash/HashService.scala

@ -23,7 +23,7 @@ object HashService extends Logging {
}
def getImageHashes(image:BufferedImage, imagePath:String):ImageHashDTO = {
debug("Creating hashes for an image")
//debug("Creating hashes for an image")
var ahash:Long = 0L
var dhash:Long = 0L
@ -50,7 +50,7 @@ object HashService extends Logging {
}
def getAhash(image:BufferedImage, alreadyGray:Boolean = false):Long = {
debug("Started generating an AHash")
//debug("Started generating an AHash")
var grayImage:BufferedImage = null
if (alreadyGray) {
grayImage = image
@ -63,7 +63,7 @@ object HashService extends Logging {
}
def getDhash(image:BufferedImage, alreadyGray:Boolean = false):Long = {
debug("Started generating an DHash")
//debug("Started generating an DHash")
var grayImage:BufferedImage = null
if (alreadyGray) {
grayImage = image
@ -76,7 +76,7 @@ object HashService extends Logging {
}
def getPhash(image:BufferedImage, alreadyGray:Boolean = false):Long = {
debug("Started generating an PHash")
//debug("Started generating an PHash")
var grayImage:BufferedImage = null
if (alreadyGray) {
grayImage = image

8
src/main/scala/com/sothr/imagetools/hash/PHash.scala

@ -11,7 +11,7 @@ object PHash extends PerceptualHasher with Logging {
//convert the imageData into a FloatArray
val width = imageData.length
val height = imageData(0).length
debug(s"Starting with image of ${height}x${width} for PHash")
//debug(s"Starting with image of ${height}x${width} for PHash")
val imageDataFloat:Array[Array[Float]] = Array.ofDim[Float](height, width)
for (row <- 0 until height) {
@ -19,13 +19,13 @@ object PHash extends PerceptualHasher with Logging {
imageDataFloat(row)(col) = imageData(row)(col).toFloat
}
}
debug("Copied image data to float array for transform")
//debug("Copied image data to float array for transform")
//debug(s"\n${imageDataFloat.deep.mkString("\n")}")
//perform transform on the data
val dct:FloatDCT_2D = new FloatDCT_2D(height,width)
dct.forward(imageDataFloat, true)
debug("Converted image data into DCT")
//debug("Converted image data into DCT")
//debug(s"\n${imageDataFloat.deep.mkString("\n")}")
//extract the DCT data
@ -40,7 +40,7 @@ object PHash extends PerceptualHasher with Logging {
}
}
val mean = total / (dctDataHeight * dctDataWidth)
debug(s"Calculated mean as $mean from ${total}/${dctDataHeight * dctDataWidth}")
//debug(s"Calculated mean as $mean from ${total}/${dctDataHeight * dctDataWidth}")
//calculate the hash
var hash = 0L

21
src/main/scala/com/sothr/imagetools/image/ImageService.scala

@ -60,6 +60,7 @@ object ImageService extends Logging {
debug(s"${file.getAbsolutePath} was already processed")
return image
} else {
debug(s"Processing image: ${file.getAbsolutePath}")
val bufferedImage = ImageIO.read(file)
val hashes = HashService.getImageHashes(bufferedImage, file.getAbsolutePath)
var thumbnailPath = lookupThumbnailPath(hashes.md5)
@ -134,7 +135,7 @@ object ImageService extends Logging {
* @return
*/
def convertToGray(image:BufferedImage):BufferedImage = {
debug("Converting an image to grayscale")
//debug("Converting an image to grayscale")
val grayImage = new BufferedImage(image.getWidth, image.getHeight, BufferedImage.TYPE_BYTE_GRAY)
//create a color conversion operation
@ -152,7 +153,7 @@ object ImageService extends Logging {
}
def resize(image:BufferedImage, size:Int, forced:Boolean=false):BufferedImage = {
debug(s"Resizing an image to size: ${size}x${size} forced: $forced")
//debug(s"Resizing an image to size: ${size}x${size} forced: $forced")
if (forced) {
Thumbnails.of(image).forceSize(size,size).asBufferedImage
} else {
@ -174,15 +175,15 @@ object ImageService extends Logging {
val height = image.getHeight
val isSingleChannel = if(numPixels == (width * height)) true else false
val hasAlphaChannel = image.getAlphaRaster != null
debug(s"Converting image to 2d. width:$width height:$height")
//debug(s"Converting image to 2d. width:$width height:$height")
val result = Array.ofDim[Int](height,width)
if (isSingleChannel) {
debug(s"Processing Single Channel Image")
//debug(s"Processing Single Channel Image")
val pixelLength = 1
var row = 0
var col = 0
debug(s"Processing pixels 0 until $numPixels by $pixelLength")
//debug(s"Processing pixels 0 until $numPixels by $pixelLength")
for (pixel <- 0 until numPixels by pixelLength) {
//debug(s"Processing pixel: $pixel/${numPixels - 1}")
val argb:Int = pixels(pixel).toInt //singleChannel
@ -196,11 +197,11 @@ object ImageService extends Logging {
}
}
else if (hasAlphaChannel) {
debug(s"Processing Four Channel Image")
//debug(s"Processing Four Channel Image")
val pixelLength = 4
var row = 0
var col = 0
debug(s"Processing pixels 0 until $numPixels by $pixelLength")
//debug(s"Processing pixels 0 until $numPixels by $pixelLength")
for (pixel <- 0 until numPixels by pixelLength) {
//debug(s"Processing pixel: $pixel/${numPixels - 1}")
var argb:Int = 0
@ -216,11 +217,11 @@ object ImageService extends Logging {
}
}
} else {
debug(s"Processing Three Channel Image")
//debug(s"Processing Three Channel Image")
val pixelLength = 3
var row = 0
var col = 0
debug(s"Processing pixels 0 until $numPixels by $pixelLength")
//debug(s"Processing pixels 0 until $numPixels by $pixelLength")
for (pixel <- 0 until numPixels by pixelLength) {
//debug(s"Processing pixel: $pixel/${numPixels - 1}")
var argb:Int = 0
@ -236,8 +237,6 @@ object ImageService extends Logging {
}
}
}
result
}
}

19
src/test/resources/hibernate.cfg.xml

@ -7,9 +7,9 @@
<session-factory>
<!-- properties -->
<property name="connection.driver_class">org.h2.Driver</property>
<property name="dialect">org.hibernate.dialect.H2Dialect</property>
<property name="show_sql">true</property>
<property name="hibernate.connection.driver_class">org.h2.Driver</property>
<property name="hibernate.dialect">org.hibernate.dialect.H2Dialect</property>
<property name="hibernate.show_sql">true</property>
<!--<property name="transaction.factory_class">
org.hibernate.transaction.JTATransactionFactory
</property>-->
@ -20,8 +20,17 @@
<!-- Enable Hibernate's automatic session context management -->
<property name="current_session_context_class">thread</property>
<!-- Disable the second-level cache -->
<!--<property name="cache.provider_class">org.hibernate.cache.NoCacheProvider</property>-->
<!-- Enable the second-level cache -->
<property name="hibernate.cache.region.factory_class">org.hibernate.cache.ehcache.EhCacheRegionFactory</property>
<property name="hibernate.cache.region.factory_class">org.hibernate.cache.ehcache.SingletonEhCacheRegionFactory</property>
<property name="hibernate.cache.use_second_level_cache">true</property>
<property name="c3p0.acquire_increment">1</property>
<property name="c3p0.idle_test_period">100</property> <!-- seconds -->
<property name="c3p0.max_size">50</property>
<property name="c3p0.max_statements">0</property>
<property name="c3p0.min_size">5</property>
<property name="c3p0.timeout">100</property> <!-- seconds -->
<!-- mapping files -->
<mapping resource="hibernate/Image.hbm.xml"/>

23
todo

@ -1,16 +1,13 @@
Add functionality to both engines
-Move files to new locations
-Rename new files based on their MD5
Add functionality to ImageService
-Cache thumbnails
-Generate thumbnails
-Move files to new locations
-Rename new files based on their MD5 - abandoned as bad idea
Improve functionality of the PropertiesService
-Getters and Setters for sepcific values with proper cascading and type awareness
-Nicer debugging?
-Getters and Setters for sepcific values with proper cascading and type awareness
-Nicer debugging?
Hibernate - H2 intergration
- setup hibernate
- setup h2
- setup C3P0 connection pool
- transfer configuration files
- autoload configuration files?
- map DTO's
- autoload configuration files?
Database - How to handle directories to reduce database storage?
- recursive database construction
-Factory to handle generating directory information
-Directory1 -> Directory2 -> Directory3
-result = Directory/Directory2/Directory3
Loading…
Cancel
Save