diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..7a5faf4 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2020 The Naval Postgraduate School + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/extract_detect/Hadoop/.goutputstream-MTYAO0 b/extract_detect/Hadoop/.goutputstream-MTYAO0 deleted file mode 100644 index 810a8bd..0000000 --- a/extract_detect/Hadoop/.goutputstream-MTYAO0 +++ /dev/null @@ -1,8 +0,0 @@ -#!/bin/bash - -javac -cp /usr/lib/hadoop/*:/usr/lib/hadoop/client-0.20/* -d ~/Documents/shared/MineProcess ~/Documents/shared/MineProcess/Util.java ~/Documents/shared/MineProcess/LightPoint.java ~/Documents/shared/MineProcess/Point.java ~/Documents/shared/MineProcess/Centroid.java ~/Documents/shared/MineProcess/Box.java ~/Documents/shared/MineProcess/Cluster.java ~/Documents/shared/MineProcess/Master.java ~/Documents/shared/MineProcess/Polish.java ~/Documents/shared/MineProcess/Scan.java ~/Documents/shared/MineProcess/Sift.java ~/Documents/shared/MineProcess/Calc.java ~/Documents/shared/MineProcess/ColorEdit.java ~/Documents/shared/MineProcess/Constants.java ~/Documents/shared/MineProcess/Converter.java ~/Documents/shared/MineProcess/Format.java ~/Documents/shared/MineProcess/Radius.java ~/Documents/shared/MineProcess/Slide.java ~/Documents/shared/MineProcess/PictureFrame.java ~/Documents/shared/MineProcess/Picture.java ~/Documents/shared/MineProcess/SimplePicture.java ~/Documents/shared/MineProcess/Convert.java ~/Documents/shared/MineProcess/DigitalPicture.java ~/Documents/shared/MineProcess/ImageArrEdit.java ~/Documents/shared/MineProcess/Pixel.java ~/Documents/shared/MineProcess/MacroMaster.java ~/Documents/shared/MineProcess/Runner.java -echo "COMPILED!" -jar -cvf Runner.jar -C ~/Documents/shared/MineProcess . -echo "STARTING HADOOP..." -hadoop jar Runner.jar Runner /user/cloudera/MineProcess/input/test0.png-m-00000 /user/cloudera/MineProcess/output/mine$1 -echo "FINISHED!" diff --git a/extract_detect/Hadoop/Box.class b/extract_detect/Hadoop/Box.class deleted file mode 100644 index 6a576b6..0000000 Binary files a/extract_detect/Hadoop/Box.class and /dev/null differ diff --git a/extract_detect/Hadoop/Calc.class b/extract_detect/Hadoop/Calc.class deleted file mode 100644 index 3ddb236..0000000 Binary files a/extract_detect/Hadoop/Calc.class and /dev/null differ diff --git a/extract_detect/Hadoop/Centroid.class b/extract_detect/Hadoop/Centroid.class deleted file mode 100644 index 983ec16..0000000 Binary files a/extract_detect/Hadoop/Centroid.class and /dev/null differ diff --git a/extract_detect/Hadoop/Cluster.class b/extract_detect/Hadoop/Cluster.class deleted file mode 100644 index 2c48aeb..0000000 Binary files a/extract_detect/Hadoop/Cluster.class and /dev/null differ diff --git a/extract_detect/Hadoop/ColorEdit.class b/extract_detect/Hadoop/ColorEdit.class deleted file mode 100644 index a72f519..0000000 Binary files a/extract_detect/Hadoop/ColorEdit.class and /dev/null differ diff --git a/extract_detect/Hadoop/Constants$Stage.class b/extract_detect/Hadoop/Constants$Stage.class deleted file mode 100644 index 0a5dac4..0000000 Binary files a/extract_detect/Hadoop/Constants$Stage.class and /dev/null differ diff --git a/extract_detect/Hadoop/Constants.class b/extract_detect/Hadoop/Constants.class deleted file mode 100644 index 6e59b3d..0000000 Binary files a/extract_detect/Hadoop/Constants.class and /dev/null differ diff --git a/extract_detect/Hadoop/Convert.class b/extract_detect/Hadoop/Convert.class deleted file mode 100644 index 403d075..0000000 Binary files a/extract_detect/Hadoop/Convert.class and /dev/null differ diff --git a/extract_detect/Hadoop/Convert.java b/extract_detect/Hadoop/Convert.java index 50807e0..271a34b 100644 --- a/extract_detect/Hadoop/Convert.java +++ b/extract_detect/Hadoop/Convert.java @@ -51,6 +51,12 @@ public static double[][] convertImageCompress(String name, int scale) throws IOE return out; } + /** + * Returns a double array of an image with each cell representing a pixel + * + * @param image + * @return the 2D double array representing the image + */ public static double[][] convertByte(byte[][] image) { double[][] out = new double[image.length][image[0].length]; for (int x = 0; x < image.length; x++) { diff --git a/extract_detect/Hadoop/Converter.class b/extract_detect/Hadoop/Converter.class deleted file mode 100644 index ab30627..0000000 Binary files a/extract_detect/Hadoop/Converter.class and /dev/null differ diff --git a/extract_detect/Hadoop/DigitalPicture.class b/extract_detect/Hadoop/DigitalPicture.class deleted file mode 100644 index b7f3f37..0000000 Binary files a/extract_detect/Hadoop/DigitalPicture.class and /dev/null differ diff --git a/extract_detect/Hadoop/Extract.class b/extract_detect/Hadoop/Extract.class deleted file mode 100644 index 92c02e6..0000000 Binary files a/extract_detect/Hadoop/Extract.class and /dev/null differ diff --git a/extract_detect/Hadoop/Extract.java b/extract_detect/Hadoop/Extract.java index 9df22aa..23628ba 100644 --- a/extract_detect/Hadoop/Extract.java +++ b/extract_detect/Hadoop/Extract.java @@ -6,7 +6,7 @@ import java.util.Arrays; /** - * Hadoop with MapReduce, image only version + * Extracts image and metadata from MSTIFF * */ public class Extract { @@ -63,7 +63,8 @@ public class Extract { private static long[] Fatho_timestamp_sec; // Save - public static byte[][] imTemp; + public static byte[][] imTempR; + public static byte[][] imTempL; public static String[][] metaTemp; private static String lastFile; @@ -98,7 +99,8 @@ public static boolean init() { fileHeader[0][6] = "WaterDepth"; fileHeader[0][7] = "TowfishDepth"; - imTemp = new byte[0][0]; + imTempR = new byte[0][0]; + imTempL = new byte[0][0]; metaTemp = new String[0][0]; lastFile = "empty"; count = 0; @@ -225,7 +227,7 @@ public static boolean fileProcess(String x) throws IOException { Extract.metaInit(); // Check if first - if (imTemp == null) + if (imTempR == null) init(); // Save if last of mission @@ -254,14 +256,15 @@ public static boolean fileProcess(String x) throws IOException { } // Combine left and right channels, previous image - if (imTemp.length != 0) { - imTemp = Util.combineVertically(imTemp, Util.combineHorizontally(left, right)); + if (imTempR.length != 0) { + imTempR = Util.combineVertically(imTempR, right); + imTempL = Util.combineVertically(imTempL, left); saveIm(); - System.out.println("saveIm should have ran"); - imTemp = Util.combineHorizontally(left, right); + imTempR = right; + imTempL = left; } else { - System.out.println("saveIm should have ran"); - imTemp = Util.combineHorizontally(left, right); + imTempR = right; + imTempL = left; } // Add metadata to growing table @@ -278,8 +281,7 @@ public static boolean fileProcess(String x) throws IOException { * Save image */ public static void saveIm() { - Util.saveIm(imTemp, "im" + count, false); - System.out.println("saveIm" + count); + Util.saveIm(Util.combineHorizontally(imTempL, imTempR), "im" + count, false); count++; } @@ -287,8 +289,9 @@ public static void saveIm() { * Save metadata as CSV */ public static void saveMeta() throws IOException { -// Util.saveIm(imTemp, "im" + count); - imTemp = new byte[0][0]; +// Util.saveIm(imTempR, "im" + count); + imTempR = new byte[0][0]; + imTempL = new byte[0][0]; String tempLabel = Util.remPath(lastFile); Util.save(metaTemp, tempLabel + "META"); metaTemp = new String[0][0]; diff --git a/extract_detect/Hadoop/Format.class b/extract_detect/Hadoop/Format.class deleted file mode 100644 index 9d75abc..0000000 Binary files a/extract_detect/Hadoop/Format.class and /dev/null differ diff --git a/extract_detect/Hadoop/ImageArrEdit.class b/extract_detect/Hadoop/ImageArrEdit.class deleted file mode 100644 index 4121a6a..0000000 Binary files a/extract_detect/Hadoop/ImageArrEdit.class and /dev/null differ diff --git a/extract_detect/Hadoop/LightPoint.class b/extract_detect/Hadoop/LightPoint.class deleted file mode 100644 index 1819704..0000000 Binary files a/extract_detect/Hadoop/LightPoint.class and /dev/null differ diff --git a/extract_detect/Hadoop/MacroMaster.class b/extract_detect/Hadoop/MacroMaster.class deleted file mode 100644 index 9358feb..0000000 Binary files a/extract_detect/Hadoop/MacroMaster.class and /dev/null differ diff --git a/extract_detect/Hadoop/MacroMaster.java b/extract_detect/Hadoop/MacroMaster.java index d22fe87..ebd012f 100644 --- a/extract_detect/Hadoop/MacroMaster.java +++ b/extract_detect/Hadoop/MacroMaster.java @@ -5,56 +5,51 @@ * */ public class MacroMaster { - - static byte[][] im; - static String name; - static int dim = 200; - static boolean lastVal = false; - - public static byte[][] findMine(byte[][] imTemp, String nameSet) throws IOException { - init(imTemp, nameSet); + static byte[][] imL; + static byte[][] imR; + static byte[][] im; + static int dim = 150; + public static boolean findMine(byte[][] imTemp, char side) throws IOException { + im = imTemp; boolean[][][] layers; boolean[][] bmap; + // find dark spots Slide.init(im, dim, true); layers = new boolean[2][Slide.bmap.length][Slide.bmap[0].length]; - Slide.process(); layers[0] = Slide.bmap; + // find light spots Slide.init(im, dim, false); Slide.process(); layers[1] = Slide.bmap; byte[][] imOut = new byte[imTemp.length][imTemp[0].length]; + // find overlapping areas bmap = new boolean[layers[0].length][layers[0][0].length]; for (int r = 0; r < bmap.length; r++) { for (int c = 0; c < bmap[0].length; c++) { bmap[r][c] = layers[0][r][c] && layers[1][r][c]; + if(bmap[r][c]) System.out.println(r + ", " + c); + + // identify bounds of frame on original image and highlight if needed int[] temp = Util.refitRect(r, c, bmap[0].length, bmap.length, imTemp[0].length, imTemp.length); for (int x = temp[1]; x < (temp[3] < imTemp.length ? temp[3] : imTemp.length); x++) { for (int y = temp[0]; y < (temp[2] < imTemp[0].length ? temp[2] : imTemp[0].length); y++) { - if (bmap[r][c]) imOut[x][y] = (byte)(Util.getByteVal(imTemp[x][y]) > 150 ? 255 : Util.getByteVal(imTemp[x][y]) + 100); + if (bmap[r][c]) { imOut[x][y] = (byte)(Util.getByteVal(imTemp[x][y]) > 150 ? 255 : Util.getByteVal(imTemp[x][y]) + 100); } else imOut[x][y] = imTemp[x][y]; -// System.out.println(x + " " + y); - } + } } } } - Util.saveIm(imOut, nameSet, false); + if (side == 'L') imL = imOut; + else imR = imOut; - lastVal = Calc.containsTrue(bmap); - - return imOut; - } - - public static void init(byte[][] imSet, String nameSet) { - - im = imSet; - name = nameSet; + return Calc.containsTrue(bmap); } } diff --git a/extract_detect/Hadoop/Master.class b/extract_detect/Hadoop/Master.class deleted file mode 100644 index 4633a96..0000000 Binary files a/extract_detect/Hadoop/Master.class and /dev/null differ diff --git a/extract_detect/Hadoop/Master.java b/extract_detect/Hadoop/Master.java index 17979a4..9023725 100644 --- a/extract_detect/Hadoop/Master.java +++ b/extract_detect/Hadoop/Master.java @@ -1,5 +1,3 @@ -import java.io.FileNotFoundException; - import java.io.IOException; /** @@ -49,7 +47,7 @@ public static boolean process(double[][] vals) { int cenN; tmap = Converter.convert(vals); - double minP = dark ? 28.3 : 20; + double minP = dark ? 28.1 : 22.5; rmap = Scan(tmap, 3, minP); rmap = ColorReduce(rmap, Scan.cenN); bmap = Converter.convert(rmap); diff --git a/extract_detect/Hadoop/Picture.class b/extract_detect/Hadoop/Picture.class deleted file mode 100644 index 3182f7b..0000000 Binary files a/extract_detect/Hadoop/Picture.class and /dev/null differ diff --git a/extract_detect/Hadoop/PictureFrame.class b/extract_detect/Hadoop/PictureFrame.class deleted file mode 100644 index 6fd81c2..0000000 Binary files a/extract_detect/Hadoop/PictureFrame.class and /dev/null differ diff --git a/extract_detect/Hadoop/Pixel.class b/extract_detect/Hadoop/Pixel.class deleted file mode 100644 index f6dfe7a..0000000 Binary files a/extract_detect/Hadoop/Pixel.class and /dev/null differ diff --git a/extract_detect/Hadoop/Point.class b/extract_detect/Hadoop/Point.class deleted file mode 100644 index 19c6c17..0000000 Binary files a/extract_detect/Hadoop/Point.class and /dev/null differ diff --git a/extract_detect/Hadoop/Polish.class b/extract_detect/Hadoop/Polish.class deleted file mode 100644 index 580ad3a..0000000 Binary files a/extract_detect/Hadoop/Polish.class and /dev/null differ diff --git a/extract_detect/Hadoop/Radius.class b/extract_detect/Hadoop/Radius.class deleted file mode 100644 index c110964..0000000 Binary files a/extract_detect/Hadoop/Radius.class and /dev/null differ diff --git a/extract_detect/Hadoop/Runner$CombineDocumentFileFormat.class b/extract_detect/Hadoop/Runner$CombineDocumentFileFormat.class deleted file mode 100644 index 13d0797..0000000 Binary files a/extract_detect/Hadoop/Runner$CombineDocumentFileFormat.class and /dev/null differ diff --git a/extract_detect/Hadoop/Runner$Sonar_Mapper$CombineDocumentFileFormat.class b/extract_detect/Hadoop/Runner$Sonar_Mapper$CombineDocumentFileFormat.class deleted file mode 100644 index 95f504a..0000000 Binary files a/extract_detect/Hadoop/Runner$Sonar_Mapper$CombineDocumentFileFormat.class and /dev/null differ diff --git a/extract_detect/Hadoop/Runner$Sonar_Mapper$Sonar_Reducer.class b/extract_detect/Hadoop/Runner$Sonar_Mapper$Sonar_Reducer.class deleted file mode 100644 index 3742c6b..0000000 Binary files a/extract_detect/Hadoop/Runner$Sonar_Mapper$Sonar_Reducer.class and /dev/null differ diff --git a/extract_detect/Hadoop/Runner$Sonar_Mapper$WholeFileRecordReader.class b/extract_detect/Hadoop/Runner$Sonar_Mapper$WholeFileRecordReader.class deleted file mode 100644 index 506b2d4..0000000 Binary files a/extract_detect/Hadoop/Runner$Sonar_Mapper$WholeFileRecordReader.class and /dev/null differ diff --git a/extract_detect/Hadoop/Runner$Sonar_Mapper.class b/extract_detect/Hadoop/Runner$Sonar_Mapper.class deleted file mode 100644 index 0ed233c..0000000 Binary files a/extract_detect/Hadoop/Runner$Sonar_Mapper.class and /dev/null differ diff --git a/extract_detect/Hadoop/Runner$Sonar_Reducer.class b/extract_detect/Hadoop/Runner$Sonar_Reducer.class deleted file mode 100644 index 349360a..0000000 Binary files a/extract_detect/Hadoop/Runner$Sonar_Reducer.class and /dev/null differ diff --git a/extract_detect/Hadoop/Runner$WholeFileRecordReader.class b/extract_detect/Hadoop/Runner$WholeFileRecordReader.class deleted file mode 100644 index c99cfd5..0000000 Binary files a/extract_detect/Hadoop/Runner$WholeFileRecordReader.class and /dev/null differ diff --git a/extract_detect/Hadoop/Runner.class b/extract_detect/Hadoop/Runner.class deleted file mode 100644 index f50d1a4..0000000 Binary files a/extract_detect/Hadoop/Runner.class and /dev/null differ diff --git a/extract_detect/Hadoop/Runner.jar b/extract_detect/Hadoop/Runner.jar deleted file mode 100644 index eb1b5d2..0000000 Binary files a/extract_detect/Hadoop/Runner.jar and /dev/null differ diff --git a/extract_detect/Hadoop/Runner.java b/extract_detect/Hadoop/Runner.java deleted file mode 100644 index 82ebb08..0000000 --- a/extract_detect/Hadoop/Runner.java +++ /dev/null @@ -1,301 +0,0 @@ - -import java.io.File; - -import java.net.URI; -import java.awt.Color; -import java.awt.image.BufferedImage; -import java.io.BufferedWriter; -import java.io.FileWriter; - -import javax.imageio.ImageIO; - -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.io.PrintWriter; -import java.io.OutputStream; -import java.io.ByteArrayOutputStream; -//import java.io.BufferedOutputStream; -import java.io.FileOutputStream; -import java.nio.ByteBuffer; -import java.nio.ByteOrder; -import java.util.ArrayList; -import java.util.Arrays; - -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.*; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.io.SequenceFile; -import org.apache.hadoop.io.SequenceFile.Reader; -import org.apache.hadoop.io.*; -import org.apache.hadoop.mapreduce.Job; -import org.apache.hadoop.mapreduce.Mapper; -import org.apache.hadoop.mapreduce.Reducer; -import org.apache.hadoop.mapreduce.OutputFormat; -import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; -import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; -import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; -import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; -import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; -//import org.apache.hadoop.mapred.TextOutputFormat; -import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; -import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs; -//import org.w3c.dom.Text; -import org.apache.hadoop.mapreduce.InputSplit; -import org.apache.hadoop.mapreduce.JobContext; -import org.apache.hadoop.mapreduce.RecordReader; -import org.apache.hadoop.mapreduce.TaskAttemptContext; -import org.apache.hadoop.mapreduce.lib.input.*; -import org.apache.hadoop.mapred.KeyValueTextInputFormat; -import org.apache.hadoop.util.ReflectionUtils; - -public class Runner { - // later change this to work with any input/output directory that is inputed - // through the Linux command line - // also have the file be placed in the output folder - public static String basePath = "/home/cloudera/Documents/shared/output/"; - - // file count is attached to name of png to see if multiple pngs are being - // printed - public static int count = 0; - - // these paths don't seem to work. It seems like after the main function, the - // variables here are reset - public static Path inPath; - public static Path outPath; - // conf seems to be fine but that's mainly because we don't need its value from - // the main function - public static Configuration conf; - - public static byte[] bytes; - public static byte[][] all_bytes = null; - public static int bytes_length; - public static String currentkey; - private static Job job; - // public static SequenceFile.Writer writer = SequenceFile.createWriter(conf, - // Writer.file(outPath), Writer.keyClass(Sonar_Mapper.class), - // Writer.valueClass(IntWritable.class)); - - /** - * Mapper with: input key = file name (Text.class) input value = MSTIFF binary - * data (BytesWritable.class) output key = file name (Text.class) output value = - * image in byte form (BytesWritable.class) - */ - public static class Sonar_Mapper extends Mapper { - public void map(Text key, BytesWritable value, Context context) throws IOException, InterruptedException { - - // process the files -// currentkey = key.toString(); -// System.out.println(currentkey); - -// Configuration conf = new Configuration(); -// SequenceFile.Reader reader = null; -// try { -// Path seqFilePath = new Path(currentkey); -// reader = new SequenceFile.Reader(conf, Reader.file(seqFilePath)); -// Writable myKey = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf); -// Writable myValue = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf); -// while (reader.next(key, value)) { -// String syncSeen = reader.syncSeen() ? "*" : ""; -//// System.out.println(myValue); -// System.out.printf("[%s]\t%s\t%s\n", syncSeen, key, value); -// } -// } finally { -// IOUtils.closeStream(reader); -// } - -// System.out.println("new file, new day!"); -// bytes = value.getBytes(); -// bytes_length = (int) value.getLength(); -// -// // process -// System.out.println("Finished with all"); -// -// Configuration conf = new Configuration(); -// count++; // because count is updated before imgPath, count starts at 1 - - // https://stackoverflow.com/questions/17488534/create-a-file-from-a-bytearrayoutputstream - // http://codeinventions.blogspot.com/2014/08/creating-file-from-bytearrayoutputstrea.html - // https://stackoverflow.com/questions/16546040/store-images-videos-into-hadoop-hdfs - // https://stackoverflow.com/questions/15414259/java-bufferedimage-to-byte-array-and-back - // ByteArrayOutputStream baos = null; - // FSDataInputStream in = null; - // FileSystem fs = FileSystem.get(conf); - // SequenceFile.Writer writer = null; - // try { - // // convert Util.img into a byte array - // baos = new ByteArrayOutputStream(); - // ImageIO.write(Util.img, "png", baos); - // byte[] imBytes = baos.toByteArray(); - // baos.write(imBytes); - // - // // writes image to vm folder - // System.out.println(localPath); - // File outfile = new File(localPath); - // ImageIO.write(Util.img, "png", outfile); - // - // // writes image as byte array as SequenceFile to hdfs - // writer = SequenceFile.createWriter(conf, writer.file(new Path(imgPath)), - // writer.keyClass(Text.class), writer.valueClass(BytesWritable.class)); - // writer.append(new Text(imgPath), new BytesWritable(imBytes)); - // - // } catch (IOException e) { - // e.printStackTrace(); - // } finally { - // IOUtils.closeStream(writer); - // } - - } - } - - /** - * Currently disabled (due to job.setNumReduceTasks(0); in main function) - * Reducer with: input key = file name (Text.class) input value = whatever the - * output value of the Mapper is (class is same as Map output class) output key - * = file name (Text.class) output value = Sonar image data in byte form - * (BytesWritable.class) - */ - public static class Sonar_Reducer extends Reducer { - public void reduce(Text key, BytesWritable value, Context context) throws IOException, InterruptedException { - } - } - - /** - * Hadoop by default splits files into smaller chunks which are fed to the - * mapper I think that this makes sure that the files aren't split when fed into - * the mapper Not sure though, this was written by @author Pyojeong Kim - */ - public static class CombineDocumentFileFormat extends CombineFileInputFormat { - @Override - protected boolean isSplitable(JobContext context, Path file) { - return false; - } - - @Override - public RecordReader createRecordReader(InputSplit split, TaskAttemptContext context) - throws IOException { - return new CombineFileRecordReader((CombineFileSplit) split, context, - WholeFileRecordReader.class); - } - } - - public static class WholeFileRecordReader extends RecordReader { - private CombineFileSplit inputSplit; - private Integer idx; - private Text path; - private Configuration conf; - private BytesWritable document = new BytesWritable(); - private boolean read; - - public WholeFileRecordReader(CombineFileSplit inputSplit, TaskAttemptContext context, Integer idx) { - this.inputSplit = inputSplit; - this.idx = idx; - this.conf = context.getConfiguration(); - this.read = false; - } - - @Override - public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { - } - - @Override - public boolean nextKeyValue() throws IOException, InterruptedException { - if (!read) { - Path file = inputSplit.getPath(idx); - int offset = (int) inputSplit.getOffset(idx); - int length = (int) inputSplit.getLength(idx); - System.out.println(offset + " " + length); - byte[] bytes = new byte[length]; - SequenceFile.Reader reader = null; - try { - reader = new SequenceFile.Reader(conf, Reader.file(file)); - Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf); - Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf); - while (reader.next(key, value)) { - String syncSeen = reader.syncSeen() ? "*" : ""; -// System.out.println(myValue); -// System.out.printf("[%s]\t%s\t%s\n", syncSeen, key, value); - } - } finally { - IOUtils.closeStream(reader); - } - -// IOUtils.readFully(input, bytes, offset, length); -// document = new BytesWritable(); -// document.set(bytes, offset, length); -// path = new Text(file.toString()); - read = true; - return true; - } else { - return false; - } - } - - @Override - public Text getCurrentKey() throws IOException, InterruptedException { - return path; - } - - @Override - public BytesWritable getCurrentValue() throws IOException, InterruptedException { - return document; - } - - @Override - public float getProgress() throws IOException { - return read ? 1.0f : 0.0f; - } - - @Override - public void close() throws IOException { - // Don't need to do anything // - } - - } - - public static void main(String[] args) throws Exception { - conf = new Configuration(); - job = Job.getInstance(conf, "Runner"); - // I think this allows for Hadoop to find the jar file - // by looking at each jar file and seeing if it has the following class in it - job.setJarByClass(Runner.class); - System.out.println("output main" + args[1]); - // the following doesn't really seem to help because - // I think the variables are reset after the main function executes. Not sure. - inPath = new Path(args[0]); - outPath = new Path(args[1]); - - System.out.println("Input: " + inPath); - -// Configuration conf = new Configuration(); -// SequenceFile.Reader reader = null; -// try { -// reader = new SequenceFile.Reader(conf, Reader.file(inPath)); -// Writable myKey = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf); -// Writable myValue = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf); -// while (reader.next(myKey, myValue)) { -// String syncSeen = reader.syncSeen() ? "*" : ""; -//// System.out.println(myValue); -// System.out.printf("[%s]\t%s\t%s\n", syncSeen, myKey, myValue); -// } -// } finally { -// IOUtils.closeStream(reader); -// } - - job.setMapperClass(Sonar_Mapper.class); - job.setReducerClass(Sonar_Reducer.class); - - // makes sure that each input to the mapper is an entire file - // since Hadoop typically breaks up files by default - job.setInputFormatClass(CombineDocumentFileFormat.class); - // disables reducer essentially - job.setNumReduceTasks(0); - - job.setOutputKeyClass(Text.class); - job.setOutputValueClass(SequenceFileOutputFormat.class); - FileInputFormat.setInputPaths(job, new Path(args[0])); - FileOutputFormat.setOutputPath(job, new Path(args[1])); - System.exit(job.waitForCompletion(true) ? 0 : 1); - - } -} \ No newline at end of file diff --git a/extract_detect/Hadoop/Scan.class b/extract_detect/Hadoop/Scan.class deleted file mode 100644 index fec8be6..0000000 Binary files a/extract_detect/Hadoop/Scan.class and /dev/null differ diff --git a/extract_detect/Hadoop/Sift.class b/extract_detect/Hadoop/Sift.class deleted file mode 100644 index ade0e7d..0000000 Binary files a/extract_detect/Hadoop/Sift.class and /dev/null differ diff --git a/extract_detect/Hadoop/SimplePicture.class b/extract_detect/Hadoop/SimplePicture.class deleted file mode 100644 index 9284b95..0000000 Binary files a/extract_detect/Hadoop/SimplePicture.class and /dev/null differ diff --git a/extract_detect/Hadoop/Slide.class b/extract_detect/Hadoop/Slide.class deleted file mode 100644 index aee07fa..0000000 Binary files a/extract_detect/Hadoop/Slide.class and /dev/null differ diff --git a/extract_detect/Hadoop/Slide.java b/extract_detect/Hadoop/Slide.java index ebc911c..1288b01 100644 --- a/extract_detect/Hadoop/Slide.java +++ b/extract_detect/Hadoop/Slide.java @@ -31,9 +31,10 @@ public static void init(byte[][] im, int dimSet, boolean darkSet) throws IOExcep dim = dimSet; dark = darkSet; - bmap = new boolean[(int) Math.ceil(1.0 * vals.length / dim)][(int) Math.ceil(1.0 * vals[0].length / dim)]; + bmap = new boolean[(int) Math.ceil(1.0 * vals.length / dim) * 2 + - 1][(int) Math.ceil(1.0 * vals[0].length / dim) * 2 - 1]; } - + /** * Initialize slide * @@ -46,7 +47,7 @@ public static void init(byte[][] im, int dimSet, boolean darkSet) throws IOExcep * @throws IOException */ public static void init(double[][] valSet, int dimSet, boolean darkSet) { - + dim = dimSet; dark = darkSet; vals = valSet; @@ -59,9 +60,12 @@ public static void init(double[][] valSet, int dimSet, boolean darkSet) { */ public static void process() { - for (int r = 0; r < bmap.length; r++) - for (int c = 0; c < bmap[0].length; c++) - bmap[r][c] = Master.findMine(findSector(r, c), dark); + for (int r = 0; r < bmap.length; r++) { + for (int c = 0; c < bmap[0].length; c++) { + if (c == 0) bmap[r][c] = false; + else bmap[r][c] = Master.findMine(findSector(r / 2.0, c / 2.0), dark); + } + } } /** @@ -73,13 +77,13 @@ public static void process() { * column * @return array with values from sector */ - public static double[][] findSector(int r, int c) { + public static double[][] findSector(double r, double c) { double[][] send = new double[refit(r, true)][refit(c, false)]; for (int i = 0; i < send.length; i++) for (int j = 0; j < send[0].length; j++) - send[i][j] = vals[r * dim + i][c * dim + j]; + send[i][j] = vals[(int) Math.round(r * dim + i)][(int) Math.round(c * dim + j)]; return send; } @@ -92,10 +96,10 @@ public static double[][] findSector(int r, int c) { * @param row * @return */ - public static int refit(int n, boolean row) { + public static int refit(double n, boolean row) { int limit = row ? vals.length : vals[0].length; - return (n + 1) * dim > limit ? limit - n * dim : dim; + return (int) Math.round(n + 1) * dim > limit ? (int) Math.round(limit - n * dim) : dim; } } diff --git a/extract_detect/Hadoop/SonarImage$CombineDocumentFileFormat.class b/extract_detect/Hadoop/SonarImage$CombineDocumentFileFormat.class deleted file mode 100644 index 2846d7c..0000000 Binary files a/extract_detect/Hadoop/SonarImage$CombineDocumentFileFormat.class and /dev/null differ diff --git a/extract_detect/Hadoop/SonarImage$Sonar_Mapper.class b/extract_detect/Hadoop/SonarImage$Sonar_Mapper.class deleted file mode 100644 index 14424db..0000000 Binary files a/extract_detect/Hadoop/SonarImage$Sonar_Mapper.class and /dev/null differ diff --git a/extract_detect/Hadoop/SonarImage$Sonar_Reducer.class b/extract_detect/Hadoop/SonarImage$Sonar_Reducer.class deleted file mode 100644 index 181d566..0000000 Binary files a/extract_detect/Hadoop/SonarImage$Sonar_Reducer.class and /dev/null differ diff --git a/extract_detect/Hadoop/SonarImage$WholeFileRecordReader.class b/extract_detect/Hadoop/SonarImage$WholeFileRecordReader.class deleted file mode 100644 index f315210..0000000 Binary files a/extract_detect/Hadoop/SonarImage$WholeFileRecordReader.class and /dev/null differ diff --git a/extract_detect/Hadoop/SonarImage.class b/extract_detect/Hadoop/SonarImage.class deleted file mode 100644 index 00566d0..0000000 Binary files a/extract_detect/Hadoop/SonarImage.class and /dev/null differ diff --git a/extract_detect/Hadoop/SonarImage.jar b/extract_detect/Hadoop/SonarImage.jar deleted file mode 100644 index ff98f36..0000000 Binary files a/extract_detect/Hadoop/SonarImage.jar and /dev/null differ diff --git a/extract_detect/Hadoop/SonarImage.java b/extract_detect/Hadoop/SonarImage.java index ae0a807..59b347c 100644 --- a/extract_detect/Hadoop/SonarImage.java +++ b/extract_detect/Hadoop/SonarImage.java @@ -15,7 +15,6 @@ import java.io.PrintWriter; import java.io.OutputStream; import java.io.ByteArrayOutputStream; -//import java.io.BufferedOutputStream; import java.io.FileOutputStream; import java.nio.ByteBuffer; import java.nio.ByteOrder; @@ -35,30 +34,18 @@ import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; -//import org.apache.hadoop.mapred.TextOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs; -//import org.w3c.dom.Text; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.input.*; -//import org.apache.hadoop.mapred.KeyValueTextInputFormat; import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat; /** - * Hadoop with Multiple Outputs, both image and metadata version - * - * Linux commands: $ javac -cp /usr/lib/hadoop/*:/usr/lib/hadoop/client-0.20/* - * -d Host_Exchange/HadoopSonar/SonarImage - * Host_Exchange/HadoopSonar/SonarImage/SonarImage.java - * Host_Exchange/HadoopSonar/SonarImage/Extract.java - * Host_Exchange/HadoopSonar/SonarImage/Util.java $ jar -cvf SonarImage.jar -C - * Host_Exchange/HadoopSonar/SonarImage . $ hadoop jar SonarImage.jar SonarImage - * /user/cloudera/Host_Exchange/remusfiles/florida - * /user/cloudera/Host_Exchange/HadoopSonar/SonarImage/output ^ change name of - * output folder in third line accordingly + * Hadoop with Multiple Outputs: mine-tagged image and metadata + * @author Kaylin Li, Pyojeong Kim, Mitali Chowdhury, Seth Knoop, Nancy Daoud */ public class SonarImage { @@ -67,29 +54,33 @@ public class SonarImage { // also have the file be placed in the output folder public static String basePath = "/home/cloudera/Documents/shared/MineProcess/output/"; - // file count is attached to name of png to see if multiple pngs are being - // printed + // file count is attached to name of png public static int count = 0; // these paths don't seem to work. It seems like after the main function, the // variables here are reset public static Path inPath; public static Path outPath; - // conf seems to be fine but that's mainly because we don't need its value from - // the main function + public static Configuration conf; + private static Job job; + // variables for storage of image data public static byte[] bytes; - public static byte[][] all_bytes = null; + public static byte[][] right_bytes = null; + public static byte[][] left_bytes = null; public static byte[][] image = null; public static int bytes_length; + public static String lastkey; public static String currentkey; - private static Job job; /** - * Mapper with: input key = file name (Text.class) input value = MSTIFF binary - * data (BytesWritable.class) output key = file name (Text.class) output value = - * image in byte form (BytesWritable.class) + * Mapper with: input key = file name (Text.class) + * input value = MSTIFF binary data (BytesWritable.class) + * + * output key = file name (Text.class) + * output value = image with mine tagged in byte form (BytesWritable.class) + * output also includes CSV files for metadata and image information */ public static class Sonar_Mapper extends Mapper { @@ -97,13 +88,12 @@ public static class Sonar_Mapper extends Mapper public void setup(Context context) throws IOException, InterruptedException { mos = new MultipleOutputs(context); - } @Override public void map(Text key, BytesWritable value, Context context) throws IOException, InterruptedException { - // process the files + // extract data from file currentkey = key.toString(); System.out.println(currentkey); System.out.println("new file, new day!"); @@ -111,40 +101,44 @@ public void map(Text key, BytesWritable value, Context context) throws IOExcepti bytes_length = (int) value.getLength(); Extract.fileProcess(key.toString()); - System.out.println("not sure if this is working"); - System.out.println("First bytes" + bytes[1]); - // output the metadata StringBuilder sb = new StringBuilder(); String[][] arr = Extract.metaTemp; - for (int j = 0; j < arr.length; j++) { + for (int j = 1; j < arr.length; j++) { + if (arr[j][0] == null) + break; for (int k = 0; k < arr[0].length; k++) { - sb.append(arr[j][k] + ","); + + sb.append(arr[j][k] + ","); } - sb.append('\n'); + sb.append(j + "\n"); } - mos.write("metadata", key, new Text(sb.toString())); + mos.write("metadata", key + "\n", new Text(sb.toString())); // process the image bytes - if (all_bytes == null) - all_bytes = Extract.imTemp; - else { - byte[][] temp = Util.combineVertically(all_bytes, Extract.imTemp); -// Util.saveIm(temp, "im" + Extract.count, false); - image = MacroMaster.findMine(temp, "im" + Extract.count); - all_bytes = Extract.imTemp; - - System.out.println("Finished with all"); - - System.out.println("img label " + Util.imgLabel); + if (right_bytes == null) { + right_bytes = Extract.imTempR; + left_bytes = Extract.imTempL; + } else { + // find mine + byte[][] temp = Util.combineVertically(right_bytes, Extract.imTempR); + boolean r = MacroMaster.findMine(temp, 'R'); + right_bytes = Extract.imTempR; + + temp = Util.combineVertically(left_bytes, Extract.imTempL); + boolean l = MacroMaster.findMine(temp, 'L'); + left_bytes = Extract.imTempL; Configuration conf = new Configuration(); count++; // because count is updated before imgPath, count starts at 1 - String imgPath = Util.imgLabel + count + ".png"; - - String localPath = basePath + Util.imgLabel + count + ".png"; - + System.out.println("img number " + count); + + String imgPath = "im" + count + ".png"; + String localPath = basePath + imgPath; + // writes image name, associated mstiff files, and whether the image contains a mine + mos.write("mines", imgPath, new Text(lastkey + ", " + currentkey + ", " + (r || l))); + // https://stackoverflow.com/questions/17488534/create-a-file-from-a-bytearrayoutputstream // http://codeinventions.blogspot.com/2014/08/creating-file-from-bytearrayoutputstrea.html // https://stackoverflow.com/questions/16546040/store-images-videos-into-hadoop-hdfs @@ -154,16 +148,16 @@ public void map(Text key, BytesWritable value, Context context) throws IOExcepti FileSystem fs = FileSystem.get(conf); SequenceFile.Writer writer = null; try { - // convert Util.img into a byte array + // convert image into a byte array baos = new ByteArrayOutputStream(); - ImageIO.write(Util.img, "png", baos); + ImageIO.write(Util.getImage(Util.combineHorizontally(MacroMaster.imL, MacroMaster.imR), false), "png", baos); byte[] imBytes = baos.toByteArray(); baos.write(imBytes); // writes image to vm folder System.out.println(localPath); File outfile = new File(localPath); - ImageIO.write(Util.img, "png", outfile); + ImageIO.write(Util.getImage(Util.combineHorizontally(MacroMaster.imL, MacroMaster.imR), false), "png", outfile); // writes image as byte array as SequenceFile to hdfs mos.write("image", key, new BytesWritable(imBytes)); @@ -176,8 +170,8 @@ public void map(Text key, BytesWritable value, Context context) throws IOExcepti // wipes imTemp Extract.saveMeta(); - // context.write(new Text(key), new Text("das good")); } + lastkey = currentkey; } public void cleanup(Context context) throws IOException, InterruptedException { @@ -187,10 +181,11 @@ public void cleanup(Context context) throws IOException, InterruptedException { } /** - * Reducer with: input key = file name (Text.class) input value = whatever the - * output value of the Mapper is (class is same as Map output class) output key - * = file name (Text.class) output value = Sonar image data in byte form - * (BytesWritable.class) + * Reducer with: input key = file name (Text.class) + * input value = whatever the output value of the Mapper is (class is same as Map output class) + * + * output key = file name (Text.class) + * output value = Sonar image data in byte form (BytesWritable.class) */ public static class Sonar_Reducer extends Reducer { @@ -200,9 +195,7 @@ public void reduce(Text key, Text value, Context context) throws IOException, In } /** - * Hadoop by default splits files into smaller chunks which are fed to the - * mapper I think that this makes sure that the files aren't split when fed into - * the mapper Not sure though, this was written by @author Pyojeong Kim + * Keeps files from being split (?) */ public static class CombineDocumentFileFormat extends CombineFileInputFormat { @Override @@ -237,6 +230,9 @@ public WholeFileRecordReader(CombineFileSplit inputSplit, TaskAttemptContext con public void initialize(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException { } + /** + * reads next mstiff file + */ @Override public boolean nextKeyValue() throws IOException, InterruptedException { if (!read) { @@ -281,6 +277,7 @@ public void close() throws IOException { public static void main(String[] args) throws Exception { conf = new Configuration(); job = Job.getInstance(conf, "SonarImage"); + // allows for Hadoop to find the jar file // by looking at each jar file and seeing if it has the following class in it job.setJarByClass(SonarImage.class); @@ -292,13 +289,16 @@ public static void main(String[] args) throws Exception { // makes sure that each input to the mapper is an entire file // since Hadoop typically breaks up files by default job.setInputFormatClass(CombineDocumentFileFormat.class); - // disables reducer essentially + + // disables reducer job.setNumReduceTasks(0); job.setOutputKeyClass(Text.class); - // set up 2 Hadoop jobs, one for metadata and one for sonar image + + // set up 3 outputs, one for metadata, one for sonar image, and one for mine information MultipleOutputs.addNamedOutput(job, "metadata", TextOutputFormat.class, Text.class, Text.class); MultipleOutputs.addNamedOutput(job, "image", SequenceFileOutputFormat.class, Text.class, BytesWritable.class); + MultipleOutputs.addNamedOutput(job, "mines", TextOutputFormat.class, Text.class, Text.class); job.setOutputKeyClass(Text.class); FileInputFormat.setInputPaths(job, new Path(args[0])); diff --git a/extract_detect/Hadoop/Util.class b/extract_detect/Hadoop/Util.class deleted file mode 100644 index 10ab99a..0000000 Binary files a/extract_detect/Hadoop/Util.class and /dev/null differ diff --git a/extract_detect/Hadoop/Util.java b/extract_detect/Hadoop/Util.java index 48a639d..ecb25bd 100644 --- a/extract_detect/Hadoop/Util.java +++ b/extract_detect/Hadoop/Util.java @@ -160,34 +160,6 @@ public static byte[][] combineVertically(byte[][] top, byte[][] bottom) { return hold; } - /** - * Save a 2D array to a CSV - * - * @param arr - * array to save - * @param label - * name to give file - */ -// public static void save(byte[][] arr, String label) { -// // Write metadata to CSV file -// try (PrintWriter writer = new PrintWriter(new File(Constants.out_path + "data" + label + ".csv"))) { -// StringBuilder sb = new StringBuilder(); -// -// for (int j = 0; j < arr.length; j++) { -// for (int k = 0; k < arr[0].length; k++) { -// sb.append(arr[j][k] + ","); -// } -// sb.append('\n'); -// } -// writer.write(sb.toString()); -// -// System.out.println("Saved " + label); -// -// } catch (FileNotFoundException e) { -// System.out.println(e.getMessage()); -// } -// } - /** * Combines two string arrays * @@ -228,15 +200,9 @@ public static int getByteVal(byte z) { } /** - * Write image to png file - * - * @param arr - * byte array for image - * @param label - * label to save as + * Transform byte array to BufferedImage */ - public static String saveIm(byte[][] arr, String label, boolean sepia) { -// String save = Constants.out_path + "\\" + label + ".png"; + public static BufferedImage getImage(byte[][] arr, boolean sepia) { img = new BufferedImage(arr.length, arr[0].length, BufferedImage.TYPE_INT_RGB); for (int x = 0; x < arr.length; x++) { for (int y = 0; y < arr[0].length; y++) { @@ -255,20 +221,31 @@ public static String saveIm(byte[][] arr, String label, boolean sepia) { } } } - + // just info that's good to know width = img.getWidth(); height = img.getHeight(); System.out.println("BufferedImage width: " + width); System.out.println("BufferedImage height: " + height); + return img; + } + + /** + * Write image to png file + * + * @param arr + * byte array for image + * @param label + * label to save as + */ + public static String saveIm(byte[][] arr, String label, boolean sepia) { +// String save = Constants.out_path + "\\" + label + ".png"; + getImage(arr, sepia); + // copy label to public static variable imgLabel - imgLabel=label; - try { - System.out.println("Image saved in Util.saveIm!"); - } catch (Exception e) { - e.printStackTrace(); - } + imgLabel = label; + System.out.println("Image saved in Util.saveIm!"); return imgLabel; } @@ -281,6 +258,7 @@ public static String saveIm(byte[][] arr, String label, boolean sepia) { * @param label * label to save as */ + //TODO add getImage call public static String saveIm(int[][] arr, String label, boolean sepia) { // String save = Constants.out_path + "\\" + label + ".png"; img = new BufferedImage(arr.length, arr[0].length, BufferedImage.TYPE_INT_RGB); @@ -329,22 +307,7 @@ public static String saveIm(int[][] arr, String label, boolean sepia) { * name to give */ public static void save(String[][] arr, String label) { - // Write image or metadata to CSV file -// PrintWriter writer = new PrintWriter(new File( label + ".csv")); -// -// StringBuilder sb = new StringBuilder(); -// -// for (int j = 0; j < arr.length; j++) { -// if (arr[j][0] != (null)) { -// for (int k = 0; k < arr[0].length; k++) { -// sb.append(arr[j][k] + ","); -// } -// sb.append('\n'); -// } -// } -// writer.write(sb.toString()); -// -// System.out.println("Saved " + label); + // unused function } /** @@ -365,13 +328,13 @@ public static int[] refitRect(int r, int c, double mapW, double mapH, double imW int tempY = refit(r, heightR); int dX = refit(c + 1, widthR) - refit(c, widthR); int dY = refit(r + 1, heightR) - refit(r, heightR); -// System.out.println(tempX + " " + tempY + " " + dX + " " + dY); - return new int[] { tempX, tempY, tempX + dX, tempY + dY }; } + /** + * Returns scaled point based on ratio + */ public static int refit(int pos, double ratio) { - return (int) ((pos * ratio) + 0.5); } diff --git a/extract_detect/Hadoop/images/imageim152.png b/extract_detect/Hadoop/images/imageim152.png deleted file mode 100644 index 927a8b5..0000000 Binary files a/extract_detect/Hadoop/images/imageim152.png and /dev/null differ diff --git a/extract_detect/Hadoop/images/imageim3.png b/extract_detect/Hadoop/images/imageim3.png deleted file mode 100644 index 558a80f..0000000 Binary files a/extract_detect/Hadoop/images/imageim3.png and /dev/null differ diff --git a/extract_detect/Hadoop/input/test0.png-m-00000 b/extract_detect/Hadoop/input/test0.png-m-00000 deleted file mode 100644 index 598b428..0000000 Binary files a/extract_detect/Hadoop/input/test0.png-m-00000 and /dev/null differ diff --git a/extract_detect/Hadoop/input/test1.png-m-00000 b/extract_detect/Hadoop/input/test1.png-m-00000 deleted file mode 100644 index 598b428..0000000 Binary files a/extract_detect/Hadoop/input/test1.png-m-00000 and /dev/null differ diff --git a/extract_detect/Hadoop/output/im01.png b/extract_detect/Hadoop/output/im01.png deleted file mode 100644 index bf8e676..0000000 Binary files a/extract_detect/Hadoop/output/im01.png and /dev/null differ diff --git a/extract_detect/Hadoop/output/im02.png b/extract_detect/Hadoop/output/im02.png deleted file mode 100644 index 93d91ee..0000000 Binary files a/extract_detect/Hadoop/output/im02.png and /dev/null differ diff --git a/extract_detect/Hadoop/output/im03.png b/extract_detect/Hadoop/output/im03.png deleted file mode 100644 index 627603a..0000000 Binary files a/extract_detect/Hadoop/output/im03.png and /dev/null differ diff --git a/extract_detect/Hadoop/output/im1.png b/extract_detect/Hadoop/output/im1.png new file mode 100644 index 0000000..2d6512f Binary files /dev/null and b/extract_detect/Hadoop/output/im1.png differ diff --git a/extract_detect/Hadoop/output/im10.png b/extract_detect/Hadoop/output/im10.png new file mode 100644 index 0000000..7797481 Binary files /dev/null and b/extract_detect/Hadoop/output/im10.png differ diff --git a/extract_detect/Hadoop/output/im11.png b/extract_detect/Hadoop/output/im11.png new file mode 100644 index 0000000..396fd4a Binary files /dev/null and b/extract_detect/Hadoop/output/im11.png differ diff --git a/extract_detect/Hadoop/output/im12.png b/extract_detect/Hadoop/output/im12.png new file mode 100644 index 0000000..871137a Binary files /dev/null and b/extract_detect/Hadoop/output/im12.png differ diff --git a/extract_detect/Hadoop/output/im13.png b/extract_detect/Hadoop/output/im13.png new file mode 100644 index 0000000..a9206be Binary files /dev/null and b/extract_detect/Hadoop/output/im13.png differ diff --git a/extract_detect/Hadoop/output/im14.png b/extract_detect/Hadoop/output/im14.png new file mode 100644 index 0000000..d8a5b9b Binary files /dev/null and b/extract_detect/Hadoop/output/im14.png differ diff --git a/extract_detect/Hadoop/output/im2.png b/extract_detect/Hadoop/output/im2.png new file mode 100644 index 0000000..5b67c92 Binary files /dev/null and b/extract_detect/Hadoop/output/im2.png differ diff --git a/extract_detect/Hadoop/output/im3.png b/extract_detect/Hadoop/output/im3.png new file mode 100644 index 0000000..dba3826 Binary files /dev/null and b/extract_detect/Hadoop/output/im3.png differ diff --git a/extract_detect/Hadoop/output/im4.png b/extract_detect/Hadoop/output/im4.png new file mode 100644 index 0000000..16d6a42 Binary files /dev/null and b/extract_detect/Hadoop/output/im4.png differ diff --git a/extract_detect/Hadoop/output/im5.png b/extract_detect/Hadoop/output/im5.png new file mode 100644 index 0000000..1bccf48 Binary files /dev/null and b/extract_detect/Hadoop/output/im5.png differ diff --git a/extract_detect/Hadoop/output/im6.png b/extract_detect/Hadoop/output/im6.png new file mode 100644 index 0000000..3362211 Binary files /dev/null and b/extract_detect/Hadoop/output/im6.png differ diff --git a/extract_detect/Hadoop/output/im7.png b/extract_detect/Hadoop/output/im7.png new file mode 100644 index 0000000..084ceea Binary files /dev/null and b/extract_detect/Hadoop/output/im7.png differ diff --git a/extract_detect/Hadoop/output/im8.png b/extract_detect/Hadoop/output/im8.png new file mode 100644 index 0000000..486d742 Binary files /dev/null and b/extract_detect/Hadoop/output/im8.png differ diff --git a/extract_detect/Hadoop/output/im9.png b/extract_detect/Hadoop/output/im9.png new file mode 100644 index 0000000..21c88e0 Binary files /dev/null and b/extract_detect/Hadoop/output/im9.png differ diff --git a/extract_detect/Hadoop/outputim01.png b/extract_detect/Hadoop/outputim01.png deleted file mode 100644 index c8a1847..0000000 Binary files a/extract_detect/Hadoop/outputim01.png and /dev/null differ diff --git a/extract_detect/Hadoop/outputim02.png b/extract_detect/Hadoop/outputim02.png deleted file mode 100644 index 5946cfd..0000000 Binary files a/extract_detect/Hadoop/outputim02.png and /dev/null differ diff --git a/extract_detect/Hadoop/run_mstiff_input.sh b/extract_detect/Hadoop/run.sh similarity index 100% rename from extract_detect/Hadoop/run_mstiff_input.sh rename to extract_detect/Hadoop/run.sh diff --git a/extract_detect/Hadoop/run_seq_input.sh b/extract_detect/Hadoop/run_seq_input.sh deleted file mode 100644 index db11d73..0000000 --- a/extract_detect/Hadoop/run_seq_input.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash - -javac -cp /usr/lib/hadoop/*:/usr/lib/hadoop/client-0.20/* -d ~/Documents/shared/MineProcess ~/Documents/shared/MineProcess/Util.java -~/Documents/shared/MineProcess/Extract.java ~/Documents/shared/MineProcess/LightPoint.java ~/Documents/shared/MineProcess/Point.java ~/Documents/shared/MineProcess/Centroid.java ~/Documents/shared/MineProcess/Box.java ~/Documents/shared/MineProcess/Cluster.java ~/Documents/shared/MineProcess/Master.java ~/Documents/shared/MineProcess/Polish.java ~/Documents/shared/MineProcess/Scan.java ~/Documents/shared/MineProcess/Sift.java ~/Documents/shared/MineProcess/Calc.java ~/Documents/shared/MineProcess/ColorEdit.java ~/Documents/shared/MineProcess/Constants.java ~/Documents/shared/MineProcess/Converter.java ~/Documents/shared/MineProcess/Format.java ~/Documents/shared/MineProcess/Radius.java ~/Documents/shared/MineProcess/Slide.java ~/Documents/shared/MineProcess/PictureFrame.java ~/Documents/shared/MineProcess/Picture.java ~/Documents/shared/MineProcess/SimplePicture.java ~/Documents/shared/MineProcess/Convert.java ~/Documents/shared/MineProcess/DigitalPicture.java ~/Documents/shared/MineProcess/ImageArrEdit.java ~/Documents/shared/MineProcess/Pixel.java ~/Documents/shared/MineProcess/MacroMaster.java ~/Documents/shared/MineProcess/Runner.java -echo "COMPILED!" -jar -cvf Runner.jar -C ~/Documents/shared/MineProcess . -echo "STARTING HADOOP..." -hadoop jar Runner.jar Runner /user/cloudera/MineProcess/input/ /user/cloudera/MineProcess/output/mine$1 -echo "FINISHED!"