Skip to content

Commit

Permalink
made changes based on review comments
Browse files Browse the repository at this point in the history
  • Loading branch information
NoTrace7 committed Mar 13, 2015
1 parent 0f1bb3e commit 4b4b210
Show file tree
Hide file tree
Showing 2 changed files with 34 additions and 26 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.nio.charset.Charset;

import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
Expand All @@ -22,15 +23,16 @@

import com.google.common.io.Closer;

import gobblin.configuration.ConfigurationKeys;


/**
* A utility class for generating script to move the heap dump .prof files to HDFS for hadoop tasks, when Java heap out of memory error is thrown.
*
*/
public class HeapDumpForTaskUtils {

private static final Logger LOG = LoggerFactory.getLogger(HeapDumpForTaskUtils.class);
private static final String DUMP_FOLDER = "/dumps/";
private static final String DUMP_FOLDER = "dumps";

/**
* Generate the dumpScript, which is used when OOM error is thrown during task execution.
Expand All @@ -49,27 +51,30 @@ public class HeapDumpForTaskUtils {
*/
public static void generateDumpScript(Path dumpScript, FileSystem fs, String heapFileName, String chmod)
throws IOException {
BufferedWriter scriptWriter = null;
if (fs.exists(dumpScript)) {
LOG.info("Heap dump script already exists: " + dumpScript);
return;
}

Closer closer = Closer.create();
try {
if (fs.exists(dumpScript)) {
LOG.info("Heap dump script already exists: " + dumpScript);
} else {
String dumpDir = dumpScript.getParent() + DUMP_FOLDER;
scriptWriter = closer.register(new BufferedWriter(new OutputStreamWriter(fs.create(dumpScript))));
scriptWriter.write("#!/bin/sh\n" + "hadoop dfs -put " + heapFileName + " " + dumpDir + "${PWD//\\//_}.hprof");
scriptWriter.flush();
Runtime.getRuntime().exec(chmod + " " + dumpScript);

if (!fs.exists(new Path(dumpScript.getParent() + DUMP_FOLDER))) {
fs.mkdirs(new Path(dumpDir));
}
Path dumpDir = new Path(dumpScript.getParent(), DUMP_FOLDER);
if (!fs.exists(dumpDir)) {
fs.mkdirs(dumpDir);
}
BufferedWriter scriptWriter =
closer.register(new BufferedWriter(new OutputStreamWriter(fs.create(dumpScript), Charset
.forName(ConfigurationKeys.DEFAULT_CHARSET_ENCODING))));
scriptWriter.write("#!/bin/sh\n" + "hadoop dfs -put " + heapFileName + " " + dumpDir + "/${PWD//\\//_}.hprof");
scriptWriter.flush();
Runtime.getRuntime().exec(chmod + " " + dumpScript);
} catch (IOException e) {
LOG.error("Heap dump script is not generated successfully.");
if (fs.exists(dumpScript)) {
fs.delete(dumpScript, true);
}
} catch (Throwable t) {
closer.rethrow(t);
} finally {
closer.close();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,30 +30,33 @@
public class HeapDumpForTaskUtilsTest {

private FileSystem fs;
private Configuration conf;

private static final String TEST_DIR = "./dumpScript/";
private static final String TEST_DIR = "dumpScript";
private static final String SCRIPT_NAME = "dump.sh";

@BeforeClass
public void setUp() throws IOException {
this.conf = new Configuration();
this.conf.set("fs.default.name", "file:///");
this.conf.set("mapred.job.tracker", "local");
this.fs = FileSystem.getLocal(this.conf);
this.fs = FileSystem.getLocal(new Configuration());
this.fs.mkdirs(new Path(TEST_DIR));
}

@Test
public void testGenerateDumpScript() throws IOException {
Path dumpScript = new Path(TEST_DIR + SCRIPT_NAME);
Path dumpScript = new Path(TEST_DIR, SCRIPT_NAME);
HeapDumpForTaskUtils.generateDumpScript(dumpScript, this.fs, "test.hprof", "chmod 777 ");
Assert.assertEquals(true, this.fs.exists(dumpScript));
Assert.assertEquals(true, this.fs.exists(new Path(dumpScript.getParent() + "/dumps/")));
Assert.assertEquals(true, this.fs.exists(new Path(dumpScript.getParent(), "dumps")));
Closer closer = Closer.create();
BufferedReader scriptReader = closer.register(new BufferedReader(new InputStreamReader(this.fs.open(dumpScript))));
Assert.assertEquals("#!/bin/sh", scriptReader.readLine());
Assert.assertEquals("hadoop dfs -put test.hprof dumpScript/dumps/${PWD//\\//_}.hprof", scriptReader.readLine());
try {
BufferedReader scriptReader =
closer.register(new BufferedReader(new InputStreamReader(this.fs.open(dumpScript))));
Assert.assertEquals("#!/bin/sh", scriptReader.readLine());
Assert.assertEquals("hadoop dfs -put test.hprof dumpScript/dumps/${PWD//\\//_}.hprof", scriptReader.readLine());
} catch (Throwable t) {
closer.rethrow(t);
} finally {
closer.close();
}
}

@AfterClass
Expand Down

0 comments on commit 4b4b210

Please sign in to comment.