Skip to content

Commit

Permalink
sdfs Glacier updates for 3.1.
Browse files Browse the repository at this point in the history
3.1 Final release
  • Loading branch information
opendedup committed Mar 1, 2016
1 parent 665f93f commit 646d017
Show file tree
Hide file tree
Showing 9 changed files with 40 additions and 12 deletions.
Binary file modified install-packages/deb/usr/share/sdfs/lib/sdfs.jar
Binary file not shown.
7 changes: 5 additions & 2 deletions install-packages/deb/usr/share/sdfs/mount.sdfs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
#!/bin/bash
modprobe fuse > /dev/null
MEMORY="1000"
BASEMEMORY="1000"
CHUNKSZ="80"
CFG=""
MPTG=4
MU="M"
Expand Down Expand Up @@ -30,12 +31,14 @@ fi
if [ ! -n "$MEM" ]; then
if [ -n "$CFG" ] && [ -f "$CFG" ]; then
ac=$(echo 'cat //subsystem-config/local-chunkstore/@allocation-size' | xmllint --shell "$CFG" | grep -v ">" | cut -f 2 -d "=" | tr -d - | tr -d \");
MEMORY=$(((ac/10737418240*MPTG)+1000))
bac=$(echo 'cat //subsystem-config/local-chunkstore/extended-config/@io-threads' | xmllint --shell "$CFG" | grep -v ">" | cut -f 2 -d "=" | tr -d - | tr -d \");
MEMORY=$(((ac/10737418240*MPTG)+BASEMEMORY+(bac*CHUNKSZ)))
fi
else
MEMORY=$MEM
fi


LD_PRELOAD="/usr/share/sdfs/bin/libfuse.so.2" $EXEC -server -outfile '&1' -errfile '&2' -Djava.library.path=/usr/share/sdfs/bin/ -home /usr/share/sdfs/bin/jre -Dorg.apache.commons.logging.Log=fuse.logging.FuseLog\
-Dfuse.logging.level=INFO -Xmx$MEMORY$MU -Xms$MEMORY$MU \
-XX:+DisableExplicitGC -pidfile /var/run/$PF -XX:+UseG1GC -Djava.awt.headless=true \
Expand Down
1 change: 1 addition & 0 deletions src/log4j.properties
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
log4j.logger.com.amazonaws=WARN
21 changes: 18 additions & 3 deletions src/org/opendedup/collections/ProgressiveFileBasedCSMap.java
Original file line number Diff line number Diff line change
Expand Up @@ -138,9 +138,10 @@ private ProgressiveFileByteArrayLongMap createWriteMap() throws IOException {
File f = new File(fileName + "-" + guid + ".keys");
if (!f.exists()) {
activeWMap = new ProgressiveFileByteArrayLongMap(fileName + "-" + guid, this.hashTblSz);
activeWMap.activate();
activeWMap.setUp();
this.maps.add(activeWMap);
activeWMap.activate();

written = true;
}
}
Expand Down Expand Up @@ -297,7 +298,13 @@ public synchronized long claimRecords(SDFSEvent evt, LargeBloomFilter bf) throws
p = m.nextKeyValue();
}
int mapsz = maps.size();
l = this.gcLock.writeLock();
l.lock();
try{
maps.remove(m);
}finally {
l.unlock();
}
mapsz = mapsz - maps.size();
SDFSLogger.getLog()
.info("removing map " + m.toString() + " sz=" + maps.size() + " rm=" + mapsz);
Expand All @@ -320,7 +327,13 @@ public synchronized long claimRecords(SDFSEvent evt, LargeBloomFilter bf) throws
p = m.nextKeyValue();
}
int mapsz = maps.size();
l = this.gcLock.writeLock();
l.lock();
try{
maps.remove(m);
}finally {
l.unlock();
}
mapsz = mapsz - maps.size();
SDFSLogger.getLog()
.info("removing map " + m.toString() + " sz=" + maps.size() + " rm=" + mapsz);
Expand Down Expand Up @@ -495,10 +508,12 @@ public long setUp() throws Exception {
if (!f.exists()) {
ProgressiveFileByteArrayLongMap activeWMap = new ProgressiveFileByteArrayLongMap(
fileName + "-" + guid, this.hashTblSz);
activeWMap.activate();
activeWMap.setUp();

this.maps.add(activeWMap);
written = true;
activeWMap.activate();

this.activeWriteMap = activeWMap;
}
}
Expand Down Expand Up @@ -619,7 +634,7 @@ public boolean put(ChunkData cm, boolean persist) throws IOException, HashtableF
} finally {
try {
if (bm != null) {
bm.inActive();
bm.activate();
}
} catch (Exception e) {

Expand Down
7 changes: 6 additions & 1 deletion src/org/opendedup/logging/SDFSLogger.java
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@




import org.apache.log4j.BasicConfigurator;
import org.apache.log4j.ConsoleAppender;
import org.apache.log4j.Level;
Expand All @@ -17,6 +18,7 @@
public class SDFSLogger {

private static Logger log = Logger.getLogger("sdfs");
private static Logger awslog = Logger.getLogger("com.amazonaws");
private static Logger fslog = Logger.getLogger("fs");
private static Logger basicLog = Logger.getLogger("bsdfs");
private static boolean debug = false;
Expand All @@ -25,7 +27,7 @@ public class SDFSLogger {
ConsoleAppender bapp = new ConsoleAppender(new PatternLayout("%m%n"));
basicLog.addAppender(bapp);

basicLog.setLevel(Level.INFO);
basicLog.setLevel(Level.WARN);
RollingFileAppender app = null;
try {

Expand All @@ -36,6 +38,9 @@ public class SDFSLogger {
} catch (IOException e) {
log.debug("unable to change appender", e);
}
awslog.setLevel(Level.WARN);
awslog.removeAllAppenders();
awslog.addAppender(app);
log.setLevel(Level.INFO);
fsdebug = true;
fslog.setLevel(Level.DEBUG);
Expand Down
4 changes: 2 additions & 2 deletions src/org/opendedup/sdfs/Main.java
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ public class Main {
public static boolean checkArchiveOnOpen= false;
public static boolean checkArchiveOnRead = false;

public static int writeTimeoutSeconds = 300;
public static int readTimeoutSeconds = 300;
public static int writeTimeoutSeconds = -1; //1 hour timeout
public static int readTimeoutSeconds = -1; //1 hour timeout
// public static VolumeConfigWriterThread wth = null;
public static boolean runConsistancyCheck = false;

Expand Down
9 changes: 6 additions & 3 deletions src/org/opendedup/sdfs/filestore/HashBlobArchive.java
Original file line number Diff line number Diff line change
Expand Up @@ -447,7 +447,7 @@ public void onRemoval(RemovalNotification<Long, HashBlobArchive> removal) {
removal.getValue().removeCache();
}
}).build(new CacheLoader<Long, HashBlobArchive>() {
public HashBlobArchive load(Long hashid) throws IOException {
public HashBlobArchive load(Long hashid) throws Exception {
try {
HashBlobArchive har = null;
File f = getPath(hashid);
Expand All @@ -458,9 +458,12 @@ public HashBlobArchive load(Long hashid) throws IOException {
har = new HashBlobArchive(f, hashid);
har.cached = true;
return har;
} catch (Exception e) {
}catch (DataArchivedException e) {
throw e;
}
catch (Exception e) {
SDFSLogger.getLog().error("unable to fetch block [" + hashid + "]", e);
throw new IOException("unable to read " + hashid);
throw e;
}
}
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1169,7 +1169,7 @@ else if (lz4compress) {

String hast = new String(data);
SDFSLogger.getLog()
.info("reading hashes " + (String) mp.get("objects")
.debug("reading hashes " + (String) mp.get("objects")
+ " from " + hid + " encn " + sobj.getKey().substring(5));
StringTokenizer ht = new StringTokenizer(hast, ",");
StringResult st = new StringResult();
Expand Down
1 change: 1 addition & 0 deletions src/org/opendedup/sdfs/io/WritableCacheBuffer.java
Original file line number Diff line number Diff line change
Expand Up @@ -665,6 +665,7 @@ public void commandArchiveException(DataArchivedException e) {
l.setMaxSize(fs.size());
Finger.FingerPersister fp = new Finger.FingerPersister();
fp.l = l;
fp.fingers = fs;
fp.dedup = df.mf.isDedup();
lexecutor.execute(fp);
int wl = 0;
Expand Down

0 comments on commit 646d017

Please sign in to comment.