Skip to content

Commit

Permalink
delta-based index cache: added compacting of cache store
Browse files Browse the repository at this point in the history
  • Loading branch information
martinlippert committed Dec 13, 2024
1 parent 2e7db46 commit 7d52eab
Show file tree
Hide file tree
Showing 2 changed files with 92 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -74,6 +74,10 @@ public class IndexCacheOnDiscDeltaBased implements IndexCache {

private final File cacheDirectory;
private final Map<IndexCacheKey, ConcurrentMap<InternalFileIdentifier, Long>> timestamps;
private final Map<IndexCacheKey, Integer> compactingCounter;
private final int compactingCounterBoundary;

private static final int DEFAULT_COMPACTING_TRIGGER = 20;

private static final Logger log = LoggerFactory.getLogger(IndexCacheOnDiscDeltaBased.class);

Expand All @@ -89,6 +93,8 @@ public IndexCacheOnDiscDeltaBased(File cacheDirectory) {
}

this.timestamps = new ConcurrentHashMap<>();
this.compactingCounter = new ConcurrentHashMap<>();
this.compactingCounterBoundary = DEFAULT_COMPACTING_TRIGGER;
}

@Override
Expand Down Expand Up @@ -116,6 +122,9 @@ public <T extends IndexCacheable> void store(IndexCacheKey cacheKey, String[] fi
ConcurrentMap<InternalFileIdentifier, Long> timestampMap = timestampedFiles.entrySet().stream()
.collect(Collectors.toConcurrentMap(e -> InternalFileIdentifier.fromPath(e.getKey()), e -> e.getValue()));
this.timestamps.put(cacheKey, timestampMap);

this.compactingCounter.put(cacheKey, 0);
deleteOutdatedCacheFiles(cacheKey);
}

@SuppressWarnings("unchecked")
Expand All @@ -124,7 +133,8 @@ public <T extends IndexCacheable> Pair<T[], Multimap<String, String>> retrieve(I
File cacheStore = new File(cacheDirectory, cacheKey.toString() + ".json");
if (cacheStore.exists()) {

IndexCacheStore<T> store = retrieveStoreFromIncrementalStorage(cacheKey, type);
Pair<IndexCacheStore<T>, Integer> result = retrieveStoreFromIncrementalStorage(cacheKey, type);
IndexCacheStore<T> store = result.getLeft();

SortedMap<String, Long> timestampedFiles = Arrays.stream(files)
.filter(file -> new File(file).exists())
Expand Down Expand Up @@ -153,6 +163,8 @@ public <T extends IndexCacheable> Pair<T[], Multimap<String, String>> retrieve(I
ConcurrentMap<InternalFileIdentifier, Long> timestampMap = timestampedFiles.entrySet().stream()
.collect(Collectors.toConcurrentMap(e -> InternalFileIdentifier.fromPath(e.getKey()), e -> e.getValue()));
this.timestamps.put(cacheKey, timestampMap);
this.compactingCounter.put(cacheKey, result.getRight());
compact(cacheKey, type);

return Pair.of(
(T[]) symbols.toArray((T[]) Array.newInstance(type, symbols.size())),
Expand All @@ -179,6 +191,9 @@ public <T extends IndexCacheable> void removeFiles(IndexCacheKey cacheKey, Strin
timestampsMap.remove(InternalFileIdentifier.fromPath(file));
}
}

this.compactingCounter.merge(cacheKey, 1, Integer::sum);
compact(cacheKey, type);
}

@Override
Expand All @@ -190,6 +205,7 @@ public void remove(IndexCacheKey cacheKey) {

// update local timestamp cache
this.timestamps.remove(cacheKey);
this.compactingCounter.remove(cacheKey);
}

@Override
Expand All @@ -212,6 +228,8 @@ public <T extends IndexCacheable> void update(IndexCacheKey cacheKey, String fil
// update local timestamp cache
Map<InternalFileIdentifier, Long> timestampsMap = this.timestamps.computeIfAbsent(cacheKey, (s) -> new ConcurrentHashMap<>());
timestampsMap.put(InternalFileIdentifier.fromPath(file), lastModified);
this.compactingCounter.merge(cacheKey, 1, Integer::sum);
compact(cacheKey, type);
}

@Override
Expand All @@ -238,6 +256,8 @@ public <T extends IndexCacheable> void update(IndexCacheKey cacheKey, String[] f
for (int i = 0; i < files.length; i++) {
timestampsMap.put(InternalFileIdentifier.fromPath(files[i]), lastModified[i]);
}
this.compactingCounter.merge(cacheKey, 1, Integer::sum);
compact(cacheKey, type);
}

@Override
Expand All @@ -254,6 +274,10 @@ public long getModificationTimestamp(IndexCacheKey cacheKey, String file) {

return 0;
}

public int getCompactingCounterBoundary() {
return compactingCounterBoundary;
}

private boolean isFileMatch(SortedMap<String, Long> files1, SortedMap<String, Long> files2) {
if (files1.size() != files2.size()) return false;
Expand All @@ -265,8 +289,18 @@ private boolean isFileMatch(SortedMap<String, Long> files1, SortedMap<String, Lo

return true;
}

private <T extends IndexCacheable> void compact(IndexCacheKey cacheKey, Class<T> type) {
if (this.compactingCounter.get(cacheKey) > this.compactingCounterBoundary) {
IndexCacheStore<T> compactedData = retrieveStoreFromIncrementalStorage(cacheKey, type).getLeft();
persist(cacheKey, new DeltaSnapshot<T>(compactedData), false);
this.compactingCounter.put(cacheKey, 0);

deleteOutdatedCacheFiles(cacheKey);
}
}

private void cleanupCache(IndexCacheKey cacheKey) {
private void deleteOutdatedCacheFiles(IndexCacheKey cacheKey) {
File[] cacheFiles = this.cacheDirectory.listFiles();

for (int i = 0; i < cacheFiles.length; i++) {
Expand Down Expand Up @@ -298,16 +332,15 @@ private <T extends IndexCacheable> void persist(IndexCacheKey cacheKey, DeltaEle
gson.toJson(deltaStorage, writer);

writer.write("\n");

cleanupCache(cacheKey);
}
catch (Exception e) {
log.error("cannot write symbol cache", e);
}
}

private <T extends IndexCacheable> IndexCacheStore<T> retrieveStoreFromIncrementalStorage(IndexCacheKey cacheKey, Class<T> type) {
private <T extends IndexCacheable> Pair<IndexCacheStore<T>, Integer> retrieveStoreFromIncrementalStorage(IndexCacheKey cacheKey, Class<T> type) {
IndexCacheStore<T> store = new IndexCacheStore<>(new TreeMap<>(), new ArrayList<T>(), new HashMap<>(), type);
int deltaCounter = 0;

File cacheStore = new File(cacheDirectory, cacheKey.toString() + ".json");
if (cacheStore.exists()) {
Expand All @@ -319,14 +352,15 @@ private <T extends IndexCacheable> IndexCacheStore<T> retrieveStoreFromIncrement
while (reader.peek() != JsonToken.END_DOCUMENT) {
DeltaStorage<T> delta = gson.fromJson(reader, DeltaStorage.class);
store = delta.storedElement.apply(store);
deltaCounter++;
}

}
catch (Exception e) {
log.error("error reading cached symbols", e);
}
}
return store;
return Pair.of(store, deltaCounter);
}


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -333,6 +333,58 @@ void testSymbolAddedToExistingFile() throws Exception {
assertEquals(timeFile1.toMillis() + 2000, cache.getModificationTimestamp(CACHE_KEY_VERSION_1, file1.toString()));
}

@Test
void testStorageFileIncrementallyUpdatedAndCompacted() throws Exception {
Path file1 = Paths.get(tempDir.toAbsolutePath().toString(), "tempFile1");
Files.createFile(file1);

FileTime timeFile1 = Files.getLastModifiedTime(file1);
String[] files = {file1.toAbsolutePath().toString()};

String doc1URI = UriUtil.toUri(file1.toFile()).toString();

List<CachedSymbol> generatedSymbols1 = new ArrayList<>();
WorkspaceSymbol symbol1 = new WorkspaceSymbol("symbol1", SymbolKind.Field, Either.forLeft(new Location("docURI", new Range(new Position(3, 10), new Position(3, 20)))));
EnhancedSymbolInformation enhancedSymbol1 = new EnhancedSymbolInformation(symbol1, null);
generatedSymbols1.add(new CachedSymbol(doc1URI, timeFile1.toMillis(), enhancedSymbol1));

cache.store(CACHE_KEY_VERSION_1, files, generatedSymbols1, null, CachedSymbol.class);

Path path = tempDir.resolve(Paths.get(CACHE_KEY_VERSION_1.toString() + STORAGE_FILE_EXTENSION));
long initialCacheStorageSize = Files.size(path);
long lastCacheStorageSize = initialCacheStorageSize;

int compactingBoundary = cache.getCompactingCounterBoundary();

for (int i = 0; i < compactingBoundary; i++) {
cache.update(CACHE_KEY_VERSION_1, file1.toAbsolutePath().toString(), timeFile1.toMillis() + (100 * i), generatedSymbols1, null, CachedSymbol.class);

// check storage size (to see if updates are stored incrementally
long updatedCacheStorageSize = Files.size(path);
assertTrue(updatedCacheStorageSize > lastCacheStorageSize, "cache storage size in iteration: " + i);

lastCacheStorageSize = updatedCacheStorageSize;

// check internal timestamp updates
long newModificationTimestamp = cache.getModificationTimestamp(CACHE_KEY_VERSION_1, file1.toString());
assertEquals(timeFile1.toMillis() + (100 * i), newModificationTimestamp);

}

// test compacting after trigger boundary
cache.update(CACHE_KEY_VERSION_1, file1.toAbsolutePath().toString(), timeFile1.toMillis() + (100 * compactingBoundary), generatedSymbols1, null, CachedSymbol.class);

// check storage size (to see if updates are stored incrementally
long updatedCacheStorageSize = Files.size(path);
assertTrue(updatedCacheStorageSize < lastCacheStorageSize, "cache storage size after compacting");

lastCacheStorageSize = updatedCacheStorageSize;

// check internal timestamp updates
long newModificationTimestamp = cache.getModificationTimestamp(CACHE_KEY_VERSION_1, file1.toString());
assertEquals(timeFile1.toMillis() + (100 * compactingBoundary), newModificationTimestamp);
}

@Test
void testSymbolsAddedToMultipleFiles() throws Exception {

Expand Down

0 comments on commit 7d52eab

Please sign in to comment.