Skip to content

Commit

Permalink
Render modified blocks
Browse files Browse the repository at this point in the history
Addresses #1

TODO:
 - clear mesh caches for modified blocks
 - (de-)serialize modified blocks
 - trigger re-rendering of all affected meshes when solution is propagated to level
  • Loading branch information
hanslovsky committed May 30, 2018
1 parent da88860 commit 413638e
Show file tree
Hide file tree
Showing 5 changed files with 163 additions and 22 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import org.janelia.saalfeldlab.paintera.data.DataSource;
import org.janelia.saalfeldlab.paintera.data.mask.MaskedSource;
import org.janelia.saalfeldlab.paintera.meshes.InterruptibleFunction;
import org.janelia.saalfeldlab.paintera.meshes.InterruptibleFunctionAndCache;
import org.janelia.saalfeldlab.paintera.meshes.cache.CacheUtils;
import org.janelia.saalfeldlab.paintera.meshes.cache.UniqueLabelListLabelMultisetCacheLoader;
import org.janelia.saalfeldlab.paintera.state.GlobalTransformManager;
Expand Down Expand Up @@ -221,13 +222,13 @@ public ExecutorService generalPurposeExecutorService()
return this.generalPurposeExecutorService;
}

public static < D, T > InterruptibleFunction< Long, Interval[] >[] generateLabelBlocksForLabelCache(
public static < D, T > InterruptibleFunctionAndCache< Long, Interval[] >[] generateLabelBlocksForLabelCache(
final DataSource< D, T > spec )
{
return generateLabelBlocksForLabelCache( spec, scaleFactorsFromAffineTransforms( spec ) );
}

public static < D, T > InterruptibleFunction< Long, Interval[] >[] generateLabelBlocksForLabelCache(
public static < D, T > InterruptibleFunctionAndCache< Long, Interval[] >[] generateLabelBlocksForLabelCache(
final DataSource< D, T > spec,
final double[][] scalingFactors )
{
Expand All @@ -248,7 +249,7 @@ public static < D, T > InterruptibleFunction< Long, Interval[] >[] generateLabel
return generateLabelBlocksForLabelCacheGeneric( spec, scalingFactors, collectLabels( spec.getDataType() ) );
}

private static < D, T > InterruptibleFunction< Long, Interval[] >[] generateLabelBlocksForLabelCacheGeneric(
private static < D, T > InterruptibleFunctionAndCache< Long, Interval[] >[] generateLabelBlocksForLabelCacheGeneric(
final DataSource< D, T > spec,
final double[][] scalingFactors,
final BiConsumer< D, TLongHashSet > collectLabels )
Expand All @@ -262,7 +263,7 @@ private static < D, T > InterruptibleFunction< Long, Interval[] >[] generateLabe
collectLabels,
CacheUtils::toCacheSoftRefLoaderCache );

final InterruptibleFunction< Long, Interval[] >[] blocksForLabelCache = CacheUtils.blocksForLabelCachesLongKeys(
final InterruptibleFunctionAndCache< Long, Interval[] >[] blocksForLabelCache = CacheUtils.blocksForLabelCachesLongKeys(
spec,
uniqueLabelLoaders,
blockSizes,
Expand All @@ -273,7 +274,7 @@ private static < D, T > InterruptibleFunction< Long, Interval[] >[] generateLabe

}

private static < T > InterruptibleFunction< Long, Interval[] >[] generateBlocksForLabelCacheLabelMultisetTypeCachedImg(
private static < T > InterruptibleFunctionAndCache< Long, Interval[] >[] generateBlocksForLabelCacheLabelMultisetTypeCachedImg(
final DataSource< LabelMultisetType, T > spec,
final double[][] scalingFactors )
{
Expand All @@ -295,7 +296,7 @@ private static < T > InterruptibleFunction< Long, Interval[] >[] generateBlocksF
blockSizes[ level ] = IntStream.range( 0, grid.numDimensions() ).map( grid::cellDimension ).toArray();
}

final InterruptibleFunction< Long, Interval[] >[] blocksForLabelCache = CacheUtils.blocksForLabelCachesLongKeys(
final InterruptibleFunctionAndCache< Long, Interval[] >[] blocksForLabelCache = CacheUtils.blocksForLabelCachesLongKeys(
spec,
uniqueLabelLoaders,
blockSizes,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,15 @@
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
import java.util.function.BiConsumer;
import java.util.function.Supplier;
import java.util.stream.DoubleStream;
import java.util.stream.IntStream;
import java.util.stream.LongStream;
import java.util.stream.Stream;

import org.janelia.saalfeldlab.paintera.data.DataSource;
import org.janelia.saalfeldlab.paintera.data.mask.PickOne.PickAndConvert;
Expand Down Expand Up @@ -54,6 +56,7 @@
import net.imglib2.converter.Converters;
import net.imglib2.converter.TypeIdentity;
import net.imglib2.img.basictypeaccess.LongAccess;
import net.imglib2.img.cell.AbstractCellImg;
import net.imglib2.img.cell.Cell;
import net.imglib2.img.cell.CellGrid;
import net.imglib2.img.cell.LazyCellImg.LazyCells;
Expand Down Expand Up @@ -134,6 +137,8 @@ public class MaskedSource< D extends Type< D >, T extends Type< T > > implements

private final ObservableBooleanValue isNotPersisting = isPersisting.not();

private final Map< Long, TLongHashSet >[] affectedBlocksByLabel;

private final ObservableBooleanValue canBePersited = Bindings.createBooleanBinding(
() -> isMaskNotDeployed.get() && isNotPersisting.get() && noMasksCurrentlyApplied.get(),
isNotPersisting,
Expand Down Expand Up @@ -207,6 +212,8 @@ public MaskedSource(
this.cacheDirectory.addListener( new CanvasBaseDirChangeListener( dataCanvases, canvases, this.dimensions, this.blockSizes ) );
this.cacheDirectory.set( initialCacheDirectory );

this.affectedBlocksByLabel = Stream.generate( HashMap::new ).limit( this.canvases.length ).toArray( Map[]::new );

setMasksConstant();

}
Expand Down Expand Up @@ -300,6 +307,10 @@ public void applyMask( final RandomAccessibleInterval< UnsignedByteType > mask,

final TLongSet paintedBlocksAtHighestResolution = this.scaleBlocksToLevel( affectedBlocks, maskInfo.level, 0 );

System.out.println( 1 );
this.affectedBlocksByLabel[ maskInfo.level ].computeIfAbsent( maskInfo.value.getIntegerLong(), key -> new TLongHashSet() ).addAll( affectedBlocks );
System.out.println( 2 );
LOG.warn( "Added affected block: {}", affectedBlocksByLabel[ maskInfo.level ] );
this.affectedBlocks.addAll( paintedBlocksAtHighestResolution );

this.maskApplyCount.set( this.maskApplyCount.get() + 1 );
Expand Down Expand Up @@ -662,6 +673,12 @@ public static < T extends IntegerType< T > > void downsample(
}
}

public TLongSet getModifiedBlocks( final int level, final long id )
{
LOG.warn( "Getting modified blocks for level={} and id={}", level, id );
return Optional.ofNullable( this.affectedBlocksByLabel[ level ].get( id ) ).map( TLongHashSet::new ).orElseGet( TLongHashSet::new );
}

private void propagateMask(
final RandomAccessibleInterval< UnsignedByteType > mask,
final TLongSet paintedBlocksAtPaintedScale,
Expand All @@ -688,6 +705,7 @@ private void propagateMask(
throw new RuntimeException( "Non-integer relative scales: " + Arrays.toString( relativeScales ) );
}
final TLongSet affectedBlocksAtHigherLevel = this.scaleBlocksToLevel( paintedBlocksAtPaintedScale, paintedLevel, level );
this.affectedBlocksByLabel[ level ].get( label.getIntegerLong() ).addAll( affectedBlocksAtHigherLevel );

// downsample
final int[] steps = DoubleStream.of( relativeScales ).mapToInt( d -> ( int ) d ).toArray();
Expand All @@ -706,6 +724,7 @@ private void propagateMask(
LOG.debug( "Upsampling for level={}", level );
final TLongSet affectedBlocksAtLowerLevel = this.scaleBlocksToLevel( paintedBlocksAtPaintedScale, paintedLevel, level );
final double[] currentRelativeScaleFromTargetToPainted = DataSource.getRelativeScales( this, 0, level, paintedLevel );
this.affectedBlocksByLabel[ level ].get( label.getIntegerLong() ).addAll( affectedBlocksAtLowerLevel );

final Interval paintedIntervalAtTargetLevel = scaleIntervalToLevel( intervalAtPaintedScale, paintedLevel, level );

Expand Down Expand Up @@ -968,6 +987,7 @@ public static boolean isNonEmpty( final long[] min, final long[] max )
private void clearCanvases()
{
this.cacheDirectory.set( this.nextCacheDirectory.get() );
Arrays.stream( this.affectedBlocksByLabel ).forEach( Map::clear );
}

private static class CanvasBaseDirChangeListener implements ChangeListener< String >
Expand Down Expand Up @@ -1061,4 +1081,9 @@ public String currentCanvasDirectory()
return this.persistCanvas;
}

public CellGrid getCellGrid( final int t, final int level )
{
return ( ( AbstractCellImg< ?, ?, ?, ? > ) underlyingSource().getSource( t, level ) ).getCellGrid();
}

}
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package org.janelia.saalfeldlab.paintera.meshes;

import java.util.Arrays;
import java.util.function.Function;

public interface InterruptibleFunction< T, R > extends Function< T, R >, Interruptible< T >
Expand Down Expand Up @@ -31,4 +32,13 @@ public static < T, R > InterruptibleFunction< T, R > fromFunction( final Functio
return fromFunctionAndInterruptible( function, t -> {} );
}

@SuppressWarnings( "unchecked" )
public static < T, R > InterruptibleFunction< T, R >[] fromFunction( final Function< T, R >[] functions )
{
return Arrays
.stream( functions )
.map( InterruptibleFunction::fromFunction )
.toArray( InterruptibleFunction[]::new );
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -108,7 +108,7 @@ public static < D, T > InterruptibleFunction< HashWrapper< long[] >, long[] >[]
* @return Cascade of {@link Cache} that produce list of containing blocks
* for a label (key) at each scale level.
*/
public static < D, T > InterruptibleFunction< Long, Interval[] >[] blocksForLabelCachesLongKeys(
public static < D, T > InterruptibleFunctionAndCache< Long, Interval[] >[] blocksForLabelCachesLongKeys(
final DataSource< D, T > source,
final InterruptibleFunction< HashWrapper< long[] >, long[] >[] uniqueLabelLoaders,
final int[][] blockSizes,
Expand All @@ -119,7 +119,7 @@ public static < D, T > InterruptibleFunction< Long, Interval[] >[] blocksForLabe
assert uniqueLabelLoaders.length == numMipmapLevels;

@SuppressWarnings( "unchecked" )
final InterruptibleFunction< Long, Interval[] >[] caches = new InterruptibleFunction[ numMipmapLevels ];
final InterruptibleFunctionAndCache< Long, Interval[] >[] caches = new InterruptibleFunctionAndCache[ numMipmapLevels ];

LOG.debug( "Number of mipmap levels for source {}: {}", source.getName(), source.getNumMipmapLevels() );
LOG.debug( "Provided {} block sizes and {} scaling factors", blockSizes.length, scalingFactors.length );
Expand All @@ -136,9 +136,9 @@ public static < D, T > InterruptibleFunction< Long, Interval[] >[] blocksForLabe
final BlocksForLabelCacheLoader< Long > loader = BlocksForLabelCacheLoader.longKeys(
grid,
level == numMipmapLevels - 1 ? InterruptibleFunction.fromFunction( l -> new Interval[] { new FinalInterval( dims.clone() ) } ) : caches[ level + 1 ],
level == numMipmapLevels - 1 ? l -> collectAllOffsets( dims, bs, b -> fromMin( b, max, bs ) ) : relevantBlocksFromLowResInterval( grid, scalingFactors[ level + 1 ], scalingFactors[ level ] ),
key -> uniqueLabelLoaders[ finalLevel ].apply( HashWrapper.longArray( key ) ) );
caches[ level ] = fromCache( makeCache.apply( loader ).unchecked(), ( Interruptible< Long > ) loader );
level == numMipmapLevels - 1 ? l -> collectAllOffsets( dims, bs, b -> fromMin( b, max, bs ) ) : relevantBlocksFromLowResInterval( grid, scalingFactors[ level + 1 ], scalingFactors[ level ] ),
key -> uniqueLabelLoaders[ finalLevel ].apply( HashWrapper.longArray( key ) ) );
caches[ level ] = new InterruptibleFunctionAndCache<>( makeCache.apply( loader ).unchecked(), loader );
}

return caches;
Expand Down Expand Up @@ -196,8 +196,8 @@ public static < D, T > InterruptibleFunction< TLongHashSet, Interval[] >[] block
final BlocksForLabelCacheLoader< TLongHashSet > loader = BlocksForLabelCacheLoader.hashSetKeys(
grid,
level == numMipmapLevels - 1 ? InterruptibleFunction.fromFunction( l -> new Interval[] { new FinalInterval( dims.clone() ) } ) : caches[ level + 1 ],
level == numMipmapLevels - 1 ? l -> collectAllOffsets( dims, bs, b -> fromMin( b, max, bs ) ) : relevantBlocksFromLowResInterval( grid, scalingFactors[ level + 1 ], scalingFactors[ level ] ),
key -> uniqueLabelLoaders[ finalLevel ].apply( HashWrapper.longArray( key ) ) );
level == numMipmapLevels - 1 ? l -> collectAllOffsets( dims, bs, b -> fromMin( b, max, bs ) ) : relevantBlocksFromLowResInterval( grid, scalingFactors[ level + 1 ], scalingFactors[ level ] ),
key -> uniqueLabelLoaders[ finalLevel ].apply( HashWrapper.longArray( key ) ) );
caches[ level ] = fromCache( makeCache.apply( loader ).unchecked(), ( Interruptible< TLongHashSet > ) loader );
}

Expand Down
Loading

0 comments on commit 413638e

Please sign in to comment.