Skip to content

Commit

Permalink
support max number of spots per view
Browse files Browse the repository at this point in the history
  • Loading branch information
StephanPreibisch committed Jul 10, 2024
1 parent 3afede8 commit 5bbad3a
Showing 1 changed file with 39 additions and 9 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,9 @@ public enum Localization { NONE, QUADRATIC };
protected boolean prefetch = false;


@Option(names = {"--maxSpots" }, required = true, description = "limit the number of spots per view (choose the brightest ones), e.g. --maxSpots 10000 (default: NO LIMIT)")
protected int maxSpots = -1;

@Option(names = "--blockSize", description = "blockSize for running the interest point detection - at the scale of detection (default: 512,512,128)")
protected String blockSizeString = "512,512,128";

Expand Down Expand Up @@ -182,7 +185,9 @@ public Void call() throws Exception
final boolean onlyOverlappingRegions = overlappingOnly;
final double combineDistance = SparkInterestPointDetection.combineDistance;
final Localization localization = this.localization;
final int maxSpots = this.maxSpots;
final boolean prefetch = this.prefetch;
final boolean storeIntensities = this.storeIntensities;
final Integer medianFilter = this.medianFilter;

System.out.println( "label: " + label );
Expand All @@ -196,8 +201,10 @@ public Void call() throws Exception
System.out.println( "downsampleZ: " + downsampleZ );
System.out.println( "overlappingOnly: " + onlyOverlappingRegions );
System.out.println( "prefetching: " + prefetch );
if ( maxSpots > 0 ) System.out.println( "maxSpots: " + maxSpots );
System.out.println( "blockSize: " + Util.printCoordinates( blockSize ) );
System.out.println( "medianFilter: " + medianFilter );
System.out.println( "storeIntensities: " + storeIntensities );

//
// assemble all intervals that need to be processed
Expand Down Expand Up @@ -441,7 +448,7 @@ public Void call() throws Exception

final double[] intensities;

if ( storeIntensities )
if ( storeIntensities || maxSpots > 0 )
{
System.out.println( "Retrieving intensities for interest points '" + label + "' for " + Group.pvid(viewId) + ", " + Util.printInterval( processInterval ) + " ... " );

Expand Down Expand Up @@ -511,7 +518,7 @@ public Void call() throws Exception
interestPointsPerViewId.putIfAbsent(viewId, new ArrayList<>() );
interestPointsPerViewId.get( viewId ).add( Spark.deserializeInterestPoints(points) );

if ( storeIntensities )
if ( storeIntensities || maxSpots > 0 )
{
intensitiesPerViewId.putIfAbsent(viewId, new ArrayList<>() );
intensitiesPerViewId.get( viewId ).add( DoubleStream.of(tuple._4()).boxed().collect(Collectors.toList() ) );
Expand All @@ -531,7 +538,7 @@ public Void call() throws Exception
final List< List< InterestPoint > > ipsList = interestPointsPerViewId.get( viewId );
final List< List< Double > > intensitiesList;

if ( storeIntensities )
if ( storeIntensities || maxSpots > 0 )
intensitiesList = intensitiesPerViewId.get( viewId );
else
intensitiesList = null;
Expand All @@ -542,7 +549,7 @@ public Void call() throws Exception
final List< InterestPoint > ips = ipsList.get( l );
final List< Double > intensities;

if ( storeIntensities )
if ( storeIntensities || maxSpots > 0 )
intensities = intensitiesList.get( l );
else
intensities = null;
Expand All @@ -551,7 +558,7 @@ public Void call() throws Exception
{
myIps.addAll( ips );

if ( storeIntensities )
if ( storeIntensities || maxSpots > 0 )
myIntensities.addAll( intensities );
}
else
Expand All @@ -568,7 +575,7 @@ public Void call() throws Exception
{
myIps.add( ip );

if ( storeIntensities )
if ( storeIntensities || maxSpots > 0 )
myIntensities.add( intensities.get( i ) );
}
}
Expand All @@ -577,18 +584,41 @@ public Void call() throws Exception

if ( myIps.size() > 0 )
{
// we need to sort and assign new ids since order is assumed when loading corresponding interest points, and we will have duplicate ids
// we need to sort and assign new ids since order is assumed when loading corresponding interest points, and we will have duplicate ids otherwise
final ArrayList< InterestPoint > myIpsNewId = new ArrayList<>();

for ( int id = 0; id < myIps.size(); ++id )
myIpsNewId.add( new InterestPoint( id, myIps.get( id ).getL() ) );

System.out.println( Group.pvid( viewId ) + ": " + myIpsNewId.size() );

if ( maxSpots > 0 )
{
// filter for the brightnest N spots
final ArrayList< Pair< Double, InterestPoint > > combinedList = new ArrayList<>();

for ( int i = 0; i < myIps.size(); ++i )
combinedList.add( new ValuePair<Double, InterestPoint>(myIntensities.get( i ), myIpsNewId.get( i )));

// sort from large to small
Collections.sort(combinedList, (a,b) -> b.getA().compareTo( a.getA() ) );

myIpsNewId.clear();
myIntensities.clear();

for ( int i = 0; i < maxSpots; ++i )
{
myIntensities.add( combinedList.get( i ).getA() );
myIpsNewId.add( combinedList.get( i ).getB() );
}
}

System.out.println( Group.pvid( viewId ) + " (after applying maxSpots): " + myIpsNewId.size() );

interestPoints.put(viewId, myIpsNewId);

if ( storeIntensities )
intensitiesIPs.put(viewId, myIntensities );

System.out.println( Group.pvid( viewId ) + ": " + myIpsNewId.size() );
}
else
{
Expand Down

0 comments on commit 5bbad3a

Please sign in to comment.