Skip to content

Commit

Permalink
Fix: flickery frames due to leaky event handling (#36)
Browse files Browse the repository at this point in the history
  • Loading branch information
froyo-np authored Nov 12, 2024
1 parent 7107135 commit 0e3f675
Show file tree
Hide file tree
Showing 4 changed files with 26 additions and 24 deletions.
4 changes: 2 additions & 2 deletions packages/scatterbrain/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@alleninstitute/vis-scatterbrain",
"version": "0.0.4",
"version": "0.0.5",
"contributors": [
{
"name": "Lane Sawyer",
Expand Down Expand Up @@ -58,4 +58,4 @@
"lodash": "^4.17.21",
"regl": "^2.1.0"
}
}
}
36 changes: 15 additions & 21 deletions packages/scatterbrain/src/abstract/async-frame.ts
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ export function beginFrame<
renderItem(itemToRender, dataset, settings, maybe);
}
};
const reportStatus = (event: AsyncFrameEvent<Dataset, Item>, synchronous: boolean = false) => {
const reportStatus = (event: AsyncFrameEvent<Dataset, Item>, synchronous: boolean) => {
// we want to report our status, however the flow of events can be confusing -
// our callers anticipate an asynchronous (long running) frame to be started,
// but there are scenarios in which the whole thing is completely synchronous
Expand All @@ -102,11 +102,11 @@ export function beginFrame<
if (synchronous) {
lifecycleCallback(event);
} else {
Promise.resolve().then(() => lifecycleCallback(event));
Promise.resolve().then(() => lifecycleCallback(event))
}
};

const doWorkOnQueue = (intervalId: number) => {
const doWorkOnQueue = (intervalId: number, synchronous: boolean = false) => {
// try our best to cleanup if something goes awry
const startWorkTime = performance.now();
const cleanupOnError = (err: unknown) => {
Expand All @@ -117,7 +117,7 @@ export function beginFrame<
abort.abort(err);
clearInterval(intervalId);
// pass the error somewhere better:
reportStatus({ status: 'error', error: err }, true);
reportStatus({ status: 'error', error: err }, synchronous);
};
while (mutableCache.getNumPendingTasks() < Math.max(maximumInflightAsyncTasks, 1)) {
// We know there are items in the queue because of the check above, so we assert the type exist
Expand All @@ -131,7 +131,7 @@ export function beginFrame<
requestsForItem(itemToRender, dataset, settings, abort.signal),
partial(fancy, itemToRender),
toCacheKey,
() => reportStatus({ status: 'progress', dataset, renderedItems: [itemToRender] }, true)
() => reportStatus({ status: 'progress', dataset, renderedItems: [itemToRender] }, synchronous)
);
if (result !== undefined) {
// put this cancel callback in a list where we can invoke if something goes wrong
Expand All @@ -152,30 +152,24 @@ export function beginFrame<
if (mutableCache.getNumPendingTasks() < 1) {
// we do want to wait for that last in-flight task to actually finish though:
clearInterval(intervalId);
reportStatus({ status: 'finished' });
reportStatus({ status: 'finished' }, synchronous);
}
return;
}
};
const interval = setInterval(() => doWorkOnQueue(interval), queueProcessingIntervalMS);

// do some work right now...
reportStatus({ status: 'begin' }, true)
const interval = setInterval(() => doWorkOnQueue(interval), queueProcessingIntervalMS);
if (queue.length > 0) {
reportStatus({ status: 'begin' }, true);
doWorkOnQueue(interval);
// return a function to allow our caller to cancel the frame - guaranteed that no settings/data will be
// touched/referenced after cancellation, unless the author of render() did some super weird bad things
return {
cancelFrame: (reason?: string) => {
taskCancelCallbacks.forEach((cancelMe) => cancelMe());
abort.abort(new DOMException(reason, 'AbortError'));
clearInterval(interval);
reportStatus({ status: 'cancelled' });
},
};
doWorkOnQueue(interval, false)
}
return {
cancelFrame: () => {},
cancelFrame: (reason?: string) => {
taskCancelCallbacks.forEach((cancelMe) => cancelMe());
abort.abort(new DOMException(reason, 'AbortError'));
clearInterval(interval);
reportStatus({ status: 'cancelled' }, true);
}
};
}

Expand Down
9 changes: 8 additions & 1 deletion packages/scatterbrain/src/abstract/render-server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -168,12 +168,19 @@ export class RenderServer {
}
};
this.clients.set(client, {
frame: renderFn(image, this.cache, hijack),
frame: null,
image,
copyBuffer,
resolution,
updateRequested: null,
});
// this is worded rather awkwardly, because sometimes the frameLifecycle object returned by renderFn() represents
// a frame that is already finished!
// this is a good thing for performance, but potentially confusing - so we do our book-keeping before we actually start rendering:
const aboutToStart = this.clients.get(client); // this is the record we just put into the clients map - TS just wants to be sure it really exists:
if (aboutToStart) {
aboutToStart.frame = renderFn(image, this.cache, hijack)
}
}
}
}
1 change: 1 addition & 0 deletions packages/scatterbrain/src/dataset-cache.ts
Original file line number Diff line number Diff line change
Expand Up @@ -225,6 +225,7 @@ export class AsyncDataCache<SemanticKey extends RecordKey, CacheKey extends Reco
workingSet: Record<SemanticKey, () => Promise<D>>,
use: (items: Record<SemanticKey, D>) => void,
toCacheKey: (semanticKey: SemanticKey) => CacheKey,
// TODO: consider removing taskFinished - it would be more simple to let the caller handle this in their use() function
taskFinished?: () => void
): cancelFn | undefined {
const keys: SemanticKey[] = Object.keys(workingSet) as SemanticKey[];
Expand Down

0 comments on commit 0e3f675

Please sign in to comment.