diff --git a/examples/src/dzi/double.tsx b/examples/src/dzi/double.tsx index 19bb3eb..f6d90c4 100644 --- a/examples/src/dzi/double.tsx +++ b/examples/src/dzi/double.tsx @@ -32,8 +32,19 @@ const exampleSettings: DziRenderSettings = { view: Box2D.create([0, 0], [1, 1]), }, }; - +/** + * HEY!!! + * this is an example React Component for rendering two DZI images which share a camera. + * Additionally, both images have an SVG overlay. + * This example is as bare-bones as possible! It is NOT the recommended way to do anything, its just trying to show + * one way of: + * 1. using our rendering utilities for DZI data, specifically in a react component. Your needs for state-management, + * SVG overlays, etc may all be different! + * + */ export function TwoClientsPOC() { + // the DZI renderer expects a "relative" camera - that means a box, from 0 to 1. 0 is the bottom or left of the image, + // and 1 is the top or right of the image, regardless of the aspect ratio of that image. const [view, setView] = useState(Box2D.create([0, 0], [1, 1])); const zoom = (e: React.WheelEvent) => { const scale = e.deltaY > 0 ? 1.1 : 0.9; diff --git a/examples/src/omezarr/app.tsx b/examples/src/omezarr/app.tsx index 4a93ecb..333dd2f 100644 --- a/examples/src/omezarr/app.tsx +++ b/examples/src/omezarr/app.tsx @@ -7,7 +7,15 @@ const demo_versa = 'https://neuroglancer-vis-prototype.s3.amazonaws.com/VERSA/sc export function AppUi() { return ; } - +/** + * HEY!!! + * this is an example React Component for rendering A single slice of an OMEZARR image in a react component + * This example is as bare-bones as possible! It is NOT the recommended way to do anything, its just trying to show + * one way of: + * 1. using our rendering utilities for OmeZarr data, specifically in a react component. Your needs for state-management, + * slicing logic, etc might all be different! + * + */ function DataPlease() { // load our canned data for now: const [omezarr, setfile] = useState(undefined); diff --git a/examples/src/omezarr/sliceview.tsx b/examples/src/omezarr/sliceview.tsx index de31c28..d0c2040 100644 --- a/examples/src/omezarr/sliceview.tsx +++ b/examples/src/omezarr/sliceview.tsx @@ -15,6 +15,8 @@ type Props = { }; const settings: RenderSettings = { tileSize: 256, + // in a "real" app, you'd most likely expose sliders to control how the data in the file + // gets mapped to pixel/color intensity on the screen. for now, we just use hardcoded data gamut: { R: { gamut: { min: 0, max: 80 }, index: 0 }, G: { gamut: { min: 0, max: 100 }, index: 1 }, @@ -23,10 +25,17 @@ const settings: RenderSettings = { plane: 'xy', planeIndex: 3, camera: { + // the OMEZARR renderer expects a box in whatever space is given by the omezarr file itself in its + // axes metadata = for example, millimeters. if you load a volume that says its 30mm X 30mm X 10mm, + // and you want to view XY slices and have them fit perfectly on your screen, then a box + // like [0,0],[30,30] would be appropriate! view: Box2D.create([0, 0], [250, 120]), screenSize: [500, 500], }, }; +// this example uses the RenderServer utility - this lets you render to canvas elements without having to +// initialize WebGL on that canvas itself, at a small cost to performance. the compose function is the configurable +// step used to get the pixels from WebGL to the target canvas. function compose(ctx: CanvasRenderingContext2D, image: ImageData) { ctx.putImageData(image, 0, 0); } @@ -50,7 +59,7 @@ export function SliceView(props: Props) { useEffect(() => { if (server && renderer.current && cnvs.current && omezarr) { - const hey: RenderFrameFn = (target, cache, callback) => { + const renderFn: RenderFrameFn = (target, cache, callback) => { if (renderer.current) { return renderer.current( omezarr, @@ -63,17 +72,18 @@ export function SliceView(props: Props) { return null; }; server.beginRendering( - hey, + renderFn, + // here's where we handle lifecycle events in that rendering function (its async and slow because it may have to fetch data from far away) (e) => { switch (e.status) { case 'begin': server.regl?.clear({ framebuffer: e.target, color: [0, 0, 0, 0], depth: 1 }); break; case 'progress': - // wanna see the tiles as they arrive? e.server.copyToClient(compose); break; case 'finished': { + // the bare minimum event handling would be this: copy webGL's work to the target canvas using the compose function e.server.copyToClient(compose); } }