From 360a561973917e6f94b32a6740896738c6e87d15 Mon Sep 17 00:00:00 2001 From: Trevor Manz Date: Fri, 19 Jul 2024 16:32:46 -0400 Subject: [PATCH] feat!: Remove ImJoy API and cleanup notebook with preferred anywidget API (#186) * chore: Move notebooks into python/ * chore: Remove imjoy * migrate mandelbrot * chore: Update notebooks * ignore zarr * cleanup readme * cleanup deps --- .gitignore | 1 + README.md | 90 +++-- binder/environment.yml | 17 - example/README.md | 40 --- example/VizarrDemo.imjoy.html | 44 --- example/getting_started.ipynb | 337 ------------------ example/imjoy_plugin.py | 27 -- example/requirements.txt | 10 - main.ts | 21 -- package-lock.json | 92 +---- package.json | 1 - python/Untitled.ipynb | 40 +++ .../notebooks}/IDR_example.ipynb | 40 ++- python/notebooks/README.md | 35 ++ .../notebooks}/create_fixture.py | 2 +- python/notebooks/getting_started.ipynb | 214 +++++++++++ .../notebooks}/mandelbrot.ipynb | 86 +++-- .../notebooks}/spatial_transformations.ipynb | 43 ++- types/imjoy.d.ts | 15 - 19 files changed, 457 insertions(+), 698 deletions(-) delete mode 100644 binder/environment.yml delete mode 100644 example/README.md delete mode 100644 example/VizarrDemo.imjoy.html delete mode 100644 example/getting_started.ipynb delete mode 100644 example/imjoy_plugin.py delete mode 100644 example/requirements.txt create mode 100644 python/Untitled.ipynb rename {example => python/notebooks}/IDR_example.ipynb (64%) create mode 100644 python/notebooks/README.md rename {example => python/notebooks}/create_fixture.py (98%) create mode 100644 python/notebooks/getting_started.ipynb rename {example => python/notebooks}/mandelbrot.ipynb (81%) rename {example => python/notebooks}/spatial_transformations.ipynb (72%) delete mode 100644 types/imjoy.d.ts diff --git a/.gitignore b/.gitignore index 635e4394..086c2525 100644 --- a/.gitignore +++ b/.gitignore @@ -33,3 +33,4 @@ __pycache__ .venv .ipynb_checkpoints dist/ +astronaut.zarr diff --git a/README.md b/README.md index 151ddb42..e1cbbc11 100644 --- a/README.md +++ b/README.md @@ -1,49 +1,79 @@ +

- Vizarr + vizarr +

+ +

+ view multiscale zarr images online and in notebooks +
+
+ app . + getting started +

+

-[![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/hms-dbmi/vizarr/main?filepath=example%2Fgetting_started.ipynb) -[![launch ImJoy](https://imjoy.io/static/badge/launch-imjoy-badge.svg)](https://imjoy.io/lite?plugin=https://github.com/hms-dbmi/vizarr/blob/main/example/VizarrDemo.imjoy.html) -[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/hms-dbmi/vizarr/blob/main/example/mandelbrot.ipynb) +[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/hms-dbmi/vizarr/blob/main/python/notebooks/mandelbrot.ipynb) -![Multiscale OME-Zarr in Jupyter Notebook with Vizarr](./assets/screenshot.png) +
+ Multiscale OME-Zarr in Jupyter Notebook with Vizarr +
-Vizarr is a minimal, purely client-side program for viewing Zarr-based images. It is built with -[Viv](https://github.com/hms-dbmi/viv) and exposes a Python API using the -[`imjoy-rpc`](https://github.com/imjoy-team/imjoy-rpc), allowing users to programatically view multiplex -and multiscale images from within a Jupyter Notebook. The ImJoy plugin registers a codec for Python -`zarr.Array` and `zarr.Group` objects, enabling Viv to securely request chunks lazily via -[Zarr.js](https://github.com/gzuidhof/zarr.js/). This means that other valid zarr-python -[stores](https://zarr.readthedocs.io/en/stable/api/storage.html) can be viewed remotely with Viv, -enabling flexible workflows when working with large datasets. +**Vizarr** is a minimal, purely client-side program for viewing zarr-based images. -### Remote image registration workflow -We created Vizarr to enhance interactive multimodal image alignment using the -[wsireg](https://github.com/NHPatterson/wsireg) library. We describe a rapid workflow where -comparison of registration methods as well as visual verification of alignnment can be assessed -remotely, leveraging high-performance computational resources for rapid image processing and -Viv for interactive web-based visualization in a laptop computer. The Jupyter Notebook containing -the workflow described in the manuscript can be found in [`multimodal_registration_vizarr.ipynb`](multimodal_registration_vizarr.ipynb). For more information, please read our preprint [doi:10.31219/osf.io/wd2gu](https://doi.org/10.31219/osf.io/wd2gu). - -> Note: The data required to run this notebook is too large to include in this repository and can be made avaiable upon request. +- ⚡ **GPU accelerated rendering** with [Viv](https://github.com/hms-dbmi/viv) +- 💻 Purely **client-side** zarr access with [zarrita.js](https://github.com/manzt/zarrita.js) +- 🌎 A **standalone [web app](https://hms-dbmi/vizarr)** for viewing entirely in the browser. +- 🐍 An [anywidget](https://github.com/manzt/anywidget) **Python API** for + programmatic control in notebooks. +- 📦 Supports any `zarr-python` [store](https://zarr.readthedocs.io/en/stable/api/storage.html) + as a backend. ### Data types -Vizarr supports viewing 2D slices of n-Dimensional Zarr arrays, allowing users to choose -a single channel or blended composites of multiple channels during analysis. It has special support -for the developing [OME-Zarr format](https://github.com/ome/omero-ms-zarr/blob/master/spec.md) -for multiscale and multimodal images. Currently [Viv](https://github.com/hms-dbmi/viv) supports -`i1`, `i2`, `i4`, `u1`, `u2`, `u4`, and `f4` arrays, but contributions are welcome to support more `np.dtypes`! -### Getting started -The easiest way to get started with `vizarr` is to clone this repository and open one of -the example [Jupyter Notebooks](example/). +**Vizarr** supports viewing 2D slices of n-Dimensional Zarr arrays, allowing +users to choose a single channel or blended composites of multiple channels +during analysis. It has special support for the developing OME-NGFF format for +multiscale and multimodal images. Currently, Viv supports `int8`, `int16`, +`int32`, `uint8`, `uint16`, `uint32`, `float32`, `float64` arrays, but +contributions are welcome to support more np.dtypes! + +### Getting started + +Copy and paste a URL to a Zarr store as the `?source` query parameter in the +**[web app](https://hms-dbmi.github.io/vizarr/)**. For example, to view the +[example data](https://minio-dev.openmicroscopy.org/idr/v0.3/idr0062-blin-nuclearsegmentation/6001240.zarr) +from the IDR, you can use the following URL: + +``` +https://hms-dbmi.github.io/vizarr/?source=https://minio-dev.openmicroscopy.org/idr/v0.3/idr0062-blin-nuclearsegmentation/6001240.zarr +``` + +Otherwise you can try out the Python API in a Jupyter Notebook, following [the +examples](./python/notebooks/getting_started.ipynb). + +```sh +pip install vizarr +``` + +```python +import vizarr +import zarr + +store = zarr.open("./path/to/ome.zarr") +viewer = vizarr.Viewer() +viewer.add_image(store) +viewer +``` ### Limitations + `vizarr` was built to support the registration use case above where multiple, pyramidal OME-Zarr images are viewed within a Jupyter Notebook. Support for other Zarr arrays is supported but not as well tested. More information regarding the viewing of generic Zarr arrays can be found in the example notebooks. ### Citation + If you are using Vizarr in your research, please cite our paper: > Trevor Manz, Ilan Gold, Nathan Heath Patterson, Chuck McCallum, Mark S Keller, Bruce W Herr II, Katy Börner, Jeffrey M Spraggins, Nils Gehlenborg, diff --git a/binder/environment.yml b/binder/environment.yml deleted file mode 100644 index 1128b0d0..00000000 --- a/binder/environment.yml +++ /dev/null @@ -1,17 +0,0 @@ -name: vizarr -channels: - - conda-forge - - defaults -dependencies: - - fsspec - - ipywidgets - - jupyter-server-proxy - - numba - - numpy - - requests - - scikit-image - - zarr - - pip: - - imjoy>=0.10.0 - - imjoy-jupyter-extension>=0.2.14 - - imjoy-rpc>=0.2.12 diff --git a/example/README.md b/example/README.md deleted file mode 100644 index f3d26da9..00000000 --- a/example/README.md +++ /dev/null @@ -1,40 +0,0 @@ -# Examples - -## Install Requirements - -The examples require the `imjoy-jupyter-extension`. If running locally, please install the following: - -```bash -$ pip install -U imjoy-jupyter-extension -$ pip install -r requirements.txt -``` - -## Getting Started [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/hms-dbmi/vizarr/main?filepath=example%2Fgetting_started.ipynb) - -This example will work in the Jupyter Notebook (not jupyterlab). - -```bash -$ jupyter notebook getting_started.ipynb -``` - -## Viewing an Image from the Imaging Data Resource [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/hms-dbmi/vizarr/main?filepath=example%2FIDR_example.ipynb) - -OME-Zarr is a developing open standard for imaging from the OME community. The [Imaging Data Resource](https://idr.openmicroscopy.org) (IDR) has provided serveral images in this experimental format which are publically available. - -```bash -$ jupyter notebook IDR_example.ipynb -``` - -## Display a Zoomable Mandelbrot Set [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/hms-dbmi/vizarr/main?filepath=example%2Fmandelbrot.ipynb) - -This notebook a contains a `vizarr` example visualizing a generic multiscale Zarr. The first cell contains code to create the underlying generative Zarr store. It dynamically creates "chunks" at different zoom levels and associated array metadata. - -```bash -$ jupyter notebook mandelbrot.ipynb -``` - -## Display an Image in an ImJoy Plugin [![launch ImJoy](https://imjoy.io/static/badge/launch-imjoy-badge.svg)](https://imjoy.io/lite?plugin=https://github.com/hms-dbmi/vizarr/blob/main/example/VizarrDemo.imjoy.html) - -The [demo plugin](VizarrDemo.imjoy.html) shows how to build an image visualization plugin with `vizarr` in [ImJoy](https://imjoy.io). - - diff --git a/example/VizarrDemo.imjoy.html b/example/VizarrDemo.imjoy.html deleted file mode 100644 index 9bf790a6..00000000 --- a/example/VizarrDemo.imjoy.html +++ /dev/null @@ -1,44 +0,0 @@ - -# VizarrDemo - -A demo plugin which uses Vizarr to visualize zarr images - -See https://github.com/hms-dbmi/vizarr for details. - - - - -{ - "name": "VizarrDemo", - "type": "web-worker", - "tags": [], - "ui": "", - "version": "0.2.0", - "cover": "", - "description": "simple vizarr demo", - "icon": "extension", - "inputs": null, - "outputs": null, - "api_version": "0.1.8", - "env": "", - "permissions": [], - "requirements": [], - "dependencies": [] -} - - - diff --git a/example/getting_started.ipynb b/example/getting_started.ipynb deleted file mode 100644 index 91eb76c5..00000000 --- a/example/getting_started.ipynb +++ /dev/null @@ -1,337 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "# Hello, `vizarr`\n", - "\n", - "This example explains the basic usage of `vizarr` as an Imjoy Plugin in a jupyter notebook. " - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Create a multiscale OME-Zarr\n", - "\n", - "The easiest way to use `vizarr` in a jupyter notebook is to have an OME-Zarr compliant image. OME-Zarr is a developing standard from the OME-Community and currently considered experimental as it is under active development. The specification can be found [here](https://github.com/ome/omero-ms-zarr/blob/master/spec.md). \n", - " \n", - "Creating an OME-Zarr for your own images can be accomplished by using Bio-Formats, `bioformats2raw` with the `--file_type=zarr --dimension-order=XYZCT` options and adding `omero` metadata to the root attrs of the resulting multiscale group. \n", - "\n", - "For convenience, we have included a simple function to create a multiscale OME-Zarr below." - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [], - "source": [ - "from create_fixture import create_ome_zarr\n", - "create_ome_zarr(\"astronaut.zarr\") # creates an example OME-Zarr in the current directory" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Open OME-Zarr in jupyter with `zarr-python`\n", - "\n", - "The root of an OME-Zarr is a group that aheres to the `multiscales` zarr extension (if pyramidal) and also contains `omero` metadata describing _how_ to render the image. Here we open the OME-Zarr as a `zarr.Group` using `zarr-python`. " - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [], - "source": [ - "import zarr\n", - "multiscale_astronaut = zarr.open(\"astronaut.zarr\", mode=\"r\") # open the zarr created above in jupyter kernel" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Create `vizarr` image and view via Imjoy Plugin\n", - "\n", - "A `vizarr` image is simply a python dictionary specifying how to initially render the zarr-based store in the viewer. The `source` field must be a `zarr.Array` or `zarr.Group`. If the `zarr.Group` is for an OME-Zarr, `source` is the only required field. If it is an `zarr.Array` or `zarr.Group` that is not OME-Zarr, additional fields are required for rendering (described below)." - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "INFO:ImJoy-RPC:Using jupyter connection for imjoy-rpc\n" - ] - }, - { - "data": { - "application/javascript": [ - "window.connectPlugin && window.connectPlugin()" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "from imjoy_plugin import run_vizarr\n", - "\n", - "# Create Zarr \n", - "astronaut_img = { \"source\": multiscale_astronaut, \"name\": \"astronaut\" }\n", - "\n", - "# Run vizarr\n", - "run_vizarr(astronaut_img)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Create a generic array layer\n", - "\n", - "`vizarr` also supports custom `zarr.Array` and `zarr.Group`s, but requires additional metadata to render. Viewing a custom `zarr` has the following requirements:\n", - "\n", - "- The last two dimensions of the `zarr.Array` must be `YX`.\n", - "- If an `zarr.Array` has more than 2D dimensions, non-YX axis much have a `chunksize` of `1`.\n", - "- If a `zarr.Group` is provided, it must implement the `multiscales` specification and subresolution arrays must adhere to the same `zarr.Array` requirements outlined above." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [], - "source": [ - "import numpy as np\n", - "\n", - "# Create in-memory numpy array\n", - "arr = np.random.randint(0, 255, (1024, 1024), dtype=np.uint8)\n", - "\n", - "# Wrap array as `zarr.Array`\n", - "z_zarr = zarr.array(arr)\n", - "\n", - "# Create a vizarr image from custom zarr.Array\n", - "noise = {\n", - " \"source\": zarr.array(arr), \n", - " \"opacity\": 0.5, # optional, default 1\n", - " \"name\": \"noise\", # optional, default image_x\n", - "}" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### View multiple images\n", - "\n", - "`run_vizarr` takes a list of images which can be viewed in the same interactive scene as different image layers." - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": { - "scrolled": true - }, - "outputs": [ - { - "data": { - "application/javascript": [ - "window.connectPlugin && window.connectPlugin()" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "run_vizarr([astronaut_img, noise])" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Use `channel_axis` to choose axis to additively blend\n", - "\n", - "Vizarr was designed to support viewing a subset of multiscale OME-Zarr images, but it can support blending 2D slices of generic `zarr.Array`s. For a `zarr.Array` with more than 2 dimensions, a `channel_axis` must be specified to tell vizarr which dimension to split into separate blended channels. \n", - "\n", - "Here we will load one of the sub-resolution `zarr.Array`s from the OME-Zarr we created and view it with Vizarr." - ] - }, - { - "cell_type": "code", - "execution_count": 6, - "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "(1, 3, 1, 512, 512)" - ] - }, - "execution_count": 6, - "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "z_arr = zarr.open('astronaut.zarr').get('2')\n", - "z_arr.shape" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The channel axis for an OME-Zarr is always `1`, but loading the image with a different channel axis will load a single plane." - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "data": { - "application/javascript": [ - "window.connectPlugin && window.connectPlugin()" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "run_vizarr({ \"source\": z_arr, \"channel_axis\": 0 }) # first dim is size 1" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "You can specify other rendering properties to tell vizarr how to render the channels along the specified axis." - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "data": { - "application/javascript": [ - "window.connectPlugin && window.connectPlugin()" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - }, - { - "data": { - "text/html": [ - "
" - ], - "text/plain": [ - "" - ] - }, - "metadata": {}, - "output_type": "display_data" - } - ], - "source": [ - "alt_img = {\n", - " \"source\": z_arr,\n", - " \"name\": \"CYM astronaut\", # Image name\n", - " \"channel_axis\": 1, # size == 3, lists below must be of same length if provided\n", - " \"colors\": [\"#FFFF00\", \"#FF00FF\", \"#00FFFF\"], # optional\n", - " \"names\": [\"yellow\", \"magenta\", \"cyan\"], # optional, default channel_X\n", - " \"visibilities\": [True, False, True], # optional, default True\n", - " \"contrast_limits\": [[0, 1] for _ in range(3)], # optional, but recommended\n", - "}\n", - "run_vizarr(alt_img)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.5" - } - }, - "nbformat": 4, - "nbformat_minor": 4 -} diff --git a/example/imjoy_plugin.py b/example/imjoy_plugin.py deleted file mode 100644 index 7f08f0b2..00000000 --- a/example/imjoy_plugin.py +++ /dev/null @@ -1,27 +0,0 @@ -from imjoy import api -from imjoy_rpc import register_default_codecs - -register_default_codecs() - -class Plugin: - def __init__(self, images, view_state=None): - if not isinstance(images, list): - images = [images] - self.images = images - self.view_state = view_state - - async def setup(self): - pass - - async def run(self, ctx): - viewer = await api.createWindow( - type="vizarr", src="https://hms-dbmi.github.io/vizarr" - ) - if self.view_state: - await viewer.set_view_state(self.view_state) - for img in self.images: - await viewer.add_image(img) - - -def run_vizarr(images, view_state=None): - api.export(Plugin(images, view_state)) diff --git a/example/requirements.txt b/example/requirements.txt deleted file mode 100644 index c5e5fce3..00000000 --- a/example/requirements.txt +++ /dev/null @@ -1,10 +0,0 @@ -numpy -ipywidgets>=7.0.0 -scikit-image -imjoy>=0.10.10 -fsspec -imjoy-jupyter-extension>=0.2.14 -imjoy-rpc>=0.2.30 -zarr -numba -requests diff --git a/main.ts b/main.ts index f51f0613..533d2965 100644 --- a/main.ts +++ b/main.ts @@ -1,19 +1,6 @@ import * as vizarr from './src/index'; import debounce from 'just-debounce-it'; -async function initImjoy(viewer: vizarr.VizarrViewer) { - const { imjoyRPC } = await import('imjoy-rpc'); - const api = await imjoyRPC.setupRPC({ - name: 'vizarr', - description: 'A minimal, purely client-side program for viewing Zarr-based images with Viv & ImJoy.', - version: vizarr.version, - }); - api.export({ - add_image: viewer.addImage, - set_view_state: viewer.setViewState, - }); -} - function initStandaloneApp(viewer: vizarr.VizarrViewer) { const url = new URL(window.location.href); @@ -58,15 +45,7 @@ function initStandaloneApp(viewer: vizarr.VizarrViewer) { async function main() { console.log(`vizarr v${vizarr.version}: https://github.com/hms-dbmi/vizarr`); - const viewer = await vizarr.createViewer(document.querySelector('#root')!); - - // enable imjoy api when loaded as an iframe - if (window.self !== window.top) { - initImjoy(viewer); - return; - } - initStandaloneApp(viewer); } diff --git a/package-lock.json b/package-lock.json index 1ca4a11c..b454a313 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,7 +12,6 @@ "@material-ui/core": "^4.11.0", "@material-ui/icons": "^4.9.1", "deck.gl": "^8.9.34", - "imjoy-rpc": "^0.2.23", "jotai": "^1.0.0", "just-debounce-it": "^3.1.1", "p-map": "^5.5.0", @@ -1810,11 +1809,6 @@ "win32" ] }, - "node_modules/@socket.io/component-emitter": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/@socket.io/component-emitter/-/component-emitter-3.1.2.tgz", - "integrity": "sha512-9BCxFwvbGg/RsZK9tjXd8s4UcwR0MWeFQ1XEKIQVVvAGJyINdrqKMcTRyLoK8Rse1GjzLV9cwjWV1olXRWEXVA==" - }, "node_modules/@turf/boolean-clockwise": { "version": "5.1.5", "resolved": "https://registry.npmjs.org/@turf/boolean-clockwise/-/boolean-clockwise-5.1.5.tgz", @@ -2443,6 +2437,7 @@ "version": "4.3.5", "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", + "dev": true, "dependencies": { "ms": "2.1.2" }, @@ -2515,26 +2510,6 @@ "integrity": "sha512-VY+J0e4SFcNfQy19MEoMdaIcZLmDCprqvBtkii1WTCTQHpRvf5N8+3kTYCgL/PcntvwQvmMJWTuDPsq+IlhWKQ==", "dev": true }, - "node_modules/engine.io-client": { - "version": "6.5.4", - "resolved": "https://registry.npmjs.org/engine.io-client/-/engine.io-client-6.5.4.tgz", - "integrity": "sha512-GeZeeRjpD2qf49cZQ0Wvh/8NJNfeXkXXcoGh+F77oEAgo9gUHwT1fCRxSNU+YEEaysOJTnsFHmM5oAcPy4ntvQ==", - "dependencies": { - "@socket.io/component-emitter": "~3.1.0", - "debug": "~4.3.1", - "engine.io-parser": "~5.2.1", - "ws": "~8.17.1", - "xmlhttprequest-ssl": "~2.0.0" - } - }, - "node_modules/engine.io-parser": { - "version": "5.2.3", - "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.2.3.tgz", - "integrity": "sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q==", - "engines": { - "node": ">=10.0.0" - } - }, "node_modules/esbuild": { "version": "0.21.5", "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", @@ -2802,14 +2777,6 @@ "node": ">=6.9.0" } }, - "node_modules/imjoy-rpc": { - "version": "0.2.42", - "resolved": "https://registry.npmjs.org/imjoy-rpc/-/imjoy-rpc-0.2.42.tgz", - "integrity": "sha512-XRd1gZeSZuq9UuMVL5QDVG1XwlnE78J6xL5Fx1FA6uWcwyQqYLQCCOrT183YWWQGxP+kRv50bS1CqaWvp9E4IQ==", - "dependencies": { - "socket.io-client": "^4.0.1" - } - }, "node_modules/indefinitely-typed": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/indefinitely-typed/-/indefinitely-typed-1.1.0.tgz", @@ -3142,7 +3109,8 @@ "node_modules/ms": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true }, "node_modules/nanoid": { "version": "3.3.7", @@ -3479,32 +3447,6 @@ "semver": "bin/semver.js" } }, - "node_modules/socket.io-client": { - "version": "4.7.5", - "resolved": "https://registry.npmjs.org/socket.io-client/-/socket.io-client-4.7.5.tgz", - "integrity": "sha512-sJ/tqHOCe7Z50JCBCXrsY3I2k03iOiUe+tj1OmKeD2lXPiGH/RUCdTZFoqVyN7l1MnpIzPrGtLcijffmeouNlQ==", - "dependencies": { - "@socket.io/component-emitter": "~3.1.0", - "debug": "~4.3.2", - "engine.io-client": "~6.5.2", - "socket.io-parser": "~4.2.4" - }, - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/socket.io-parser": { - "version": "4.2.4", - "resolved": "https://registry.npmjs.org/socket.io-parser/-/socket.io-parser-4.2.4.tgz", - "integrity": "sha512-/GbIKmo8ioc+NIWIhwdecY0ge+qVBSMdgxGygevmdHj24bsfgtCmcUUcQ5ZzcylGFHsN3k4HB4Cgkl96KVnuew==", - "dependencies": { - "@socket.io/component-emitter": "~3.1.0", - "debug": "~4.3.1" - }, - "engines": { - "node": ">=10.0.0" - } - }, "node_modules/source-map-js": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", @@ -3696,39 +3638,11 @@ "resolved": "https://registry.npmjs.org/web-worker/-/web-worker-1.3.0.tgz", "integrity": "sha512-BSR9wyRsy/KOValMgd5kMyr3JzpdeoR9KVId8u5GVlTTAtNChlsE4yTxeY7zMdNSyOmoKBv8NH2qeRY9Tg+IaA==" }, - "node_modules/ws": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ws/-/ws-8.17.1.tgz", - "integrity": "sha512-6XQFvXTkbfUOZOKKILFG1PDK2NDQs4azKQl26T0YS5CxqWLgXajbPZ+h4gZekJyRqFU8pvnbAbbs/3TgRPy+GQ==", - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, "node_modules/xml-utils": { "version": "1.10.1", "resolved": "https://registry.npmjs.org/xml-utils/-/xml-utils-1.10.1.tgz", "integrity": "sha512-Dn6vJ1Z9v1tepSjvnCpwk5QqwIPcEFKdgnjqfYOABv1ngSofuAhtlugcUC3ehS1OHdgDWSG6C5mvj+Qm15udTQ==" }, - "node_modules/xmlhttprequest-ssl": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/xmlhttprequest-ssl/-/xmlhttprequest-ssl-2.0.0.tgz", - "integrity": "sha512-QKxVRxiRACQcVuQEYFsI1hhkrMlrXHPegbbd1yn9UHOmRxY+si12nQYzri3vbzt8VdTTRviqcKxcyllFas5z2A==", - "engines": { - "node": ">=0.4.0" - } - }, "node_modules/yallist": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", diff --git a/package.json b/package.json index 11fba86f..19e1d371 100644 --- a/package.json +++ b/package.json @@ -7,7 +7,6 @@ "@material-ui/core": "^4.11.0", "@material-ui/icons": "^4.9.1", "deck.gl": "^8.9.34", - "imjoy-rpc": "^0.2.23", "jotai": "^1.0.0", "just-debounce-it": "^3.1.1", "p-map": "^5.5.0", diff --git a/python/Untitled.ipynb b/python/Untitled.ipynb new file mode 100644 index 00000000..2953b38a --- /dev/null +++ b/python/Untitled.ipynb @@ -0,0 +1,40 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "20794451-c89f-497d-aa89-3c6a432326fc", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 5 +} diff --git a/example/IDR_example.ipynb b/python/notebooks/IDR_example.ipynb similarity index 64% rename from example/IDR_example.ipynb rename to python/notebooks/IDR_example.ipynb index 6903b121..c9ba8d45 100644 --- a/example/IDR_example.ipynb +++ b/python/notebooks/IDR_example.ipynb @@ -6,7 +6,16 @@ "source": [ "# Viewing an image from the IDR\n", "\n", - "OME-Zarr is a developing open standard for imaging from the OME community. The Imaging Data Resource (IDR) has provided serveral images in this experimental format which are publically avaiable." + "OME-NGFF is an open standard for imaging from the OME community. The Imaging Data Resource (IDR) provides serveral publically available images in this format." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "!pip install --quiet fsspec zarr requests aiohttp" ] }, { @@ -42,26 +51,20 @@ { "cell_type": "code", "execution_count": null, - "metadata": { - "scrolled": true - }, - "outputs": [], - "source": [ - "from imjoy_plugin import run_vizarr\n", - "run_vizarr({ \"source\": z_grp })" - ] - }, - { - "cell_type": "markdown", "metadata": {}, + "outputs": [], "source": [ - "OME-Zarr is a developing community standard. If you like to get involved, more information can be found on [`image.sc`](https://image.sc) and the [`ome-zarr-py`](https://github.com/ome/ome-zarr-py) or [`omero-ms-zarr`](https://github.com/ome/omero-ms-zarr) GitHub repositories." + "import vizarr\n", + "\n", + "viewer = vizarr.Viewer()\n", + "viewer.add_image(source=z_grp)\n", + "viewer" ] } ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -75,7 +78,14 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.5" + "version": "3.12.3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } } }, "nbformat": 4, diff --git a/python/notebooks/README.md b/python/notebooks/README.md new file mode 100644 index 00000000..5e2a2393 --- /dev/null +++ b/python/notebooks/README.md @@ -0,0 +1,35 @@ +# Examples + +## Install Requirements + +The examples require the `vizarr` [anywidget](https://github.com/manzt/anywidget). If running locally, please install the following: + +```bash +pip install vizarr +``` + +## Getting Started + +This example will work in the Jupyter, JupyterLab, VS Code, and more. We recommend trying our JupyterLab. + +```bash +$ jupyter lab getting_started.ipynb +``` + +## Viewing an Image from the Imaging Data Resource + +OME-NGFF is an open standard for imaging from the OME community. The [Imaging Data Resource](https://idr.openmicroscopy.org) (IDR) +provides serveral images in this format which are publically available. + +```bash +$ jupyter lab IDR_example.ipynb +``` + +## Display a Zoomable Mandelbrot Set + +This notebook a contains a `vizarr` example visualizing a generic multiscale Zarr. The first cells create the +underlying generative Zarr store. It dynamically creates "chunks" at different zoom levels and associated array metadata. + +```bash +$ jupyter lab mandelbrot.ipynb +``` \ No newline at end of file diff --git a/example/create_fixture.py b/python/notebooks/create_fixture.py similarity index 98% rename from example/create_fixture.py rename to python/notebooks/create_fixture.py index 64b2ea3c..bb1d850b 100644 --- a/example/create_fixture.py +++ b/python/notebooks/create_fixture.py @@ -11,7 +11,7 @@ def create_ome_zarr(zarr_directory, dtype="f4"): base = np.tile(data.astronaut(), (4, 4, 1)) - gaussian = list(pyramid_gaussian(base, downscale=2, max_layer=3, multichannel=True)) + gaussian = list(pyramid_gaussian(base, downscale=2, max_layer=3, channel_axis=-1)) pyramid = [] # convert each level of pyramid into 5D image (t, c, z, y, x) diff --git a/python/notebooks/getting_started.ipynb b/python/notebooks/getting_started.ipynb new file mode 100644 index 00000000..86b93731 --- /dev/null +++ b/python/notebooks/getting_started.ipynb @@ -0,0 +1,214 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Hello, `vizarr`\n", + "\n", + "This example explains the basic usage of `vizarr` [anywidget](https://github.com/manzt/anywidget)." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Create a multiscale OME-Zarr\n", + "\n", + "The easiest way to use `vizarr` in a jupyter notebook is to have an OME-Zarr compliant image. OME-Zarr is a developing standard from the OME-Community and currently considered experimental as it is under active development. The specification can be found [here](https://github.com/ome/omero-ms-zarr/blob/master/spec.md). \n", + " \n", + "Creating an OME-Zarr for your own images can be accomplished by using Bio-Formats, `bioformats2raw` with the `--file_type=zarr --dimension-order=XYZCT` options and adding `omero` metadata to the root attrs of the resulting multiscale group. \n", + "\n", + "For convenience, we have included a simple function to create a multiscale OME-Zarr below." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from create_fixture import create_ome_zarr\n", + "create_ome_zarr(\"astronaut.zarr\") # creates an example OME-Zarr in the current directory" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Open OME-Zarr in jupyter with `zarr-python`\n", + "\n", + "The root of an OME-Zarr is a group that aheres to the `multiscales` zarr extension (if pyramidal) and also contains `omero` metadata describing _how_ to render the image. Here we open the OME-Zarr as a `zarr.Group` using `zarr-python`. " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import zarr\n", + "multiscale_astronaut = zarr.open(\"astronaut.zarr\", mode=\"r\") # open the zarr created above in jupyter kernel" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### View the image in `vizarr`\n", + "\n", + "The vizarr Python API allows basic viewing of large zarr datasets in computational notebooks. The `vizarr.Viewer` allows adding images to the view with the `.add_image` API. The `source` field must be a `zarr.Array` or `zarr.Group`. If the `zarr.Group` is for an OME-Zarr, `source` is the only required field. If it is an `zarr.Array` or `zarr.Group` that is not OME-Zarr, additional fields are required for rendering (described below)." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import vizarr\n", + "\n", + "viewer = vizarr.Viewer()\n", + "viewer.add_image(source=multiscale_astronaut, name=\"astronaut\")\n", + "viewer" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Create a generic array layer\n", + "\n", + "`vizarr` also supports custom `zarr.Array` and `zarr.Group`s, but requires additional metadata to render. Viewing a custom `zarr` has the following requirements:\n", + "\n", + "- The last two dimensions of the `zarr.Array` must be `YX`.\n", + "- If an `zarr.Array` has more than 2D dimensions, non-YX axis much have a `chunksize` of `1`.\n", + "- If a `zarr.Group` is provided, it must implement the `multiscales` specification and subresolution arrays must adhere to the same `zarr.Array` requirements outlined above." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "\n", + "# Create in-memory numpy array\n", + "arr = np.random.randint(0, 255, (1024, 1024), dtype=np.uint8)\n", + "\n", + "viewer.add_image(\n", + " source=zarr.array(arr), # make sure it's a zarr\n", + " opacity=0.5, # optional, default 1\n", + " name=\"noise\", # optional, default image_x\n", + ")\n", + "\n", + "# view canvas above" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "run_vizarr([astronaut_img, noise])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Use `channel_axis` to choose axis to additively blend\n", + "\n", + "Vizarr was designed to support viewing a subset of multiscale OME-Zarr images, but it can support blending 2D slices of generic `zarr.Array`s. For a `zarr.Array` with more than 2 dimensions, a `channel_axis` must be specified to tell vizarr which dimension to split into separate blended channels. \n", + "\n", + "Here we will load one of the sub-resolution `zarr.Array`s from the OME-Zarr we created and view it with Vizarr." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "z_arr = zarr.open('astronaut.zarr').get('2')\n", + "z_arr.shape" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The channel axis for an OME-Zarr is always `1`, but loading the image with a different channel axis will load a single plane." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "viewer = vizarr.Viewer()\n", + "viewer.add_image(source=z_arr, channel_axis=0)\n", + "viewer" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You can specify other rendering properties to tell vizarr how to render the channels along the specified axis." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "viewer = vizarr.Viewer()\n", + "viewer.add_image(\n", + " source=z_arr,\n", + " name=\"CYM astronaut\", # Image name\n", + " channel_axis=1, # size == 3, lists below must be of same length if provided\n", + " colors=[\"#FFFF00\", \"#FF00FF\", \"#00FFFF\"], # optional\n", + " names=[\"yellow\", \"magenta\", \"cyan\"], # optional, default channel_X\n", + " visibilities=[True, False, True], # optional, default True\n", + " contrast_limits=[[0, 1] for _ in range(3)], # optional, but recommended\n", + ")\n", + "viewer" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.12.3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/example/mandelbrot.ipynb b/python/notebooks/mandelbrot.ipynb similarity index 81% rename from example/mandelbrot.ipynb rename to python/notebooks/mandelbrot.ipynb index 48347530..c32115ef 100755 --- a/example/mandelbrot.ipynb +++ b/python/notebooks/mandelbrot.ipynb @@ -15,29 +15,19 @@ "metadata": {}, "outputs": [], "source": [ - "!pip install -U imjoy zarr" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "If you are running inside **Google Colab**, please also run the following cell.\n", - "\n", - "(Skip for Jupyter notebooks)" + "!pip install --quiet vizarr zarr numba" ] }, { "cell_type": "code", "execution_count": null, - "metadata": { - "tags": [] - }, + "metadata": {}, "outputs": [], "source": [ - "!git clone https://github.com/hms-dbmi/vizarr\n", - "import os\n", - "os.chdir('vizarr/example')" + "import zarr\n", + "import numcodecs\n", + "import numba\n", + "import numpy as np" ] }, { @@ -55,23 +45,19 @@ "metadata": {}, "outputs": [], "source": [ - "import numpy as np\n", - "from numba import njit\n", - "from zarr.util import json_dumps\n", - "from zarr.storage import init_array, init_group\n", "\n", - "def create_meta_store(levels, tilesize, compressor, dtype):\n", + "def create_meta_store(levels: int, tilesize: int, compressor, dtype: str):\n", " store = dict()\n", - " init_group(store)\n", + " zarr.storage.init_group(store)\n", " \n", " datasets = [{\"path\": str(i)} for i in range(levels)]\n", " root_attrs = {\"multiscales\": [{\"datasets\": datasets, \"version\": \"0.1\"}]}\n", - " store[\".zattrs\"] = json_dumps(root_attrs) \n", + " store[\".zattrs\"] = zarr.util.json_dumps(root_attrs)\n", " \n", " base_width = tilesize * 2 ** levels\n", " for level in range(levels):\n", " width = int(base_width / 2 ** level)\n", - " init_array(\n", + " zarr.storage.init_array(\n", " store,\n", " path=str(level),\n", " shape=(width, width),\n", @@ -82,7 +68,7 @@ " return store\n", "\n", "\n", - "@njit\n", + "@numba.njit\n", "def mandelbrot(out, from_x, from_y, to_x, to_y, grid_size, maxiter):\n", " step_x = (to_x - from_x) / grid_size\n", " step_y = (to_y - from_y) / grid_size\n", @@ -105,7 +91,7 @@ " return out\n", "\n", "\n", - "@njit\n", + "@numba.njit\n", "def tile_bounds(level, x, y, max_level, min_coord=-2.5, max_coord=2.5):\n", " max_width = max_coord - min_coord\n", " tile_width = max_width / 2 ** (max_level - level)\n", @@ -118,7 +104,8 @@ " return from_x, from_y, to_x, to_y\n", "\n", "\n", - "class MandlebrotStore:\n", + "class MandlebrotStore(zarr.storage.BaseStore):\n", + " \n", " def __init__(self, levels, tilesize, maxiter=255, compressor=None):\n", " self.levels = levels\n", " self.tilesize = tilesize\n", @@ -149,11 +136,17 @@ "\n", " return tile.tobytes()\n", "\n", - " def keys(self):\n", - " return self._store.keys()\n", - "\n", " def __iter__(self):\n", - " return iter(self._store)" + " return iter(self._store)\n", + " \n", + " def __len__(self):\n", + " return len(self._store)\n", + "\n", + " def __delitem__(self, key):\n", + " raise NotImplementedError(\"read-only store\")\n", + " \n", + " def __setitem__(self, key):\n", + " raise NotImplementedError(\"read-only store\")" ] }, { @@ -162,7 +155,7 @@ "source": [ "### Running vizarr\n", "\n", - "Simply initalize the multiscale store implemented above, and open as a `zarr.Group` for vizarr. " + "Initalize the store implemented above, and open as a `zarr.Group` for vizarr. " ] }, { @@ -171,21 +164,13 @@ "metadata": {}, "outputs": [], "source": [ - "from imjoy_plugin import run_vizarr\n", - "from numcodecs import Blosc\n", - "import zarr\n", - "\n", "# Initialize the store\n", - "store = MandlebrotStore(levels=50, tilesize=512, compressor=Blosc())\n", + "store = MandlebrotStore(levels=50, tilesize=512, compressor=numcodecs.Blosc())\n", "# Wrap in a cache so that tiles don't need to be computed as often\n", "store = zarr.LRUStoreCache(store, max_size=1e9)\n", "\n", "# This store implements the 'multiscales' zarr specfiication which is recognized by vizarr\n", - "z_grp = zarr.open(store, mode=\"r\")\n", - "\n", - "img = { \"source\": z_grp, \"name\": \"mandelbrot\" }\n", - "\n", - "run_vizarr(img)" + "grp = zarr.open(store, mode=\"r\")" ] }, { @@ -193,7 +178,13 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [] + "source": [ + "import vizarr\n", + "\n", + "viewer = vizarr.Viewer()\n", + "viewer.add_image(source=grp, name=\"mandelbrot\")\n", + "viewer" + ] } ], "metadata": { @@ -212,7 +203,14 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.7" + "version": "3.12.3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } } }, "nbformat": 4, diff --git a/example/spatial_transformations.ipynb b/python/notebooks/spatial_transformations.ipynb similarity index 72% rename from example/spatial_transformations.ipynb rename to python/notebooks/spatial_transformations.ipynb index 22ad610e..a93ab2ef 100644 --- a/example/spatial_transformations.ipynb +++ b/python/notebooks/spatial_transformations.ipynb @@ -1,5 +1,15 @@ { "cells": [ + { + "cell_type": "code", + "execution_count": null, + "id": "e9ac663e-a7ed-41c9-9050-2c69b89e7e74", + "metadata": {}, + "outputs": [], + "source": [ + "!pip install --quiet zarr scikit-image" + ] + }, { "cell_type": "markdown", "id": "2c447021", @@ -7,7 +17,10 @@ "source": [ "## Affine Transformations\n", "\n", - "Spatial transformations are defined per image layer via a [4x4 transformation matrix](https://www.brainvoyager.com/bv/doc/UsersGuide/CoordsAndTransforms/SpatialTransformationMatrices.html) using the `model_matrix` property. This features is very useful for pre-processing (e.g. flipping, rotating, scaling etc) your data, since these operations are executed performantly on the GPU thanks to [Viv](https://github.com/hms-dbmi/viv).\n", + "Spatial transformations are defined per image layer via a [4x4 transformation matrix](https://www.brainvoyager.com/bv/doc/UsersGuide/CoordsAndTransforms/SpatialTransformationMatrices.html) using the `model_matrix` property.\n", + "\n", + "This features is useful for pre-processing (e.g. flipping, rotating, scaling etc) your data, since these operations \n", + "are executed performantly on the GPU thanks to [Viv](https://github.com/hms-dbmi/viv).\n", "\n", "In this notebook we will load a simple OME-NGFF dataset from the `Getting Started` notebook and apply a simple rotation." ] @@ -19,26 +32,32 @@ "metadata": {}, "outputs": [], "source": [ - "from imjoy_plugin import run_vizarr\n", "from create_fixture import create_ome_zarr\n", "import zarr\n", "import numpy as np\n", "\n", + "import vizarr\n", + "\n", "# creates an example OME-NGFF, see `getting_started.ipynb` for more details\n", "create_ome_zarr(\"astronaut.zarr\") \n", "\n", + "# Create a viewer\n", + "viewer = vizarr.Viewer()\n", + "\n", "astronaut = {\n", " \"source\": zarr.open(\"astronaut.zarr\", mode=\"r\"),\n", - " \"name\": \"astronaut\"\n", + " \"name\": \"astronaut\",\n", "}\n", "\n", "noise = {\n", " \"source\": zarr.array(np.random.randint(0, 255, (1024, 1024), dtype=np.uint8)),\n", " \"opacity\": 0.8,\n", - " \"name\": \"noise\"\n", + " \"name\": \"noise\",\n", "}\n", "\n", - "run_vizarr([astronaut, noise]) # render without rotation" + "viewer.add_image(**astronaut)\n", + "viewer.add_image(**noise)\n", + "viewer" ] }, { @@ -81,7 +100,10 @@ "source": [ "translated_noise = { **noise, **{\"model_matrix\": M.ravel().tolist() } } # need to ravel into 1D list\n", "\n", - "run_vizarr([astronaut, translated_noise])" + "v = vizarr.Viewer()\n", + "v.add_image(**astronaut)\n", + "v.add_image(**translated_noise)\n", + "v" ] }, { @@ -109,7 +131,14 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.9.7" + "version": "3.12.3" + }, + "widgets": { + "application/vnd.jupyter.widget-state+json": { + "state": {}, + "version_major": 2, + "version_minor": 0 + } } }, "nbformat": 4, diff --git a/types/imjoy.d.ts b/types/imjoy.d.ts deleted file mode 100644 index 23f496e9..00000000 --- a/types/imjoy.d.ts +++ /dev/null @@ -1,15 +0,0 @@ -declare module 'imjoy-rpc' { - type ImJoySetupRPCProps = { - name: string; - description: string; - version: string; - }; - - interface ImJoyAPI { - export: (funcs: Record void>) => void; - } - - declare const imjoyRPC: { - setupRPC(props: ImJoySetupRPCProps): Promise; - }; -}