From 72d0458b93b123a040d58d433991c520532b2ad3 Mon Sep 17 00:00:00 2001 From: Jai Date: Fri, 1 Dec 2023 21:34:34 +0000 Subject: [PATCH 01/16] Ensure data filtered by bitmask --- aeon/io/reader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aeon/io/reader.py b/aeon/io/reader.py index 67570608..14d0f700 100644 --- a/aeon/io/reader.py +++ b/aeon/io/reader.py @@ -212,7 +212,7 @@ def read(self, file): specified unique identifier. """ data = super().read(file) - data = data[data.event & self.value > 0] + data = data[data.event == self.value] data["event"] = self.tag return data From f20c24837aab4ecfc8fc0d1d73991040faafd326 Mon Sep 17 00:00:00 2001 From: Jai Date: Fri, 1 Dec 2023 22:54:18 +0000 Subject: [PATCH 02/16] Fix for #290 --- aeon/io/reader.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aeon/io/reader.py b/aeon/io/reader.py index 14d0f700..035e22a3 100644 --- a/aeon/io/reader.py +++ b/aeon/io/reader.py @@ -212,7 +212,7 @@ def read(self, file): specified unique identifier. """ data = super().read(file) - data = data[data.event == self.value] + data = data[(data.event & self.value) == self.value] data["event"] = self.tag return data From f8349c0ed08d2e99dfbd38a0399313cfd91b5d20 Mon Sep 17 00:00:00 2001 From: Jai Date: Mon, 4 Dec 2023 17:01:33 +0000 Subject: [PATCH 03/16] Changed 'dataset' name to 'schemas' and refactored --- aeon/dj_pipeline/acquisition.py | 2 +- aeon/schema/{dataset.py => schemas.py} | 55 ++++++++++++++++++++++++++ tests/io/test_api.py | 2 +- 3 files changed, 57 insertions(+), 2 deletions(-) rename aeon/schema/{dataset.py => schemas.py} (56%) diff --git a/aeon/dj_pipeline/acquisition.py b/aeon/dj_pipeline/acquisition.py index 4c9df4af..916dda23 100644 --- a/aeon/dj_pipeline/acquisition.py +++ b/aeon/dj_pipeline/acquisition.py @@ -5,7 +5,7 @@ import pandas as pd from aeon.io import api as io_api -from aeon.schema import dataset as aeon_schema +from aeon.schema import schemas as aeon_schema from aeon.io import reader as io_reader from aeon.analysis import utils as analysis_utils diff --git a/aeon/schema/dataset.py b/aeon/schema/schemas.py similarity index 56% rename from aeon/schema/dataset.py rename to aeon/schema/schemas.py index b9586de4..8a3dc333 100644 --- a/aeon/schema/dataset.py +++ b/aeon/schema/schemas.py @@ -57,3 +57,58 @@ Device("Wall8", octagon.Wall), ] ) + +# All recorded social01 streams: + +# *Note* regiser 8 is always the harp heartbeat for any device that has this stream. + +# - Metadata.yml +# - Environment_BlockState +# - Environment_EnvironmentState +# - Environment_LightEvents +# - Environment_MessageLog +# - Environment_SubjectState +# - Environment_SubjectVisits +# - Environment_SubjectWeight +# - CameraTop (200, 201, avi, csv, ,) +# - 200: position +# - 201: region +# - CameraNorth (avi, csv) +# - CameraEast (avi, csv) +# - CameraSouth (avi, csv) +# - CameraWest (avi, csv) +# - CameraPatch1 (avi, csv) +# - CameraPatch2 (avi, csv) +# - CameraPatch3 (avi, csv) +# - CameraNest (avi, csv) +# - ClockSynchronizer (8, 36) +# - 36: +# - Nest (200, 201, 202, 203) +# - 200: weight_raw +# - 201: weight_tare +# - 202: weight_filtered +# - 203: weight_baseline +# - 204: weight_subject +# - Patch1 (8, 32, 35, 36, 87, 90, 91, 200, 201, 202, 203, State) +# - 32: beam_break +# - 35: delivery_set +# - 36: delivery_clear +# - 87: expansion_board +# - 90: enocder_read +# - 91: encoder_mode +# - 200: dispenser_state +# - 201: delivery_manual +# - 202: missed_pellet +# - 203: delivery_retry +# - Patch2 (8, 32, 35, 36, 87, 90, 91, State) +# - Patch3 (8, 32, 35, 36, 87, 90, 91, 200, 203, State) +# - RfidEventsGate (8, 32, 35) +# - 32: entry_id +# - 35: hardware_notifications +# - RfidEventsNest1 (8, 32, 35) +# - RfidEventsNest2 (8, 32, 35) +# - RfidEventsPatch1 (8, 32, 35) +# - RfidEventsPatch2 (8, 32, 35) +# - RfidEventsPatch3 (8, 32, 35) +# - VideoController (8, 32, 33, 34, 35, 36, 45, 52) +# - 32: frame_number \ No newline at end of file diff --git a/tests/io/test_api.py b/tests/io/test_api.py index 48986830..8f9d8c0b 100644 --- a/tests/io/test_api.py +++ b/tests/io/test_api.py @@ -5,7 +5,7 @@ from pytest import mark import aeon -from aeon.schema.dataset import exp02 +from aeon.schema.schemas import exp02 nonmonotonic_path = Path(__file__).parent.parent / "data" / "nonmonotonic" monotonic_path = Path(__file__).parent.parent / "data" / "monotonic" From 38d46c1dd98ee4525d5bee5e1550b3f9bce6b5d5 Mon Sep 17 00:00:00 2001 From: Jai Date: Mon, 4 Dec 2023 17:04:54 +0000 Subject: [PATCH 04/16] Added notebook showing finding harp event bitmasks --- .../get_harp_stream_event_bitmask.ipynb | 123 ++++++++++++++++++ 1 file changed, 123 insertions(+) create mode 100644 docs/examples/get_harp_stream_event_bitmask.ipynb diff --git a/docs/examples/get_harp_stream_event_bitmask.ipynb b/docs/examples/get_harp_stream_event_bitmask.ipynb new file mode 100644 index 00000000..ffbeb6ef --- /dev/null +++ b/docs/examples/get_harp_stream_event_bitmask.ipynb @@ -0,0 +1,123 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"Jupyter settings and Imports\"\"\"\n", + "\n", + "%load_ext autoreload\n", + "%autoreload 2\n", + "\n", + "from pathlib import Path\n", + "\n", + "import aeon.io.api as api\n", + "from aeon.io import reader" + ] + }, + { + "cell_type": "code", + "execution_count": 44, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "data:\n", + " beambreak\n", + "time \n", + "2023-06-21 10:01:16.633728027 34\n", + "2023-06-21 10:01:16.649184227 32\n", + "2023-06-21 10:01:28.314400196 34\n", + "2023-06-21 10:01:28.331103802 32\n", + "2023-06-21 10:01:38.428864002 34\n", + "... ...\n", + "2023-06-21 11:16:43.647552013 32\n", + "2023-06-21 11:16:43.655648232 34\n", + "2023-06-21 11:16:43.674079895 32\n", + "2023-06-21 11:21:40.381728172 34\n", + "2023-06-21 11:21:40.397024155 32\n", + "\n", + "[196 rows x 1 columns]\n", + "\n", + "\n", + "bitmask:\n", + " 34\n", + "\n", + "\n", + "stream_data:\n", + " event\n", + "time \n", + "2023-06-21 10:01:16.633728027 beambreak\n", + "2023-06-21 10:01:28.314400196 beambreak\n", + "2023-06-21 10:01:38.428864002 beambreak\n", + "2023-06-21 10:01:53.453343868 beambreak\n", + "2023-06-21 10:04:14.685791969 beambreak\n", + "... ...\n", + "2023-06-21 11:15:20.406752110 beambreak\n", + "2023-06-21 11:16:24.036767960 beambreak\n", + "2023-06-21 11:16:43.625472069 beambreak\n", + "2023-06-21 11:16:43.655648232 beambreak\n", + "2023-06-21 11:21:40.381728172 beambreak\n", + "\n", + "[98 rows x 1 columns]\n" + ] + } + ], + "source": [ + "\"\"\"How to find the bitmask associated with any harp stream event and create a new reader: \n", + "example with patch beambreak\"\"\"\n", + "\n", + "# Ensure you have the pattern of the stream (look at the filename), and the expected event name\n", + "pattern = \"Patch1_32*\"\n", + "event_name = \"beambreak\"\n", + "# Set the reader for the stream\n", + "harp_reader = reader.Harp(pattern=pattern, columns=[event_name])\n", + "# Set the root dir and a time range in which you know the stream acquired data\n", + "root = Path(\"/ceph/aeon/aeon/data/raw/AEON3/presocial0.1\")\n", + "start = pd.Timestamp(\"2023-06-21 10:00:00\")\n", + "end = pd.Timestamp(\"2023-06-21 12:00:10\")\n", + "# Get the bitmask as the first value of the loaded stream\n", + "data = api.load(root, harp_reader, start=start, end=end)\n", + "bitmask = data.iloc[0, 0]\n", + "new_reader = reader.BitmaskEvent(pattern, bitmask, event_name)\n", + "stream_data = api.load(root, new_reader, start=start, end=end)\n", + "\n", + "print(f\"data:\\n {data}\\n\\n\")\n", + "print(f\"bitmask:\\n {bitmask}\\n\\n\")\n", + "print(f\"stream_data:\\n {stream_data}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "aeon", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From 967f32258092eec4bdb3e7e19b94a59f32106489 Mon Sep 17 00:00:00 2001 From: Jai Date: Mon, 4 Dec 2023 17:31:42 +0000 Subject: [PATCH 05/16] Refactored according to #293 --- aeon/dj_pipeline/acquisition.py | 2 +- aeon/{schema => io/binder}/__init__.py | 0 aeon/{schema => io/binder}/core.py | 0 aeon/{schema => io/binder}/foraging.py | 2 +- aeon/{schema => io/binder}/octagon.py | 0 aeon/{schema => io/binder}/schemas.py | 4 ++-- aeon/{schema => io/binder}/social.py | 0 aeon/io/device.py | 16 ++++++++-------- tests/io/test_api.py | 2 +- 9 files changed, 13 insertions(+), 13 deletions(-) rename aeon/{schema => io/binder}/__init__.py (100%) rename aeon/{schema => io/binder}/core.py (100%) rename aeon/{schema => io/binder}/foraging.py (98%) rename aeon/{schema => io/binder}/octagon.py (100%) rename aeon/{schema => io/binder}/schemas.py (97%) rename aeon/{schema => io/binder}/social.py (100%) diff --git a/aeon/dj_pipeline/acquisition.py b/aeon/dj_pipeline/acquisition.py index 916dda23..64842d65 100644 --- a/aeon/dj_pipeline/acquisition.py +++ b/aeon/dj_pipeline/acquisition.py @@ -5,7 +5,7 @@ import pandas as pd from aeon.io import api as io_api -from aeon.schema import schemas as aeon_schema +from aeon.io.binder import schemas as aeon_schema from aeon.io import reader as io_reader from aeon.analysis import utils as analysis_utils diff --git a/aeon/schema/__init__.py b/aeon/io/binder/__init__.py similarity index 100% rename from aeon/schema/__init__.py rename to aeon/io/binder/__init__.py diff --git a/aeon/schema/core.py b/aeon/io/binder/core.py similarity index 100% rename from aeon/schema/core.py rename to aeon/io/binder/core.py diff --git a/aeon/schema/foraging.py b/aeon/io/binder/foraging.py similarity index 98% rename from aeon/schema/foraging.py rename to aeon/io/binder/foraging.py index ffd8fdd9..9267dc77 100644 --- a/aeon/schema/foraging.py +++ b/aeon/io/binder/foraging.py @@ -4,7 +4,7 @@ import aeon.io.device as _device import aeon.io.reader as _reader -import aeon.schema.core as _stream +import aeon.io.binder.core as _stream class Area(_Enum): diff --git a/aeon/schema/octagon.py b/aeon/io/binder/octagon.py similarity index 100% rename from aeon/schema/octagon.py rename to aeon/io/binder/octagon.py diff --git a/aeon/schema/schemas.py b/aeon/io/binder/schemas.py similarity index 97% rename from aeon/schema/schemas.py rename to aeon/io/binder/schemas.py index 8a3dc333..782767e4 100644 --- a/aeon/schema/schemas.py +++ b/aeon/io/binder/schemas.py @@ -1,8 +1,8 @@ from dotmap import DotMap -import aeon.schema.core as stream +import aeon.io.binder.core as stream from aeon.io.device import Device -from aeon.schema import foraging, octagon +from aeon.io.binder import foraging, octagon exp02 = DotMap( [ diff --git a/aeon/schema/social.py b/aeon/io/binder/social.py similarity index 100% rename from aeon/schema/social.py rename to aeon/io/binder/social.py diff --git a/aeon/io/device.py b/aeon/io/device.py index 1a4916e6..23018aaf 100644 --- a/aeon/io/device.py +++ b/aeon/io/device.py @@ -3,16 +3,16 @@ def compositeStream(pattern, *args): """Merges multiple data streams into a single composite stream.""" - composite = {} + registry = {} if args: for stream in args: if inspect.isclass(stream): for method in vars(stream).values(): if isinstance(method, staticmethod): - composite.update(method.__func__(pattern)) + registry.update(method.__func__(pattern)) else: - composite.update(stream(pattern)) - return composite + registry.update(stream(pattern)) + return registry class Device: @@ -31,11 +31,11 @@ class Device: def __init__(self, name, *args, pattern=None): self.name = name - self.stream = compositeStream(name if pattern is None else pattern, *args) + self.registry = compositeStream(name if pattern is None else pattern, *args) def __iter__(self): - if len(self.stream) == 1: - singleton = self.stream.get(self.name, None) + if len(self.registry) == 1: + singleton = self.registry.get(self.name, None) if singleton: return iter((self.name, singleton)) - return iter((self.name, self.stream)) + return iter((self.name, self.registry)) diff --git a/tests/io/test_api.py b/tests/io/test_api.py index 8f9d8c0b..486f1d3f 100644 --- a/tests/io/test_api.py +++ b/tests/io/test_api.py @@ -5,7 +5,7 @@ from pytest import mark import aeon -from aeon.schema.schemas import exp02 +from aeon.io.binder.schemas import exp02 nonmonotonic_path = Path(__file__).parent.parent / "data" / "nonmonotonic" monotonic_path = Path(__file__).parent.parent / "data" / "monotonic" From fe3a7990ba21b1a74faf36db823d4c320fbdb4cd Mon Sep 17 00:00:00 2001 From: Jai Date: Tue, 5 Dec 2023 17:04:54 +0000 Subject: [PATCH 06/16] Fixed #294 --- aeon/io/api.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/aeon/io/api.py b/aeon/io/api.py index 2b9bc745..5c16159f 100644 --- a/aeon/io/api.py +++ b/aeon/io/api.py @@ -115,7 +115,8 @@ def load(root, reader, start=None, end=None, time=None, tolerance=None, epoch=No # to fill missing values previous = reader.read(files[i - 1]) data = pd.concat([previous, frame]) - data = data.reindex(values, method="pad", tolerance=tolerance) + data = data.reindex(values, tolerance=tolerance) + data.dropna(inplace=True) else: data.drop(columns="time", inplace=True) dataframes.append(data) From 3164415c7248ccd2dc32a951500b780a81af054c Mon Sep 17 00:00:00 2001 From: Jai Date: Wed, 6 Dec 2023 14:26:43 +0000 Subject: [PATCH 07/16] WIP schema-from-scratch tutorial --- ...understanding_aeon_data_architecture.ipynb | 1163 +++++++++++++++++ 1 file changed, 1163 insertions(+) create mode 100644 docs/examples/understanding_aeon_data_architecture.ipynb diff --git a/docs/examples/understanding_aeon_data_architecture.ipynb b/docs/examples/understanding_aeon_data_architecture.ipynb new file mode 100644 index 00000000..f1f453e8 --- /dev/null +++ b/docs/examples/understanding_aeon_data_architecture.ipynb @@ -0,0 +1,1163 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Questions\n", + "\n", + "1. What is the usecase of the `DigitalBitmask` reader? How is it different to `BitmaskEvent`?" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Aeon data file structure on Ceph" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Raw data**: `/ceph/aeon/aeon/data/raw/////`\n", + "\n", + "e.g. `/ceph/aeon/aeon/data/raw/AEON3/social0.1/2023-12-01T14-30-34/Patch1/Patch1_90_2023-12-02T12-00-00.bin`\n", + "\n", + "**Processed data (e.g. trained and exported SLEAP model)**: `/ceph/aeon/aeon/data/processed/////frozen_graph.pb`\n", + "\n", + "e.g. `/ceph/aeon/aeon/data/processed/test-node1/0000005/2023-11-30T01-29-00/topdown_multianimal_id/frozen_graph.pb`" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Reading Aeon data in Python" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Terminology\n", + "\n", + "**_Chunk Duration_**: The time duration over which experiment data files are written out. Currently, all Aeon experiments write out acquired data to files every hour (1-hour chunks).\n", + "\n", + "**_Acquisition Epoch_**: One run of an experiment workflow. When an experiment workflow restarts, a new epoch starts.\n", + "\n", + "E.g. `ceph/aeon/aeon/data/raw/AEON3/social0.1/2023-12-03T13-05-15` is an acquisition epoch in the Social0.1 experiment. Because the next epoch directory is `ceph/aeon/aeon/data/raw/AEON3/social0.1/2023-12-03T13-30-30`, we know this first epoch lasted only ~25 minutes.\n", + "\n", + "**_Stream_**: Data that comes from a single source.\n", + "\n", + "A single data file is associated with each stream, so often 'stream' and 'file' can be interchanged. If the stream comes from a harp device, the stream-file contains information about the register of the harp device which generated the stream, as well as the associated chunk datetime.\n", + "\n", + "For a harp stream, the filename format is as follows:
\n", + "`__` e.g. `Patch1_90_2023-12-02T12-00-00.bin`
\n", + "By convention, harp streams which are acquired in software start with register number '200'; e.g. the largest-blob-centroid-tracking stream filename is: `CameraTop_200*.bin`\n", + "\n", + "Each stream can contain single or multi-dimensional data (e.g. a patch wheel magnetic encoder stream contains information about both the magnetic field strength and angle: however, each dimension is associated with a unique bitmask, and thus can be isolated by applying this bitmask to the stream).\n", + "\n", + "**_Reader_**: A Python class whose instantiated objects each read one particular stream. Simple working principle: each `Reader` has a `read` method which takes in a single stream-file and reads the data in that file into a pandas `DataFrame` (see `aeon/io/reader.py` and `aeon/schema/*.py`).\n", + "\n", + "e.g. `Encoder` readers read values from `Patch__` files (these contain a patch wheel's magnetic encoder readings, to determine how much the wheel has been spun).\n", + "\n", + "Whenever a new device is implemented in an Aeon experiment, a new `Reader` should be created for the acquired data, such that the data can be read and returned in the form of a pandas `DataFrame`.\n", + "\n", + "**_Device_**: A collection of streams grouped together for convenience, often for related streams.\n", + "\n", + "On ceph, we organize streams into device folders:
e.g. `ceph/aeon/aeon/data/raw/AEON3/social0.1/2023-12-01T14-30-34/Patch1` contains the patch-heartbeat stream (`Patch1_8`), the patch-beambreak stream (`Patch1_32`), the patch-pellet delivery-pin-set stream (`Patch1_35`), the patch-pellet-delivery-pin-cleared stream (`Patch1_36`), the patch-wheel-magnetic-encoder stream (`Patch1_90`), the patch-wheel-magnetic-encoder-mode stream (`Patch1_91`), the patch-feeder-dispenser-state stream (`Patch1_200`), the patch-pellet-manual-delivery stream (`Patch1_201`), the patch-missed-pellet-stream (`Patch1_202`), the patch-pellet-delivery-retry stream (`Patch1_203`), and the patch-state stream (`Patch1_State`).\n", + "\n", + "In code, we create logical devices via the `Device` class (see `aeon/io/device.py`)
\n", + "e.g. We often define 'Patch' devices that contain `Reader` objects associated with specific streams (as experimenters may not care about analyzing all streams in a `Patch` device folder on ceph), e.g. wheel-magnetic-encoder, state, pellet-delivery-pin-set, and beambreak.\n", + "\n", + "**_Schema_**: A list of devices grouped within a `DotMap` object (see `aeon/docs/examples/schemas.py`). Each experiment is associated with a schema. If a schema changes, then the experiment neccesarily must be different (either in name or version number), as the acquired data is now different.\n", + "\n", + "**_Dataset_**: All data belonging to a particular experiment. \n", + "\n", + "e.g. All data in `ceph/aeon/aeon/data/raw/AEON3/social0.1`" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Code\n", + "\n", + "With this terminology in mind, let's get to the code!" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "> \u001b[0;32m/nfs/nhome/live/jbhagat/ProjectAeon/aeon_mecha/aeon/io/binder/core.py\u001b[0m(33)\u001b[0;36menvironment_state\u001b[0;34m()\u001b[0m\n", + "\u001b[0;32m 32 \u001b[0;31m \u001b[0;32mimport\u001b[0m \u001b[0mipdb\u001b[0m\u001b[0;34m;\u001b[0m \u001b[0mipdb\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_trace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m---> 33 \u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0;34m\"EnvironmentState\"\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0m_reader\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mCsv\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf\"{pattern}_EnvironmentState_*\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m\"state\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\u001b[0;32m 34 \u001b[0;31m\u001b[0;34m\u001b[0m\u001b[0m\n", + "\u001b[0m\n" + ] + } + ], + "source": [ + "\"\"\"Imports\"\"\"\n", + "\n", + "%load_ext autoreload\n", + "%autoreload 2\n", + "# %flow mode reactive\n", + "\n", + "from datetime import date\n", + "import ipdb\n", + "from itertools import product\n", + "from pathlib import Path\n", + "\n", + "from dotmap import DotMap\n", + "import matplotlib.pyplot as plt\n", + "import numpy as np\n", + "import pandas as pd\n", + "import plotly.express as px\n", + "import plotly.graph_objs as go\n", + "import seaborn as sns\n", + "\n", + "import aeon\n", + "import aeon.io.binder.core as stream\n", + "from aeon.io import api\n", + "from aeon.io import reader\n", + "from aeon.io.device import Device\n", + "from aeon.io.binder.schemas import exp02, exp01\n", + "from aeon.analysis.utils import visits, distancetravelled" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"Set experiment root path and time range / set to load data\"\"\"\n", + "\n", + "root = Path(\"/ceph/aeon/aeon/data/raw/AEON3/social0.1\")\n", + "start_time = pd.Timestamp(\"2023-12-02 10:30:00\")\n", + "end_time = pd.Timestamp(\"2023-12-02 12:30:00\")\n", + "time_set = pd.concat(\n", + " [\n", + " pd.Series(pd.date_range(start_time, start_time + pd.Timedelta(hours=1), freq=\"1s\")),\n", + " pd.Series(pd.date_range(end_time, end_time + pd.Timedelta(hours=1), freq=\"1s\"))\n", + " ]\n", + ")\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"Creating a new `Reader` class\"\"\"\n", + "\n", + "# All readers are subclassed from the base `Reader` class. They thus all contain a `read` method,\n", + "# for returning data from a file in the form of a pandas DataFrame, and the following attributes, \n", + "# which must be specified on object construction:\n", + "# `pattern`: a prefix in the filename used by `aeon.io.api.load` to find matching files to load\n", + "# `columns`: a list of column names for the returned DataFrame\n", + "# `extension`: the file extension of the files to be read\n", + "\n", + "# Using these principles, we can recreate a simple reader for reading subject weight data from the \n", + "# social0.1 experiments, which are saved in .csv format.\n", + "\n", + "# First, we'll create a general Csv reader, subclassed from `Reader`.\n", + "class Csv(reader.Reader):\n", + " \"\"\"Reads data from csv text files, where the first column stores the Aeon timestamp, in seconds.\"\"\"\n", + "\n", + " def __init__(self, pattern, columns, extension=\"csv\"):\n", + " super().__init__(pattern, columns, extension)\n", + "\n", + " def read(self, file):\n", + " return pd.read_csv(file, header=0, names=self.columns, index_col=0)\n", + " \n", + "# Next, we'll create a reader for the subject weight data, subclassed from `Csv`.\n", + "\n", + "# We know from our data that the files of interest start with 'Environment_SubjectWeight' and columns are: \n", + "# 1) Aeon timestamp in seconds from 1904/01/01 (1904 date system)\n", + "# 2) Weight in grams\n", + "# 3) Weight stability confidence (0-1)\n", + "# 4) Subject ID (string)\n", + "# 5) Subject ID (integer)\n", + "# Since the first column (Aeon timestamp) will be set as the index, we'll use the rest as DataFrame columns.\n", + "# And we don't need to define `read`, as it will use the `Csv` class's `read` method.\n", + "\n", + "class Subject_Weight(Csv):\n", + " \"\"\"Reads subject weight data from csv text files.\"\"\"\n", + " \n", + " def __init__(\n", + " self, \n", + " pattern=\"Environment_SubjectWeight*\",\n", + " columns=[\"weight\", \"confidence\", \"subject_id\", \"int_id\"], \n", + " extension=\"csv\"\n", + " ):\n", + " super().__init__(pattern, columns, extension)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
weightconfidencesubject_idint_id
3.784363e+0929.7999991CAA-11207460
3.784363e+0929.7999991CAA-11207460
3.784363e+0929.7999991CAA-11207460
3.784363e+0929.7999991CAA-11207460
3.784363e+0929.7999991CAA-11207460
...............
3.784367e+0931.2000011CAA-11207470
3.784367e+0931.2000011CAA-11207470
3.784367e+0931.2000011CAA-11207470
3.784367e+0931.2000011CAA-11207470
3.784367e+0931.2000011CAA-11207470
\n", + "

4382 rows × 4 columns

\n", + "
" + ], + "text/plain": [ + " weight confidence subject_id int_id\n", + "3.784363e+09 29.799999 1 CAA-1120746 0\n", + "3.784363e+09 29.799999 1 CAA-1120746 0\n", + "3.784363e+09 29.799999 1 CAA-1120746 0\n", + "3.784363e+09 29.799999 1 CAA-1120746 0\n", + "3.784363e+09 29.799999 1 CAA-1120746 0\n", + "... ... ... ... ...\n", + "3.784367e+09 31.200001 1 CAA-1120747 0\n", + "3.784367e+09 31.200001 1 CAA-1120747 0\n", + "3.784367e+09 31.200001 1 CAA-1120747 0\n", + "3.784367e+09 31.200001 1 CAA-1120747 0\n", + "3.784367e+09 31.200001 1 CAA-1120747 0\n", + "\n", + "[4382 rows x 4 columns]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
weightconfidencesubject_idint_id
time
2023-12-02 10:31:12.84000015329.51CAA-11207460
2023-12-02 10:31:12.94000005729.51CAA-11207460
2023-12-02 10:31:13.03999996229.51CAA-11207460
2023-12-02 10:31:13.09999990529.51CAA-11207460
2023-12-02 10:31:13.19999980929.51CAA-11207460
...............
2023-12-02 12:27:29.46000003831.11CAA-11207470
2023-12-02 12:27:29.55999994331.11CAA-11207470
2023-12-02 12:27:29.61999988631.11CAA-11207470
2023-12-02 12:27:29.71999979031.11CAA-11207471
2023-12-02 12:27:29.82000017231.11CAA-11207470
\n", + "

10525 rows × 4 columns

\n", + "
" + ], + "text/plain": [ + " weight confidence subject_id int_id\n", + "time \n", + "2023-12-02 10:31:12.840000153 29.5 1 CAA-1120746 0\n", + "2023-12-02 10:31:12.940000057 29.5 1 CAA-1120746 0\n", + "2023-12-02 10:31:13.039999962 29.5 1 CAA-1120746 0\n", + "2023-12-02 10:31:13.099999905 29.5 1 CAA-1120746 0\n", + "2023-12-02 10:31:13.199999809 29.5 1 CAA-1120746 0\n", + "... ... ... ... ...\n", + "2023-12-02 12:27:29.460000038 31.1 1 CAA-1120747 0\n", + "2023-12-02 12:27:29.559999943 31.1 1 CAA-1120747 0\n", + "2023-12-02 12:27:29.619999886 31.1 1 CAA-1120747 0\n", + "2023-12-02 12:27:29.719999790 31.1 1 CAA-1120747 1\n", + "2023-12-02 12:27:29.820000172 31.1 1 CAA-1120747 0\n", + "\n", + "[10525 rows x 4 columns]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
weightconfidencesubject_idint_id
2023-12-02 10:30:0030.0000001.0CAA-11207470.0
2023-12-02 10:30:0130.0000001.0CAA-11207470.0
2023-12-02 10:30:0230.0000001.0CAA-11207470.0
2023-12-02 10:30:0330.0000001.0CAA-11207470.0
2023-12-02 10:30:0430.0000001.0CAA-11207470.0
...............
2023-12-02 13:18:2529.7999991.0CAA-11207460.0
2023-12-02 13:18:2629.7999991.0CAA-11207460.0
2023-12-02 13:22:0529.9000001.0CAA-11207460.0
2023-12-02 13:22:1429.7999991.0CAA-11207460.0
2023-12-02 13:22:1829.7999991.0CAA-11207460.0
\n", + "

3691 rows × 4 columns

\n", + "
" + ], + "text/plain": [ + " weight confidence subject_id int_id\n", + "2023-12-02 10:30:00 30.000000 1.0 CAA-1120747 0.0\n", + "2023-12-02 10:30:01 30.000000 1.0 CAA-1120747 0.0\n", + "2023-12-02 10:30:02 30.000000 1.0 CAA-1120747 0.0\n", + "2023-12-02 10:30:03 30.000000 1.0 CAA-1120747 0.0\n", + "2023-12-02 10:30:04 30.000000 1.0 CAA-1120747 0.0\n", + "... ... ... ... ...\n", + "2023-12-02 13:18:25 29.799999 1.0 CAA-1120746 0.0\n", + "2023-12-02 13:18:26 29.799999 1.0 CAA-1120746 0.0\n", + "2023-12-02 13:22:05 29.900000 1.0 CAA-1120746 0.0\n", + "2023-12-02 13:22:14 29.799999 1.0 CAA-1120746 0.0\n", + "2023-12-02 13:22:18 29.799999 1.0 CAA-1120746 0.0\n", + "\n", + "[3691 rows x 4 columns]" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "\"\"\"Loading data via a `Reader` object\"\"\"\n", + "\n", + "# We can now load data by specifying a file \n", + "subject_weight_reader = Subject_Weight()\n", + "acq_epoch = \"2023-12-01T14-30-34\"\n", + "weight_file = root / acq_epoch / \"Environment/Environment_SubjectWeight_2023-12-02T12-00-00.csv\"\n", + "display(subject_weight_reader.read(weight_file))\n", + "\n", + "# And we can use `load` to load data across many same-stream files given a time range or time set.\n", + "display(aeon.load(root, subject_weight_reader, start=start_time, end=end_time))\n", + "display(aeon.load(root, subject_weight_reader, time=time_set.values))" + ] + }, + { + "cell_type": "code", + "execution_count": 34, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "bitmasks: [32 34]\n", + "raw data:\n", + " beambreak\n", + "time \n", + "2023-12-02 11:33:03.942463875 34\n", + "2023-12-02 11:33:03.951744080 32\n", + "2023-12-02 11:33:18.500351906 34\n", + "2023-12-02 11:33:18.503456115 32\n", + "2023-12-02 11:33:18.509632111 34\n", + "... ...\n", + "2023-12-02 12:07:19.810304165 32\n", + "2023-12-02 12:07:29.802591801 34\n", + "2023-12-02 12:07:29.808767796 32\n", + "2023-12-02 12:07:40.692639828 34\n", + "2023-12-02 12:07:40.705023766 32\n", + "\n", + "[102 rows x 1 columns]\n", + "\n", + "\n", + "bitmasked data:\n", + " event\n", + "time \n", + "2023-12-02 11:33:03.942463875 beambreak\n", + "2023-12-02 11:33:03.951744080 beambreak\n", + "2023-12-02 11:33:18.500351906 beambreak\n", + "2023-12-02 11:33:18.503456115 beambreak\n", + "2023-12-02 11:33:18.509632111 beambreak\n", + "... ...\n", + "2023-12-02 12:07:19.810304165 beambreak\n", + "2023-12-02 12:07:29.802591801 beambreak\n", + "2023-12-02 12:07:29.808767796 beambreak\n", + "2023-12-02 12:07:40.692639828 beambreak\n", + "2023-12-02 12:07:40.705023766 beambreak\n", + "\n", + "[102 rows x 1 columns]\n" + ] + } + ], + "source": [ + "\"\"\"Updating a `Reader` object\"\"\"\n", + "\n", + "# Occasionally, we may want to tweak the output from a `Reader` object's `read` method, or some tweaks to \n", + "# streams on the acquisition side may require us to make corresponding tweaks to a `Reader` object to\n", + "# ensure it works properly. We'll cover some of these cases here.\n", + "\n", + "# 1. Column changes\n", + "\n", + "# First, if we want to simply change the output from `read`, we can change the columns of an instantiated\n", + "# `Reader` object. Let's change `subject_id` to `id`, and after reading, drop the `confidence` and `int_id`\n", + "# columns.\n", + "subject_weight_reader.columns = [\"weight\", \"confidence\", \"id\", \"int_id\"]\n", + "data = subject_weight_reader.read(weight_file)\n", + "data.drop([\"confidence\", \"int_id\"], axis=1, inplace=True)\n", + "display(data)\n", + "\n", + "\n", + "# 2. Pattern changes\n", + "\n", + "# Next, occasionally a stream's filename may change, in which case we'll need to update the `Reader` \n", + "# object's `pattern` to find the new files using `load`: \n", + "\n", + "# Let's simulate a case where the old SubjectWeight stream was called Weight, and create a `Reader` class.\n", + "class Subject_Weight(Csv):\n", + " \"\"\"Reads subject weight data from csv text files.\"\"\"\n", + " \n", + " def __init__(\n", + " self, \n", + " pattern=\"Environment_Weight*\",\n", + " columns=[\"weight\", \"confidence\", \"subject_id\", \"int_id\"], \n", + " extension=\"csv\"\n", + " ):\n", + " super().__init__(pattern, columns, extension)\n", + "\n", + "# We'll see that we can't find any files with this pattern.\n", + "subject_weight_reader = Subject_Weight()\n", + "data = aeon.load(root, subject_weight_reader, start=start_time, end=end_time)\n", + "display(data) # empty dataframe\n", + "\n", + "# But if we just update the pattern, `load` will find the files.\n", + "subject_weight_reader.pattern = \"Environment_SubjectWeight*\"\n", + "data = aeon.load(root, subject_weight_reader, start=start_time, end=end_time)\n", + "display(data) \n", + "\n", + "\n", + "# 3. Bitmask changes for Harp streams\n", + "\n", + "# Lastly, some Harp streams use bitmasks to distinguish writing out different events to the same file.\n", + "# e.g. The beambreak stream `Patch_32*` writes out events both for when the beam is broken and when\n", + "# it gets reset. Given a Harp stream, we can find all bitmasks associated with it, and choose which one\n", + "# to use to filter the data:\n", + "\n", + "# Given a stream, we can create a `Harp` reader object to find all bitmasks associated with it.\n", + "pattern = \"Patch1_32*\"\n", + "event_name = \"beambreak\"\n", + "harp_reader = reader.Harp(pattern=pattern, columns=[event_name])\n", + "data = api.load(root, harp_reader, start=start_time, end=end_time)\n", + "bitmasks = np.unique(data[event_name].values)\n", + "print(f\"bitmasks: {bitmasks}\")\n", + "\n", + "# Let's set the bitmasks to the first returned unique value, and create a new `Reader` object to use this.\n", + "bitmask = bitmasks[0]\n", + "beambreak_reader = reader.BitmaskEvent(pattern, bitmask, event_name)\n", + "bitmasked_data = api.load(root, beambreak_reader, start=start_time, end=end_time)\n", + "\n", + "print(f\"raw data:\\n {data}\\n\\n\")\n", + "print(f\"bitmasked data:\\n {bitmasked_data}\")" + ] + }, + { + "cell_type": "code", + "execution_count": 29, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "data:\n", + " beambreak\n", + "time \n", + "2023-12-02 11:33:03.942463875 34\n", + "2023-12-02 11:33:03.951744080 32\n", + "2023-12-02 11:33:18.500351906 34\n", + "2023-12-02 11:33:18.503456115 32\n", + "2023-12-02 11:33:18.509632111 34\n", + "... ...\n", + "2023-12-02 12:07:19.810304165 32\n", + "2023-12-02 12:07:29.802591801 34\n", + "2023-12-02 12:07:29.808767796 32\n", + "2023-12-02 12:07:40.692639828 34\n", + "2023-12-02 12:07:40.705023766 32\n", + "\n", + "[102 rows x 1 columns]\n", + "\n", + "\n", + "bitmask:\n", + " 34\n", + "\n", + "\n", + "stream_data:\n", + " event\n", + "time \n", + "2023-12-02 11:33:03.942463875 beambreak\n", + "2023-12-02 11:33:18.500351906 beambreak\n", + "2023-12-02 11:33:18.509632111 beambreak\n", + "2023-12-02 11:33:18.515808104 beambreak\n", + "2023-12-02 11:33:43.750751972 beambreak\n", + "2023-12-02 11:33:43.760032177 beambreak\n", + "2023-12-02 11:34:13.048543930 beambreak\n", + "2023-12-02 11:34:13.057824135 beambreak\n", + "2023-12-02 11:34:13.076320171 beambreak\n", + "2023-12-02 11:34:35.263328075 beambreak\n", + "2023-12-02 11:34:35.269504070 beambreak\n", + "2023-12-02 11:34:49.161056042 beambreak\n", + "2023-12-02 11:35:01.140063763 beambreak\n", + "2023-12-02 11:35:24.542560101 beambreak\n", + "2023-12-02 11:35:24.548736095 beambreak\n", + "2023-12-02 11:35:35.697792053 beambreak\n", + "2023-12-02 11:35:35.731743813 beambreak\n", + "2023-12-02 11:35:50.357567787 beambreak\n", + "2023-12-02 11:36:05.535552025 beambreak\n", + "2023-12-02 11:36:05.541759968 beambreak\n", + "2023-12-02 11:36:19.920832157 beambreak\n", + "2023-12-02 11:36:34.256608009 beambreak\n", + "2023-12-02 11:36:51.954944134 beambreak\n", + "2023-12-02 11:37:03.847680092 beambreak\n", + "2023-12-02 11:37:03.853856087 beambreak\n", + "2023-12-02 11:40:01.529439926 beambreak\n", + "2023-12-02 11:44:22.924352169 beambreak\n", + "2023-12-02 11:44:33.175744057 beambreak\n", + "2023-12-02 11:44:51.966368198 beambreak\n", + "2023-12-02 11:45:04.593088150 beambreak\n", + "2023-12-02 11:45:18.151519775 beambreak\n", + "2023-12-02 11:45:18.157663822 beambreak\n", + "2023-12-02 11:45:43.645567894 beambreak\n", + "2023-12-02 11:46:01.303775787 beambreak\n", + "2023-12-02 11:46:26.813504219 beambreak\n", + "2023-12-02 11:46:26.819680214 beambreak\n", + "2023-12-02 11:46:43.139167786 beambreak\n", + "2023-12-02 11:46:43.148416042 beambreak\n", + "2023-12-02 11:46:57.703455925 beambreak\n", + "2023-12-02 11:47:15.047423840 beambreak\n", + "2023-12-02 11:47:33.655744076 beambreak\n", + "2023-12-02 11:47:46.538911819 beambreak\n", + "2023-12-02 11:57:26.466911793 beambreak\n", + "2023-12-02 11:57:38.874559879 beambreak\n", + "2023-12-02 11:57:58.827775955 beambreak\n", + "2023-12-02 11:57:58.833951950 beambreak\n", + "2023-12-02 11:58:22.878240108 beambreak\n", + "2023-12-02 12:07:19.794847965 beambreak\n", + "2023-12-02 12:07:19.807199955 beambreak\n", + "2023-12-02 12:07:29.802591801 beambreak\n", + "2023-12-02 12:07:40.692639828 beambreak\n" + ] + } + ], + "source": [ + "pattern = \"Patch1_32*\"\n", + "event_name = \"beambreak\"\n", + "# Set the reader for the stream\n", + "harp_reader = reader.Harp(pattern=pattern, columns=[event_name])\n", + "# Get the bitmask as the first value of the loaded stream\n", + "data = api.load(root, harp_reader, start=start_time, end=end_time)\n", + "bitmask = data.iloc[0, 0]\n", + "new_reader = reader.BitmaskEvent(pattern, bitmask, event_name)\n", + "stream_data = api.load(root, new_reader, start=start_time, end=end_time)\n", + "\n", + "print(f\"data:\\n {data}\\n\\n\")\n", + "print(f\"bitmask:\\n {bitmask}\\n\\n\")\n", + "print(f\"stream_data:\\n {stream_data}\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"Instantiating a `Device` object\"\"\"\n", + "\n", + "# A `Device` object is instantiated from a name, followed by one or more 'binder functions', which \n", + "# return a dictionary of a name paired with a `Reader` object. We call such a dictionary of `:Reader`\n", + "# key-value pairs a 'registry'.\n", + "\n", + "\n", + "# On creation, the `Device` object puts all registries into a single registry, which is accessible via the\n", + "# `registry` attribute.\n", + "\n", + "\n", + "# This is done so that we can create a 'schema' (a DotMap of a list of `Device` objects), where a `Device`\n", + "# object name is a key for the schema, and the `registry` names' (which are keys for the `Device` object) \n", + "# corresponding values are the `Reader` objects associated with that `Device` object.\n", + "# This works because, when a list of `Device` objects are passed into the `DotMap` constructor, the\n", + "# `__iter__` method of the `Device` object returns a tuple of the object's name with its `stream` \n", + "# attribute, which is passed in directly to the DotMap constructor to create a nested DotMap:\n", + "# device_name -> stream_name -> stream `Reader` object.\n", + "\n", + "d = Device(\"ExperimentalMetadata\", stream.environment, stream.messageLog)\n", + "print(d.registry)\n", + "DotMap(d.registry)\n", + "DotMap([d])\n", + "s = DotMap([d])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Social0.1 Data Streams\n", + "\n", + "Now that we've covered streams, devices, and schemas, let's build a schema for the Social0.1 Experiment!\n", + "\n", + "First we'll need to know all the streams we recorded during the Social0.1 experiment: these can be found via\n", + "looking through all devices in an acqusition epoch \n", + "(e.g. `ceph/aeon/aeon/data/raw/AEON3/social0.1/2023-12-01T14-30-34`)\n", + "\n", + "And here they are: (*note: register 8 is always the harp heartbeat for any device that has this stream.*)\n", + "\n", + "- Metadata.yml\n", + "- Environment\n", + " - BlockState\n", + " - EnvironmentState\n", + " - LightEvents\n", + " - MessageLog\n", + " - SubjectState\n", + " - SubjectVisits\n", + " - SubjectWeight\n", + "- CameraTop (200, 201, avi, csv, ,)\n", + " - 200: position\n", + " - 201: region\n", + "- CameraNorth (avi, csv)\n", + "- CameraEast (avi, csv)\n", + "- CameraSouth (avi, csv)\n", + "- CameraWest (avi, csv)\n", + "- CameraPatch1 (avi, csv)\n", + "- CameraPatch2 (avi, csv)\n", + "- CameraPatch3 (avi, csv)\n", + "- CameraNest (avi, csv)\n", + "- ClockSynchronizer (8, 36)\n", + " - 36: hearbeat_out\n", + "- Nest (200, 201, 202, 203)\n", + " - 200: weight_raw\n", + " - 201: weight_tare\n", + " - 202: weight_filtered\n", + " - 203: weight_baseline\n", + " - 204: weight_subject\n", + "- Patch1 (8, 32, 35, 36, 87, 90, 91, 200, 201, 202, 203, State)\n", + " - 32: beam_break\n", + " - 35: delivery_set\n", + " - 36: delivery_clear\n", + " - 87: expansion_board\n", + " - 90: encoder_read\n", + " - 91: encoder_mode\n", + " - 200: dispenser_state\n", + " - 201: delivery_manual\n", + " - 202: missed_pellet\n", + " - 203: delivery_retry\n", + "- Patch2 (8, 32, 35, 36, 87, 90, 91, State)\n", + "- Patch3 (8, 32, 35, 36, 87, 90, 91, 200, 203, State)\n", + "- RfidEventsGate (8, 32, 35)\n", + " - 32: entry_id\n", + " - 35: hardware_notifications\n", + "- RfidEventsNest1 (8, 32, 35)\n", + "- RfidEventsNest2 (8, 32, 35)\n", + "- RfidEventsPatch1 (8, 32, 35)\n", + "- RfidEventsPatch2 (8, 32, 35)\n", + "- RfidEventsPatch3 (8, 32, 35)\n", + "- VideoController (8, 32, 33, 34, 35, 36, 45, 52)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"Creating the Social 0.1 schema\"\"\"\n", + "\n", + "# Above we've listed out all the streams we recorded from during Social0.1, but we won't care to analyze all of them.\n", + "# Instead, we'll create a schema that only contains the streams we want to analyze:\n", + "\n", + "# Metadata\n", + "\n", + "# Environment\n", + "\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "DotMap(\n", + " [\n", + " Device(\"Metadata\", stream.metadata),\n", + " Device(\"ExperimentalMetadata\", stream.environment, stream.messageLog),\n", + " Device(\"CameraEast\", stream.video),\n", + " Device(\"CameraNest\", stream.video),\n", + " Device(\"CameraNorth\", stream.video),\n", + " Device(\"CameraPatch1\", stream.video),\n", + " Device(\"CameraPatch2\", stream.video),\n", + " Device(\"CameraSouth\", stream.video),\n", + " Device(\"CameraWest\", stream.video),\n", + " ]\n", + ")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "d = Device(\"ExperimentalMetadata\", stream.environment, stream.messageLog)\n", + "print(d.registry)\n", + "DotMap(d.registry)\n", + "DotMap([d])\n", + "s = DotMap([d])\n", + "s.ExperimentalMetadata.EnvironmentState.pattern" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"Metadata\"\"\"\n", + "\n", + "data = api.load(root, reader.Metadata(), start=start, end=end)\n", + "data.metadata.iloc[0] # get device metadata dotmap" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "d = Device(\"ExperimentalMetadata\", stream.environment, stream.messageLog)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "Device(\"Metadata\", stream.metadata).stream" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "d = Device(\"test\", reader.Video, reader.Metadata)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "d.name" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "exp02.ExperimentalMetadata" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "exp02.Metadata" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "exp02.ExperimentalMetadata.keys()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "api.load(root, exp02.ExperimentalMetadata., start=start, end=end)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"Environment\"\"\"\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"CameraTop\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"Top quadrant and zoomed in patch cameras\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"Nest\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"Patches\"\"\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "\"\"\"Rfids\"\"\"" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "aeon", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.4" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} From f9441d5eb36b86d33a4eea9297781f9b5439e573 Mon Sep 17 00:00:00 2001 From: Jai Date: Wed, 6 Dec 2023 15:02:10 +0000 Subject: [PATCH 08/16] Started 'device' section --- ...understanding_aeon_data_architecture.ipynb | 478 ++++++++++++------ 1 file changed, 322 insertions(+), 156 deletions(-) diff --git a/docs/examples/understanding_aeon_data_architecture.ipynb b/docs/examples/understanding_aeon_data_architecture.ipynb index f1f453e8..e8f69f15 100644 --- a/docs/examples/understanding_aeon_data_architecture.ipynb +++ b/docs/examples/understanding_aeon_data_architecture.ipynb @@ -1,14 +1,5 @@ { "cells": [ - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Questions\n", - "\n", - "1. What is the usecase of the `DigitalBitmask` reader? How is it different to `BitmaskEvent`?" - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -91,19 +82,7 @@ "cell_type": "code", "execution_count": 1, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "> \u001b[0;32m/nfs/nhome/live/jbhagat/ProjectAeon/aeon_mecha/aeon/io/binder/core.py\u001b[0m(33)\u001b[0;36menvironment_state\u001b[0;34m()\u001b[0m\n", - "\u001b[0;32m 32 \u001b[0;31m \u001b[0;32mimport\u001b[0m \u001b[0mipdb\u001b[0m\u001b[0;34m;\u001b[0m \u001b[0mipdb\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_trace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m---> 33 \u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0;34m{\u001b[0m\u001b[0;34m\"EnvironmentState\"\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0m_reader\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mCsv\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf\"{pattern}_EnvironmentState_*\"\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m\"state\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m}\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 34 \u001b[0;31m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\n" - ] - } - ], + "outputs": [], "source": [ "\"\"\"Imports\"\"\"\n", "\n", @@ -624,9 +603,301 @@ }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 5, "metadata": {}, "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
weightid
3.784363e+0929.799999CAA-1120746
3.784363e+0929.799999CAA-1120746
3.784363e+0929.799999CAA-1120746
3.784363e+0929.799999CAA-1120746
3.784363e+0929.799999CAA-1120746
.........
3.784367e+0931.200001CAA-1120747
3.784367e+0931.200001CAA-1120747
3.784367e+0931.200001CAA-1120747
3.784367e+0931.200001CAA-1120747
3.784367e+0931.200001CAA-1120747
\n", + "

4382 rows × 2 columns

\n", + "
" + ], + "text/plain": [ + " weight id\n", + "3.784363e+09 29.799999 CAA-1120746\n", + "3.784363e+09 29.799999 CAA-1120746\n", + "3.784363e+09 29.799999 CAA-1120746\n", + "3.784363e+09 29.799999 CAA-1120746\n", + "3.784363e+09 29.799999 CAA-1120746\n", + "... ... ...\n", + "3.784367e+09 31.200001 CAA-1120747\n", + "3.784367e+09 31.200001 CAA-1120747\n", + "3.784367e+09 31.200001 CAA-1120747\n", + "3.784367e+09 31.200001 CAA-1120747\n", + "3.784367e+09 31.200001 CAA-1120747\n", + "\n", + "[4382 rows x 2 columns]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
weightconfidencesubject_idint_id
time
\n", + "
" + ], + "text/plain": [ + "Empty DataFrame\n", + "Columns: [weight, confidence, subject_id, int_id]\n", + "Index: []" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
weightconfidencesubject_idint_id
time
2023-12-02 10:31:12.84000015329.51CAA-11207460
2023-12-02 10:31:12.94000005729.51CAA-11207460
2023-12-02 10:31:13.03999996229.51CAA-11207460
2023-12-02 10:31:13.09999990529.51CAA-11207460
2023-12-02 10:31:13.19999980929.51CAA-11207460
...............
2023-12-02 12:27:29.46000003831.11CAA-11207470
2023-12-02 12:27:29.55999994331.11CAA-11207470
2023-12-02 12:27:29.61999988631.11CAA-11207470
2023-12-02 12:27:29.71999979031.11CAA-11207471
2023-12-02 12:27:29.82000017231.11CAA-11207470
\n", + "

10525 rows × 4 columns

\n", + "
" + ], + "text/plain": [ + " weight confidence subject_id int_id\n", + "time \n", + "2023-12-02 10:31:12.840000153 29.5 1 CAA-1120746 0\n", + "2023-12-02 10:31:12.940000057 29.5 1 CAA-1120746 0\n", + "2023-12-02 10:31:13.039999962 29.5 1 CAA-1120746 0\n", + "2023-12-02 10:31:13.099999905 29.5 1 CAA-1120746 0\n", + "2023-12-02 10:31:13.199999809 29.5 1 CAA-1120746 0\n", + "... ... ... ... ...\n", + "2023-12-02 12:27:29.460000038 31.1 1 CAA-1120747 0\n", + "2023-12-02 12:27:29.559999943 31.1 1 CAA-1120747 0\n", + "2023-12-02 12:27:29.619999886 31.1 1 CAA-1120747 0\n", + "2023-12-02 12:27:29.719999790 31.1 1 CAA-1120747 1\n", + "2023-12-02 12:27:29.820000172 31.1 1 CAA-1120747 0\n", + "\n", + "[10525 rows x 4 columns]" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, { "name": "stdout", "output_type": "stream", @@ -640,14 +911,6 @@ "2023-12-02 11:33:18.500351906 34\n", "2023-12-02 11:33:18.503456115 32\n", "2023-12-02 11:33:18.509632111 34\n", - "... ...\n", - "2023-12-02 12:07:19.810304165 32\n", - "2023-12-02 12:07:29.802591801 34\n", - "2023-12-02 12:07:29.808767796 32\n", - "2023-12-02 12:07:40.692639828 34\n", - "2023-12-02 12:07:40.705023766 32\n", - "\n", - "[102 rows x 1 columns]\n", "\n", "\n", "bitmasked data:\n", @@ -657,15 +920,7 @@ "2023-12-02 11:33:03.951744080 beambreak\n", "2023-12-02 11:33:18.500351906 beambreak\n", "2023-12-02 11:33:18.503456115 beambreak\n", - "2023-12-02 11:33:18.509632111 beambreak\n", - "... ...\n", - "2023-12-02 12:07:19.810304165 beambreak\n", - "2023-12-02 12:07:29.802591801 beambreak\n", - "2023-12-02 12:07:29.808767796 beambreak\n", - "2023-12-02 12:07:40.692639828 beambreak\n", - "2023-12-02 12:07:40.705023766 beambreak\n", - "\n", - "[102 rows x 1 columns]\n" + "2023-12-02 11:33:18.509632111 beambreak\n" ] } ], @@ -735,132 +990,43 @@ "beambreak_reader = reader.BitmaskEvent(pattern, bitmask, event_name)\n", "bitmasked_data = api.load(root, beambreak_reader, start=start_time, end=end_time)\n", "\n", - "print(f\"raw data:\\n {data}\\n\\n\")\n", - "print(f\"bitmasked data:\\n {bitmasked_data}\")" + "print(f\"raw data:\\n {data.head()}\\n\\n\")\n", + "print(f\"bitmasked data:\\n {bitmasked_data.head()}\")" ] }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 6, "metadata": {}, "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "data:\n", - " beambreak\n", - "time \n", - "2023-12-02 11:33:03.942463875 34\n", - "2023-12-02 11:33:03.951744080 32\n", - "2023-12-02 11:33:18.500351906 34\n", - "2023-12-02 11:33:18.503456115 32\n", - "2023-12-02 11:33:18.509632111 34\n", - "... ...\n", - "2023-12-02 12:07:19.810304165 32\n", - "2023-12-02 12:07:29.802591801 34\n", - "2023-12-02 12:07:29.808767796 32\n", - "2023-12-02 12:07:40.692639828 34\n", - "2023-12-02 12:07:40.705023766 32\n", - "\n", - "[102 rows x 1 columns]\n", - "\n", - "\n", - "bitmask:\n", - " 34\n", - "\n", - "\n", - "stream_data:\n", - " event\n", - "time \n", - "2023-12-02 11:33:03.942463875 beambreak\n", - "2023-12-02 11:33:18.500351906 beambreak\n", - "2023-12-02 11:33:18.509632111 beambreak\n", - "2023-12-02 11:33:18.515808104 beambreak\n", - "2023-12-02 11:33:43.750751972 beambreak\n", - "2023-12-02 11:33:43.760032177 beambreak\n", - "2023-12-02 11:34:13.048543930 beambreak\n", - "2023-12-02 11:34:13.057824135 beambreak\n", - "2023-12-02 11:34:13.076320171 beambreak\n", - "2023-12-02 11:34:35.263328075 beambreak\n", - "2023-12-02 11:34:35.269504070 beambreak\n", - "2023-12-02 11:34:49.161056042 beambreak\n", - "2023-12-02 11:35:01.140063763 beambreak\n", - "2023-12-02 11:35:24.542560101 beambreak\n", - "2023-12-02 11:35:24.548736095 beambreak\n", - "2023-12-02 11:35:35.697792053 beambreak\n", - "2023-12-02 11:35:35.731743813 beambreak\n", - "2023-12-02 11:35:50.357567787 beambreak\n", - "2023-12-02 11:36:05.535552025 beambreak\n", - "2023-12-02 11:36:05.541759968 beambreak\n", - "2023-12-02 11:36:19.920832157 beambreak\n", - "2023-12-02 11:36:34.256608009 beambreak\n", - "2023-12-02 11:36:51.954944134 beambreak\n", - "2023-12-02 11:37:03.847680092 beambreak\n", - "2023-12-02 11:37:03.853856087 beambreak\n", - "2023-12-02 11:40:01.529439926 beambreak\n", - "2023-12-02 11:44:22.924352169 beambreak\n", - "2023-12-02 11:44:33.175744057 beambreak\n", - "2023-12-02 11:44:51.966368198 beambreak\n", - "2023-12-02 11:45:04.593088150 beambreak\n", - "2023-12-02 11:45:18.151519775 beambreak\n", - "2023-12-02 11:45:18.157663822 beambreak\n", - "2023-12-02 11:45:43.645567894 beambreak\n", - "2023-12-02 11:46:01.303775787 beambreak\n", - "2023-12-02 11:46:26.813504219 beambreak\n", - "2023-12-02 11:46:26.819680214 beambreak\n", - "2023-12-02 11:46:43.139167786 beambreak\n", - "2023-12-02 11:46:43.148416042 beambreak\n", - "2023-12-02 11:46:57.703455925 beambreak\n", - "2023-12-02 11:47:15.047423840 beambreak\n", - "2023-12-02 11:47:33.655744076 beambreak\n", - "2023-12-02 11:47:46.538911819 beambreak\n", - "2023-12-02 11:57:26.466911793 beambreak\n", - "2023-12-02 11:57:38.874559879 beambreak\n", - "2023-12-02 11:57:58.827775955 beambreak\n", - "2023-12-02 11:57:58.833951950 beambreak\n", - "2023-12-02 11:58:22.878240108 beambreak\n", - "2023-12-02 12:07:19.794847965 beambreak\n", - "2023-12-02 12:07:19.807199955 beambreak\n", - "2023-12-02 12:07:29.802591801 beambreak\n", - "2023-12-02 12:07:40.692639828 beambreak\n" + "ename": "TypeError", + "evalue": "subject_weight_binder() takes 0 positional arguments but 1 was given", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", + "Cell \u001b[0;32mIn[6], line 12\u001b[0m\n\u001b[1;32m 9\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21msubject_state_binder\u001b[39m(): \u001b[38;5;66;03m# an example subject state binder function\u001b[39;00m\n\u001b[1;32m 10\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m {\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124msubject_state\u001b[39m\u001b[38;5;124m\"\u001b[39m: reader\u001b[38;5;241m.\u001b[39mSubject(pattern\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mEnvironment_SubjectState_*\u001b[39m\u001b[38;5;124m\"\u001b[39m)}\n\u001b[0;32m---> 12\u001b[0m d \u001b[38;5;241m=\u001b[39m \u001b[43mDevice\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mSubjectMetadata\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43msubject_weight_binder\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43msubject_state_binder\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 15\u001b[0m \u001b[38;5;66;03m# On creation, the `Device` object puts all registries into a single registry, which is accessible via the\u001b[39;00m\n\u001b[1;32m 16\u001b[0m \u001b[38;5;66;03m# `registry` attribute.\u001b[39;00m\n\u001b[1;32m 17\u001b[0m \n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 30\u001b[0m \u001b[38;5;66;03m# DotMap([d])\u001b[39;00m\n\u001b[1;32m 31\u001b[0m \u001b[38;5;66;03m# s = DotMap([d])\u001b[39;00m\n", + "File \u001b[0;32m~/ProjectAeon/aeon_mecha/aeon/io/device.py:34\u001b[0m, in \u001b[0;36mDevice.__init__\u001b[0;34m(self, name, pattern, *args)\u001b[0m\n\u001b[1;32m 32\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__init__\u001b[39m(\u001b[38;5;28mself\u001b[39m, name, \u001b[38;5;241m*\u001b[39margs, pattern\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m):\n\u001b[1;32m 33\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mname \u001b[38;5;241m=\u001b[39m name\n\u001b[0;32m---> 34\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mregistry \u001b[38;5;241m=\u001b[39m \u001b[43mcompositeStream\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mpattern\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mis\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01melse\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mpattern\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/ProjectAeon/aeon_mecha/aeon/io/device.py:14\u001b[0m, in \u001b[0;36mcompositeStream\u001b[0;34m(pattern, *args)\u001b[0m\n\u001b[1;32m 12\u001b[0m registry\u001b[38;5;241m.\u001b[39mupdate(method\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__func__\u001b[39m(pattern))\n\u001b[1;32m 13\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m---> 14\u001b[0m registry\u001b[38;5;241m.\u001b[39mupdate(\u001b[43mstream\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpattern\u001b[49m\u001b[43m)\u001b[49m)\n\u001b[1;32m 15\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m registry\n", + "\u001b[0;31mTypeError\u001b[0m: subject_weight_binder() takes 0 positional arguments but 1 was given" ] } ], - "source": [ - "pattern = \"Patch1_32*\"\n", - "event_name = \"beambreak\"\n", - "# Set the reader for the stream\n", - "harp_reader = reader.Harp(pattern=pattern, columns=[event_name])\n", - "# Get the bitmask as the first value of the loaded stream\n", - "data = api.load(root, harp_reader, start=start_time, end=end_time)\n", - "bitmask = data.iloc[0, 0]\n", - "new_reader = reader.BitmaskEvent(pattern, bitmask, event_name)\n", - "stream_data = api.load(root, new_reader, start=start_time, end=end_time)\n", - "\n", - "print(f\"data:\\n {data}\\n\\n\")\n", - "print(f\"bitmask:\\n {bitmask}\\n\\n\")\n", - "print(f\"stream_data:\\n {stream_data}\")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], "source": [ "\"\"\"Instantiating a `Device` object\"\"\"\n", "\n", "# A `Device` object is instantiated from a name, followed by one or more 'binder functions', which \n", "# return a dictionary of a name paired with a `Reader` object. We call such a dictionary of `:Reader`\n", - "# key-value pairs a 'registry'.\n", + "# key-value pairs a 'registry'. Each binder function must take in a `pattern` argument, which is used to\n", + "# set the pattern of the `Reader` object it returns.\n", + "def subject_weight_binder(pattern): # an example subject weight binder function\n", + " return {\"subject_weight\": Subject_Weight(pattern=pattern)}\n", + "\n", + "def subject_state_binder(): # an example subject state binder function\n", + " return {\"subject_state\": reader.Subject(pattern=pattern)}\n", + "\n", + "d = Device(\"SubjectMetadata\", subject_weight_binder, subject_state_binder)\n", "\n", "\n", "# On creation, the `Device` object puts all registries into a single registry, which is accessible via the\n", @@ -875,11 +1041,11 @@ "# attribute, which is passed in directly to the DotMap constructor to create a nested DotMap:\n", "# device_name -> stream_name -> stream `Reader` object.\n", "\n", - "d = Device(\"ExperimentalMetadata\", stream.environment, stream.messageLog)\n", - "print(d.registry)\n", - "DotMap(d.registry)\n", - "DotMap([d])\n", - "s = DotMap([d])" + "# d = Device(\"ExperimentalMetadata\", stream.environment, stream.messageLog)\n", + "# print(d.registry)\n", + "# DotMap(d.registry)\n", + "# DotMap([d])\n", + "# s = DotMap([d])" ] }, { From 26394c9c5ee16a62b54fee44a28ab58e9467f4ae Mon Sep 17 00:00:00 2001 From: Jai Date: Wed, 13 Dec 2023 14:51:00 +0000 Subject: [PATCH 09/16] Updated device terminology --- aeon/io/device.py | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/aeon/io/device.py b/aeon/io/device.py index 23018aaf..d9435a8c 100644 --- a/aeon/io/device.py +++ b/aeon/io/device.py @@ -1,26 +1,25 @@ import inspect -def compositeStream(pattern, *args): - """Merges multiple data streams into a single composite stream.""" +def register(pattern, *args): + """Merges multiple Readers into a single registry.""" registry = {} if args: - for stream in args: - if inspect.isclass(stream): - for method in vars(stream).values(): + for binder_fn in args: + if inspect.isclass(binder_fn): + for method in vars(binder_fn).values(): if isinstance(method, staticmethod): registry.update(method.__func__(pattern)) else: - registry.update(stream(pattern)) + registry.update(binder_fn(pattern)) return registry class Device: - """Groups multiple data streams into a logical device. + """Groups multiple Readers into a logical device. - If a device contains a single stream with the same pattern as the device - `name`, it will be considered a singleton, and the stream reader will be - paired directly with the device without nesting. + If a device contains a single stream reader with the same pattern as the device `name`, it will be + considered a singleton, and the stream reader will be paired directly with the device without nesting. Attributes: name (str): Name of the device. @@ -31,7 +30,7 @@ class Device: def __init__(self, name, *args, pattern=None): self.name = name - self.registry = compositeStream(name if pattern is None else pattern, *args) + self.registry = register(name if pattern is None else pattern, *args) def __iter__(self): if len(self.registry) == 1: From fc779f3002c4df341404dab9bdbfb0efc94b163b Mon Sep 17 00:00:00 2001 From: Jai Date: Wed, 13 Dec 2023 14:51:38 +0000 Subject: [PATCH 10/16] Finished 'device' and 'schema' explanation --- ...understanding_aeon_data_architecture.ipynb | 84 +++++++++++-------- 1 file changed, 47 insertions(+), 37 deletions(-) diff --git a/docs/examples/understanding_aeon_data_architecture.ipynb b/docs/examples/understanding_aeon_data_architecture.ipynb index e8f69f15..c67effe7 100644 --- a/docs/examples/understanding_aeon_data_architecture.ipynb +++ b/docs/examples/understanding_aeon_data_architecture.ipynb @@ -80,9 +80,18 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 19, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The autoreload extension is already loaded. To reload it, use:\n", + " %reload_ext autoreload\n" + ] + } + ], "source": [ "\"\"\"Imports\"\"\"\n", "\n", @@ -104,7 +113,7 @@ "import seaborn as sns\n", "\n", "import aeon\n", - "import aeon.io.binder.core as stream\n", + "import aeon.io.binder.core as core_binder\n", "from aeon.io import api\n", "from aeon.io import reader\n", "from aeon.io.device import Device\n", @@ -594,16 +603,19 @@ "subject_weight_reader = Subject_Weight()\n", "acq_epoch = \"2023-12-01T14-30-34\"\n", "weight_file = root / acq_epoch / \"Environment/Environment_SubjectWeight_2023-12-02T12-00-00.csv\"\n", + "print(\"Read from a single file:\")\n", "display(subject_weight_reader.read(weight_file))\n", "\n", "# And we can use `load` to load data across many same-stream files given a time range or time set.\n", + "print(\"Read from a contiguous time range:\")\n", "display(aeon.load(root, subject_weight_reader, start=start_time, end=end_time))\n", + "print(\"Read from a set of times:\")\n", "display(aeon.load(root, subject_weight_reader, time=time_set.values))" ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 6, "metadata": {}, "outputs": [ { @@ -917,10 +929,10 @@ " event\n", "time \n", "2023-12-02 11:33:03.942463875 beambreak\n", - "2023-12-02 11:33:03.951744080 beambreak\n", "2023-12-02 11:33:18.500351906 beambreak\n", - "2023-12-02 11:33:18.503456115 beambreak\n", - "2023-12-02 11:33:18.509632111 beambreak\n" + "2023-12-02 11:33:18.509632111 beambreak\n", + "2023-12-02 11:33:18.515808104 beambreak\n", + "2023-12-02 11:33:43.750751972 beambreak\n" ] } ], @@ -928,7 +940,7 @@ "\"\"\"Updating a `Reader` object\"\"\"\n", "\n", "# Occasionally, we may want to tweak the output from a `Reader` object's `read` method, or some tweaks to \n", - "# streams on the acquisition side may require us to make corresponding tweaks to a `Reader` object to\n", + "# streams on the acquisition side may require us to make corresponding tweaks to a `Reader` object to\n", "# ensure it works properly. We'll cover some of these cases here.\n", "\n", "# 1. Column changes\n", @@ -985,8 +997,8 @@ "bitmasks = np.unique(data[event_name].values)\n", "print(f\"bitmasks: {bitmasks}\")\n", "\n", - "# Let's set the bitmasks to the first returned unique value, and create a new `Reader` object to use this.\n", - "bitmask = bitmasks[0]\n", + "# Let's set the bitmask to '34', and create a new `Reader` object to use this.\n", + "bitmask = 34\n", "beambreak_reader = reader.BitmaskEvent(pattern, bitmask, event_name)\n", "bitmasked_data = api.load(root, beambreak_reader, start=start_time, end=end_time)\n", "\n", @@ -996,20 +1008,17 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 21, "metadata": {}, "outputs": [ { - "ename": "TypeError", - "evalue": "subject_weight_binder() takes 0 positional arguments but 1 was given", - "output_type": "error", - "traceback": [ - "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)", - "Cell \u001b[0;32mIn[6], line 12\u001b[0m\n\u001b[1;32m 9\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21msubject_state_binder\u001b[39m(): \u001b[38;5;66;03m# an example subject state binder function\u001b[39;00m\n\u001b[1;32m 10\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m {\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124msubject_state\u001b[39m\u001b[38;5;124m\"\u001b[39m: reader\u001b[38;5;241m.\u001b[39mSubject(pattern\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mEnvironment_SubjectState_*\u001b[39m\u001b[38;5;124m\"\u001b[39m)}\n\u001b[0;32m---> 12\u001b[0m d \u001b[38;5;241m=\u001b[39m \u001b[43mDevice\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mSubjectMetadata\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43msubject_weight_binder\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43msubject_state_binder\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 15\u001b[0m \u001b[38;5;66;03m# On creation, the `Device` object puts all registries into a single registry, which is accessible via the\u001b[39;00m\n\u001b[1;32m 16\u001b[0m \u001b[38;5;66;03m# `registry` attribute.\u001b[39;00m\n\u001b[1;32m 17\u001b[0m \n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 30\u001b[0m \u001b[38;5;66;03m# DotMap([d])\u001b[39;00m\n\u001b[1;32m 31\u001b[0m \u001b[38;5;66;03m# s = DotMap([d])\u001b[39;00m\n", - "File \u001b[0;32m~/ProjectAeon/aeon_mecha/aeon/io/device.py:34\u001b[0m, in \u001b[0;36mDevice.__init__\u001b[0;34m(self, name, pattern, *args)\u001b[0m\n\u001b[1;32m 32\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__init__\u001b[39m(\u001b[38;5;28mself\u001b[39m, name, \u001b[38;5;241m*\u001b[39margs, pattern\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m):\n\u001b[1;32m 33\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mname \u001b[38;5;241m=\u001b[39m name\n\u001b[0;32m---> 34\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mregistry \u001b[38;5;241m=\u001b[39m \u001b[43mcompositeStream\u001b[49m\u001b[43m(\u001b[49m\u001b[43mname\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mif\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mpattern\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;129;43;01mis\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01mNone\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[38;5;28;43;01melse\u001b[39;49;00m\u001b[43m \u001b[49m\u001b[43mpattern\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m)\u001b[49m\n", - "File \u001b[0;32m~/ProjectAeon/aeon_mecha/aeon/io/device.py:14\u001b[0m, in \u001b[0;36mcompositeStream\u001b[0;34m(pattern, *args)\u001b[0m\n\u001b[1;32m 12\u001b[0m registry\u001b[38;5;241m.\u001b[39mupdate(method\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__func__\u001b[39m(pattern))\n\u001b[1;32m 13\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m---> 14\u001b[0m registry\u001b[38;5;241m.\u001b[39mupdate(\u001b[43mstream\u001b[49m\u001b[43m(\u001b[49m\u001b[43mpattern\u001b[49m\u001b[43m)\u001b[49m)\n\u001b[1;32m 15\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m registry\n", - "\u001b[0;31mTypeError\u001b[0m: subject_weight_binder() takes 0 positional arguments but 1 was given" + "name": "stdout", + "output_type": "stream", + "text": [ + "d.registry={'subject_weight': <__main__.Subject_Weight object at 0x7f590e2eec90>, 'subject_state': }\n", + "schema.SubjectMetadata=DotMap(subject_weight=<__main__.Subject_Weight object at 0x7f590e2eec90>, subject_state=)\n", + "schema.SubjectMetadata.subject_weight=<__main__.Subject_Weight object at 0x7f590e2eec90>\n", + "schema.Metadata=\n" ] } ], @@ -1018,34 +1027,35 @@ "\n", "# A `Device` object is instantiated from a name, followed by one or more 'binder functions', which \n", "# return a dictionary of a name paired with a `Reader` object. We call such a dictionary of `:Reader`\n", - "# key-value pairs a 'registry'. Each binder function must take in a `pattern` argument, which is used to\n", - "# set the pattern of the `Reader` object it returns.\n", + "# key-value pairs a 'registry'. Each binder function must take in a `pattern` argument, which can used to \n", + "# set the pattern of the `Reader` object it returns. This requirement for binder functions is for allowing\n", + "# the `Device` to optionally pass its name to appropriately set the pattern of `Reader` objects it contains.\n", "def subject_weight_binder(pattern): # an example subject weight binder function\n", - " return {\"subject_weight\": Subject_Weight(pattern=pattern)}\n", + " return {\"subject_weight\": subject_weight_reader}\n", "\n", - "def subject_state_binder(): # an example subject state binder function\n", - " return {\"subject_state\": reader.Subject(pattern=pattern)}\n", + "def subject_state_binder(pattern): # an example subject state binder function\n", + " return {\"subject_state\": reader.Subject(pattern=\"Environment_SubjectState*\")}\n", "\n", "d = Device(\"SubjectMetadata\", subject_weight_binder, subject_state_binder)\n", "\n", - "\n", "# On creation, the `Device` object puts all registries into a single registry, which is accessible via the\n", "# `registry` attribute.\n", - "\n", + "print(f\"{d.registry=}\")\n", "\n", "# This is done so that we can create a 'schema' (a DotMap of a list of `Device` objects), where a `Device`\n", - "# object name is a key for the schema, and the `registry` names' (which are keys for the `Device` object) \n", - "# corresponding values are the `Reader` objects associated with that `Device` object.\n", + "# object name is a key for the schema, and the corresponding values of the `registry` names (which are keys\n", + "# for the `Device` object) are the `Reader` objects associated with that `Device` object.\n", + "\n", "# This works because, when a list of `Device` objects are passed into the `DotMap` constructor, the\n", - "# `__iter__` method of the `Device` object returns a tuple of the object's name with its `stream` \n", + "# `__iter__` method of the `Device` object returns a tuple of the object's name with its `stream` \n", "# attribute, which is passed in directly to the DotMap constructor to create a nested DotMap:\n", - "# device_name -> stream_name -> stream `Reader` object.\n", + "# device_name -> stream_name -> stream `Reader` object. This is shown below:\n", "\n", - "# d = Device(\"ExperimentalMetadata\", stream.environment, stream.messageLog)\n", - "# print(d.registry)\n", - "# DotMap(d.registry)\n", - "# DotMap([d])\n", - "# s = DotMap([d])" + "d2 = Device(\"Metadata\", core_binder.metadata) # instantiate Device from a defined binder function\n", + "schema = DotMap([d, d2]) # create schema as DotMap of list of Device objects\n", + "print(f\"{schema.SubjectMetadata=}\") # Device object name as key to schema\n", + "print(f\"{schema.SubjectMetadata.subject_weight=}\") # binder function name yields the Reader object\n", + "print(f\"{schema.Metadata=}\") # for a singleton Device object, Device name alone yields the Reader object" ] }, { From 422395ffab76d1aa03d651df8068b87f91de8e04 Mon Sep 17 00:00:00 2001 From: Jai Date: Wed, 13 Dec 2023 19:54:51 +0000 Subject: [PATCH 11/16] reintroduce aeon.schema --- aeon/dj_pipeline/acquisition.py | 2 +- aeon/{io/binder => schema}/__init__.py | 0 aeon/{io/binder => schema}/core.py | 0 aeon/{io/binder => schema}/foraging.py | 2 +- aeon/{io/binder => schema}/octagon.py | 0 aeon/{io/binder => schema}/schemas.py | 52 +++++++++++++------------- aeon/{io/binder => schema}/social.py | 0 tests/io/test_api.py | 2 +- 8 files changed, 28 insertions(+), 30 deletions(-) rename aeon/{io/binder => schema}/__init__.py (100%) rename aeon/{io/binder => schema}/core.py (100%) rename aeon/{io/binder => schema}/foraging.py (98%) rename aeon/{io/binder => schema}/octagon.py (100%) rename aeon/{io/binder => schema}/schemas.py (63%) rename aeon/{io/binder => schema}/social.py (100%) diff --git a/aeon/dj_pipeline/acquisition.py b/aeon/dj_pipeline/acquisition.py index 64842d65..6fa0a31f 100644 --- a/aeon/dj_pipeline/acquisition.py +++ b/aeon/dj_pipeline/acquisition.py @@ -5,7 +5,7 @@ import pandas as pd from aeon.io import api as io_api -from aeon.io.binder import schemas as aeon_schema +from aeon.io import schemas as aeon_schema from aeon.io import reader as io_reader from aeon.analysis import utils as analysis_utils diff --git a/aeon/io/binder/__init__.py b/aeon/schema/__init__.py similarity index 100% rename from aeon/io/binder/__init__.py rename to aeon/schema/__init__.py diff --git a/aeon/io/binder/core.py b/aeon/schema/core.py similarity index 100% rename from aeon/io/binder/core.py rename to aeon/schema/core.py diff --git a/aeon/io/binder/foraging.py b/aeon/schema/foraging.py similarity index 98% rename from aeon/io/binder/foraging.py rename to aeon/schema/foraging.py index 9267dc77..ffd8fdd9 100644 --- a/aeon/io/binder/foraging.py +++ b/aeon/schema/foraging.py @@ -4,7 +4,7 @@ import aeon.io.device as _device import aeon.io.reader as _reader -import aeon.io.binder.core as _stream +import aeon.schema.core as _stream class Area(_Enum): diff --git a/aeon/io/binder/octagon.py b/aeon/schema/octagon.py similarity index 100% rename from aeon/io/binder/octagon.py rename to aeon/schema/octagon.py diff --git a/aeon/io/binder/schemas.py b/aeon/schema/schemas.py similarity index 63% rename from aeon/io/binder/schemas.py rename to aeon/schema/schemas.py index 782767e4..778bf140 100644 --- a/aeon/io/binder/schemas.py +++ b/aeon/schema/schemas.py @@ -1,21 +1,19 @@ from dotmap import DotMap - -import aeon.io.binder.core as stream from aeon.io.device import Device -from aeon.io.binder import foraging, octagon +from aeon.schema import core, foraging, octagon exp02 = DotMap( [ - Device("Metadata", stream.metadata), - Device("ExperimentalMetadata", stream.environment, stream.messageLog), - Device("CameraTop", stream.video, stream.position, foraging.region), - Device("CameraEast", stream.video), - Device("CameraNest", stream.video), - Device("CameraNorth", stream.video), - Device("CameraPatch1", stream.video), - Device("CameraPatch2", stream.video), - Device("CameraSouth", stream.video), - Device("CameraWest", stream.video), + Device("Metadata", core.metadata), + Device("ExperimentalMetadata", core.environment, core.messageLog), + Device("CameraTop", core.video, core.position, foraging.region), + Device("CameraEast", core.video), + Device("CameraNest", core.video), + Device("CameraNorth", core.video), + Device("CameraPatch1", core.video), + Device("CameraPatch2", core.video), + Device("CameraSouth", core.video), + Device("CameraWest", core.video), Device("Nest", foraging.weight), Device("Patch1", foraging.patch), Device("Patch2", foraging.patch), @@ -25,25 +23,25 @@ exp01 = DotMap( [ Device("SessionData", foraging.session), - Device("FrameTop", stream.video, stream.position), - Device("FrameEast", stream.video), - Device("FrameGate", stream.video), - Device("FrameNorth", stream.video), - Device("FramePatch1", stream.video), - Device("FramePatch2", stream.video), - Device("FrameSouth", stream.video), - Device("FrameWest", stream.video), - Device("Patch1", foraging.depletionFunction, stream.encoder, foraging.feeder), - Device("Patch2", foraging.depletionFunction, stream.encoder, foraging.feeder), + Device("FrameTop", core.video, core.position), + Device("FrameEast", core.video), + Device("FrameGate", core.video), + Device("FrameNorth", core.video), + Device("FramePatch1", core.video), + Device("FramePatch2", core.video), + Device("FrameSouth", core.video), + Device("FrameWest", core.video), + Device("Patch1", foraging.depletionFunction, core.encoder, foraging.feeder), + Device("Patch2", foraging.depletionFunction, core.encoder, foraging.feeder), ] ) octagon01 = DotMap( [ - Device("Metadata", stream.metadata), - Device("CameraTop", stream.video, stream.position), - Device("CameraColorTop", stream.video), - Device("ExperimentalMetadata", stream.subject_state), + Device("Metadata", core.metadata), + Device("CameraTop", core.video, core.position), + Device("CameraColorTop", core.video), + Device("ExperimentalMetadata", core.subject_state), Device("Photodiode", octagon.photodiode), Device("OSC", octagon.OSC), Device("TaskLogic", octagon.TaskLogic), diff --git a/aeon/io/binder/social.py b/aeon/schema/social.py similarity index 100% rename from aeon/io/binder/social.py rename to aeon/schema/social.py diff --git a/tests/io/test_api.py b/tests/io/test_api.py index 486f1d3f..8f9d8c0b 100644 --- a/tests/io/test_api.py +++ b/tests/io/test_api.py @@ -5,7 +5,7 @@ from pytest import mark import aeon -from aeon.io.binder.schemas import exp02 +from aeon.schema.schemas import exp02 nonmonotonic_path = Path(__file__).parent.parent / "data" / "nonmonotonic" monotonic_path = Path(__file__).parent.parent / "data" / "monotonic" From f11f97ffe0ef4351b533b95a1c0f9d7fe87ce915 Mon Sep 17 00:00:00 2001 From: Jai Date: Thu, 14 Dec 2023 15:57:32 +0000 Subject: [PATCH 12/16] Refactored to use 'register' --- aeon/schema/core.py | 2 +- aeon/schema/foraging.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/aeon/schema/core.py b/aeon/schema/core.py index 8181c710..4bc75f96 100644 --- a/aeon/schema/core.py +++ b/aeon/schema/core.py @@ -24,7 +24,7 @@ def encoder(pattern): def environment(pattern): """Metadata for environment mode and subjects.""" - return _device.compositeStream(pattern, environment_state, subject_state) + return _device.register(pattern, environment_state, subject_state) def environment_state(pattern): diff --git a/aeon/schema/foraging.py b/aeon/schema/foraging.py index ffd8fdd9..7382f124 100644 --- a/aeon/schema/foraging.py +++ b/aeon/schema/foraging.py @@ -65,7 +65,7 @@ def depletionFunction(pattern): def feeder(pattern): """Feeder commands and events.""" - return _device.compositeStream(pattern, beam_break, deliver_pellet) + return _device.register(pattern, beam_break, deliver_pellet) def beam_break(pattern): @@ -80,12 +80,12 @@ def deliver_pellet(pattern): def patch(pattern): """Data streams for a patch.""" - return _device.compositeStream(pattern, depletionFunction, _stream.encoder, feeder) + return _device.register(pattern, depletionFunction, _stream.encoder, feeder) def weight(pattern): """Weight measurement data streams for a specific nest.""" - return _device.compositeStream(pattern, weight_raw, weight_filtered, weight_subject) + return _device.register(pattern, weight_raw, weight_filtered, weight_subject) def weight_raw(pattern): From 5a095c73daee881f49678d1b267dc63565599512 Mon Sep 17 00:00:00 2001 From: Jai Date: Thu, 14 Dec 2023 15:57:58 +0000 Subject: [PATCH 13/16] Social schema WIP --- ...understanding_aeon_data_architecture.ipynb | 513 ++++++++++++------ 1 file changed, 340 insertions(+), 173 deletions(-) diff --git a/docs/examples/understanding_aeon_data_architecture.ipynb b/docs/examples/understanding_aeon_data_architecture.ipynb index c67effe7..8a1ff942 100644 --- a/docs/examples/understanding_aeon_data_architecture.ipynb +++ b/docs/examples/understanding_aeon_data_architecture.ipynb @@ -80,45 +80,28 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 1, "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "The autoreload extension is already loaded. To reload it, use:\n", - " %reload_ext autoreload\n" - ] - } - ], + "outputs": [], "source": [ - "\"\"\"Imports\"\"\"\n", + "\"\"\"Notebook settings and imports.\"\"\"\n", "\n", "%load_ext autoreload\n", "%autoreload 2\n", "# %flow mode reactive\n", "\n", - "from datetime import date\n", - "import ipdb\n", - "from itertools import product\n", + "import numpy as np\n", + "import pandas as pd\n", + "\n", "from pathlib import Path\n", "\n", "from dotmap import DotMap\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import pandas as pd\n", - "import plotly.express as px\n", - "import plotly.graph_objs as go\n", - "import seaborn as sns\n", "\n", "import aeon\n", - "import aeon.io.binder.core as core_binder\n", - "from aeon.io import api\n", "from aeon.io import reader\n", - "from aeon.io.device import Device\n", - "from aeon.io.binder.schemas import exp02, exp01\n", - "from aeon.analysis.utils import visits, distancetravelled" + "from aeon.io.device import Device, register\n", + "from aeon.schema import core, foraging, social\n", + "from aeon.schema.schemas import exp02" ] }, { @@ -137,8 +120,7 @@ " pd.Series(pd.date_range(start_time, start_time + pd.Timedelta(hours=1), freq=\"1s\")),\n", " pd.Series(pd.date_range(end_time, end_time + pd.Timedelta(hours=1), freq=\"1s\"))\n", " ]\n", - ")\n", - "\n" + ")" ] }, { @@ -197,6 +179,13 @@ "execution_count": 4, "metadata": {}, "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Read from a single file:\n" + ] + }, { "data": { "text/html": [ @@ -327,6 +316,13 @@ "metadata": {}, "output_type": "display_data" }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Read from a contiguous time range:\n" + ] + }, { "data": { "text/html": [ @@ -465,6 +461,13 @@ "metadata": {}, "output_type": "display_data" }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Read from a set of times:\n" + ] + }, { "data": { "text/html": [ @@ -615,7 +618,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 5, "metadata": {}, "outputs": [ { @@ -993,14 +996,14 @@ "pattern = \"Patch1_32*\"\n", "event_name = \"beambreak\"\n", "harp_reader = reader.Harp(pattern=pattern, columns=[event_name])\n", - "data = api.load(root, harp_reader, start=start_time, end=end_time)\n", + "data = aeon.load(root, harp_reader, start=start_time, end=end_time)\n", "bitmasks = np.unique(data[event_name].values)\n", "print(f\"bitmasks: {bitmasks}\")\n", "\n", "# Let's set the bitmask to '34', and create a new `Reader` object to use this.\n", "bitmask = 34\n", "beambreak_reader = reader.BitmaskEvent(pattern, bitmask, event_name)\n", - "bitmasked_data = api.load(root, beambreak_reader, start=start_time, end=end_time)\n", + "bitmasked_data = aeon.load(root, beambreak_reader, start=start_time, end=end_time)\n", "\n", "print(f\"raw data:\\n {data.head()}\\n\\n\")\n", "print(f\"bitmasked data:\\n {bitmasked_data.head()}\")" @@ -1008,17 +1011,17 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 6, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "d.registry={'subject_weight': <__main__.Subject_Weight object at 0x7f590e2eec90>, 'subject_state': }\n", - "schema.SubjectMetadata=DotMap(subject_weight=<__main__.Subject_Weight object at 0x7f590e2eec90>, subject_state=)\n", - "schema.SubjectMetadata.subject_weight=<__main__.Subject_Weight object at 0x7f590e2eec90>\n", - "schema.Metadata=\n" + "d.registry={'subject_weight': <__main__.Subject_Weight object at 0x7f1a47076690>, 'subject_state': }\n", + "schema.SubjectMetadata=DotMap(subject_weight=<__main__.Subject_Weight object at 0x7f1a47076690>, subject_state=)\n", + "schema.SubjectMetadata.subject_weight=<__main__.Subject_Weight object at 0x7f1a47076690>\n", + "schema.Metadata=\n" ] } ], @@ -1027,9 +1030,11 @@ "\n", "# A `Device` object is instantiated from a name, followed by one or more 'binder functions', which \n", "# return a dictionary of a name paired with a `Reader` object. We call such a dictionary of `:Reader`\n", - "# key-value pairs a 'registry'. Each binder function must take in a `pattern` argument, which can used to \n", + "# key-value pairs a 'registry'. Each binder function requires a `pattern` argument, which can be used to\n", "# set the pattern of the `Reader` object it returns. This requirement for binder functions is for allowing\n", "# the `Device` to optionally pass its name to appropriately set the pattern of `Reader` objects it contains.\n", + "\n", + "# Below are examples of \"empty pattern\" binder functions, where the pattern doesn't get used.\n", "def subject_weight_binder(pattern): # an example subject weight binder function\n", " return {\"subject_weight\": subject_weight_reader}\n", "\n", @@ -1051,13 +1056,73 @@ "# attribute, which is passed in directly to the DotMap constructor to create a nested DotMap:\n", "# device_name -> stream_name -> stream `Reader` object. This is shown below:\n", "\n", - "d2 = Device(\"Metadata\", core_binder.metadata) # instantiate Device from a defined binder function\n", + "d2 = Device(\"Metadata\", core.metadata) # instantiate Device from a defined binder function\n", "schema = DotMap([d, d2]) # create schema as DotMap of list of Device objects\n", "print(f\"{schema.SubjectMetadata=}\") # Device object name as key to schema\n", "print(f\"{schema.SubjectMetadata.subject_weight=}\") # binder function name yields the Reader object\n", "print(f\"{schema.Metadata=}\") # for a singleton Device object, Device name alone yields the Reader object" ] }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "feeder_device.registry={'pellet_trigger': , 'pellet_beambreak': }\n", + "feeder_device_nested.registry={'pellet_trigger': , 'pellet_beambreak': }\n", + "patch_device.registry={'pellet_trigger': , 'pellet_beambreak': , 'Encoder': }\n" + ] + } + ], + "source": [ + "\"\"\"Nested binder functions\"\"\"\n", + "\n", + "# Binder functions can return a dict whose value is actually composed of multiple, rather than a single,\n", + "# `Reader` objects. This is done by creating nested binder functions, via `register`.\n", + "\n", + "# First let's define two standard binder functions, for pellet delivery trigger and beambreak events. \n", + "# In all examples below we'll define \"device-name passed\" binder functions, since the `Device` object which\n", + "# will be instantiated from these functions will pass its name to set the pattern of the corresponding\n", + "# Reader objects.\n", + "def pellet_trigger(pattern):\n", + " \"\"\"Pellet delivery trigger events.\"\"\"\n", + " return {\"pellet_trigger\": reader.BitmaskEvent(f\"{pattern}_35_*\", 0x80, \"PelletTriggered\")}\n", + "\n", + "\n", + "def pellet_beambreak(pattern):\n", + " \"\"\"Pellet beambreak events.\"\"\"\n", + " return {\"pellet_beambreak\": reader.BitmaskEvent(f\"{pattern}_32_*\", 0x22, \"PelletDetected\")}\n", + "\n", + "# Next, we'll define a nested binder function for a \"feeder\", which returns the two binder functions above.\n", + "def feeder(pattern):\n", + " \"\"\"Feeder commands and events.\"\"\"\n", + " return register(pattern, pellet_trigger, pellet_beambreak)\n", + "\n", + "# And further, we can define a higher-level nested binder function for a \"patch\", which includes the\n", + "# magnetic encoder values for a patch's wheel in addition to `feeder`.\n", + "def patch(pattern):\n", + " \"\"\"Data streams for a patch.\"\"\"\n", + " return register(pattern, feeder, core.encoder)\n", + "\n", + "\n", + "# We can now instantiate a `Device` object as done previously, from combinations of binder functions, but \n", + "# also from nested binder functions.\n", + "feeder_device = Device(\"Patch1\", pellet_trigger, pellet_beambreak)\n", + "feeder_device_nested = Device(\"Patch1\", feeder)\n", + "patch_device = Device(\"Patch1\", patch)\n", + "\n", + "# And we can see that `feeder_device` and `feeder_device_nested` are equivalent.\n", + "print(f\"{feeder_device.registry=}\")\n", + "print(f\"{feeder_device_nested.registry=}\")\n", + "\n", + "# And `patch_device` contains the same Reader objects as these plus an `Encoder` Reader.\n", + "print(f\"{patch_device.registry=}\")" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -1073,6 +1138,7 @@ "And here they are: (*note: register 8 is always the harp heartbeat for any device that has this stream.*)\n", "\n", "- Metadata.yml\n", + "- AudioAmbient\n", "- Environment\n", " - BlockState\n", " - EnvironmentState\n", @@ -1126,165 +1192,244 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 8, "metadata": {}, "outputs": [], "source": [ "\"\"\"Creating the Social 0.1 schema\"\"\"\n", "\n", - "# Above we've listed out all the streams we recorded from during Social0.1, but we won't care to analyze all of them.\n", - "# Instead, we'll create a schema that only contains the streams we want to analyze:\n", + "# Above we've listed out all the streams we recorded from during Social0.1, but we won't care to analyze all\n", + "# of them. Instead, we'll create a DotMap schema from Device objects that only contains Readers for the\n", + "# streams we want to analyze.\n", "\n", - "# Metadata\n", + "# We'll see both examples of binder functions we saw previously: 1. \"empty pattern\", and\n", + "# 2. \"device-name passed\".\n", "\n", - "# Environment\n", - "\n" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "DotMap(\n", - " [\n", - " Device(\"Metadata\", stream.metadata),\n", - " Device(\"ExperimentalMetadata\", stream.environment, stream.messageLog),\n", - " Device(\"CameraEast\", stream.video),\n", - " Device(\"CameraNest\", stream.video),\n", - " Device(\"CameraNorth\", stream.video),\n", - " Device(\"CameraPatch1\", stream.video),\n", - " Device(\"CameraPatch2\", stream.video),\n", - " Device(\"CameraSouth\", stream.video),\n", - " Device(\"CameraWest\", stream.video),\n", - " ]\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "d = Device(\"ExperimentalMetadata\", stream.environment, stream.messageLog)\n", - "print(d.registry)\n", - "DotMap(d.registry)\n", - "DotMap([d])\n", - "s = DotMap([d])\n", - "s.ExperimentalMetadata.EnvironmentState.pattern" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "\"\"\"Metadata\"\"\"\n", + "# And we'll see both examples of instantiating Device objects we saw previously: 1. from singleton binder\n", + "# functions; 2. from multiple and/or nested binder functions.\n", "\n", - "data = api.load(root, reader.Metadata(), start=start, end=end)\n", - "data.metadata.iloc[0] # get device metadata dotmap" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "d = Device(\"ExperimentalMetadata\", stream.environment, stream.messageLog)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "Device(\"Metadata\", stream.metadata).stream" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "d = Device(\"test\", reader.Video, reader.Metadata)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "d.name" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "exp02.ExperimentalMetadata" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "exp02.Metadata" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "exp02.ExperimentalMetadata.keys()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "api.load(root, exp02.ExperimentalMetadata., start=start, end=end)" + "# (Note, in the simplest case, a schema can always be created from / reduced to \"empty pattern\" binder\n", + "# functions as singletons in Device objects.)\n", + "\n", + "# Metadata (will be a singleton binder function Device object)\n", + "# ---\n", + "\n", + "# `core.metadata` is a \"device-name passed\" binder function that returns a `reader.Metadata` Reader object\n", + "metadata_device = Device(\"Metadata\", core.metadata)\n", + "\n", + "# ---\n", + "\n", + "# Environment (will be a nested, multiple binder function Device object)\n", + "# ---\n", + "\n", + "# BlockState\n", + "cols = [\"pellet_ct\", \"pellet_ct_thresh\", \"due_time\"]\n", + "block_state_reader = reader.Csv(\"Environment_BlockState*\", cols)\n", + "# \"Empty pattern\" binder fn.\n", + "block_state_binder_fn = lambda pattern: {\"block_state\": block_state_reader} \n", + "\n", + "# EnvironmentState\n", + "\n", + "# LightEvents\n", + "\n", + "# MessageLog\n", + "\n", + "# SubjectState\n", + "\n", + "# SubjectVisits\n", + "\n", + "# SubjectWeight\n", + "\n", + "# Nested binder fn Device object.\n", + "environment_device = Device(\n", + " \"Environment\", \n", + " block_state_binder_fn, \n", + " core.environment # readers for \n", + ")\n", + "\n", + "# ---" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 10, "metadata": {}, "outputs": [], "source": [ - "\"\"\"Environment\"\"\"\n" + "d = Device(\"Environment\", block_state_binder_fn)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 13, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 13, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "\"\"\"CameraTop\"\"\"" + "d.registry[\"block_state\"]" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 14, "metadata": {}, - "outputs": [], + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
pellet_ctpellet_ct_threshdue_time
time
2023-12-02 10:30:44.8520002376420001-01-01T00:00:00.0000000
2023-12-02 10:30:58.8680000317420001-01-01T00:00:00.0000000
2023-12-02 10:31:14.0159997948420001-01-01T00:00:00.0000000
2023-12-02 10:31:29.2859840399420001-01-01T00:00:00.0000000
2023-12-02 10:31:45.99200010310420001-01-01T00:00:00.0000000
............
2023-12-02 12:27:07.51200008313430001-01-01T00:00:00.0000000
2023-12-02 12:27:20.61999988614430001-01-01T00:00:00.0000000
2023-12-02 12:28:36.51398420315430001-01-01T00:00:00.0000000
2023-12-02 12:29:02.42598390616430001-01-01T00:00:00.0000000
2023-12-02 12:29:17.12998390217430001-01-01T00:00:00.0000000
\n", + "

97 rows × 3 columns

\n", + "
" + ], + "text/plain": [ + " pellet_ct pellet_ct_thresh \\\n", + "time \n", + "2023-12-02 10:30:44.852000237 6 42 \n", + "2023-12-02 10:30:58.868000031 7 42 \n", + "2023-12-02 10:31:14.015999794 8 42 \n", + "2023-12-02 10:31:29.285984039 9 42 \n", + "2023-12-02 10:31:45.992000103 10 42 \n", + "... ... ... \n", + "2023-12-02 12:27:07.512000083 13 43 \n", + "2023-12-02 12:27:20.619999886 14 43 \n", + "2023-12-02 12:28:36.513984203 15 43 \n", + "2023-12-02 12:29:02.425983906 16 43 \n", + "2023-12-02 12:29:17.129983902 17 43 \n", + "\n", + " due_time \n", + "time \n", + "2023-12-02 10:30:44.852000237 0001-01-01T00:00:00.0000000 \n", + "2023-12-02 10:30:58.868000031 0001-01-01T00:00:00.0000000 \n", + "2023-12-02 10:31:14.015999794 0001-01-01T00:00:00.0000000 \n", + "2023-12-02 10:31:29.285984039 0001-01-01T00:00:00.0000000 \n", + "2023-12-02 10:31:45.992000103 0001-01-01T00:00:00.0000000 \n", + "... ... \n", + "2023-12-02 12:27:07.512000083 0001-01-01T00:00:00.0000000 \n", + "2023-12-02 12:27:20.619999886 0001-01-01T00:00:00.0000000 \n", + "2023-12-02 12:28:36.513984203 0001-01-01T00:00:00.0000000 \n", + "2023-12-02 12:29:02.425983906 0001-01-01T00:00:00.0000000 \n", + "2023-12-02 12:29:17.129983902 0001-01-01T00:00:00.0000000 \n", + "\n", + "[97 rows x 3 columns]" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "\"\"\"Top quadrant and zoomed in patch cameras\"\"\"" + "start_time = pd.Timestamp(\"2023-12-02 10:30:00\")\n", + "end_time = pd.Timestamp(\"2023-12-02 12:30:00\")\n", + "r = d.registry[\"block_state\"]\n", + "aeon.load(root, r, start=start_time, end=end_time)" ] }, { @@ -1293,7 +1438,12 @@ "metadata": {}, "outputs": [], "source": [ - "\"\"\"Nest\"\"\"" + "social01 = DotMap(\n", + " [\n", + " metadata_device,\n", + " \n", + " ]\n", + ")" ] }, { @@ -1302,7 +1452,7 @@ "metadata": {}, "outputs": [], "source": [ - "\"\"\"Patches\"\"\"" + "exp02.ExperimentalMetadata.EnvironmentState" ] }, { @@ -1311,7 +1461,24 @@ "metadata": {}, "outputs": [], "source": [ - "\"\"\"Rfids\"\"\"" + "\"\"\"Test all readers in schema.\"\"\"\n", + "\n", + "def find_obj(dotmap, obj):\n", + " \"\"\"Returns a list of objects of type `obj` found in a DotMap.\"\"\"\n", + " objs = []\n", + " for value in dotmap.values():\n", + " if isinstance(value, obj):\n", + " objs.append(value)\n", + " elif isinstance(value, DotMap):\n", + " objs.extend(find_obj(value, obj))\n", + " return objs\n", + "\n", + "readers = find_obj(social01, reader.Reader)\n", + "for r in readers:\n", + " data = aeon.load(root, r, start=start_time, end=end_time)\n", + " #assert not data.empty, f\"No data found with {r}.\"\n", + " print(f\"{r}: {data.head()=}\")\n", + " " ] } ], From 14d463525d28d4f57c361c1751a6ef6020ff9cd1 Mon Sep 17 00:00:00 2001 From: Jai Date: Thu, 14 Dec 2023 17:35:53 +0000 Subject: [PATCH 14/16] Refactored for consistent snake_case --- aeon/schema/core.py | 2 +- aeon/schema/foraging.py | 4 ++-- aeon/schema/schemas.py | 6 +++--- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/aeon/schema/core.py b/aeon/schema/core.py index 4bc75f96..cf60dc46 100644 --- a/aeon/schema/core.py +++ b/aeon/schema/core.py @@ -37,7 +37,7 @@ def subject_state(pattern): return {"SubjectState": _reader.Subject(f"{pattern}_SubjectState_*")} -def messageLog(pattern): +def message_log(pattern): """Message log data.""" return {"MessageLog": _reader.Log(f"{pattern}_MessageLog_*")} diff --git a/aeon/schema/foraging.py b/aeon/schema/foraging.py index 7382f124..df42cc1a 100644 --- a/aeon/schema/foraging.py +++ b/aeon/schema/foraging.py @@ -58,7 +58,7 @@ def region(pattern): return {"Region": _RegionReader(f"{pattern}_201_*")} -def depletionFunction(pattern): +def depletion_function(pattern): """State of the linear depletion function for foraging patches.""" return {"DepletionState": _PatchState(f"{pattern}_State_*")} @@ -80,7 +80,7 @@ def deliver_pellet(pattern): def patch(pattern): """Data streams for a patch.""" - return _device.register(pattern, depletionFunction, _stream.encoder, feeder) + return _device.register(pattern, depletion_function, _stream.encoder, feeder) def weight(pattern): diff --git a/aeon/schema/schemas.py b/aeon/schema/schemas.py index 778bf140..7b61c2d7 100644 --- a/aeon/schema/schemas.py +++ b/aeon/schema/schemas.py @@ -5,7 +5,7 @@ exp02 = DotMap( [ Device("Metadata", core.metadata), - Device("ExperimentalMetadata", core.environment, core.messageLog), + Device("ExperimentalMetadata", core.environment, core.message_log), Device("CameraTop", core.video, core.position, foraging.region), Device("CameraEast", core.video), Device("CameraNest", core.video), @@ -31,8 +31,8 @@ Device("FramePatch2", core.video), Device("FrameSouth", core.video), Device("FrameWest", core.video), - Device("Patch1", foraging.depletionFunction, core.encoder, foraging.feeder), - Device("Patch2", foraging.depletionFunction, core.encoder, foraging.feeder), + Device("Patch1", foraging.depletion_function, core.encoder, foraging.feeder), + Device("Patch2", foraging.depletion_function, core.encoder, foraging.feeder), ] ) From 517110852b40417478a7e294973917a390fe6723 Mon Sep 17 00:00:00 2001 From: Jai Date: Fri, 15 Dec 2023 23:20:06 +0000 Subject: [PATCH 15/16] Removed device name from path --- aeon/schema/social.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aeon/schema/social.py b/aeon/schema/social.py index 97453af3..6ad46ea0 100644 --- a/aeon/schema/social.py +++ b/aeon/schema/social.py @@ -32,7 +32,7 @@ def read( ) -> pd.DataFrame: """Reads data from the Harp-binarized tracking file.""" # Get config file from `file`, then bodyparts from config file. - model_dir = Path(file.stem.replace("_", "/")).parent + model_dir = Path(*Path(file.stem.replace("_", "/")).parent.parts[1:]) config_file_dir = ceph_proc_dir / model_dir if not config_file_dir.exists(): raise FileNotFoundError(f"Cannot find model dir {config_file_dir}") From 8683fee5cb1df7d0e4b2d694fd4e208e7af4fc1d Mon Sep 17 00:00:00 2001 From: Jai Date: Sat, 16 Dec 2023 01:14:00 +0000 Subject: [PATCH 16/16] Finished and tested social schema for data between 2023-12-01 -- 2023-12-08 --- aeon/io/device.py | 2 +- .../get_harp_stream_event_bitmask.ipynb | 1 + ...understanding_aeon_data_architecture.ipynb | 3061 +++++++++++++++-- 3 files changed, 2868 insertions(+), 196 deletions(-) diff --git a/aeon/io/device.py b/aeon/io/device.py index d9435a8c..e8e5cf0f 100644 --- a/aeon/io/device.py +++ b/aeon/io/device.py @@ -23,7 +23,7 @@ class Device: Attributes: name (str): Name of the device. - args (Any): Data streams collected from the device. + args (any): A binder function or class that returns a dictionary of Readers. pattern (str, optional): Pattern used to find raw chunk files, usually in the format `_`. """ diff --git a/docs/examples/get_harp_stream_event_bitmask.ipynb b/docs/examples/get_harp_stream_event_bitmask.ipynb index ffbeb6ef..3c01212b 100644 --- a/docs/examples/get_harp_stream_event_bitmask.ipynb +++ b/docs/examples/get_harp_stream_event_bitmask.ipynb @@ -12,6 +12,7 @@ "%autoreload 2\n", "\n", "from pathlib import Path\n", + "import pandas as pd\n", "\n", "import aeon.io.api as api\n", "from aeon.io import reader" diff --git a/docs/examples/understanding_aeon_data_architecture.ipynb b/docs/examples/understanding_aeon_data_architecture.ipynb index 8a1ff942..e3df6981 100644 --- a/docs/examples/understanding_aeon_data_architecture.ipynb +++ b/docs/examples/understanding_aeon_data_architecture.ipynb @@ -60,9 +60,11 @@ "On ceph, we organize streams into device folders:
e.g. `ceph/aeon/aeon/data/raw/AEON3/social0.1/2023-12-01T14-30-34/Patch1` contains the patch-heartbeat stream (`Patch1_8`), the patch-beambreak stream (`Patch1_32`), the patch-pellet delivery-pin-set stream (`Patch1_35`), the patch-pellet-delivery-pin-cleared stream (`Patch1_36`), the patch-wheel-magnetic-encoder stream (`Patch1_90`), the patch-wheel-magnetic-encoder-mode stream (`Patch1_91`), the patch-feeder-dispenser-state stream (`Patch1_200`), the patch-pellet-manual-delivery stream (`Patch1_201`), the patch-missed-pellet-stream (`Patch1_202`), the patch-pellet-delivery-retry stream (`Patch1_203`), and the patch-state stream (`Patch1_State`).\n", "\n", "In code, we create logical devices via the `Device` class (see `aeon/io/device.py`)
\n", - "e.g. We often define 'Patch' devices that contain `Reader` objects associated with specific streams (as experimenters may not care about analyzing all streams in a `Patch` device folder on ceph), e.g. wheel-magnetic-encoder, state, pellet-delivery-pin-set, and beambreak.\n", + "e.g. We often define 'Patch' devices that contain `Reader` objects (in the _`register`_ attribute) that are associated with specific streams (as experimenters may not care about analyzing all streams in a `Patch` device folder on ceph), e.g. wheel-magnetic-encoder, state, pellet-delivery-pin-set, and beambreak.\n", "\n", - "**_Schema_**: A list of devices grouped within a `DotMap` object (see `aeon/docs/examples/schemas.py`). Each experiment is associated with a schema. If a schema changes, then the experiment neccesarily must be different (either in name or version number), as the acquired data is now different.\n", + "One last important aspect of `Device` objects are _binder functions_: on instantiation, `Device` requires at least one argument that is a function that returns a dict of `Reader` objects (these get set into the `Device` object's `registry`). We'll explain this more in detail and show examples below.\n", + "\n", + "**_Schema_**: A list of devices grouped within a `DotMap` object (see `aeon/docs/examples/schemas.py`). Each experiment is associated with a schema. If a schema changes, then the experiment neccesarily must be different (either in name or version number), as the acquired data is now different.\n", "\n", "**_Dataset_**: All data belonging to a particular experiment. \n", "\n", @@ -80,9 +82,18 @@ }, { "cell_type": "code", - "execution_count": 1, + "execution_count": 45, "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "The autoreload extension is already loaded. To reload it, use:\n", + " %reload_ext autoreload\n" + ] + } + ], "source": [ "\"\"\"Notebook settings and imports.\"\"\"\n", "\n", @@ -106,7 +117,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 46, "metadata": {}, "outputs": [], "source": [ @@ -125,7 +136,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 47, "metadata": {}, "outputs": [], "source": [ @@ -176,7 +187,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 48, "metadata": {}, "outputs": [ { @@ -618,7 +629,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 49, "metadata": {}, "outputs": [ { @@ -1011,17 +1022,17 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 50, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "d.registry={'subject_weight': <__main__.Subject_Weight object at 0x7f1a47076690>, 'subject_state': }\n", - "schema.SubjectMetadata=DotMap(subject_weight=<__main__.Subject_Weight object at 0x7f1a47076690>, subject_state=)\n", - "schema.SubjectMetadata.subject_weight=<__main__.Subject_Weight object at 0x7f1a47076690>\n", - "schema.Metadata=\n" + "d.registry={'subject_weight': <__main__.Subject_Weight object at 0x7fa4433d5f50>, 'subject_state': }\n", + "schema.SubjectMetadata=DotMap(subject_weight=<__main__.Subject_Weight object at 0x7fa4433d5f50>, subject_state=)\n", + "schema.SubjectMetadata.subject_weight=<__main__.Subject_Weight object at 0x7fa4433d5f50>\n", + "schema.Metadata=\n" ] } ], @@ -1065,16 +1076,16 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 51, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "feeder_device.registry={'pellet_trigger': , 'pellet_beambreak': }\n", - "feeder_device_nested.registry={'pellet_trigger': , 'pellet_beambreak': }\n", - "patch_device.registry={'pellet_trigger': , 'pellet_beambreak': , 'Encoder': }\n" + "feeder_device.registry={'pellet_trigger': , 'pellet_beambreak': }\n", + "feeder_device_nested.registry={'pellet_trigger': , 'pellet_beambreak': }\n", + "patch_device.registry={'pellet_trigger': , 'pellet_beambreak': , 'Encoder': }\n" ] } ], @@ -1129,7 +1140,7 @@ "source": [ "#### Social0.1 Data Streams\n", "\n", - "Now that we've covered streams, devices, and schemas, let's build a schema for the Social0.1 Experiment!\n", + "Now that we've covered streams, readers, binder functions, devices, and schemas, let's build a schema for the Social0.1 Experiment!\n", "\n", "First we'll need to know all the streams we recorded during the Social0.1 experiment: these can be found via\n", "looking through all devices in an acqusition epoch \n", @@ -1138,7 +1149,8 @@ "And here they are: (*note: register 8 is always the harp heartbeat for any device that has this stream.*)\n", "\n", "- Metadata.yml\n", - "- AudioAmbient\n", + "- AudioAmbient (.wav)\n", + " - .wav: raw audio\n", "- Environment\n", " - BlockState\n", " - EnvironmentState\n", @@ -1147,7 +1159,7 @@ " - SubjectState\n", " - SubjectVisits\n", " - SubjectWeight\n", - "- CameraTop (200, 201, avi, csv, ,)\n", + "- CameraTop (200, 201, .avi, .csv, )\n", " - 200: position\n", " - 201: region\n", "- CameraNorth (avi, csv)\n", @@ -1159,40 +1171,42 @@ "- CameraPatch3 (avi, csv)\n", "- CameraNest (avi, csv)\n", "- ClockSynchronizer (8, 36)\n", - " - 36: hearbeat_out\n", + " - 36: hearbeat out\n", "- Nest (200, 201, 202, 203)\n", - " - 200: weight_raw\n", - " - 201: weight_tare\n", - " - 202: weight_filtered\n", - " - 203: weight_baseline\n", - " - 204: weight_subject\n", + " - 200: raw weight\n", + " - 201: tare weight\n", + " - 202: filtered weight\n", + " - 203: baseline weight\n", + " - 204: subject weight\n", "- Patch1 (8, 32, 35, 36, 87, 90, 91, 200, 201, 202, 203, State)\n", - " - 32: beam_break\n", - " - 35: delivery_set\n", - " - 36: delivery_clear\n", - " - 87: expansion_board\n", - " - 90: encoder_read\n", - " - 91: encoder_mode\n", - " - 200: dispenser_state\n", - " - 201: delivery_manual\n", - " - 202: missed_pellet\n", - " - 203: delivery_retry\n", + " - 32: beambreak\n", + " - 35: set delivery\n", + " - 36: clear delivery\n", + " - 87: expansion board state\n", + " - 90: encoder read\n", + " - 91: encoder mode\n", + " - 200: dispenser state\n", + " - 201: manual delivery\n", + " - 202: missed pellet\n", + " - 203: retry delivery\n", "- Patch2 (8, 32, 35, 36, 87, 90, 91, State)\n", "- Patch3 (8, 32, 35, 36, 87, 90, 91, 200, 203, State)\n", "- RfidEventsGate (8, 32, 35)\n", - " - 32: entry_id\n", - " - 35: hardware_notifications\n", + " - 32: entry id\n", + " - 35: hardware notifications\n", "- RfidEventsNest1 (8, 32, 35)\n", "- RfidEventsNest2 (8, 32, 35)\n", "- RfidEventsPatch1 (8, 32, 35)\n", "- RfidEventsPatch2 (8, 32, 35)\n", "- RfidEventsPatch3 (8, 32, 35)\n", + "- System\n", + " - AvailableMemory\n", "- VideoController (8, 32, 33, 34, 35, 36, 45, 52)" ] }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 52, "metadata": {}, "outputs": [], "source": [ @@ -1211,11 +1225,11 @@ "# (Note, in the simplest case, a schema can always be created from / reduced to \"empty pattern\" binder\n", "# functions as singletons in Device objects.)\n", "\n", - "# Metadata (will be a singleton binder function Device object)\n", + "# Metadata.yml (will be a singleton binder function Device object)\n", "# ---\n", "\n", - "# `core.metadata` is a \"device-name passed\" binder function that returns a `reader.Metadata` Reader object\n", - "metadata_device = Device(\"Metadata\", core.metadata)\n", + "core.metadata # binder function: \"device-name passed\": returns a `reader.Metadata` Reader object\n", + "metadata = Device(\"Metadata\", core.metadata)\n", "\n", "# ---\n", "\n", @@ -1223,28 +1237,137 @@ "# ---\n", "\n", "# BlockState\n", - "cols = [\"pellet_ct\", \"pellet_ct_thresh\", \"due_time\"]\n", - "block_state_reader = reader.Csv(\"Environment_BlockState*\", cols)\n", - "# \"Empty pattern\" binder fn.\n", - "block_state_binder_fn = lambda pattern: {\"block_state\": block_state_reader} \n", + "# binder function: \"device-name passed\"; `pattern` will be set by `Device` object name: \"Environment\"\n", + "block_state_b = lambda pattern: {\n", + " \"BlockState\": reader.Csv(f\"{pattern}_BlockState*\", [\"pellet_ct\", \"pellet_ct_thresh\", \"due_time\"])\n", + "}\n", "\n", "# EnvironmentState\n", + "core.environment_state # binder function: \"device-name passed\"\n", + "\n", + "# Combine EnvironmentState and BlockState\n", + "env_block_state_b = lambda pattern: register(pattern, core.environment_state, block_state_b)\n", "\n", "# LightEvents\n", + "cols = [\"channel\", \"value\"]\n", + "light_events_r = reader.Csv(\"Environment_LightEvents*\", cols)\n", + "light_events_b = lambda pattern: {\"LightEvents\": light_events_r} # binder function: \"empty pattern\"\n", "\n", "# MessageLog\n", + "core.message_log # binder function: \"device-name passed\"\n", "\n", "# SubjectState\n", + "cols = [\"id\", \"weight\", \"type\"]\n", + "subject_state_r = reader.Csv(\"Environment_SubjectState*\", cols)\n", + "subject_state_b = lambda pattern: {\"SubjectState\": subject_state_r} # binder function: \"empty pattern\"\n", "\n", "# SubjectVisits\n", + "cols = [\"id\", \"type\", \"region\"]\n", + "subject_visits_r = reader.Csv(\"Environment_SubjectVisits*\", cols)\n", + "subject_visits_b = lambda pattern: {\"SubjectVisits\": subject_visits_r} # binder function: \"empty pattern\"\n", "\n", "# SubjectWeight\n", + "cols = [\"weight\", \"confidence\", \"subject_id\", \"int_id\"]\n", + "subject_weight_r = reader.Csv(\"Environment_SubjectWeight*\", cols)\n", + "subject_weight_b = lambda pattern: {\"SubjectWeight\": subject_weight_r} # binder function: \"empty pattern\"\n", "\n", "# Nested binder fn Device object.\n", - "environment_device = Device(\n", - " \"Environment\", \n", - " block_state_binder_fn, \n", - " core.environment # readers for \n", + "environment = Device(\n", + " \"Environment\", # device name\n", + " env_block_state_b,\n", + " light_events_b,\n", + " core.message_log\n", + ")\n", + "\n", + "# Separate Device object for subject-specific streams.\n", + "subject = Device(\n", + " \"Subject\",\n", + " subject_state_b,\n", + " subject_visits_b,\n", + " subject_weight_b\n", + ")\n", + "\n", + "# ---\n", + "\n", + "# Camera\n", + "# ---\n", + "\n", + "camera_top_b = lambda pattern: {\"CameraTop\": reader.Video(\"CameraTop*\")}\n", + "camera_top_pos_b = lambda pattern: {\"CameraTopPos\": social.Pose(\"CameraTop_test-node1*\")}\n", + "\n", + "cam_names = [\"North\", \"South\", \"East\", \"West\", \"Patch1\", \"Patch2\", \"Patch3\", \"Nest\"]\n", + "cam_names = [\"Camera\" + name for name in cam_names]\n", + "camera_b = [lambda pattern, name=name: {name: reader.Video(name + \"*\")} for name in cam_names]\n", + "\n", + "camera = Device(\n", + " \"Camera\", \n", + " camera_top_b, \n", + " camera_top_pos_b, \n", + " *camera_b\n", + ")\n", + "\n", + "# ---\n", + "\n", + "# Nest\n", + "# ---\n", + "\n", + "weight_raw_b = lambda pattern: {\"WeightRaw\": reader.Harp(\"Nest_200*\", [\"weight(g)\", \"stability\"])}\n", + "weight_filtered_b = lambda pattern: {\"WeightFiltered\": reader.Harp(\"Nest_202*\", [\"weight(g)\", \"stability\"])}\n", + "\n", + "nest = Device(\n", + " \"Nest\", \n", + " weight_raw_b, \n", + " weight_filtered_b, \n", + ")\n", + "\n", + "# ---\n", + "\n", + "# Patch\n", + "# ---\n", + "\n", + "patches = [\"1\", \"2\", \"3\"]\n", + "patch_streams = [\"32\", \"35\", \"90\", \"201\", \"202\", \"203\", \"State\"]\n", + "patch_names = [\"Patch\" + name + \"_\" + stream for name in patches for stream in patch_streams]\n", + "patch_b = []\n", + "for stream in patch_names:\n", + " if \"32\" in stream:\n", + " fn = lambda pattern, stream=stream: {\n", + " stream: reader.BitmaskEvent(stream + \"*\", value=34, tag=\"beambreak\")\n", + " }\n", + " elif \"35\" in stream:\n", + " fn = lambda pattern, stream=stream: {\n", + " stream: reader.BitmaskEvent(stream + \"*\", value=1, tag=\"delivery\")\n", + " }\n", + " elif \"90\" in stream:\n", + " fn = lambda pattern, stream=stream: {stream: reader.Encoder(stream + \"*\")}\n", + " elif \"201\" in stream:\n", + " fn = lambda pattern, stream=stream: {stream: reader.Harp(stream + \"*\", [\"manual_delivery\"])}\n", + " elif \"202\" in stream:\n", + " fn = lambda pattern, stream=stream: {stream: reader.Harp(stream + \"*\", [\"missed_pellet\"])}\n", + " elif \"203\" in stream:\n", + " fn = lambda pattern, stream=stream: {stream: reader.Harp(stream + \"*\", [\"retried_delivery\"])}\n", + " elif \"State\" in stream:\n", + " fn = lambda pattern, stream=stream: {\n", + " stream: reader.Csv(stream + \"*\", [\"threshold\", \"offset\", \"rate\"])\n", + " }\n", + " patch_b.append(fn)\n", + "\n", + "patch = Device(\n", + " \"Patch\", \n", + " *patch_b\n", + ")\n", + "# ---\n", + "\n", + "# Rfid\n", + "# ---\n", + "\n", + "rfid_names = [\"EventsGate\", \"EventsNest1\", \"EventsNest2\", \"EventsPatch1\", \"EventsPatch2\", \"EventsPatch3\"]\n", + "rfid_names = [\"Rfid\" + name for name in rfid_names]\n", + "rfid_b = [lambda pattern, name=name: {name: reader.Harp(name + \"*\", [\"rfid\"])} for name in rfid_names]\n", + "\n", + "rfid = Device(\n", + " \"Rfid\", \n", + " *rfid_b\n", ")\n", "\n", "# ---" @@ -1252,38 +1375,49 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 53, "metadata": {}, "outputs": [], "source": [ - "d = Device(\"Environment\", block_state_binder_fn)" + "social01 = DotMap(\n", + " [\n", + " metadata,\n", + " environment,\n", + " subject,\n", + " camera,\n", + " nest,\n", + " patch,\n", + " rfid\n", + " ]\n", + ")" ] }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 54, "metadata": {}, - "outputs": [ - { - "data": { - "text/plain": [ - "" - ] - }, - "execution_count": 13, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ - "d.registry[\"block_state\"]" + "# cols = [\"1\", \"2\", \"3\", \"4\", \"5\"]\n", + "# r = reader.Harp(\"RfidEventsGate_32*\", cols)\n", + "# start_time = pd.Timestamp(\"2023-12-02 10:30:00\")\n", + "# end_time = pd.Timestamp(\"2023-12-02 12:30:00\")\n", + "# aeon.load(root, r, start=start_time, end=end_time)" ] }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 56, "metadata": {}, "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Metadata:\n" + ] + }, { "data": { "text/html": [ @@ -1305,9 +1439,9 @@ " \n", " \n", " \n", - " pellet_ct\n", - " pellet_ct_thresh\n", - " due_time\n", + " workflow\n", + " commit\n", + " metadata\n", " \n", " \n", " time\n", @@ -1317,169 +1451,2706 @@ " \n", " \n", " \n", - " \n", - " 2023-12-02 10:30:44.852000237\n", - " 6\n", - " 42\n", - " 0001-01-01T00:00:00.0000000\n", + " \n", + "\n", + "" + ], + "text/plain": [ + "Empty DataFrame\n", + "Columns: [workflow, commit, metadata]\n", + "Index: []" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Environment_EnvironmentState_*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", " \n", + " \n", + " \n", " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", " \n", " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", " \n", - " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + "
state
2023-12-02 10:30:58.8680000317420001-01-01T00:00:00.0000000time
2023-12-02 10:31:14.0159997948420001-01-01T00:00:00.00000002023-12-05 15:28:04.552000046Maintenance
2023-12-02 10:31:29.2859840399420001-01-01T00:00:00.00000002023-12-05 15:30:23.199999809Experiment
2023-12-02 10:31:45.99200010310420001-01-01T00:00:00.0000000
\n", + "
" + ], + "text/plain": [ + " state\n", + "time \n", + "2023-12-05 15:28:04.552000046 Maintenance\n", + "2023-12-05 15:30:23.199999809 Experiment" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Environment_BlockState*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", " \n", " \n", - " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", + " \n", " \n", + " \n", + " \n", " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", " \n", " \n", " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", " \n", " \n", " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", " \n", " \n", " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", " \n", " \n", " \n", - " \n", - " \n", - " \n", + " \n", + " \n", + " \n", " \n", " \n", " \n", "
pellet_ctpellet_ct_threshdue_time
............time
2023-12-02 12:27:07.51200008313432023-12-05 15:02:21.03200006533390001-01-01T00:00:00.0000000
2023-12-02 12:27:20.61999988614432023-12-05 15:02:45.59999990534390001-01-01T00:00:00.0000000
2023-12-02 12:28:36.51398420315432023-12-05 15:02:56.76399993935390001-01-01T00:00:00.0000000
2023-12-02 12:29:02.42598390616432023-12-05 15:09:38.00400018636390001-01-01T00:00:00.0000000
2023-12-02 12:29:17.12998390217432023-12-05 15:09:59.62799978337390001-01-01T00:00:00.0000000
\n", - "

97 rows × 3 columns

\n", "
" ], "text/plain": [ " pellet_ct pellet_ct_thresh \\\n", "time \n", - "2023-12-02 10:30:44.852000237 6 42 \n", - "2023-12-02 10:30:58.868000031 7 42 \n", - "2023-12-02 10:31:14.015999794 8 42 \n", - "2023-12-02 10:31:29.285984039 9 42 \n", - "2023-12-02 10:31:45.992000103 10 42 \n", - "... ... ... \n", - "2023-12-02 12:27:07.512000083 13 43 \n", - "2023-12-02 12:27:20.619999886 14 43 \n", - "2023-12-02 12:28:36.513984203 15 43 \n", - "2023-12-02 12:29:02.425983906 16 43 \n", - "2023-12-02 12:29:17.129983902 17 43 \n", + "2023-12-05 15:02:21.032000065 33 39 \n", + "2023-12-05 15:02:45.599999905 34 39 \n", + "2023-12-05 15:02:56.763999939 35 39 \n", + "2023-12-05 15:09:38.004000186 36 39 \n", + "2023-12-05 15:09:59.627999783 37 39 \n", "\n", " due_time \n", "time \n", - "2023-12-02 10:30:44.852000237 0001-01-01T00:00:00.0000000 \n", - "2023-12-02 10:30:58.868000031 0001-01-01T00:00:00.0000000 \n", - "2023-12-02 10:31:14.015999794 0001-01-01T00:00:00.0000000 \n", - "2023-12-02 10:31:29.285984039 0001-01-01T00:00:00.0000000 \n", - "2023-12-02 10:31:45.992000103 0001-01-01T00:00:00.0000000 \n", - "... ... \n", - "2023-12-02 12:27:07.512000083 0001-01-01T00:00:00.0000000 \n", - "2023-12-02 12:27:20.619999886 0001-01-01T00:00:00.0000000 \n", - "2023-12-02 12:28:36.513984203 0001-01-01T00:00:00.0000000 \n", - "2023-12-02 12:29:02.425983906 0001-01-01T00:00:00.0000000 \n", - "2023-12-02 12:29:17.129983902 0001-01-01T00:00:00.0000000 \n", - "\n", - "[97 rows x 3 columns]" + "2023-12-05 15:02:21.032000065 0001-01-01T00:00:00.0000000 \n", + "2023-12-05 15:02:45.599999905 0001-01-01T00:00:00.0000000 \n", + "2023-12-05 15:02:56.763999939 0001-01-01T00:00:00.0000000 \n", + "2023-12-05 15:09:38.004000186 0001-01-01T00:00:00.0000000 \n", + "2023-12-05 15:09:59.627999783 0001-01-01T00:00:00.0000000 " ] }, - "execution_count": 14, "metadata": {}, - "output_type": "execute_result" - } - ], - "source": [ - "start_time = pd.Timestamp(\"2023-12-02 10:30:00\")\n", - "end_time = pd.Timestamp(\"2023-12-02 12:30:00\")\n", - "r = d.registry[\"block_state\"]\n", - "aeon.load(root, r, start=start_time, end=end_time)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "social01 = DotMap(\n", - " [\n", - " metadata_device,\n", - " \n", - " ]\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "exp02.ExperimentalMetadata.EnvironmentState" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "\"\"\"Test all readers in schema.\"\"\"\n", - "\n", - "def find_obj(dotmap, obj):\n", - " \"\"\"Returns a list of objects of type `obj` found in a DotMap.\"\"\"\n", - " objs = []\n", - " for value in dotmap.values():\n", - " if isinstance(value, obj):\n", - " objs.append(value)\n", - " elif isinstance(value, DotMap):\n", - " objs.extend(find_obj(value, obj))\n", - " return objs\n", - "\n", - "readers = find_obj(social01, reader.Reader)\n", - "for r in readers:\n", - " data = aeon.load(root, r, start=start_time, end=end_time)\n", - " #assert not data.empty, f\"No data found with {r}.\"\n", - " print(f\"{r}: {data.head()=}\")\n", - " " - ] + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Environment_LightEvents*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
channelvalue
time
2023-12-05 15:00:00378
2023-12-05 15:00:00778
2023-12-05 15:00:0050
2023-12-05 15:00:00180
2023-12-05 15:00:00350
\n", + "
" + ], + "text/plain": [ + " channel value\n", + "time \n", + "2023-12-05 15:00:00 3 78\n", + "2023-12-05 15:00:00 7 78\n", + "2023-12-05 15:00:00 5 0\n", + "2023-12-05 15:00:00 18 0\n", + "2023-12-05 15:00:00 35 0" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Environment_MessageLog_*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
prioritytypemessage
time
2023-12-05 15:03:02.760000229AlertTrackingFailureCAA-1120747
2023-12-05 15:06:32.019999981AlertTrackingFailureCAA-1120747
2023-12-05 15:11:06.400000095AlertTrackingFailureCAA-1120747
2023-12-05 15:14:37.320000172AlertTrackingFailureCAA-1120747
2023-12-05 15:19:46.980000019AlertTrackingFailureCAA-1120747
\n", + "
" + ], + "text/plain": [ + " priority type message\n", + "time \n", + "2023-12-05 15:03:02.760000229 Alert TrackingFailure CAA-1120747\n", + "2023-12-05 15:06:32.019999981 Alert TrackingFailure CAA-1120747\n", + "2023-12-05 15:11:06.400000095 Alert TrackingFailure CAA-1120747\n", + "2023-12-05 15:14:37.320000172 Alert TrackingFailure CAA-1120747\n", + "2023-12-05 15:19:46.980000019 Alert TrackingFailure CAA-1120747" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Environment_SubjectState*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
idweighttype
time
\n", + "
" + ], + "text/plain": [ + "Empty DataFrame\n", + "Columns: [id, weight, type]\n", + "Index: []" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Environment_SubjectVisits*:\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "/nfs/nhome/live/jbhagat/ProjectAeon/aeon_mecha/aeon/io/api.py:149: UserWarning: data index for Environment_SubjectVisits* contains duplicate keys!\n", + " warnings.warn(f\"data index for {reader.pattern} contains duplicate keys!\")\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
idtyperegion
time
2023-12-05 15:02:09.440000057CAA-1120747EnterPatch2
2023-12-05 15:02:09.519999981CAA-1120747ExitPatch2
2023-12-05 15:02:14.900000095CAA-1120747EnterPatch3
2023-12-05 15:02:15.000000000CAA-1120747ExitPatch3
2023-12-05 15:02:15.380000114CAA-1120747EnterPatch3
\n", + "
" + ], + "text/plain": [ + " id type region\n", + "time \n", + "2023-12-05 15:02:09.440000057 CAA-1120747 Enter Patch2\n", + "2023-12-05 15:02:09.519999981 CAA-1120747 Exit Patch2\n", + "2023-12-05 15:02:14.900000095 CAA-1120747 Enter Patch3\n", + "2023-12-05 15:02:15.000000000 CAA-1120747 Exit Patch3\n", + "2023-12-05 15:02:15.380000114 CAA-1120747 Enter Patch3" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Environment_SubjectWeight*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
weightconfidencesubject_idint_id
time
2023-12-05 15:06:48.53999996229.01CAA-11207471
2023-12-05 15:06:48.63999986629.01CAA-11207471
2023-12-05 15:06:48.69999980929.01CAA-11207471
2023-12-05 15:06:48.80000019129.01CAA-11207471
2023-12-05 15:06:48.90000009529.01CAA-11207471
\n", + "
" + ], + "text/plain": [ + " weight confidence subject_id int_id\n", + "time \n", + "2023-12-05 15:06:48.539999962 29.0 1 CAA-1120747 1\n", + "2023-12-05 15:06:48.639999866 29.0 1 CAA-1120747 1\n", + "2023-12-05 15:06:48.699999809 29.0 1 CAA-1120747 1\n", + "2023-12-05 15:06:48.800000191 29.0 1 CAA-1120747 1\n", + "2023-12-05 15:06:48.900000095 29.0 1 CAA-1120747 1" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Nest_200*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
weight(g)stability
time
2023-12-05 15:00:00.000000000-1.21.0
2023-12-05 15:00:00.159999847-1.21.0
2023-12-05 15:00:00.260000229-1.21.0
2023-12-05 15:00:00.340000153-1.21.0
2023-12-05 15:00:00.420000076-1.21.0
\n", + "
" + ], + "text/plain": [ + " weight(g) stability\n", + "time \n", + "2023-12-05 15:00:00.000000000 -1.2 1.0\n", + "2023-12-05 15:00:00.159999847 -1.2 1.0\n", + "2023-12-05 15:00:00.260000229 -1.2 1.0\n", + "2023-12-05 15:00:00.340000153 -1.2 1.0\n", + "2023-12-05 15:00:00.420000076 -1.2 1.0" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Nest_202*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
weight(g)stability
time
2023-12-05 15:00:00.000000000-1.21.0
2023-12-05 15:00:00.159999847-1.21.0
2023-12-05 15:00:00.260000229-1.21.0
2023-12-05 15:00:00.340000153-1.21.0
2023-12-05 15:00:00.420000076-1.21.0
\n", + "
" + ], + "text/plain": [ + " weight(g) stability\n", + "time \n", + "2023-12-05 15:00:00.000000000 -1.2 1.0\n", + "2023-12-05 15:00:00.159999847 -1.2 1.0\n", + "2023-12-05 15:00:00.260000229 -1.2 1.0\n", + "2023-12-05 15:00:00.340000153 -1.2 1.0\n", + "2023-12-05 15:00:00.420000076 -1.2 1.0" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch1_32*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
event
time
2023-12-05 15:02:21.213376045beambreak
2023-12-05 15:02:45.747712135beambreak
2023-12-05 15:02:56.878367901beambreak
2023-12-05 15:09:38.138751984beambreak
2023-12-05 15:09:59.770847797beambreak
\n", + "
" + ], + "text/plain": [ + " event\n", + "time \n", + "2023-12-05 15:02:21.213376045 beambreak\n", + "2023-12-05 15:02:45.747712135 beambreak\n", + "2023-12-05 15:02:56.878367901 beambreak\n", + "2023-12-05 15:09:38.138751984 beambreak\n", + "2023-12-05 15:09:59.770847797 beambreak" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch1_35*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
event
time
2023-12-05 15:02:21.035488129delivery
2023-12-05 15:02:45.601503849delivery
2023-12-05 15:02:56.767488003delivery
2023-12-05 15:09:38.005504131delivery
2023-12-05 15:09:59.629504204delivery
\n", + "
" + ], + "text/plain": [ + " event\n", + "time \n", + "2023-12-05 15:02:21.035488129 delivery\n", + "2023-12-05 15:02:45.601503849 delivery\n", + "2023-12-05 15:02:56.767488003 delivery\n", + "2023-12-05 15:09:38.005504131 delivery\n", + "2023-12-05 15:09:59.629504204 delivery" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch1_90*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
angleintensity
time
2023-12-05 15:00:00.000000000140562894
2023-12-05 15:00:00.001984119140552902
2023-12-05 15:00:00.004000186140572896
2023-12-05 15:00:00.005983829140532898
2023-12-05 15:00:00.007999897140572897
\n", + "
" + ], + "text/plain": [ + " angle intensity\n", + "time \n", + "2023-12-05 15:00:00.000000000 14056 2894\n", + "2023-12-05 15:00:00.001984119 14055 2902\n", + "2023-12-05 15:00:00.004000186 14057 2896\n", + "2023-12-05 15:00:00.005983829 14053 2898\n", + "2023-12-05 15:00:00.007999897 14057 2897" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch1_201*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
manual_delivery
time
\n", + "
" + ], + "text/plain": [ + "Empty DataFrame\n", + "Columns: [manual_delivery]\n", + "Index: []" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch1_202*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
missed_pellet
time
2023-12-06 13:06:33.9416961671
2023-12-06 21:19:29.8788480761
2023-12-07 09:53:59.8712639811
2023-12-07 10:08:04.8767681121
2023-12-07 10:16:46.1244478231
\n", + "
" + ], + "text/plain": [ + " missed_pellet\n", + "time \n", + "2023-12-06 13:06:33.941696167 1\n", + "2023-12-06 21:19:29.878848076 1\n", + "2023-12-07 09:53:59.871263981 1\n", + "2023-12-07 10:08:04.876768112 1\n", + "2023-12-07 10:16:46.124447823 1" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch1_203*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
retried_delivery
time
2023-12-05 16:04:15.7034878731
2023-12-05 16:17:31.9724798201
2023-12-05 17:30:23.1815037731
2023-12-05 17:30:24.1975040441
2023-12-05 17:30:41.3695039751
\n", + "
" + ], + "text/plain": [ + " retried_delivery\n", + "time \n", + "2023-12-05 16:04:15.703487873 1\n", + "2023-12-05 16:17:31.972479820 1\n", + "2023-12-05 17:30:23.181503773 1\n", + "2023-12-05 17:30:24.197504044 1\n", + "2023-12-05 17:30:41.369503975 1" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch1_State*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
thresholdoffsetrate
time
2023-12-05 15:02:21.032000065255.501030750.01
2023-12-05 15:02:45.599999905100.117430750.01
2023-12-05 15:02:56.763999939355.328025750.01
2023-12-05 15:09:38.004000186307.886556750.01
2023-12-05 15:09:59.62799978386.638658750.01
\n", + "
" + ], + "text/plain": [ + " threshold offset rate\n", + "time \n", + "2023-12-05 15:02:21.032000065 255.501030 75 0.01\n", + "2023-12-05 15:02:45.599999905 100.117430 75 0.01\n", + "2023-12-05 15:02:56.763999939 355.328025 75 0.01\n", + "2023-12-05 15:09:38.004000186 307.886556 75 0.01\n", + "2023-12-05 15:09:59.627999783 86.638658 75 0.01" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch2_32*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
event
time
2023-12-05 21:57:40.172095776beambreak
2023-12-05 21:58:17.694560051beambreak
2023-12-05 21:58:17.703807831beambreak
2023-12-05 21:58:39.021152020beambreak
2023-12-05 21:59:02.698304176beambreak
\n", + "
" + ], + "text/plain": [ + " event\n", + "time \n", + "2023-12-05 21:57:40.172095776 beambreak\n", + "2023-12-05 21:58:17.694560051 beambreak\n", + "2023-12-05 21:58:17.703807831 beambreak\n", + "2023-12-05 21:58:39.021152020 beambreak\n", + "2023-12-05 21:59:02.698304176 beambreak" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch2_35*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
event
time
2023-12-05 21:57:40.035488129delivery
2023-12-05 21:58:17.564479828delivery
2023-12-05 21:58:38.883488178delivery
2023-12-05 21:59:00.546495914delivery
2023-12-05 21:59:01.559487820delivery
\n", + "
" + ], + "text/plain": [ + " event\n", + "time \n", + "2023-12-05 21:57:40.035488129 delivery\n", + "2023-12-05 21:58:17.564479828 delivery\n", + "2023-12-05 21:58:38.883488178 delivery\n", + "2023-12-05 21:59:00.546495914 delivery\n", + "2023-12-05 21:59:01.559487820 delivery" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch2_90*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
angleintensity
time
2023-12-05 15:00:00.00000000039083164
2023-12-05 15:00:00.00198411939033172
2023-12-05 15:00:00.00400018639003167
2023-12-05 15:00:00.00598382938993166
2023-12-05 15:00:00.00799989739023170
\n", + "
" + ], + "text/plain": [ + " angle intensity\n", + "time \n", + "2023-12-05 15:00:00.000000000 3908 3164\n", + "2023-12-05 15:00:00.001984119 3903 3172\n", + "2023-12-05 15:00:00.004000186 3900 3167\n", + "2023-12-05 15:00:00.005983829 3899 3166\n", + "2023-12-05 15:00:00.007999897 3902 3170" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch2_201*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
manual_delivery
time
\n", + "
" + ], + "text/plain": [ + "Empty DataFrame\n", + "Columns: [manual_delivery]\n", + "Index: []" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch2_202*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
missed_pellet
time
\n", + "
" + ], + "text/plain": [ + "Empty DataFrame\n", + "Columns: [missed_pellet]\n", + "Index: []" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch2_203*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
retried_delivery
time
2023-12-05 21:59:01.5604801181
2023-12-05 21:59:02.5695037841
2023-12-06 03:47:32.9184961321
2023-12-06 05:24:27.8015041351
2023-12-06 05:31:37.3375039101
\n", + "
" + ], + "text/plain": [ + " retried_delivery\n", + "time \n", + "2023-12-05 21:59:01.560480118 1\n", + "2023-12-05 21:59:02.569503784 1\n", + "2023-12-06 03:47:32.918496132 1\n", + "2023-12-06 05:24:27.801504135 1\n", + "2023-12-06 05:31:37.337503910 1" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch2_State*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
thresholdoffsetrate
time
2023-12-05 15:10:27.000000000NaN750.0033
2023-12-05 15:10:27.001984119316.702028750.0033
2023-12-05 15:10:27.007999897316.702028750.0033
2023-12-05 16:28:21.000000000NaN750.0020
2023-12-05 16:28:21.001984119219.666377750.0020
\n", + "
" + ], + "text/plain": [ + " threshold offset rate\n", + "time \n", + "2023-12-05 15:10:27.000000000 NaN 75 0.0033\n", + "2023-12-05 15:10:27.001984119 316.702028 75 0.0033\n", + "2023-12-05 15:10:27.007999897 316.702028 75 0.0033\n", + "2023-12-05 16:28:21.000000000 NaN 75 0.0020\n", + "2023-12-05 16:28:21.001984119 219.666377 75 0.0020" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch3_32*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
event
time
2023-12-05 15:44:45.612192154beambreak
2023-12-06 06:07:05.146624088beambreak
2023-12-06 07:04:29.012159824beambreak
2023-12-06 08:34:13.545279980beambreak
2023-12-06 08:34:35.653376102beambreak
\n", + "
" + ], + "text/plain": [ + " event\n", + "time \n", + "2023-12-05 15:44:45.612192154 beambreak\n", + "2023-12-06 06:07:05.146624088 beambreak\n", + "2023-12-06 07:04:29.012159824 beambreak\n", + "2023-12-06 08:34:13.545279980 beambreak\n", + "2023-12-06 08:34:35.653376102 beambreak" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch3_35*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
event
time
2023-12-05 15:44:45.477503777delivery
2023-12-06 06:07:04.042496204delivery
2023-12-06 06:07:05.049503803delivery
2023-12-06 07:04:28.900479794delivery
2023-12-06 08:34:13.445504189delivery
\n", + "
" + ], + "text/plain": [ + " event\n", + "time \n", + "2023-12-05 15:44:45.477503777 delivery\n", + "2023-12-06 06:07:04.042496204 delivery\n", + "2023-12-06 06:07:05.049503803 delivery\n", + "2023-12-06 07:04:28.900479794 delivery\n", + "2023-12-06 08:34:13.445504189 delivery" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch3_90*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
angleintensity
time
2023-12-05 15:00:00.000000000106394119
2023-12-05 15:00:00.001984119106394120
2023-12-05 15:00:00.004000186106414121
2023-12-05 15:00:00.005983829106404118
2023-12-05 15:00:00.007999897106384118
\n", + "
" + ], + "text/plain": [ + " angle intensity\n", + "time \n", + "2023-12-05 15:00:00.000000000 10639 4119\n", + "2023-12-05 15:00:00.001984119 10639 4120\n", + "2023-12-05 15:00:00.004000186 10641 4121\n", + "2023-12-05 15:00:00.005983829 10640 4118\n", + "2023-12-05 15:00:00.007999897 10638 4118" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch3_201*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
manual_delivery
time
\n", + "
" + ], + "text/plain": [ + "Empty DataFrame\n", + "Columns: [manual_delivery]\n", + "Index: []" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch3_202*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
missed_pellet
time
\n", + "
" + ], + "text/plain": [ + "Empty DataFrame\n", + "Columns: [missed_pellet]\n", + "Index: []" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch3_203*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
retried_delivery
time
2023-12-06 06:07:05.0504961011
2023-12-06 08:51:15.8424959181
\n", + "
" + ], + "text/plain": [ + " retried_delivery\n", + "time \n", + "2023-12-06 06:07:05.050496101 1\n", + "2023-12-06 08:51:15.842495918 1" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "Patch3_State*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
thresholdoffsetrate
time
2023-12-05 15:10:27.001984119NaN750.0020
2023-12-05 15:10:27.004000186545.555314750.0020
2023-12-05 15:10:27.009984016545.555314750.0020
2023-12-05 15:44:45.4759998321024.856116750.0020
2023-12-05 16:28:21.000000000NaN750.0033
\n", + "
" + ], + "text/plain": [ + " threshold offset rate\n", + "time \n", + "2023-12-05 15:10:27.001984119 NaN 75 0.0020\n", + "2023-12-05 15:10:27.004000186 545.555314 75 0.0020\n", + "2023-12-05 15:10:27.009984016 545.555314 75 0.0020\n", + "2023-12-05 15:44:45.475999832 1024.856116 75 0.0020\n", + "2023-12-05 16:28:21.000000000 NaN 75 0.0033" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "RfidEventsGate*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
rfid
time
2023-12-05 15:03:03.993120193977200010163729
2023-12-05 15:03:30.682623863977200010164323
2023-12-05 15:03:31.019872189977200010164323
2023-12-05 15:03:31.395616055977200010164323
2023-12-05 15:06:38.510911942977200010164323
\n", + "
" + ], + "text/plain": [ + " rfid\n", + "time \n", + "2023-12-05 15:03:03.993120193 977200010163729\n", + "2023-12-05 15:03:30.682623863 977200010164323\n", + "2023-12-05 15:03:31.019872189 977200010164323\n", + "2023-12-05 15:03:31.395616055 977200010164323\n", + "2023-12-05 15:06:38.510911942 977200010164323" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "RfidEventsNest1*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
rfid
time
2023-12-05 15:00:00.0000000003784633200
2023-12-05 15:00:00.0015039443784633200
2023-12-05 15:00:01.0000000003784633201
2023-12-05 15:00:01.0015039443784633201
2023-12-05 15:00:02.0000000003784633202
\n", + "
" + ], + "text/plain": [ + " rfid\n", + "time \n", + "2023-12-05 15:00:00.000000000 3784633200\n", + "2023-12-05 15:00:00.001503944 3784633200\n", + "2023-12-05 15:00:01.000000000 3784633201\n", + "2023-12-05 15:00:01.001503944 3784633201\n", + "2023-12-05 15:00:02.000000000 3784633202" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "RfidEventsNest2*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
rfid
time
2023-12-05 15:03:33.940767765977200010163729
2023-12-05 15:08:28.597375870977200010164323
2023-12-05 15:08:34.070496082977200010164323
2023-12-05 15:08:50.152063847977200010164323
2023-12-05 15:08:50.489439964977200010164323
\n", + "
" + ], + "text/plain": [ + " rfid\n", + "time \n", + "2023-12-05 15:03:33.940767765 977200010163729\n", + "2023-12-05 15:08:28.597375870 977200010164323\n", + "2023-12-05 15:08:34.070496082 977200010164323\n", + "2023-12-05 15:08:50.152063847 977200010164323\n", + "2023-12-05 15:08:50.489439964 977200010164323" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "RfidEventsPatch1*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
rfid
time
2023-12-05 15:00:00.0000000003784633200
2023-12-05 15:00:00.0015039443784633200
2023-12-05 15:00:01.0000000003784633201
2023-12-05 15:00:01.0015039443784633201
2023-12-05 15:00:02.0000000003784633202
\n", + "
" + ], + "text/plain": [ + " rfid\n", + "time \n", + "2023-12-05 15:00:00.000000000 3784633200\n", + "2023-12-05 15:00:00.001503944 3784633200\n", + "2023-12-05 15:00:01.000000000 3784633201\n", + "2023-12-05 15:00:01.001503944 3784633201\n", + "2023-12-05 15:00:02.000000000 3784633202" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "RfidEventsPatch2*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
rfid
time
2023-12-05 15:02:33.719103813977200010163729
2023-12-05 15:02:34.209599972977200010163729
2023-12-05 15:02:34.608064175977200010163729
2023-12-05 15:02:35.006527901977200010163729
2023-12-05 15:02:35.251743793977200010163729
\n", + "
" + ], + "text/plain": [ + " rfid\n", + "time \n", + "2023-12-05 15:02:33.719103813 977200010163729\n", + "2023-12-05 15:02:34.209599972 977200010163729\n", + "2023-12-05 15:02:34.608064175 977200010163729\n", + "2023-12-05 15:02:35.006527901 977200010163729\n", + "2023-12-05 15:02:35.251743793 977200010163729" + ] + }, + "metadata": {}, + "output_type": "display_data" + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "RfidEventsPatch3*:\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
rfid
time
2023-12-05 15:02:19.841599941977200010164323
2023-12-05 15:02:20.271039963977200010164323
2023-12-05 15:02:20.731232166977200010164323
2023-12-05 15:02:21.130015849977200010164323
2023-12-05 15:02:21.896927834977200010164323
\n", + "
" + ], + "text/plain": [ + " rfid\n", + "time \n", + "2023-12-05 15:02:19.841599941 977200010164323\n", + "2023-12-05 15:02:20.271039963 977200010164323\n", + "2023-12-05 15:02:20.731232166 977200010164323\n", + "2023-12-05 15:02:21.130015849 977200010164323\n", + "2023-12-05 15:02:21.896927834 977200010164323" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "\"\"\"Test all readers in schema.\"\"\"\n", + "\n", + "def find_obj(dotmap, obj):\n", + " \"\"\"Returns a list of objects of type `obj` found in a DotMap.\"\"\"\n", + " objs = []\n", + " for value in dotmap.values():\n", + " if isinstance(value, obj):\n", + " objs.append(value)\n", + " elif isinstance(value, DotMap):\n", + " objs.extend(find_obj(value, obj))\n", + " return objs\n", + "\n", + "readers = find_obj(social01, reader.Reader)\n", + "start_time = pd.Timestamp(\"2023-12-05 15:00:00\")\n", + "end_time = pd.Timestamp(\"2023-12-07 11:00:00\")\n", + "for r in readers:\n", + " data = aeon.load(root, r, start=start_time, end=end_time)\n", + " #assert not data.empty, f\"No data found with {r}.\"\n", + " print(f\"\\n{r.pattern}:\")\n", + " display(data.head())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] } ], "metadata": {