From 8cf0b02fd0152a3591e1a96221e84256ef4d8e70 Mon Sep 17 00:00:00 2001 From: Kim Scott Date: Wed, 2 Dec 2020 11:13:37 -0500 Subject: [PATCH 1/2] Feature/152 pause resume (#191) * save data when navigating forwards/backwards by assigning uniquely suffixed frame IDs upon starting each frame. * implement pause-unpause mixin and adapt video, calibration, change-detection, and images-audio frames to use --- app/components/exp-frame-base/component.js | 220 ++++---- app/components/exp-frame-base/doc.rst | 4 + .../exp-lookit-calibration/component.js | 56 +- app/components/exp-lookit-calibration/doc.rst | 78 ++- .../exp-lookit-calibration/template.hbs | 9 - .../exp-lookit-change-detection/component.js | 500 ++++-------------- .../exp-lookit-change-detection/doc.rst | 43 +- .../exp-lookit-change-detection/template.hbs | 41 +- .../component.js | 102 +--- .../exp-lookit-images-audio/component.js | 39 +- .../exp-lookit-images-audio/doc.rst | 30 +- .../exp-lookit-images-audio/template.hbs | 7 - .../exp-lookit-start-recording/component.js | 8 +- .../exp-lookit-stop-recording/component.js | 9 +- .../component.js | 103 +--- .../exp-lookit-video-infant-control/doc.rst | 6 +- app/components/exp-lookit-video/component.js | 417 ++------------- app/components/exp-lookit-video/doc.rst | 63 +-- app/components/exp-lookit-video/template.hbs | 46 +- app/components/exp-player/component.js | 17 +- app/components/exp-player/template.hbs | 1 - app/index.rst | 1 + app/mixins/full-screen.js | 135 ++--- app/mixins/infant-controlled-timing.js | 2 +- app/mixins/infant-controlled-timing.rst | 82 ++- app/mixins/pause-unpause.js | 392 ++++++++++++++ app/mixins/pause-unpause.rst | 188 +++++++ app/mixins/session-record.js | 1 + app/mixins/video-record.js | 31 +- app/styles/app.scss | 9 +- app/styles/full-screen.scss | 8 + app/templates/participate.hbs | 2 +- app/templates/preview.hbs | 2 +- app/utils/is-color.js | 10 +- app/utils/parse-experiment.js | 13 +- app/utils/protocol.rst | 8 +- app/utils/replace-values.js | 23 + images/Pause-unpause.png | Bin 0 -> 75769 bytes 38 files changed, 1262 insertions(+), 1444 deletions(-) create mode 100644 app/mixins/pause-unpause.js create mode 100644 app/mixins/pause-unpause.rst create mode 100644 images/Pause-unpause.png diff --git a/app/components/exp-frame-base/component.js b/app/components/exp-frame-base/component.js index e15add6e..93fb8be4 100644 --- a/app/components/exp-frame-base/component.js +++ b/app/components/exp-frame-base/component.js @@ -31,7 +31,6 @@ import SessionRecord from '../../mixins/session-record'; exit=(action 'exit') previous=(action 'previous') saveHandler=(action 'saveFrame') - skipone=(action 'skipone') extra=extra }}`` * @@ -152,7 +151,6 @@ let ExpFrameBase = Ember.Component.extend(FullScreen, SessionRecord, { frameContext: null, frameType: 'DEFAULT', eventTimings: null, - _oldFrameIndex: null, /** * Function to generate additional properties for this frame (like {"kind": "exp-lookit-text"}) @@ -384,105 +382,112 @@ let ExpFrameBase = Ember.Component.extend(FullScreen, SessionRecord, { let currentFrameIndex = this.get('frameIndex', null); - let clean = currentFrameIndex !== this.get('_oldFrameIndex'); + let defaultParams = this.setupParams(true); + Object.keys(defaultParams).forEach((key) => { + this.set(key, defaultParams[key]); + }); - var defaultParams = this.setupParams(clean); - if (clean) { - Object.keys(defaultParams).forEach((key) => { - this.set(key, defaultParams[key]); - }); + if (!this.get('id')) { + this.set('id', `${this.get('kind')}-${currentFrameIndex}`); } - if (!this.get('id')) { - var frameIndex = this.get('frameIndex'); - var kind = this.get('kind'); - this.set('id', `${kind}-${frameIndex}`); + // Finalize the frame ID! Handle case where due to navigation, frameId already exists in the sequence. + let sequence = this.get('session').get('sequence'); + let origId = this.get('id'); + if (sequence.includes(origId)) { + // Get stub: This ID with any -repeat-N removed + let repeatedFramePattern = /-repeat-(\d+)$/; + let stub = origId; + if (repeatedFramePattern.test(origId)) { + stub = origId.replace(repeatedFramePattern, ''); + } + // Find lowest N where stub-repeat-N doesn't already exist + let framePatternString = `^${stub}-repeat-(?\\d+)$`; + let thisFramePattern = new RegExp(framePatternString); + let existingRepeatIndices = []; + sequence.forEach(function (frId) { + let match = frId.match(thisFramePattern); + if (match) { + existingRepeatIndices.push(match.groups.repeat); + } + }); + // Call this frame stub-repeat-N+1 + let repeatIndex = existingRepeatIndices.length ? Math.max(...existingRepeatIndices) + 1 : 1; + this.set('id', stub + '-repeat-' + repeatIndex); } - if (clean) { - var session = this.get('session'); - var expData = session ? session.get('expData') : null; + let session = this.get('session'); + let expData = session ? session.get('expData') : null; - // Load any existing data for this particular frame - e.g. for a survey that - // the participant is returning to via a previous button. - if (session && session.get('expData')) { - var key = this.get('frameIndex') + '-' + this.get('id'); - if (expData[key]) { - this.loadData(expData[key]); - } + // Use the provided generateProperties fn, if any, to generate properties for this + // frame on-the-fly based on expData, sequence, child, & pastSessions. + if (this.get('generateProperties')) { // Only if generateProperties is non-empty + try { + this.set('_generatePropertiesFn', Function('return ' + this.get('generateProperties'))()); + } catch (error) { + console.error(error); + throw new Error('generateProperties provided for this frame, but cannot be evaluated.'); } - - // Use the provided generateProperties fn, if any, to generate properties for this - // frame on-the-fly based on expData, sequence, child, & pastSessions. - if (this.get('generateProperties')) { // Only if generateProperties is non-empty - try { - this.set('_generatePropertiesFn', Function('return ' + this.get('generateProperties'))()); - } catch (error) { - console.error(error); - throw new Error('generateProperties provided for this frame, but cannot be evaluated.'); - } - if (typeof (this.get('_generatePropertiesFn')) === 'function') { - var sequence = session ? session.get('sequence', null) : null; - var child = session ? session.get('child', null) : null; - var conditions = session ? session.get('conditions', {}) : {}; - var frameContext = this.get('frameContext'); - var pastSessions = frameContext ? frameContext.pastSessions : null; - var generatedParams = this._generatePropertiesFn(expData, sequence, child, pastSessions, conditions); - if (typeof (generatedParams) === 'object') { - this.set('generatedProperties', generatedParams); - Object.keys(generatedParams).forEach((key) => { - this.set(key, generatedParams[key]); - }); - } else { - throw new Error('generateProperties function provided for this frame, but did not return an object'); - } + if (typeof (this.get('_generatePropertiesFn')) === 'function') { + let sequence = session ? session.get('sequence', null) : null; + let child = session ? session.get('child', null) : null; + let conditions = session ? session.get('conditions', {}) : {}; + let frameContext = this.get('frameContext'); + let pastSessions = frameContext ? frameContext.pastSessions : null; + let generatedParams = this._generatePropertiesFn(expData, sequence, child, pastSessions, conditions); + if (typeof (generatedParams) === 'object') { + this.set('generatedProperties', generatedParams); + Object.keys(generatedParams).forEach((key) => { + this.set(key, generatedParams[key]); + }); } else { - throw new Error('generateProperties provided for this frame, but does not evaluate to a function'); + throw new Error('generateProperties function provided for this frame, but did not return an object'); } + } else { + throw new Error('generateProperties provided for this frame, but does not evaluate to a function'); } + } - // Use the provided selectNextFrame fn, if any, to determine which frame should come - // next. - if (this.get('selectNextFrame')) { // Only if selectNextFrame is non-empty - try { - this.set('_selectNextFrameFn', Function('return ' + this.get('selectNextFrame'))()); - } catch (error) { - console.error(error); - throw new Error('selectNextFrame provided for this frame, but cannot be evaluated.'); - } - if (!(typeof (this.get('_selectNextFrameFn')) === 'function')) { - throw new Error('selectNextFrame provided for this frame, but does not evaluate to a function'); - } + // Use the provided selectNextFrame fn, if any, to determine which frame should come + // next. + if (this.get('selectNextFrame')) { // Only if selectNextFrame is non-empty + try { + this.set('_selectNextFrameFn', Function('return ' + this.get('selectNextFrame'))()); + } catch (error) { + console.error(error); + throw new Error('selectNextFrame provided for this frame, but cannot be evaluated.'); } - - // After adding any generated properties, check that all required fields are set - if (this.get('frameSchemaProperties').hasOwnProperty('required')) { - var requiredFields = this.get('frameSchemaProperties.required', []); - requiredFields.forEach((key) => { - if (!this.hasOwnProperty(key) || this.get(key) === undefined) { - // Don't actually throw an error here because the frame may actually still function and that's probably good - console.error(`Missing required parameter '${key}' for frame of kind '${this.get('kind')}'.`); - } - }); + if (!(typeof (this.get('_selectNextFrameFn')) === 'function')) { + throw new Error('selectNextFrame provided for this frame, but does not evaluate to a function'); } + } - // Use JSON schema validator to check that all values are within specified constraints - var ajv = new Ajv({ - allErrors: true, - verbose: true - }); - var frameSchema = {type: 'object', properties: this.get('frameSchemaProperties')}; - try { - var validate = ajv.compile(frameSchema); - var valid = validate(this); - if (!valid) { - console.warn('Invalid: ' + ajv.errorsText(validate.errors)); + // After adding any generated properties, check that all required fields are set + if (this.get('frameSchemaProperties').hasOwnProperty('required')) { + var requiredFields = this.get('frameSchemaProperties.required', []); + requiredFields.forEach((key) => { + if (!this.hasOwnProperty(key) || this.get(key) === undefined) { + // Don't actually throw an error here because the frame may actually still function and that's probably good + console.error(`Missing required parameter '${key}' for frame of kind '${this.get('kind')}'.`); } - } - catch (error) { - console.error(`Failed to compile frameSchemaProperties to use for validating researcher usage of frame type '${this.get('kind')}.`); - } + }); + } + // Use JSON schema validator to check that all values are within specified constraints + var ajv = new Ajv({ + allErrors: true, + verbose: true + }); + var frameSchema = {type: 'object', properties: this.get('frameSchemaProperties')}; + try { + var validate = ajv.compile(frameSchema); + var valid = validate(this); + if (!valid) { + console.warn('Invalid: ' + ajv.errorsText(validate.errors)); + } + } + catch (error) { + console.error(`Failed to compile frameSchemaProperties to use for validating researcher usage of frame type '${this.get('kind')}.`); } this.set('_oldFrameIndex', currentFrameIndex); @@ -501,7 +506,7 @@ let ExpFrameBase = Ember.Component.extend(FullScreen, SessionRecord, { // If the save failure was a server error, warn the user. This error should never disappear. // Note: errors are not visible in FS mode, which is generally the desired behavior so as not to silently // bias infant looking time towards right. - const msg = 'Check your internet connection. If another error like this still shows up as you continue, please contact lookit-tech@mit.edu to let us know!'; + const msg = 'Please check your internet connection and (in another tab or window) that you are still logged in to Lookit. If another error like this still shows up as you continue, please contact lookit-tech@mit.edu to let us know!'; this.get('toast').error(msg, 'Error: Could not save data', {timeOut: 0, extendedTimeOut: 0}); }, @@ -533,6 +538,7 @@ let ExpFrameBase = Ember.Component.extend(FullScreen, SessionRecord, { defaultParams.generateProperties = null; defaultParams.generatedProperties = null; defaultParams.selectNextFrame = null; + defaultParams.parameters = null; Ember.assign(defaultParams, params); return defaultParams; @@ -663,27 +669,41 @@ let ExpFrameBase = Ember.Component.extend(FullScreen, SessionRecord, { this.send('save'); if (this.get('endSessionRecording') && this.get('sessionRecorder')) { - var _this = this; + let _this = this; if (!(this.get('session').get('recordingInProgress'))) { _this.sendAction('next', iNextFrame); - window.scrollTo(0, 0); } else { this.get('session').set('recordingInProgress', false); this.stopSessionRecorder().finally(() => { _this.sendAction('next', iNextFrame); - window.scrollTo(0, 0); }); } - } else { this.sendAction('next', iNextFrame); - window.scrollTo(0, 0); } }, - exit() { - this.sendAction('exit'); + goToFrameIndex(frameIndex) { + + // Note: this will allow participant to proceed even if saving fails as in next() + this.send('save'); + this.set('_oldFrameIndex', -1); // Pretend no old frame index so we treat this as "clean" during didReceiveAttrs + + + if (this.get('endSessionRecording') && this.get('sessionRecorder')) { + let _this = this; + if (!(this.get('session').get('recordingInProgress'))) { + _this.sendAction('next', frameIndex); + } else { + this.get('session').set('recordingInProgress', false); + this.stopSessionRecorder().finally(() => { + _this.sendAction('next', frameIndex); + }); + } + } else { + this.sendAction('next', frameIndex); + } }, previous() { @@ -693,10 +713,12 @@ let ExpFrameBase = Ember.Component.extend(FullScreen, SessionRecord, { * @event previousFrame */ this.send('setTimeEvent', 'previousFrame'); - var frameId = `${this.get('id')}`; // don't prepend frameindex, done by parser - console.log(`Previous: Leaving frame ID ${frameId}`); + this.send('save'); this.sendAction('previous'); - window.scrollTo(0, 0); + }, + + exit() { + this.sendAction('exit'); } }, @@ -704,7 +726,7 @@ let ExpFrameBase = Ember.Component.extend(FullScreen, SessionRecord, { // Add different classes depending on whether fullscreen mode is // being triggered as part of standard frame operation or as an override to a frame // that is not typically fullscreen. In latter case, keep formatting as close to - // before as possible, to enable forms etc. to work ok in fullscreen mode. + // before as possible, to enable forms etc. to work ok in fullscreen mode Ember.$('*').removeClass('player-fullscreen'); Ember.$('*').removeClass('player-fullscreen-override'); @@ -720,9 +742,9 @@ let ExpFrameBase = Ember.Component.extend(FullScreen, SessionRecord, { // Note: if this is defined the same way in full-screen.js, it gets called twice // for reasons I don't yet understand. if (this.get('displayFullscreenOverride') || this.get('displayFullscreen')) { - this.send('showFullscreen'); + this.showFullscreen(); } else { - this.send('exitFullscreen'); + this.exitFullscreen(); } this._super(...arguments); } diff --git a/app/components/exp-frame-base/doc.rst b/app/components/exp-frame-base/doc.rst index 77934880..09db90de 100644 --- a/app/components/exp-frame-base/doc.rst +++ b/app/components/exp-frame-base/doc.rst @@ -155,6 +155,10 @@ parameters There are also some miscellaneous parameters you can set on any frame: +id [String] + Setting the id explicitly allows you to override the frame ID that will be used in data downloads and video filenames. + This may be useful to identify specific frames within randomizers or frame groups. + displayFullscreenOverride [Boolean | ``false``] Set to `true` to display this frame in fullscreen mode, even if the frame type is not always displayed fullscreen. (For instance, you might use this to keep diff --git a/app/components/exp-lookit-calibration/component.js b/app/components/exp-lookit-calibration/component.js index 87e01537..41c8fef0 100644 --- a/app/components/exp-lookit-calibration/component.js +++ b/app/components/exp-lookit-calibration/component.js @@ -1,9 +1,8 @@ import Ember from 'ember'; import layout from './template'; import ExpFrameBaseComponent from '../exp-frame-base/component'; -import FullScreen from '../../mixins/full-screen'; -import MediaReload from '../../mixins/media-reload'; import VideoRecord from '../../mixins/video-record'; +import PauseUnpause from '../../mixins/pause-unpause'; import ExpandAssets from '../../mixins/expand-assets'; import isColor from '../../utils/is-color'; import { audioAssetOptions, imageAssetOptions, videoAssetOptions } from '../../mixins/expand-assets'; @@ -106,13 +105,11 @@ let { * @uses Expand-assets */ -export default ExpFrameBaseComponent.extend(FullScreen, MediaReload, VideoRecord, ExpandAssets, { +export default ExpFrameBaseComponent.extend(VideoRecord, PauseUnpause, ExpandAssets, { layout: layout, type: 'exp-lookit-calibration', displayFullscreen: true, // force fullscreen for all uses of this component - fullScreenElementId: 'experiment-player', - fsButtonID: 'fsButton', assetsToExpand: { 'audio': [ @@ -263,6 +260,7 @@ export default ExpFrameBaseComponent.extend(FullScreen, MediaReload, VideoRecord // to call next AFTER recording is stopped and we don't want this to have // already been destroyed at that point. window.clearInterval(this.get('calTimer')); + this.disablePausing(); var _this = this; if (this.get('doRecording')) { this.stopRecorder().then(() => { @@ -324,18 +322,23 @@ export default ExpFrameBaseComponent.extend(FullScreen, MediaReload, VideoRecord _this.set('calTimer', window.setTimeout(function() { - _this.set('retryCalibrationAudio', false); - doCalibrationSegments(calList, thisLoc); + if (!_this.get('_isPaused')) { + _this.set('retryCalibrationAudio', false); + _this.enablePausing(true); // On 2nd+ cal, require FS mode + doCalibrationSegments(calList, thisLoc); + } }, _this.get('calibrationLength'))); + } }; - doCalibrationSegments(this.get('calibrationPositions').slice(), ''); + if (!this.get('_isPaused')) { + doCalibrationSegments(this.get('calibrationPositions').slice(), ''); + } }, reloadObserver: Ember.observer('reloadingMedia', function() { - console.log('reloadObserver'); if (!this.get('reloadingMedia')) { // done with most recent reload if (this.get('retryCalibrationAudio')) { $('#player-calibration-audio')[0].play(); @@ -344,12 +347,38 @@ export default ExpFrameBaseComponent.extend(FullScreen, MediaReload, VideoRecord }), onRecordingStarted() { - this.startCalibration(); + if (!this.get('_isPaused')) { + this.enablePausing(true); + this.startCalibration(); + } }, onSessionRecordingStarted() { $('#waitForVideo').hide(); - this.startCalibration(); + if (!this.get('_isPaused')) { + this.enablePausing(true); + this.startCalibration(); + } + }, + + onStudyPause() { + window.clearInterval(this.get('calTimer')); + if ($('#player-calibration-audio').length) { + $('#player-calibration-audio')[0].pause(); + } + $('.exp-lookit-calibration').hide(); + this.set('retryCalibrationAudio', false); + if (this.get('doRecording')) { + let _this = this; + return this.stopRecorder().finally(() => { + _this.set('stoppedRecording', true); + _this.destroyRecorder(); + }); + } else { + return new Promise((resolve) => { + resolve(); + }); + } }, didInsertElement() { @@ -368,7 +397,10 @@ export default ExpFrameBaseComponent.extend(FullScreen, MediaReload, VideoRecord if (this.get('calibrationImage')) { $('#calibration-image').addClass(this.get('calibrationImageAnimation')); } - if (!(this.get('doRecording') || this.get('startSessionRecording'))) { + if (!(this.get('doRecording') && !(this.get('startSessionRecording')))) { + if (this.checkFullscreen()) { + this.enablePausing(); // allow pausing right away if not in process of entering FS, otherwise give a moment + } this.startCalibration(); } }, diff --git a/app/components/exp-lookit-calibration/doc.rst b/app/components/exp-lookit-calibration/doc.rst index 34010057..ea82a65a 100644 --- a/app/components/exp-lookit-calibration/doc.rst +++ b/app/components/exp-lookit-calibration/doc.rst @@ -28,30 +28,45 @@ Recording Generally you will want to have webcam video of this frame. You can set doRecording to true to make a video clip just for this frame. Recording will begin at the same time the first calibration -stimulus is shown. Alternately, you can use session-level recording (set -startSessionRecording to true on this or a previous frame). If either type of recording -is starting on this frame, it waits until recording starts to display the first calibration -segment. +stimulus is shown. Alternately, you can use session-level recording by using an exp-lookit-start-recording sometime +before this one. Fullscreen display ~~~~~~~~~~~~~~~~~~~ This frame is displayed fullscreen, to match the frames you will likely want to compare looking behavior on. If the participant leaves fullscreen, that will be -recorded as an event, and a large "return to fullscreen" button will be displayed. Don't -use video coding from any intervals where the participant isn't in fullscreen mode - the +recorded as an event, and a large "return to fullscreen" button will be displayed. By default leaving fullscreen +will pause the study. Don't use video coding from any intervals where the participant isn't in fullscreen mode - the position of the attention-grabbers won't be as expected. -If the frame before this is not fullscreen, that frame -needs to include a manual "next" button so that there's a user interaction -event to trigger fullscreen mode. (Browsers don't allow us to switch to FS -without a user event.) +If the frame before this is not fullscreen, that frame needs to include a manual "next" button so that there's a user +interaction event to trigger fullscreen mode. (Browsers don't allow us to switch to FS without a user event.) + +Pausing +~~~~~~~~~~ + +This frame supports flexible pausing behavior due to the use of :ref:`pause-unpause`. See that link for more detailed +information about how to adjust pausing behavior. + +If the user pauses using the ``pauseKey`` (space bar by default), or leaves fullscreen mode, the study will be paused. You can optionally disable +either form of pausing; see :ref:`pause-unpause`. While paused, audio is paused and stimuli are +not displayed, and instead a ``pauseImage`` or looping ``pauseVideo`` and some ``pausedText`` are displayed. Audio can be played upon pausing and +upon unpausing. + +Upon unpausing, either this frame will restart (default) or the study can proceed to a frame of your choice (see the +``frameOffsetAfterPause`` parameter in :ref:`pause-unpause`. + +If ``doRecording`` is true and you are recording webcam video during this frame, that recording will stop when the study +is paused. If you are doing session-level recording, you can optionally stop that upon pausing; if you do that, you +will probably want to send families back to an exp-lookit-start-recording frame upon unpausing. Specifying where files are ~~~~~~~~~~~~~~~~~~~~~~~~~~~ Several of the parameters for this frame can be specified either by providing a list of full URLs and file types, or -by providing just a filename that will be interpreted relative to the ``baseDir``. See the :ref:`expand-assets` mixin that this frame uses. +by providing just a filename that will be interpreted relative to the ``baseDir``. See the :ref:`expand-assets` +mixin that this frame uses. More general functionality ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -60,6 +75,7 @@ Below is information specific to this particular frame. There may also be availa and data collected that come from the following more general sources: - the :ref:`base frame` (things all frames do) +- :ref:`pause-unpause` - :ref:`video-record` - :ref:`expand-assets` @@ -82,6 +98,7 @@ This frame will show an image at center, left, and right, along with chimes each "webm", "mp4" ], + "calibrationImage": "peekaboo_remy.jpg", "calibrationLength": 3000, "calibrationPositions": [ @@ -90,7 +107,16 @@ This frame will show an image at center, left, and right, along with chimes each "right" ], "calibrationAudio": "chimes", - "calibrationImageAnimation": "spin" + "calibrationImageAnimation": "spin", + + "doRecording": true, + "showWaitForUploadMessage": true, + "waitForUploadImage": "peekaboo_remy.jpg", + + "pauseVideo": "attentiongrabber", + "pauseAudio": "pause", + "unpauseAudio": "return_after_pause", + "frameOffsetAfterPause": 0 } This frame will show a small video at center, left, and right, along with chimes each time. @@ -108,6 +134,7 @@ This frame will show a small video at center, left, and right, along with chimes "webm", "mp4" ], + "calibrationLength": 3000, "calibrationPositions": [ "center", @@ -115,7 +142,16 @@ This frame will show a small video at center, left, and right, along with chimes "right" ], "calibrationAudio": "chimes", - "calibrationVideo": "attentiongrabber" + "calibrationVideo": "attentiongrabber", + + "doRecording": true, + "showWaitForUploadMessage": true, + "waitForUploadImage": "peekaboo_remy.jpg", + + "pauseVideo": "attentiongrabber", + "pauseAudio": "pause", + "unpauseAudio": "return_after_pause", + "frameOffsetAfterPause": 0 } Parameters @@ -246,14 +282,16 @@ rename ``calibrationAudioSources`` and ``calibrationVideoSources``, and remove t "calibrationLength": 3000, <-- leave this the same "calibrationAudio": "chimes", <-- just rename from "calibrationAudioSources" - "calibrationVideo": "attentiongrabber" <-- just rename from "calibrationVideoSources" + "calibrationVideo": "attentiongrabber", <-- just rename from "calibrationVideoSources" + + "pauseAudio": "pause", <-- leave these the same + "unpauseAudio": "return_after_pause", + "pauseVideo": "attentiongrabber" <-- just rename from "attnSources" } If your old frame defined ``calibrationPositions``, you can leave that the same too. Otherwise this will continue to use the default of ``['center', 'left', 'right', 'center']``. -The one difference is that you will not yet be able to pause the study during the calibration phase. - .. _update_preferential_to_calibration: Updating an exp-lookit-preferential-looking frame @@ -316,7 +354,9 @@ You can change it to an ``exp-lookit-calibration`` frame just by changing the `` "calibrationLength": 0, <-- leave this the same "calibrationAudio": "chimes", <-- leave this the same - "calibrationVideo": "attentiongrabber" <-- leave this the same - } + "calibrationVideo": "attentiongrabber", <-- leave this the same -The one difference is that you will not yet be able to pause the study during the calibration phase. \ No newline at end of file + "pauseAudio": "pause", <-- leave these the same + "unpauseAudio": "return_after_pause", + "pauseVideo": "attentiongrabber" <-- copy this from announcementVideo + } \ No newline at end of file diff --git a/app/components/exp-lookit-calibration/template.hbs b/app/components/exp-lookit-calibration/template.hbs index c6a27af6..d9b9dc2e 100644 --- a/app/components/exp-lookit-calibration/template.hbs +++ b/app/components/exp-lookit-calibration/template.hbs @@ -35,12 +35,3 @@ - - -{{#unless updatedIsFullscreen}} -
- -
-{{/unless}} \ No newline at end of file diff --git a/app/components/exp-lookit-change-detection/component.js b/app/components/exp-lookit-change-detection/component.js index 83775de2..38c8854d 100644 --- a/app/components/exp-lookit-change-detection/component.js +++ b/app/components/exp-lookit-change-detection/component.js @@ -1,9 +1,9 @@ import Ember from 'ember'; import layout from './template'; import ExpFrameBaseComponent from '../exp-frame-base/component'; -import FullScreen from '../../mixins/full-screen'; import VideoRecord from '../../mixins/video-record'; import ExpandAssets from '../../mixins/expand-assets'; +import PauseUnpause from "../../mixins/pause-unpause"; import { audioAssetOptions, videoAssetOptions, imageAssetOptions } from '../../mixins/expand-assets'; import isColor from '../../utils/is-color'; import { observer } from '@ember/object'; @@ -23,143 +23,20 @@ function shuffleArrayInPlace(array) { return array; } -/** - * @module exp-player - * @submodule frames - */ - -/** +/* * * Frame for a preferential looking "alternation" or "change detection" paradigm trial, * in which separate streams of images are displayed on the left and right of the screen. * Typically, on one side images would be alternating between two categories - e.g., images * of 8 vs. 16 dots, images of cats vs. dogs - and on the other side the images would all * be in the same category. - * - * - * The frame starts with an optional brief "announcement" segment, where an attention-getter - * video is displayed and audio is played. During this segment, the trial can be paused - * and restarted. - * - * - * If `doRecording` is true (default), then we wait for recording to begin before the - * actual test trial can begin. We also always wait for all images to pre-load, so that - * there are no delays in loading images that affect the timing of presentation. - * - * - * You can customize the appearance of the frame: background color overall, color of the - * two rectangles that contain the image streams, and border of those rectangles. You can - * also specify how long to present the images for, how long to clear the screen in between - * image pairs, and how long the test trial should be altogether. - * - * - * You provide four lists of images to use in this frame: `leftImagesA`, `leftImagesB`, - * `rightImagesA`, and `rightImagesB`. The left stream will alternate between images in - * `leftImagesA` and `leftImagesB`. The right stream will alternate between images in - * `rightImagesA` and `rightImagesB`. They are either presented in random order (default) - * within those lists, or can be presented in the exact order listed by setting - * `randomizeImageOrder` to false. - * - * - * The timing of all image presentations and the specific images presented is recorded in - * the event data. - * - * - * This frame is displayed fullscreen; if the frame before it is not, that frame - * needs to include a manual "next" button so that there's a user interaction - * event to trigger fullscreen mode. (Browsers don't allow switching to fullscreen - * without a user event.) If the user leaves fullscreen, that event is recorded, but the - * trial is not paused. - * - * - * Specifying media locations: - * - * - * For any parameters that expect a list of audio/video sources, you can EITHER provide - * a list of src/type pairs with full paths like this: - ```json - [ - { - 'src': 'http://.../video1.mp4', - 'type': 'video/mp4' - }, - { - 'src': 'http://.../video1.webm', - 'type': 'video/webm' - } - ] - ``` - * OR you can provide a single string 'stub', which will be expanded - * based on the parameter baseDir and the media types expected - either audioTypes or - * videoTypes as appropriate. For example, if you provide the audio source `intro` - * and baseDir is https://mystimuli.org/mystudy/, with audioTypes ['mp3', 'ogg'], then this - * will be expanded to: - ```json - [ - { - src: 'https://mystimuli.org/mystudy/mp3/intro.mp3', - type: 'audio/mp3' - }, - { - src: 'https://mystimuli.org/mystudy/ogg/intro.ogg', - type: 'audio/ogg' - } - ] - ``` - * This allows you to simplify your JSON document a bit and also easily switch to a - * new version of your stimuli without changing every URL. You can mix source objects with - * full URLs and those using stubs within the same directory. However, any stimuli - * specified using stubs MUST be - * organized as expected under baseDir/MEDIATYPE/filename.MEDIATYPE. - * - * - * Example usage: - - ```json - "frames": { - "alt-trial": { - "kind": "exp-lookit-change-detection", - "baseDir": "https://www.mit.edu/~kimscott/placeholderstimuli/", - "videoTypes": ["mp4", "webm"], - "audioTypes": ["mp3", "ogg"], - "trialLength": 15, - "attnLength": 2, - "fsAudio": "sample_1", - "unpauseAudio": "return_after_pause", - "pauseAudio": "pause", - "videoSources": "attentiongrabber", - "musicSources": "music_01", - "audioSources": "video_01", - "endAudioSources": "all_done", - "border": "thick solid black", - "leftImagesA": ["apple.jpg", "orange.jpg"], - "rightImagesA": ["square.png", "tall.png", "wide.png"], - "leftImagesB": ["apple.jpg", "orange.jpg"], - "rightImagesB": ["apple.jpg", "orange.jpg"], - "startWithA": true, - "randomizeImageOrder": true, - "displayMs": 500, - "blankMs": 250, - "containerColor": "white", - "backgroundColor": "#abc", - } - } - - * ``` - * @class Exp-lookit-change-detection - * @extends Exp-frame-base - * @uses Full-screen - * @uses Video-record - * @uses Expand-assets */ -export default ExpFrameBaseComponent.extend(FullScreen, VideoRecord, ExpandAssets, { +export default ExpFrameBaseComponent.extend(VideoRecord, PauseUnpause, ExpandAssets, { - type: 'exp-lookit-geometry-alternation', + type: 'exp-lookit-change-detection', layout: layout, displayFullscreen: true, // force fullscreen for all uses of this component - fullScreenElementId: 'experiment-player', - fsButtonID: 'fsButton', // Track state of experiment completedAudio: false, @@ -209,245 +86,88 @@ export default ExpFrameBaseComponent.extend(FullScreen, VideoRecord, ExpandAsset var nImages = this.get('leftImagesA_parsed').length + this.get('leftImagesB_parsed').length + this.get('rightImagesA_parsed').length + this.get('rightImagesB_parsed').length; - return ((recordingStarted || !this.get('doRecording')) && this.get('completedAudio') && this.get('completedAttn') && this.get('image_loaded_count') >= nImages); + return ((recordingStarted || !this.get('doRecording')) && this.get('completedAudio') && this.get('completedAttn') && this.get('image_loaded_count') >= nImages && !this.get('_isPaused')); }), doingIntro: Ember.computed('currentSegment', function() { return (this.get('currentSegment') === 'intro'); }), - isPaused: false, - hasBeenPaused: false, - // Timers for intro & stimuli introTimer: null, // minimum length of intro segment stimTimer: null, frameSchemaProperties: { - /** - * Whether to do webcam recording on this frame - * - * @property {Boolean} doRecording - */ doRecording: { type: 'boolean', description: 'Whether to do webcam recording', default: true }, - /** - * minimum amount of time to show attention-getter in seconds. If 0, attention-getter - * segment is skipped. - * - * @property {Number} attnLength - * @default 0 - */ attnLength: { type: 'number', description: 'minimum amount of time to show attention-getter in seconds', default: 0 }, - /** - * length of alternation trial in seconds. This refers only to the section of the - * trial where the alternating image streams are presented - it does not count - * any announcement phase. - * - * @property {Number} trialLength - * @default 60 - */ trialLength: { type: 'number', description: 'length of alternation trial in seconds', default: 60 }, - /** - * Sources Array of {src: 'url', type: 'MIMEtype'} objects for - * instructions during attention-getter video - * - * @property {Object[]} audioSources - */ audioSources: { oneOf: audioAssetOptions, description: 'List of objects specifying audio src and type for instructions during attention-getter video', default: [] }, - /** - * Sources Array of {src: 'url', type: 'MIMEtype'} objects for - * music during trial - * - * @property {Object[]} musicSources - */ musicSources: { oneOf: audioAssetOptions, description: 'List of objects specifying audio src and type for music during trial', default: [] }, - /** - * Sources Array of {src: 'url', type: 'MIMEtype'} objects for - * audio after completion of trial (optional; used for last - * trial "okay to open your eyes now" announcement) - * - * @property {Object[]} endAudioSources - */ endAudioSources: { oneOf: audioAssetOptions, description: 'Supply this to play audio at the end of the trial; list of objects specifying audio src and type', default: [] }, - /** - * Sources Array of {src: 'url', type: 'MIMEtype'} objects for - * attention-getter video (should be loopable) - * - * @property {Object[]} videoSources - */ videoSources: { oneOf: videoAssetOptions, description: 'List of objects specifying video src and type for attention-getter video', default: [] }, - /** - * Sources Array of {src: 'url', type: 'MIMEtype'} objects for - * audio played upon pausing study - * - * @property {Object[]} pauseAudio - */ - pauseAudio: { - oneOf: audioAssetOptions, - description: 'List of objects specifying audio src and type for audio played when pausing study', - default: [] - }, - /** - * Sources Array of {src: 'url', type: 'MIMEtype'} objects for - * audio played upon unpausing study - * - * @property {Object[]} unpauseAudio - */ - unpauseAudio: { - oneOf: audioAssetOptions, - description: 'List of objects specifying audio src and type for audio played when pausing study', - default: [] - }, - /** - * Sources Array of {src: 'url', type: 'MIMEtype'} objects for - * audio played when study is paused due to not being fullscreen - * - * @property {Object[]} fsAudio - */ - fsAudio: { - oneOf: audioAssetOptions, - description: 'List of objects specifying audio src and type for audio played when pausing study if study is not fullscreen', - default: [] - }, - /** - * Whether to start with the 'A' image list on both left and right. If true, both - * sides start with their respective A image lists; if false, both lists start with - * their respective B image lists. - * - * @property {Boolean} startWithA - * @default true - */ startWithA: { type: 'boolean', description: 'Whether to start with image list A', default: true }, - /** - * Whether to randomize image presentation order within the lists leftImagesA, - * leftImagesB, rightImagesA, and rightImagesB. If true (default), the order - * of presentation is randomized. Each time all the images in one list have been - * presented, the order is randomized again for the next 'round.' If false, the - * order of presentation is as written in the list. Once all images are presented, - * we loop back around to the first image and start again. - * - * Example of randomization: suppose we have defined - * ``` - * leftImagesA: ['apple', 'banana', 'cucumber'], - * leftImagesB: ['aardvark', 'bat'], - * randomizeImageOrder: true, - * startWithA: true - * ``` - * - * And suppose the timing is such that we end up with 10 images total. Here is a - * possible sequence of images shown on the left: - * - * ['banana', 'aardvark', 'apple', 'bat', 'cucumber', 'bat', 'cucumber', 'aardvark', 'apple', 'bat'] - * - * @property {Boolean} randomizeImageOrder - * @default true - */ randomizeImageOrder: { type: 'boolean', description: 'Whether to randomize image presentation order within lists', default: true }, - /** - * Amount of time to display each image, in milliseconds - * - * @property {Number} displayMs - * @default 750 - */ displayMs: { type: 'number', description: 'Amount of time to display each image, in milliseconds', default: 500 }, - /** - * Amount of time for blank display between each image, in milliseconds - * - * @property {Number} blankMs - * @default 750 - */ blankMs: { type: 'number', description: 'Amount of time for blank display between each image, in milliseconds', default: 250 }, - /** - * Format of border to display around alternation streams, if any. See - * https://developer.mozilla.org/en-US/docs/Web/CSS/border for syntax. - * - * @property {String} border - * @default 'thin solid gray' - */ border: { type: 'string', description: 'Amount of time for blank display between each image, in milliseconds', default: 'thin solid gray' }, - /** - * Color of background. See https://developer.mozilla.org/en-US/docs/Web/CSS/color_value - * for acceptable syntax: can use color names ('blue', 'red', 'green', etc.), or - * rgb hex values (e.g. '#800080' - include the '#') - * - * @property {String} backgroundColor - * @default 'white' - */ backgroundColor: { type: 'string', description: 'Color of background', default: 'white' }, - /** - * Color of image stream container, if different from overall background. - * Defaults to backgroundColor if one is provided. - * https://developer.mozilla.org/en-US/docs/Web/CSS/color_value - * for acceptable syntax: can use color names ('blue', 'red', 'green', etc.), or - * rgb hex values (e.g. '#800080' - include the '#') - * - * @property {String} containerColor - * @default 'white' - */ containerColor: { type: 'string', description: 'Color of image stream container', default: 'white' }, - /** - * Set A of images to display on left of screen. Left stream will alternate between - * images from set A and from set B. Elements of list can be full URLs or relative - * paths starting from `baseDir`. - * - * @property {String[]} leftImagesA - */ leftImagesA: { type: 'array', description: 'Set A of images to display on left of screen', @@ -456,13 +176,6 @@ export default ExpFrameBaseComponent.extend(FullScreen, VideoRecord, ExpandAsset oneOf: imageAssetOptions } }, - /** - * Set B of images to display on left of screen. Left stream will alternate between - * images from set A and from set B. Elements of list can be full URLs or relative - * paths starting from `baseDir`. - * - * @property {String[]} leftImagesB - */ leftImagesB: { type: 'array', description: 'Set B of images to display on left of screen', @@ -471,13 +184,6 @@ export default ExpFrameBaseComponent.extend(FullScreen, VideoRecord, ExpandAsset oneOf: imageAssetOptions } }, - /** - * Set A of images to display on right of screen. Right stream will alternate between - * images from set A and from set B. Elements of list can be full URLs or relative - * paths starting from `baseDir`. - * - * @property {String[]} rightImagesA - */ rightImagesA: { type: 'array', description: 'Set A of images to display on right of screen', @@ -486,13 +192,6 @@ export default ExpFrameBaseComponent.extend(FullScreen, VideoRecord, ExpandAsset oneOf: imageAssetOptions } }, - /** - * Set B of images to display on right of screen. Right stream will alternate between - * images from set A and from set B. Elements of list can be full URLs or relative - * paths starting from `baseDir`. - * - * @property {String[]} rightImagesA - */ rightImagesB: { type: 'array', description: 'Set B of images to display on right of screen', @@ -536,18 +235,14 @@ export default ExpFrameBaseComponent.extend(FullScreen, VideoRecord, ExpandAsset }, calObserver: observer('readyToStartCalibration', function(frame) { - if (frame.get('readyToStartCalibration') && frame.get('currentSegment') === 'intro') { - if (!frame.checkFullscreen()) { - frame.pauseStudy(); - } else { - frame.set('currentSegment', 'test'); - } + if (frame.get('readyToStartCalibration') && frame.get('currentSegment') === 'intro' && !this.get('_isPaused')) { + frame.set('currentSegment', 'test'); } }), segmentObserver: observer('currentSegment', function(frame) { // Don't trigger starting intro; that'll be done manually. - if (frame.get('currentSegment') === 'test') { + if (frame.get('currentSegment') === 'test' && !this.get('_isPaused')) { frame.startTrial(); } }), @@ -577,6 +272,7 @@ export default ExpFrameBaseComponent.extend(FullScreen, VideoRecord, ExpandAsset * * @event stoppingCapture */ + this.disablePausing(); var _this = this; this.stopRecorder().then(() => { _this.set('stoppedRecording', true); @@ -593,10 +289,7 @@ export default ExpFrameBaseComponent.extend(FullScreen, VideoRecord, ExpandAsset }, startIntro() { - // Allow pausing during intro var _this = this; - $(document).off('keyup.pauser'); - $(document).on('keyup.pauser', function(e) {_this.handleSpace(e, _this);}); // Start placeholder video right away /** @@ -628,25 +321,28 @@ export default ExpFrameBaseComponent.extend(FullScreen, VideoRecord, ExpandAsset * * @event startTestTrial */ - _this.send('setTimeEvent', 'startTestTrial'); - - // Begin playing music; fade in and set to fade out at end of trial - var $musicPlayer = $('#player-music'); - $musicPlayer.prop('volume', 0.1); - $musicPlayer[0].play(); - $musicPlayer.animate({volume: 1}, _this.get('musicFadeLength')); - window.setTimeout(function() { - $musicPlayer.animate({volume: 0}, _this.get('musicFadeLength')); - }, _this.get('trialLength') * 1000 - _this.get('musicFadeLength')); - - // Start presenting triangles and set to stop after trial length - $('#allstimuli').show(); - _this.presentImages(); - window.setTimeout(function() { - window.clearTimeout(_this.get('stimTimer')); - _this.clearImages(); - _this.endTrial(); - }, _this.get('trialLength') * 1000); + this.send('setTimeEvent', 'startTestTrial'); + let paused = this.enablePausing(true); // Now that we should definitely be in FS mode, check! + + if (!paused) { + // Begin playing music; fade in and set to fade out at end of trial + var $musicPlayer = $('#player-music'); + $musicPlayer.prop('volume', 0.1); + $musicPlayer[0].play(); + $musicPlayer.animate({volume: 1}, this.get('musicFadeLength')); + this.set('musicFadeTimer', window.setTimeout(function () { + $musicPlayer.animate({volume: 0}, _this.get('musicFadeLength')); + }, _this.get('trialLength') * 1000 - _this.get('musicFadeLength'))); + + // Start presenting triangles and set to stop after trial length + $('#allstimuli').show(); + this.presentImages(); + this.set('trialTimer', window.setTimeout(function () { + window.clearTimeout(_this.get('stimTimer')); + _this.clearImages(); + _this.endTrial(); + }, _this.get('trialLength') * 1000)); + } }, // When triangles have been shown for time indicated: play end-audio if @@ -671,91 +367,77 @@ export default ExpFrameBaseComponent.extend(FullScreen, VideoRecord, ExpandAsset }, presentImages() { - var A = this.get('doingA'); - var leftImageList = A ? this.get('leftImagesA_parsed') : this.get('leftImagesB_parsed'); - var rightImageList = A ? this.get('rightImagesA_parsed') : this.get('rightImagesB_parsed'); - var imageIndex = A ? this.get('imageIndexA') : this.get('imageIndexB'); + if (!this.get('_isPaused')) { + var A = this.get('doingA'); + var leftImageList = A ? this.get('leftImagesA_parsed') : this.get('leftImagesB_parsed'); + var rightImageList = A ? this.get('rightImagesA_parsed') : this.get('rightImagesB_parsed'); + var imageIndex = A ? this.get('imageIndexA') : this.get('imageIndexB'); - var leftImageIndex = imageIndex % leftImageList.length; - var rightImageIndex = imageIndex % rightImageList.length; - - if (leftImageIndex == 0 && this.get('randomizeImageOrder')) { - shuffleArrayInPlace(leftImageList); - } - if (rightImageIndex == 0 && this.get('randomizeImageOrder')) { - shuffleArrayInPlace(rightImageList); - } - if (A) { - this.set('imageIndexA', this.get('imageIndexA') + 1); - } else { - this.set('imageIndexB', this.get('imageIndexB') + 1); - } - this.set('doingA', !this.get('doingA')); - var _this = this; - _this.clearImages(); - _this.set('stimTimer', window.setTimeout(function() { - $('#left-stream-container').html(`left image`); - $('#right-stream-container').html(`right image`); - /** - * Immediately after making images visible - * - * @event presentImages - * @param {String} left url of left image - * @param {String} right url of right image - */ - _this.send('setTimeEvent', 'presentImages', { - left: leftImageList[leftImageIndex], - right: rightImageList[rightImageIndex] - }); - _this.set('stimTimer', window.setTimeout(function() { - _this.presentImages(); - }, _this.get('displayMs'))); - }, _this.get('blankMs'))); - }, + var leftImageIndex = imageIndex % leftImageList.length; + var rightImageIndex = imageIndex % rightImageList.length; - handleSpace(event, frame) { - if (frame.checkFullscreen() || !frame.isPaused) { - if (event.which === 32) { // space - frame.pauseStudy(); + if (leftImageIndex == 0 && this.get('randomizeImageOrder')) { + shuffleArrayInPlace(leftImageList); + } + if (rightImageIndex == 0 && this.get('randomizeImageOrder')) { + shuffleArrayInPlace(rightImageList); + } + if (A) { + this.set('imageIndexA', this.get('imageIndexA') + 1); + } else { + this.set('imageIndexB', this.get('imageIndexB') + 1); } + this.set('doingA', !this.get('doingA')); + var _this = this; + _this.clearImages(); + _this.set('stimTimer', window.setTimeout(function () { + $('#left-stream-container').html(`left image`); + $('#right-stream-container').html(`right image`); + /** + * Immediately after making images visible + * + * @event presentImages + * @param {String} left url of left image + * @param {String} right url of right image + */ + _this.send('setTimeEvent', 'presentImages', { + left: leftImageList[leftImageIndex], + right: rightImageList[rightImageIndex] + }); + _this.set('stimTimer', window.setTimeout(function () { + _this.presentImages(); + }, _this.get('displayMs'))); + }, _this.get('blankMs'))); } }, - // Pause/unpause study; only called if doing intro. - pauseStudy() { - - $('#player-audio')[0].pause(); - $('#player-audio')[0].currentTime = 0; - $('#player-pause-audio')[0].pause(); - $('#player-pause-audio')[0].currentTime = 0; - $('#player-pause-audio-leftfs')[0].pause(); - $('#player-pause-audio-leftfs')[0].currentTime = 0; - + onStudyPause() { + window.clearTimeout(this.get('introTimer')); + window.clearTimeout(this.get('stimTimer')); + window.clearTimeout(this.get('trialTimer')); + window.clearTimeout(this.get('musicFadeTimer')); + this.set('currentSegment', 'intro'); this.set('completedAudio', false); this.set('completedAttn', false); - - Ember.run.once(this, () => { - this.set('hasBeenPaused', true); - var wasPaused = this.get('isPaused'); - this.set('currentSegment', 'intro'); - - // Currently paused: RESUME - if (wasPaused) { - this.startIntro(); - this.set('isPaused', false); - } else { // Not currently paused: PAUSE - window.clearTimeout(this.get('introTimer')); - if (this.checkFullscreen()) { - $('#player-pause-audio')[0].play(); - } else { - $('#player-pause-audio-leftfs')[0].play(); - } - this.set('isPaused', true); - } + $('#alternation-container').hide(); + $('audio#player-audio, audio#player-music').each(function() { + this.pause(); }); + if (this.get('doRecording')) { + let _this = this; + return this.stopRecorder().finally(() => { + _this.set('stoppedRecording', true); + _this.destroyRecorder(); + }); + } else { + return new Promise((resolve) => { + resolve(); + }); + } }, + image_loaded_count: 0, didInsertElement() { @@ -801,10 +483,10 @@ export default ExpFrameBaseComponent.extend(FullScreen, VideoRecord, ExpandAsset $('#allstimuli').hide(); this.startIntro(); + this.enablePausing(false); // Don't do a FS check at this point because we may be *entering* fullscreen. }, willDestroyElement() { // remove event handler - $(document).off('keyup.pauser'); window.clearInterval(this.get('introTimer')); window.clearInterval(this.get('stimTimer')); this._super(...arguments); diff --git a/app/components/exp-lookit-change-detection/doc.rst b/app/components/exp-lookit-change-detection/doc.rst index aca64605..ac636ce8 100644 --- a/app/components/exp-lookit-change-detection/doc.rst +++ b/app/components/exp-lookit-change-detection/doc.rst @@ -53,6 +53,24 @@ needs to include a manual "next" button so that there's a user interaction event to trigger fullscreen mode. (Browsers don't allow us to switch to FS without a user event.) +Pausing +~~~~~~~~~~ + +This frame supports flexible pausing behavior due to the use of :ref:`pause-unpause`. See that link for more detailed +information about how to adjust pausing behavior. + +If the user pauses using the ``pauseKey`` (space bar by default), or leaves fullscreen mode, the study will be paused. You can optionally disable +either form of pausing; see :ref:`pause-unpause`. While paused, audio is paused and stimuli are +not displayed, and instead a ``pauseImage`` or looping ``pauseVideo`` and some ``pausedText`` are displayed. Audio can be played upon pausing and +upon unpausing. + +Upon unpausing, either this frame will restart (default) or the study can proceed to a frame of your choice (see the +``frameOffsetAfterPause`` parameter in :ref:`pause-unpause`. + +If ``doRecording`` is true and you are recording webcam video during this frame, that recording will stop when the study +is paused. If you are doing session-level recording, you can optionally stop that upon pausing; if you do that, you +will probably want to send families back to an exp-lookit-start-recording frame upon unpausing. + Specifying where files are ~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -66,6 +84,7 @@ Below is information specific to this particular frame. There may also be availa and data collected that come from the following more general sources: - the :ref:`base frame` (things all frames do) +- :ref:`pause-unpause` - :ref:`video-record` - :ref:`expand-assets` @@ -79,18 +98,23 @@ This frame will alternate between fruit and shapes on the left, and just fruit o "alt-trial": { "kind": "exp-lookit-change-detection", + "baseDir": "https://www.mit.edu/~kimscott/placeholderstimuli/", "videoTypes": ["mp4", "webm"], "audioTypes": ["mp3", "ogg"], - "trialLength": 15, - "attnLength": 2, - "fsAudio": "sample_1", + "unpauseAudio": "return_after_pause", "pauseAudio": "pause", + "pauseVideo": "attentiongrabber", + "frameOffsetAfterPause": 0, + + "trialLength": 15, + "attnLength": 2, "videoSources": "attentiongrabber", "musicSources": "music_01", "audioSources": "video_01", "endAudioSources": "all_done", + "border": "thick solid black", "leftImagesA": ["apple.jpg", "orange.jpg"], "rightImagesA": ["square.png", "tall.png", "wide.png"], @@ -100,6 +124,7 @@ This frame will alternate between fruit and shapes on the left, and just fruit o "randomizeImageOrder": true, "displayMs": 500, "blankMs": 250, + "containerColor": "white", "backgroundColor": "#abc" } @@ -137,18 +162,6 @@ videoSources [String or Array | ``[]``] Array of {src: 'url', type: 'MIMEtype'} objects for attention-getter video, OR string relative to ``baseDir``. Will play in a loop for announcement phase. -pauseAudio [String or Array | ``[]``] - Array of {src: 'url', type: 'MIMEtype'} objects for audio to play upon pausing study, OR string relative to - ``baseDir``. - -unpauseAudio [String or Array | ``[]``] - Array of {src: 'url', type: 'MIMEtype'} objects for audio to play upon unpausing study, OR string relative to - ``baseDir``. - -fsAudio [String or Array | ``[]``] - Array of {src: 'url', type: 'MIMEtype'} objects for audio to play upon pausing study due to leaving fullscreen - mode, OR string relative to ``baseDir``. - startWithA [Boolean | ``true``] Whether to start with the 'A' image list on both left and right. If true, both sides start with their respective A image lists; if false, both lists start with diff --git a/app/components/exp-lookit-change-detection/template.hbs b/app/components/exp-lookit-change-detection/template.hbs index 1ee75e51..5b1eeee2 100644 --- a/app/components/exp-lookit-change-detection/template.hbs +++ b/app/components/exp-lookit-change-detection/template.hbs @@ -15,50 +15,15 @@ {{/each}} - {{#if hasBeenPaused}} - - {{else}} - - {{/if}} - - - -