forked from CrowdCurio/audio-annotator
-
Notifications
You must be signed in to change notification settings - Fork 0
/
main.js
executable file
·280 lines (251 loc) · 10.4 KB
/
main.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
'use strict';
/*
* Purpose:
* Combines all the components of the interface. Creates each component, gets task
* data, updates components. When the user submits their work this class gets the workers
* annotations and other data and submits to the backend
* Dependencies:
* AnnotationStages (src/annotation_stages.js), PlayBar & WorkflowBtns (src/components.js),
* HiddenImg (src/hidden_image.js), colormap (colormap/colormap.min.js) , Wavesurfer (lib/wavesurfer.min.js)
* Globals variable from other files:
* colormap.min.js:
* magma // color scheme array that maps 0 - 255 to rgb values
*
*/
function Annotator() {
this.wavesurfer;
this.playBar;
this.stages;
this.workflowBtns;
this.currentTask;
this.taskStartTime;
this.hiddenImage;
// only automatically open instructions modal when first loaded
this.instructionsViewed = false;
// Boolean, true if currently sending http post request
this.sendingResponse = false;
// Create color map for spectrogram
var spectrogramColorMap = colormap({
colormap: magma,
nshades: 256,
format: 'rgb',
alpha: 1
});
// Create wavesurfer (audio visualization component)
var height = 256;
this.wavesurfer = Object.create(WaveSurfer);
this.wavesurfer.init({
container: '.audio_visual',
waveColor: '#FF00FF',
progressColor: '#FF00FF',
// For the spectrogram the height is half the number of fftSamples
fftSamples: height * 2,
height: height,
colorMap: spectrogramColorMap
});
// Create labels (labels that appear above each region)
var labels = Object.create(WaveSurfer.Labels);
labels.init({
wavesurfer: this.wavesurfer,
container: '.labels'
});
// Create hiddenImage, an image that is slowly revealed to a user as they annotate
// (only for this.currentTask.feedback === 'hiddenImage')
this.hiddenImage = new HiddenImg('.hidden_img', 100);
this.hiddenImage.create();
// Create the play button and time that appear below the wavesurfer
this.playBar = new PlayBar(this.wavesurfer);
this.playBar.create();
// Create the annotation stages that appear below the wavesurfer. The stages contain tags
// the users use to label a region in the audio clip
this.stages = new AnnotationStages(this.wavesurfer, this.hiddenImage);
this.stages.create();
// Create Workflow btns (submit and exit)
this.workflowBtns = new WorkflowBtns();
this.workflowBtns.create();
this.addEvents();
}
Annotator.prototype = {
addWaveSurferEvents: function() {
var my = this;
// function that moves the vertical progress bar to the current time in the audio clip
var updateProgressBar = function () {
var progress = my.wavesurfer.getCurrentTime() / my.wavesurfer.getDuration();
my.wavesurfer.seekTo(progress);
};
// Update vertical progress bar to the currentTime when the sound clip is
// finished or paused since it is only updated on audioprocess
this.wavesurfer.on('pause', updateProgressBar);
this.wavesurfer.on('finish', updateProgressBar);
// When a new sound file is loaded into the wavesurfer update the play bar, update the
// annotation stages back to stage 1, update when the user started the task, update the workflow buttons.
// Also if the user is suppose to get hidden image feedback, append that component to the page
this.wavesurfer.on('ready', function () {
my.playBar.update();
my.stages.updateStage(1);
my.updateTaskTime();
my.workflowBtns.update();
if (my.currentTask.feedback === 'hiddenImage') {
my.hiddenImage.append(my.currentTask.imgUrl);
}
});
this.wavesurfer.on('click', function (e) {
my.stages.clickDeselectCurrentRegion();
});
},
updateTaskTime: function() {
this.taskStartTime = new Date().getTime();
},
// Event Handler, if the user clicks submit annotations call submitAnnotations
addWorkflowBtnEvents: function() {
$(this.workflowBtns).on('submit-annotations', this.submitAnnotations.bind(this));
},
addEvents: function() {
this.addWaveSurferEvents();
this.addWorkflowBtnEvents();
},
// Update the task specific data of the interfaces components
update: function() {
var my = this;
var mainUpdate = function(annotationSolutions) {
// Update the different tags the user can use to annotate, also update the solutions to the
// annotation task if the user is suppose to recieve feedback
var proximityTags = my.currentTask.proximityTag;
var annotationTags = my.currentTask.annotationTag;
var tutorialVideoURL = my.currentTask.tutorialVideoURL;
var alwaysShowTags = my.currentTask.alwaysShowTags;
var instructions = my.currentTask.instructions;
my.stages.reset(
proximityTags,
annotationTags,
annotationSolutions,
alwaysShowTags
);
// set video url
$('#tutorial-video').attr('src', tutorialVideoURL);
// add instructions
var instructionsContainer = $('#instructions-container');
instructionsContainer.empty();
if (typeof instructions !== "undefined"){
$('.modal-trigger').leanModal();
instructions.forEach(function (instruction, index) {
if (index==0) {
// first instruction is the header
var instr = $('<h4>', {
html: instruction
});
} else {
var instr = $('<h6>', {
"class": "instruction",
html: instruction
});
}
instructionsContainer.append(instr);
});
if (!my.instructionsViewed) {
$('#instructions-modal').openModal();
my.instructionsViewed = true;
}
}
else
{
$('#instructions-container').hide();
$('#trigger').hide();
}
// Update the visualization type and the feedback type and load in the new audio clip
my.wavesurfer.params.visualization = my.currentTask.visualization; // invisible, spectrogram, waveform
my.wavesurfer.params.feedback = my.currentTask.feedback; // hiddenImage, silent, notify, none
my.wavesurfer.load(my.currentTask.url);
};
if (this.currentTask.feedback !== 'none') {
// If the current task gives the user feedback, load the tasks solutions and then update
// interface components
$.getJSON(this.currentTask.annotationSolutionsUrl)
.done(function(data) {
mainUpdate(data);
})
.fail(function() {
alert('Error: Unable to retrieve annotation solution set');
});
} else {
// If not, there is no need to make an additional request. Just update task specific data right away
mainUpdate({});
}
},
// Update the interface with the next task's data
loadNextTask: function() {
var my = this;
$.getJSON(dataUrl)
.done(function(data) {
my.currentTask = data.task;
my.update();
});
},
// Collect data about users annotations and submit it to the backend
submitAnnotations: function() {
// Check if all the regions have been labeled before submitting
if (this.stages.annotationDataValidationCheck()) {
if (this.sendingResponse) {
// If it is already sending a post with the data, do nothing
return;
}
this.sendingResponse = true;
// Get data about the annotations the user has created
var content = {
task_start_time: this.taskStartTime,
task_end_time: new Date().getTime(),
visualization: this.wavesurfer.params.visualization,
annotations: this.stages.getAnnotations(),
deleted_annotations: this.stages.getDeletedAnnotations(),
// List of the different types of actions they took to create the annotations
annotation_events: this.stages.getEvents(),
// List of actions the user took to play and pause the audio
play_events: this.playBar.getEvents(),
// Boolean, if at the end, the user was shown what city the clip was recorded in
final_solution_shown: this.stages.aboveThreshold()
};
if (this.stages.aboveThreshold()) {
// If the user is suppose to recieve feedback and got enough of the annotations correct
// display the city the clip was recorded for 2 seconds and then submit their work
var my = this;
this.stages.displaySolution();
setTimeout(function() {
my.post(content);
}, 2000);
} else {
this.post(content);
}
}
},
// Make POST request, passing back the content data. On success load in the next task
post: function (content) {
var my = this;
$.ajax({
type: 'POST',
url: $.getJSON(postUrl),
contentType: 'application/json',
data: JSON.stringify(content)
})
.done(function(data) {
// If the last task had a hiddenImage component, remove it
if (my.currentTask.feedback === 'hiddenImage') {
my.hiddenImage.remove();
}
my.loadNextTask();
})
.fail(function() {
alert('Error: Unable to Submit Annotations');
})
.always(function() {
// No longer sending response
my.sendingResponse = false;
});
}
};
function main() {
// Create all the components
var annotator = new Annotator();
// Load the first audio annotation task
annotator.loadNextTask();
}
main();