API Docs for: 0.5.1
Show:

File: addon/components/exp-lookit-story-page/component.js

  1. import Ember from 'ember';
  2. import layout from './template';
  3. import ExpFrameBaseUnsafeComponent from '../../components/exp-frame-base-unsafe/component';
  4. import FullScreen from '../../mixins/full-screen';
  5. import VideoRecord from '../../mixins/video-record';
  6.  
  7. let {
  8. $
  9. } = Ember;
  10.  
  11. /**
  12. * @module exp-player
  13. * @submodule frames
  14. */
  15.  
  16. /**
  17. * Frame to implement a basic "storybook page" trial, with images placed on the
  18. * screen within a display area and a sequence of audio files played.
  19. * Optionally, images may be highlighted at specified times during the audio
  20. * files.
  21. *
  22. * Webcam recording may be turned on or off; if on, the page is not displayed
  23. * or audio started until recording begins.
  24. *
  25. * Frame is displayed fullscreen, but is not paused or otherwise disabled if the
  26. * user leaves fullscreen. A button appears prompting the user to return to
  27. * fullscreen mode.
  28. *
  29. * The parent may press 'next' to proceed, or the study may proceed
  30. * automatically when audio finishes (autoProceed).
  31. *
  32. * Any number of images may be placed on the screen, and their position
  33. * specified. (Aspect ratio will be the same as the original image.)
  34. *
  35. * These frames extend ExpFrameBaseUnsafe because they are displayed fullscreen
  36. * and expected to be repeated.
  37.  
  38. ```json
  39. "frames": {
  40. "story-intro-1": {
  41. "doRecording": false,
  42. "autoProceed": true,
  43. "baseDir": "https://s3.amazonaws.com/lookitcontents/ingroupobligations/",
  44. "audioTypes": ["mp3", "ogg"],
  45. "parentTextBlock": {
  46. "title": "Parents!",
  47. "text": "some instructions",
  48. "emph": true
  49. },
  50. "images": [
  51. {
  52. "id": "leftA",
  53. "src": "flurps1.jpg",
  54. "left": "10",
  55. "width": "30",
  56. "top": "34.47"
  57. },
  58. {
  59. "id": "rightA",
  60. "src": "zazzes1.jpg",
  61. "left": "60",
  62. "width": "30",
  63. "top": "34.47"
  64. }
  65. ],
  66. "kind": "exp-lookit-story-page",
  67. "id": "story-intro-1",
  68. "audioSources": [
  69. {
  70. "audioId": "firstAudio",
  71. "sources": [
  72. {
  73. "stub": "intro1"
  74. }
  75. ],
  76. "highlights": [
  77. {"range": [3.017343, 5.600283], "image": "leftA"},
  78. {"range": [5.752911, 8.899402], "image": "rightA"}
  79. ]
  80. }
  81. ]
  82. }
  83. }
  84.  
  85. * ```
  86. * @class ExpLookitStoryPage
  87. * @extends ExpFrameBaseUnsafe
  88. * @uses FullScreen
  89. * @uses VideoRecord
  90. */
  91.  
  92. export default ExpFrameBaseUnsafeComponent.extend(FullScreen, VideoRecord, {
  93. // In the Lookit use case, the frame BEFORE the one that goes fullscreen
  94. // must use "unsafe" saves (in order for the fullscreen event to register as
  95. // being user-initiated and not from a promise handler) #LEI-369.
  96. // exp-alternation frames are expected to be repeated, so they need to be
  97. // unsafe.
  98. type: 'exp-lookit-story-page',
  99. layout: layout,
  100. displayFullscreen: true, // force fullscreen for all uses of this component
  101. fullScreenElementId: 'experiment-player',
  102. fsButtonID: 'fsButton',
  103. videoRecorder: Ember.inject.service(),
  104. recorder: null,
  105. hasCamAccess: Ember.computed.alias('recorder.hasCamAccess'),
  106. videoUploadConnected: Ember.computed.alias('recorder.connected'),
  107.  
  108. // Track state of experiment
  109. completedAudio: false,
  110. completedAttn: false,
  111. currentSegment: 'intro', // 'calibration', 'test', 'finalaudio' (mutually exclusive)
  112. previousSegment: 'intro', // used when pausing/unpausing - refers to segment that study was paused during
  113.  
  114. currentAudioIndex: -1, // during initial sequential audio, holds an index into audioSources
  115.  
  116. readyToStartAudio: Ember.computed('hasCamAccess', 'videoUploadConnected',
  117. function() {
  118. return (this.get('hasCamAccess') && this.get('videoUploadConnected'));
  119. }),
  120.  
  121. meta: {
  122. name: 'ExpLookitStoryPage',
  123. description: 'Frame to [TODO]',
  124. parameters: {
  125. type: 'object',
  126. properties: {
  127. /**
  128. * Whether to do webcam recording (will wait for webcam
  129. * connection before starting audio if so)
  130. *
  131. * @property {Boolean} doRecording
  132. */
  133. doRecording: {
  134. type: 'boolean',
  135. description: 'Whether to do webcam recording (will wait for webcam connection before starting audio if so'
  136. },
  137. /**
  138. * Whether to proceed automatically after audio (and hide
  139. * replay/next buttons)
  140. *
  141. * @property {Boolean} autoProceed
  142. */
  143. autoProceed: {
  144. type: 'boolean',
  145. description: 'Whether to proceed automatically after audio (and hide replay/next buttons)'
  146. },
  147. /**
  148. * Base directory for where to find stimuli. Any image src
  149. * values that are not full paths will be expanded by prefixing
  150. * with `baseDir` + `img/`. Any audio/video src values that give
  151. * a value for 'stub' rather than 'src' and 'type' will be
  152. * expanded out to
  153. * `baseDir/avtype/[stub].avtype`, where the potential avtypes
  154. * are given by audioTypes and videoTypes.
  155. *
  156. * Note that baseDir SHOULD include a trailing slash
  157. * (e.g., `http://stimuli.org/myexperiment/`, not
  158. * `http://stimuli.org/myexperiment`)
  159. *
  160. * @property {String} baseDir
  161. * @default ''
  162. */
  163. baseDir: {
  164. type: 'string',
  165. default: '',
  166. description: 'Base directory for all stimuli'
  167. },
  168. /**
  169. * List of audio types to expect for any audio specified just
  170. * with a string rather than with a list of src/type pairs.
  171. * If audioTypes is ['typeA', 'typeB'] and an audio source
  172. * is given as [{'stub': 'intro'}], the audio source will be
  173. * expanded out to
  174. *
  175. ```json
  176. [
  177. {
  178. src: 'baseDir' + 'typeA/intro.typeA',
  179. type: 'audio/typeA'
  180. },
  181. {
  182. src: 'baseDir' + 'typeB/intro.typeB',
  183. type: 'audio/typeB'
  184. }
  185. ]
  186. ```
  187. *
  188. * @property {String[]} audioTypes
  189. * @default ['mp3', 'ogg']
  190. */
  191. audioTypes: {
  192. type: 'array',
  193. default: ['mp3', 'ogg'],
  194. description: 'List of audio types to expect for any audio sources specified as strings rather than lists of src/type pairs'
  195. },
  196. /**
  197. * Array of objects describing audio to play at the start of
  198. * this frame. Each element describes a separate audio segment.
  199. *
  200. * @property {Object[]} audioSources
  201. * @param {String} audioId unique string identifying this
  202. * audio segment
  203. * @param {Object[]} sources Array of {src: 'url', type:
  204. * 'MIMEtype'} objects with audio sources for this segment
  205. *
  206. * Can also give a single element {stub: 'filename'}, which will
  207. * be expanded out to the appropriate array based on `baseDir`
  208. * and `audioTypes` values; see `audioTypes`.
  209. *
  210. * @param {Object[]} highlights Array of {'range': [startT,
  211. * endT], 'image': 'imageId'} objects, where the imageId
  212. * values correspond to the ids given in images
  213. */
  214. audioSources: {
  215. type: 'array',
  216. description: 'List of objects specifying audio src and type for audio played during test trial',
  217. default: [],
  218. items: {
  219. type: 'object',
  220. properties: {
  221. 'audioId': {
  222. type: 'string'
  223. },
  224. 'sources': {
  225. type: 'array',
  226. items: {
  227. type: 'object',
  228. properties: {
  229. 'src': {
  230. type: 'string'
  231. },
  232. 'type': {
  233. type: 'string'
  234. },
  235. 'stub': {
  236. type: 'string'
  237. }
  238. }
  239. }
  240. },
  241. 'highlights': {
  242. type: 'array',
  243. items: {
  244. type: 'object',
  245. properties: {
  246. 'range': {
  247. type: 'array',
  248. items: {
  249. type: 'number'
  250. }
  251. },
  252. 'image': {
  253. 'type': 'string'
  254. }
  255. }
  256. }
  257. }
  258. }
  259. }
  260. },
  261. /**
  262. * Text block to display to parent.
  263. *
  264. * @property {Object} parentTextBlock
  265. * @param {String} title title to display
  266. * @param {String} text paragraph of text
  267. * @param {Boolean} emph whether to bold this paragraph
  268. */
  269. parentTextBlock: {
  270. type: 'object',
  271. properties: {
  272. title: {
  273. type: 'string'
  274. },
  275. text: {
  276. type: 'string'
  277. },
  278. emph: {
  279. type: 'boolean'
  280. }
  281. },
  282. default: []
  283. },
  284. /**
  285. * Array of images to display and information about their placement
  286. *
  287. * @property {Object[]} images
  288. * @param {String} id unique ID for this image
  289. * @param {String} src URL of image source. This can be a full
  290. * URL, or relative to baseDir (see baseDir).
  291. * @param {String} left left margin, as percentage of story area width
  292. * @param {String} width image width, as percentage of story area width
  293. * @param {String} top top margin, as percentage of story area height
  294.  
  295. */
  296. images: {
  297. type: 'array',
  298. items: {
  299. type: 'object',
  300. properties: {
  301. 'id': {
  302. type: 'string'
  303. },
  304. 'src': {
  305. type: 'string'
  306. },
  307. 'left': {
  308. type: 'string'
  309. },
  310. 'width': {
  311. type: 'string'
  312. },
  313. 'top': {
  314. type: 'string'
  315. }
  316. }
  317. }
  318. }
  319. }
  320. },
  321. data: {
  322. /**
  323. * Parameters captured and sent to the server
  324. *
  325. * @method serializeContent
  326. * @param {String} videoID The ID of any video recorded during this frame
  327. * @param {Object} eventTimings
  328. * @return {Object} The payload sent to the server
  329. */
  330. type: 'object',
  331. properties: {
  332. videoId: {
  333. type: 'string'
  334. }
  335. }
  336. }
  337. },
  338.  
  339. audioObserver: Ember.observer('readyToStartAudio', function(frame) {
  340. if (frame.get('readyToStartAudio')) {
  341. $('#waitForVideo').hide();
  342. $('.story-image-container').show();
  343. frame.set('currentAudioIndex', -1);
  344. frame.send('playNextAudioSegment');
  345. }
  346. }),
  347.  
  348. actions: {
  349.  
  350. // During playing audio
  351. updateCharacterHighlighting() {
  352.  
  353. var thisAudioData = this.get('audioSources')[this.currentAudioIndex];
  354. var t = $('#' + thisAudioData.audioId)[0].currentTime;
  355.  
  356. $('.story-image-container').removeClass('highlight');
  357.  
  358. thisAudioData.highlights.forEach(function (h) {
  359. if (t > h.range[0] && t < h.range[1]) {
  360. $('#' + h.image).addClass('highlight');
  361. }
  362. });
  363. },
  364.  
  365. replay() {
  366. // pause any current audio, and set times to 0
  367. $('audio').each(function() {
  368. this.pause();
  369. this.currentTime = 0;
  370. });
  371. // reset to index -1 as at start of study
  372. this.set('currentAudioIndex', -1);
  373. // restart audio
  374. this.send('playNextAudioSegment');
  375. },
  376.  
  377. next() {
  378. if (this.get('recorder')) {
  379. /**
  380. * Just before stopping webcam video capture
  381. *
  382. * @event stoppingCapture
  383. */
  384. this.sendTimeEvent('stoppingCapture');
  385. this.get('recorder').stop();
  386. }
  387. this._super(...arguments);
  388. },
  389.  
  390. playNextAudioSegment() {
  391. this.set('currentAudioIndex', this.get('currentAudioIndex') + 1);
  392. if (this.currentAudioIndex < this.get('audioSources').length) {
  393. $('#' + this.get('audioSources')[this.currentAudioIndex].audioId)[0].play();
  394. } else {
  395. if (this.get('autoProceed')) {
  396. this.send('next');
  397. } else {
  398. $('#nextbutton').prop('disabled', false);
  399. }
  400. }
  401. }
  402.  
  403. },
  404.  
  405. // Utility to expand stubs into either full URLs (for images) or
  406. // array of {src: 'url', type: 'MIMEtype'} objects (for audio).
  407. expandAsset(asset, type) {
  408. var fullAsset = asset;
  409. var _this = this;
  410.  
  411. if (type === 'image' && typeof asset === 'string' && !(asset.includes('://'))) {
  412. // Image: replace stub with full URL if needed
  413. fullAsset = this.baseDir + 'img/' + asset;
  414. } else if (type === 'audio') {
  415. // Audio: replace any source objects that have a
  416. // 'stub' attribute with the appropriate expanded source
  417. // objects
  418. fullAsset = [];
  419. var types = this.audioTypes;
  420. asset.forEach(function(srcObj) {
  421. if (srcObj.hasOwnProperty('stub')) {
  422. for (var iType = 0; iType < types.length; iType++) {
  423. fullAsset.push({
  424. src: _this.baseDir + types[iType] + '/' + srcObj.stub + '.' + types[iType],
  425. type: type + '/' + types[iType]
  426. });
  427. }
  428. } else {
  429. fullAsset.push(srcObj);
  430. }
  431. });
  432. }
  433. return fullAsset;
  434. },
  435.  
  436. // TODO: should this be moved to the recording mixin?
  437. sendTimeEvent(name, opts = {}) {
  438. var streamTime = this.get('recorder') ? this.get('recorder').getTime() : null;
  439. Ember.merge(opts, {
  440. streamTime: streamTime,
  441. videoId: this.get('videoId')
  442. });
  443. this.send('setTimeEvent', `exp-lookit-story-page:${name}`, opts);
  444. },
  445.  
  446. // TODO: should the events here be moved to the fullscreen mixin?
  447. onFullscreen() {
  448. if (this.get('isDestroyed')) {
  449. return;
  450. }
  451. this._super(...arguments);
  452. if (!this.checkFullscreen()) {
  453. /**
  454. * Upon detecting change out of fullscreen mode
  455. *
  456. * @event leftFullscreen
  457. */
  458. this.sendTimeEvent('leftFullscreen');
  459. } else {
  460. /**
  461. * Upon detecting change to fullscreen mode
  462. *
  463. * @event enteredFullscreen
  464. */
  465. this.sendTimeEvent('enteredFullscreen');
  466. }
  467. },
  468.  
  469. didInsertElement() {
  470. this._super(...arguments);
  471.  
  472. // Expand any image src stubs
  473. var _this = this;
  474. var images = this.get('images');
  475. images.forEach(function(im) {
  476. Ember.set(im, 'src', _this.expandAsset(im.src, 'image'));
  477. });
  478. this.set('images_parsed', images);
  479.  
  480. // Expand any audio src stubs
  481. var audioSources = this.get('audioSources');
  482. audioSources.forEach(function(aud) {
  483. Ember.set(aud, 'sources_parsed', _this.expandAsset(aud.sources, 'audio'));
  484. });
  485. this.set('audioSources', audioSources);
  486.  
  487. this.send('showFullscreen');
  488. $('#nextbutton').prop('disabled', true);
  489.  
  490. if (_this.get('doRecording')) {
  491. $('.story-image-container').hide();
  492. if (_this.get('experiment') && _this.get('id') && _this.get('session')) {
  493. let recorder = _this.get('videoRecorder').start(_this.get('videoId'), _this.$('#videoRecorder'), {
  494. hidden: true
  495. });
  496. recorder.install({
  497. record: true
  498. }).then(() => {
  499. _this.sendTimeEvent('recorderReady');
  500. _this.set('recordingIsReady', true);
  501. _this.notifyPropertyChange('readyToStartAudio');
  502. });
  503. // TODO: move handlers that just record events to the VideoRecord mixin?
  504. /**
  505. * When recorder detects a change in camera access
  506. *
  507. * @event onCamAccess
  508. * @param {Boolean} hasCamAccess
  509. */
  510. recorder.on('onCamAccess', (hasAccess) => {
  511. _this.sendTimeEvent('hasCamAccess', {
  512. hasCamAccess: hasAccess
  513. });
  514. _this.notifyPropertyChange('readyToStartAudio');
  515. });
  516. /**
  517. * When recorder detects a change in video stream connection status
  518. *
  519. * @event videoStreamConnection
  520. * @param {String} status status of video stream connection, e.g.
  521. * 'NetConnection.Connect.Success' if successful
  522. */
  523. recorder.on('onConnectionStatus', (status) => {
  524. _this.sendTimeEvent('videoStreamConnection', {
  525. status: status
  526. });
  527. _this.notifyPropertyChange('readyToStartAudio');
  528. });
  529. _this.set('recorder', recorder);
  530. }
  531. } else {
  532. _this.send('playNextAudioSegment');
  533. }
  534.  
  535. },
  536.  
  537. willDestroyElement() {
  538. this.sendTimeEvent('destroyingElement');
  539.  
  540. // Whenever the component is destroyed, make sure that event handlers are removed and video recorder is stopped
  541. if (this.get('recorder')) {
  542. this.get('recorder').hide(); // Hide the webcam config screen
  543. this.get('recorder').stop();
  544. }
  545.  
  546. this._super(...arguments);
  547. }
  548.  
  549. });
  550.