Show:

File: app/components/exp-lookit-change-detection/component.js

  1. import Ember from 'ember';
  2. import layout from './template';
  3. import ExpFrameBaseComponent from '../exp-frame-base/component';
  4. import FullScreen from '../../mixins/full-screen';
  5. import VideoRecord from '../../mixins/video-record';
  6. import ExpandAssets from '../../mixins/expand-assets';
  7. import { audioAssetOptions, videoAssetOptions, imageAssetOptions } from '../../mixins/expand-assets';
  8. import isColor from '../../utils/is-color';
  9. import { observer } from '@ember/object';
  10.  
  11. let {
  12. $
  13. } = Ember;
  14.  
  15. // http://stackoverflow.com/a/12646864
  16. function shuffleArrayInPlace(array) {
  17. for (var i = array.length - 1; i > 0; i--) {
  18. var j = Math.floor(Math.random() * (i + 1));
  19. var temp = array[i];
  20. array[i] = array[j];
  21. array[j] = temp;
  22. }
  23. return array;
  24. }
  25.  
  26. /**
  27. * @module exp-player
  28. * @submodule frames
  29. */
  30.  
  31. /**
  32. *
  33. * Frame for a preferential looking "alternation" or "change detection" paradigm trial,
  34. * in which separate streams of images are displayed on the left and right of the screen.
  35. * Typically, on one side images would be alternating between two categories - e.g., images
  36. * of 8 vs. 16 dots, images of cats vs. dogs - and on the other side the images would all
  37. * be in the same category.
  38. *
  39. *
  40. * The frame starts with an optional brief "announcement" segment, where an attention-getter
  41. * video is displayed and audio is played. During this segment, the trial can be paused
  42. * and restarted.
  43. *
  44. *
  45. * If `doRecording` is true (default), then we wait for recording to begin before the
  46. * actual test trial can begin. We also always wait for all images to pre-load, so that
  47. * there are no delays in loading images that affect the timing of presentation.
  48. *
  49. *
  50. * You can customize the appearance of the frame: background color overall, color of the
  51. * two rectangles that contain the image streams, and border of those rectangles. You can
  52. * also specify how long to present the images for, how long to clear the screen in between
  53. * image pairs, and how long the test trial should be altogether.
  54. *
  55. *
  56. * You provide four lists of images to use in this frame: `leftImagesA`, `leftImagesB`,
  57. * `rightImagesA`, and `rightImagesB`. The left stream will alternate between images in
  58. * `leftImagesA` and `leftImagesB`. The right stream will alternate between images in
  59. * `rightImagesA` and `rightImagesB`. They are either presented in random order (default)
  60. * within those lists, or can be presented in the exact order listed by setting
  61. * `randomizeImageOrder` to false.
  62. *
  63. *
  64. * The timing of all image presentations and the specific images presented is recorded in
  65. * the event data.
  66. *
  67. *
  68. * This frame is displayed fullscreen; if the frame before it is not, that frame
  69. * needs to include a manual "next" button so that there's a user interaction
  70. * event to trigger fullscreen mode. (Browsers don't allow switching to fullscreen
  71. * without a user event.) If the user leaves fullscreen, that event is recorded, but the
  72. * trial is not paused.
  73. *
  74. *
  75. * Specifying media locations:
  76. *
  77. *
  78. * For any parameters that expect a list of audio/video sources, you can EITHER provide
  79. * a list of src/type pairs with full paths like this:
  80. ```json
  81. [
  82. {
  83. 'src': 'http://.../video1.mp4',
  84. 'type': 'video/mp4'
  85. },
  86. {
  87. 'src': 'http://.../video1.webm',
  88. 'type': 'video/webm'
  89. }
  90. ]
  91. ```
  92. * OR you can provide a single string 'stub', which will be expanded
  93. * based on the parameter baseDir and the media types expected - either audioTypes or
  94. * videoTypes as appropriate. For example, if you provide the audio source `intro`
  95. * and baseDir is https://mystimuli.org/mystudy/, with audioTypes ['mp3', 'ogg'], then this
  96. * will be expanded to:
  97. ```json
  98. [
  99. {
  100. src: 'https://mystimuli.org/mystudy/mp3/intro.mp3',
  101. type: 'audio/mp3'
  102. },
  103. {
  104. src: 'https://mystimuli.org/mystudy/ogg/intro.ogg',
  105. type: 'audio/ogg'
  106. }
  107. ]
  108. ```
  109. * This allows you to simplify your JSON document a bit and also easily switch to a
  110. * new version of your stimuli without changing every URL. You can mix source objects with
  111. * full URLs and those using stubs within the same directory. However, any stimuli
  112. * specified using stubs MUST be
  113. * organized as expected under baseDir/MEDIATYPE/filename.MEDIATYPE.
  114. *
  115. *
  116. * Example usage:
  117.  
  118. ```json
  119. "frames": {
  120. "alt-trial": {
  121. "kind": "exp-lookit-change-detection",
  122. "baseDir": "https://www.mit.edu/~kimscott/placeholderstimuli/",
  123. "videoTypes": ["mp4", "webm"],
  124. "audioTypes": ["mp3", "ogg"],
  125. "trialLength": 15,
  126. "attnLength": 2,
  127. "fsAudio": "sample_1",
  128. "unpauseAudio": "return_after_pause",
  129. "pauseAudio": "pause",
  130. "videoSources": "attentiongrabber",
  131. "musicSources": "music_01",
  132. "audioSources": "video_01",
  133. "endAudioSources": "all_done",
  134. "border": "thick solid black",
  135. "leftImagesA": ["apple.jpg", "orange.jpg"],
  136. "rightImagesA": ["square.png", "tall.png", "wide.png"],
  137. "leftImagesB": ["apple.jpg", "orange.jpg"],
  138. "rightImagesB": ["apple.jpg", "orange.jpg"],
  139. "startWithA": true,
  140. "randomizeImageOrder": true,
  141. "displayMs": 500,
  142. "blankMs": 250,
  143. "containerColor": "white",
  144. "backgroundColor": "#abc",
  145. }
  146. }
  147.  
  148. * ```
  149. * @class Exp-lookit-change-detection
  150. * @extends Exp-frame-base
  151. * @uses Full-screen
  152. * @uses Video-record
  153. * @uses Expand-assets
  154. */
  155.  
  156. export default ExpFrameBaseComponent.extend(FullScreen, VideoRecord, ExpandAssets, {
  157.  
  158. type: 'exp-lookit-geometry-alternation',
  159. layout: layout,
  160. displayFullscreen: true, // force fullscreen for all uses of this component
  161. fullScreenElementId: 'experiment-player',
  162. fsButtonID: 'fsButton',
  163.  
  164. // Track state of experiment
  165. completedAudio: false,
  166. completedAttn: false,
  167. currentSegment: 'intro', // 'test' (mutually exclusive)
  168. alreadyStartedCalibration: false,
  169.  
  170. // Override setting in VideoRecord mixin - only use camera if doing recording
  171. doUseCamera: Ember.computed.alias('doRecording'),
  172. startRecordingAutomatically: Ember.computed.alias('doRecording'),
  173.  
  174. recordingStarted: false,
  175.  
  176. imageIndexA: 0,
  177. imageIndexB: 0,
  178. doingA: false,
  179. musicFadeLength: 2000,
  180.  
  181. assetsToExpand: {
  182. 'audio': [
  183. 'audioSources',
  184. 'musicSources',
  185. 'endAudioSources',
  186. 'pauseAudio',
  187. 'unpauseAudio',
  188. 'fsAudio'
  189. ],
  190. 'video': [
  191. 'videoSources'
  192. ],
  193. 'image': [
  194. 'leftImagesA',
  195. 'rightImagesA',
  196. 'leftImagesB',
  197. 'rightImagesB'
  198. ]
  199. },
  200.  
  201. readyToStartCalibration: Ember.computed('recordingStarted', 'completedAudio', 'completedAttn', 'image_loaded_count',
  202. function() {
  203. var recordingStarted = false;
  204. if (this.get('session').get('recorder')) {
  205. recordingStarted = this.get('session').get('recorder').get('recording');
  206. } else {
  207. recordingStarted = this.get('recordingStarted');
  208. }
  209. var nImages = this.get('leftImagesA_parsed').length + this.get('leftImagesB_parsed').length +
  210. this.get('rightImagesA_parsed').length + this.get('rightImagesB_parsed').length;
  211.  
  212. return ((recordingStarted || !this.get('doRecording')) && this.get('completedAudio') && this.get('completedAttn') && this.get('image_loaded_count') >= nImages);
  213. }),
  214.  
  215. doingIntro: Ember.computed('currentSegment', function() {
  216. return (this.get('currentSegment') === 'intro');
  217. }),
  218.  
  219. isPaused: false,
  220. hasBeenPaused: false,
  221.  
  222. // Timers for intro & stimuli
  223. introTimer: null, // minimum length of intro segment
  224. stimTimer: null,
  225.  
  226. frameSchemaProperties: {
  227. /**
  228. * Whether to do webcam recording on this frame
  229. *
  230. * @property {Boolean} doRecording
  231. */
  232. doRecording: {
  233. type: 'boolean',
  234. description: 'Whether to do webcam recording',
  235. default: true
  236. },
  237. /**
  238. * minimum amount of time to show attention-getter in seconds. If 0, attention-getter
  239. * segment is skipped.
  240. *
  241. * @property {Number} attnLength
  242. * @default 0
  243. */
  244. attnLength: {
  245. type: 'number',
  246. description: 'minimum amount of time to show attention-getter in seconds',
  247. default: 0
  248. },
  249. /**
  250. * length of alternation trial in seconds. This refers only to the section of the
  251. * trial where the alternating image streams are presented - it does not count
  252. * any announcement phase.
  253. *
  254. * @property {Number} trialLength
  255. * @default 60
  256. */
  257. trialLength: {
  258. type: 'number',
  259. description: 'length of alternation trial in seconds',
  260. default: 60
  261. },
  262. /**
  263. * Sources Array of {src: 'url', type: 'MIMEtype'} objects for
  264. * instructions during attention-getter video
  265. *
  266. * @property {Object[]} audioSources
  267. */
  268. audioSources: {
  269. oneOf: audioAssetOptions,
  270. description: 'List of objects specifying audio src and type for instructions during attention-getter video',
  271. default: []
  272. },
  273. /**
  274. * Sources Array of {src: 'url', type: 'MIMEtype'} objects for
  275. * music during trial
  276. *
  277. * @property {Object[]} musicSources
  278. */
  279. musicSources: {
  280. oneOf: audioAssetOptions,
  281. description: 'List of objects specifying audio src and type for music during trial',
  282. default: []
  283. },
  284. /**
  285. * Sources Array of {src: 'url', type: 'MIMEtype'} objects for
  286. * audio after completion of trial (optional; used for last
  287. * trial "okay to open your eyes now" announcement)
  288. *
  289. * @property {Object[]} endAudioSources
  290. */
  291. endAudioSources: {
  292. oneOf: audioAssetOptions,
  293. description: 'Supply this to play audio at the end of the trial; list of objects specifying audio src and type',
  294. default: []
  295. },
  296. /**
  297. * Sources Array of {src: 'url', type: 'MIMEtype'} objects for
  298. * attention-getter video (should be loopable)
  299. *
  300. * @property {Object[]} videoSources
  301. */
  302. videoSources: {
  303. oneOf: videoAssetOptions,
  304. description: 'List of objects specifying video src and type for attention-getter video',
  305. default: []
  306. },
  307. /**
  308. * Sources Array of {src: 'url', type: 'MIMEtype'} objects for
  309. * audio played upon pausing study
  310. *
  311. * @property {Object[]} pauseAudio
  312. */
  313. pauseAudio: {
  314. oneOf: audioAssetOptions,
  315. description: 'List of objects specifying audio src and type for audio played when pausing study',
  316. default: []
  317. },
  318. /**
  319. * Sources Array of {src: 'url', type: 'MIMEtype'} objects for
  320. * audio played upon unpausing study
  321. *
  322. * @property {Object[]} unpauseAudio
  323. */
  324. unpauseAudio: {
  325. oneOf: audioAssetOptions,
  326. description: 'List of objects specifying audio src and type for audio played when pausing study',
  327. default: []
  328. },
  329. /**
  330. * Sources Array of {src: 'url', type: 'MIMEtype'} objects for
  331. * audio played when study is paused due to not being fullscreen
  332. *
  333. * @property {Object[]} fsAudio
  334. */
  335. fsAudio: {
  336. oneOf: audioAssetOptions,
  337. description: 'List of objects specifying audio src and type for audio played when pausing study if study is not fullscreen',
  338. default: []
  339. },
  340. /**
  341. * Whether to start with the 'A' image list on both left and right. If true, both
  342. * sides start with their respective A image lists; if false, both lists start with
  343. * their respective B image lists.
  344. *
  345. * @property {Boolean} startWithA
  346. * @default true
  347. */
  348. startWithA: {
  349. type: 'boolean',
  350. description: 'Whether to start with image list A',
  351. default: true
  352. },
  353. /**
  354. * Whether to randomize image presentation order within the lists leftImagesA,
  355. * leftImagesB, rightImagesA, and rightImagesB. If true (default), the order
  356. * of presentation is randomized. Each time all the images in one list have been
  357. * presented, the order is randomized again for the next 'round.' If false, the
  358. * order of presentation is as written in the list. Once all images are presented,
  359. * we loop back around to the first image and start again.
  360. *
  361. * Example of randomization: suppose we have defined
  362. * ```
  363. * leftImagesA: ['apple', 'banana', 'cucumber'],
  364. * leftImagesB: ['aardvark', 'bat'],
  365. * randomizeImageOrder: true,
  366. * startWithA: true
  367. * ```
  368. *
  369. * And suppose the timing is such that we end up with 10 images total. Here is a
  370. * possible sequence of images shown on the left:
  371. *
  372. * ['banana', 'aardvark', 'apple', 'bat', 'cucumber', 'bat', 'cucumber', 'aardvark', 'apple', 'bat']
  373. *
  374. * @property {Boolean} randomizeImageOrder
  375. * @default true
  376. */
  377. randomizeImageOrder: {
  378. type: 'boolean',
  379. description: 'Whether to randomize image presentation order within lists',
  380. default: true
  381. },
  382. /**
  383. * Amount of time to display each image, in milliseconds
  384. *
  385. * @property {Number} displayMs
  386. * @default 750
  387. */
  388. displayMs: {
  389. type: 'number',
  390. description: 'Amount of time to display each image, in milliseconds',
  391. default: 500
  392. },
  393. /**
  394. * Amount of time for blank display between each image, in milliseconds
  395. *
  396. * @property {Number} blankMs
  397. * @default 750
  398. */
  399. blankMs: {
  400. type: 'number',
  401. description: 'Amount of time for blank display between each image, in milliseconds',
  402. default: 250
  403. },
  404. /**
  405. * Format of border to display around alternation streams, if any. See
  406. * https://developer.mozilla.org/en-US/docs/Web/CSS/border for syntax.
  407. *
  408. * @property {String} border
  409. * @default 'thin solid gray'
  410. */
  411. border: {
  412. type: 'string',
  413. description: 'Amount of time for blank display between each image, in milliseconds',
  414. default: 'thin solid gray'
  415. },
  416. /**
  417. * Color of background. See https://developer.mozilla.org/en-US/docs/Web/CSS/color_value
  418. * for acceptable syntax: can use color names ('blue', 'red', 'green', etc.), or
  419. * rgb hex values (e.g. '#800080' - include the '#')
  420. *
  421. * @property {String} backgroundColor
  422. * @default 'white'
  423. */
  424. backgroundColor: {
  425. type: 'string',
  426. description: 'Color of background',
  427. default: 'white'
  428. },
  429. /**
  430. * Color of image stream container, if different from overall background.
  431. * Defaults to backgroundColor if one is provided.
  432. * https://developer.mozilla.org/en-US/docs/Web/CSS/color_value
  433. * for acceptable syntax: can use color names ('blue', 'red', 'green', etc.), or
  434. * rgb hex values (e.g. '#800080' - include the '#')
  435. *
  436. * @property {String} containerColor
  437. * @default 'white'
  438. */
  439. containerColor: {
  440. type: 'string',
  441. description: 'Color of image stream container',
  442. default: 'white'
  443. },
  444. /**
  445. * Set A of images to display on left of screen. Left stream will alternate between
  446. * images from set A and from set B. Elements of list can be full URLs or relative
  447. * paths starting from `baseDir`.
  448. *
  449. * @property {String[]} leftImagesA
  450. */
  451. leftImagesA: {
  452. type: 'array',
  453. description: 'Set A of images to display on left of screen',
  454. default: [],
  455. items: {
  456. oneOf: imageAssetOptions
  457. }
  458. },
  459. /**
  460. * Set B of images to display on left of screen. Left stream will alternate between
  461. * images from set A and from set B. Elements of list can be full URLs or relative
  462. * paths starting from `baseDir`.
  463. *
  464. * @property {String[]} leftImagesB
  465. */
  466. leftImagesB: {
  467. type: 'array',
  468. description: 'Set B of images to display on left of screen',
  469. default: [],
  470. items: {
  471. oneOf: imageAssetOptions
  472. }
  473. },
  474. /**
  475. * Set A of images to display on right of screen. Right stream will alternate between
  476. * images from set A and from set B. Elements of list can be full URLs or relative
  477. * paths starting from `baseDir`.
  478. *
  479. * @property {String[]} rightImagesA
  480. */
  481. rightImagesA: {
  482. type: 'array',
  483. description: 'Set A of images to display on right of screen',
  484. default: [],
  485. items: {
  486. oneOf: imageAssetOptions
  487. }
  488. },
  489. /**
  490. * Set B of images to display on right of screen. Right stream will alternate between
  491. * images from set A and from set B. Elements of list can be full URLs or relative
  492. * paths starting from `baseDir`.
  493. *
  494. * @property {String[]} rightImagesA
  495. */
  496. rightImagesB: {
  497. type: 'array',
  498. description: 'Set B of images to display on right of screen',
  499. default: [],
  500. items: {
  501. oneOf: imageAssetOptions
  502. }
  503. }
  504. },
  505.  
  506. meta: {
  507. data: {
  508. type: 'object',
  509. properties: {
  510. /**
  511. * Sequence of images shown on the left
  512. * @attribute leftSequence
  513. */
  514. leftSequence: {
  515. type: 'Object'
  516. },
  517. /**
  518. * Sequence of images shown on the right
  519. * @attribute rightSequence
  520. */
  521. rightSequence: {
  522. type: 'Object'
  523. },
  524. videoId: {
  525. type: 'string'
  526. },
  527. /**
  528. * Whether this trial was paused
  529. * @attribute hasBeenPaused
  530. */
  531. hasBeenPaused: {
  532. type: 'boolean'
  533. }
  534. }
  535. }
  536. },
  537.  
  538. calObserver: observer('readyToStartCalibration', function(frame) {
  539. if (frame.get('readyToStartCalibration') && frame.get('currentSegment') === 'intro') {
  540. if (!frame.checkFullscreen()) {
  541. frame.pauseStudy();
  542. } else {
  543. frame.set('currentSegment', 'test');
  544. }
  545. }
  546. }),
  547.  
  548. segmentObserver: observer('currentSegment', function(frame) {
  549. // Don't trigger starting intro; that'll be done manually.
  550. if (frame.get('currentSegment') === 'test') {
  551. frame.startTrial();
  552. }
  553. }),
  554.  
  555. didRender() {
  556. this._super(...arguments);
  557. if (this.get('doingCalibration') && !this.get('alreadyStartedCalibration')) {
  558. this.set('alreadyStartedCalibration', true);
  559. this.startCalibration();
  560. }
  561. },
  562.  
  563. actions: {
  564. // When intro audio is complete
  565. endAudio() {
  566. this.set('completedAudio', true);
  567. this.notifyPropertyChange('readyToStartCalibration');
  568. },
  569.  
  570. finish() {
  571.  
  572. // Call this something separate from next because stopRecorder promise needs
  573. // to call next AFTER recording is stopped and we don't want this to have
  574. // already been destroyed at that point.
  575. /**
  576. * Just before stopping webcam video capture
  577. *
  578. * @event stoppingCapture
  579. */
  580. var _this = this;
  581. this.stopRecorder().then(() => {
  582. _this.set('stoppedRecording', true);
  583. _this.send('next');
  584. return;
  585. }, () => {
  586. _this.send('next');
  587. return;
  588. });
  589.  
  590. this._super(...arguments);
  591. }
  592.  
  593. },
  594.  
  595. startIntro() {
  596. // Allow pausing during intro
  597. var _this = this;
  598. $(document).off('keyup.pauser');
  599. $(document).on('keyup.pauser', function(e) {_this.handleSpace(e, _this);});
  600.  
  601. // Start placeholder video right away
  602. /**
  603. * Immediately before starting intro/announcement segment
  604. *
  605. * @event startIntro
  606. */
  607. this.send('setTimeEvent', 'startIntro');
  608. if (this.get('attnLength')) {
  609. $('#player-video')[0].play();
  610. // Set a timer for the minimum length for the intro/break
  611. $('#player-audio')[0].play();
  612. this.set('introTimer', window.setTimeout(function() {
  613. _this.set('completedAttn', true);
  614. _this.notifyPropertyChange('readyToStartCalibration');
  615. }, _this.get('attnLength') * 1000));
  616. } else {
  617. _this.set('completedAttn', true);
  618. _this.set('completedAudio', true);
  619. _this.notifyPropertyChange('readyToStartCalibration');
  620. }
  621. },
  622.  
  623. startTrial() {
  624.  
  625. var _this = this;
  626. /**
  627. * Immediately before starting test trial segment
  628. *
  629. * @event startTestTrial
  630. */
  631. _this.send('setTimeEvent', 'startTestTrial');
  632.  
  633. // Begin playing music; fade in and set to fade out at end of trial
  634. var $musicPlayer = $('#player-music');
  635. $musicPlayer.prop('volume', 0.1);
  636. $musicPlayer[0].play();
  637. $musicPlayer.animate({volume: 1}, _this.get('musicFadeLength'));
  638. window.setTimeout(function() {
  639. $musicPlayer.animate({volume: 0}, _this.get('musicFadeLength'));
  640. }, _this.get('trialLength') * 1000 - _this.get('musicFadeLength'));
  641.  
  642. // Start presenting triangles and set to stop after trial length
  643. $('#allstimuli').show();
  644. _this.presentImages();
  645. window.setTimeout(function() {
  646. window.clearTimeout(_this.get('stimTimer'));
  647. _this.clearImages();
  648. _this.endTrial();
  649. }, _this.get('trialLength') * 1000);
  650. },
  651.  
  652. // When triangles have been shown for time indicated: play end-audio if
  653. // present, or just move on.
  654. endTrial() {
  655. this.stopRecorder();
  656. if (this.get('endAudioSources').length) {
  657. $('#player-endaudio')[0].play();
  658. } else {
  659. this.send('finish');
  660. }
  661. },
  662.  
  663. clearImages() {
  664. /**
  665. * Records each time images are cleared from display
  666. *
  667. * @event clearImages
  668. */
  669. this.send('setTimeEvent', 'clearImages');
  670. $('.stream-container').html('');
  671. },
  672.  
  673. presentImages() {
  674. var A = this.get('doingA');
  675. var leftImageList = A ? this.get('leftImagesA_parsed') : this.get('leftImagesB_parsed');
  676. var rightImageList = A ? this.get('rightImagesA_parsed') : this.get('rightImagesB_parsed');
  677. var imageIndex = A ? this.get('imageIndexA') : this.get('imageIndexB');
  678.  
  679. var leftImageIndex = imageIndex % leftImageList.length;
  680. var rightImageIndex = imageIndex % rightImageList.length;
  681.  
  682. if (leftImageIndex == 0 && this.get('randomizeImageOrder')) {
  683. shuffleArrayInPlace(leftImageList);
  684. }
  685. if (rightImageIndex == 0 && this.get('randomizeImageOrder')) {
  686. shuffleArrayInPlace(rightImageList);
  687. }
  688. if (A) {
  689. this.set('imageIndexA', this.get('imageIndexA') + 1);
  690. } else {
  691. this.set('imageIndexB', this.get('imageIndexB') + 1);
  692. }
  693. this.set('doingA', !this.get('doingA'));
  694. var _this = this;
  695. _this.clearImages();
  696. _this.set('stimTimer', window.setTimeout(function() {
  697. $('#left-stream-container').html(`<img src=${leftImageList[leftImageIndex]} class="stim-image" alt="left image">`);
  698. $('#right-stream-container').html(`<img src=${rightImageList[rightImageIndex]} class="stim-image" alt="right image">`);
  699. /**
  700. * Immediately after making images visible
  701. *
  702. * @event presentImages
  703. * @param {String} left url of left image
  704. * @param {String} right url of right image
  705. */
  706. _this.send('setTimeEvent', 'presentImages', {
  707. left: leftImageList[leftImageIndex],
  708. right: rightImageList[rightImageIndex]
  709. });
  710. _this.set('stimTimer', window.setTimeout(function() {
  711. _this.presentImages();
  712. }, _this.get('displayMs')));
  713. }, _this.get('blankMs')));
  714. },
  715.  
  716. handleSpace(event, frame) {
  717. if (frame.checkFullscreen() || !frame.isPaused) {
  718. if (event.which === 32) { // space
  719. frame.pauseStudy();
  720. }
  721. }
  722. },
  723.  
  724. // Pause/unpause study; only called if doing intro.
  725. pauseStudy() {
  726.  
  727. $('#player-audio')[0].pause();
  728. $('#player-audio')[0].currentTime = 0;
  729. $('#player-pause-audio')[0].pause();
  730. $('#player-pause-audio')[0].currentTime = 0;
  731. $('#player-pause-audio-leftfs')[0].pause();
  732. $('#player-pause-audio-leftfs')[0].currentTime = 0;
  733.  
  734. this.set('completedAudio', false);
  735. this.set('completedAttn', false);
  736.  
  737. Ember.run.once(this, () => {
  738. this.set('hasBeenPaused', true);
  739. var wasPaused = this.get('isPaused');
  740. this.set('currentSegment', 'intro');
  741.  
  742. // Currently paused: RESUME
  743. if (wasPaused) {
  744. this.startIntro();
  745. this.set('isPaused', false);
  746. } else { // Not currently paused: PAUSE
  747. window.clearTimeout(this.get('introTimer'));
  748. if (this.checkFullscreen()) {
  749. $('#player-pause-audio')[0].play();
  750. } else {
  751. $('#player-pause-audio-leftfs')[0].play();
  752. }
  753. this.set('isPaused', true);
  754. }
  755. });
  756.  
  757. },
  758.  
  759. image_loaded_count: 0,
  760.  
  761. didInsertElement() {
  762. this._super(...arguments);
  763. this.set('doingA', this.get('startWithA'));
  764. this.notifyPropertyChange('readyToStartCalibration');
  765. var _this = this;
  766.  
  767. $.each([this.get('leftImagesA_parsed'), this.get('leftImagesB_parsed'), this.get('rightImagesA_parsed'), this.get('rightImagesB_parsed')],
  768. function(idx, imgList) {
  769. $.each(imgList, function(idx, url) {
  770. var img = new Image();
  771. img.onload = function() { // set onload fn before source to ensure we catch it
  772. _this.set('image_loaded_count', _this.get('image_loaded_count') + 1);
  773. _this.notifyPropertyChange('readyToStartCalibration');
  774. };
  775. img.onerror = function() {
  776. _this.set('image_loaded_count', _this.get('image_loaded_count') + 1);
  777. _this.notifyPropertyChange('readyToStartCalibration');
  778. console.error('Unable to load image at ', url, ' - will skip loading but this may cause the exp-lookit-change-detection frame to fail');
  779. };
  780. img.src = url;
  781. });
  782. });
  783.  
  784. if (this.get('border').includes(';')) {
  785. console.warn('Invalid border css provided to exp-lookit-change-detection; not applying.');
  786. } else {
  787. $('#allstimuli div.stream-container').css('border', this.get('border'));
  788. }
  789.  
  790. if (isColor(this.get('backgroundColor'))) {
  791. $('div.exp-lookit-change-detection').css('background-color', this.get('backgroundColor'));
  792. } else {
  793. console.warn('Invalid background color provided to exp-lookit-change-detection; not applying.');
  794. }
  795.  
  796. if (isColor(this.get('containerColor'))) {
  797. $('div.exp-lookit-change-detection div.stream-container').css('background-color', this.get('containerColor'));
  798. } else {
  799. console.warn('Invalid container color provided to exp-lookit-change-detection; not applying.');
  800. }
  801.  
  802. $('#allstimuli').hide();
  803. this.startIntro();
  804. },
  805.  
  806. willDestroyElement() { // remove event handler
  807. $(document).off('keyup.pauser');
  808. window.clearInterval(this.get('introTimer'));
  809. window.clearInterval(this.get('stimTimer'));
  810. this._super(...arguments);
  811. },
  812.  
  813. /**
  814. * What to do when individual-frame recording starts.
  815. * @method onRecordingStarted
  816. * @private
  817. */
  818. onRecordingStarted() {
  819. this.set('recordingStarted', true);
  820. this.notifyPropertyChange('readyToStartCalibration');
  821. },
  822.  
  823. /**
  824. * What to do when session-level recording starts.
  825. * @method onSessionRecordingStarted
  826. * @private
  827. */
  828. onSessionRecordingStarted() {
  829. this.set('recordingStarted', true);
  830. this.notifyPropertyChange('readyToStartCalibration');
  831. }
  832.  
  833. });
  834.