Show:

File: app/components/exp-lookit-geometry-alternation-open/component.js

  1. import Ember from 'ember';
  2. import layout from './template';
  3. import ExpFrameBaseComponent from '../exp-frame-base/component';
  4. import FullScreen from '../../mixins/full-screen';
  5. import VideoRecord from '../../mixins/video-record';
  6. import ExpandAssets from '../../mixins/expand-assets';
  7. import { audioAssetOptions, videoAssetOptions } from '../../mixins/expand-assets';
  8. import { observer } from '@ember/object';
  9.  
  10. let {
  11. $
  12. } = Ember;
  13.  
  14. /**
  15. * @module exp-player
  16. * @submodule frames-deprecated
  17. */
  18.  
  19. /**
  20. * This frame is a bespoke frame for a beta tester. It is deprecated and will not be
  21. * included in release 2.x. For new studies, use {{#crossLink "Exp-lookit-change-detection"}}{{/crossLink}} instead.
  22. *
  23. * Frame to implement specific test trial structure for geometry alternation
  24. * replication study. Includes announcement, calibration, and alternation (test)
  25. * phases. During "alternation," two streams of "open triangles" are shown, in
  26. * rectangles on the left and right of the screen: one one side both size and
  27. * shape change, on the other only size changes. Frame is displayed fullscreen
  28. * and video recording is conducted during calibration/test.
  29. *
  30. * This frame is displayed fullscreen; if the frame before it is not, that frame
  31. * needs to include a manual "next" button so that there's a user interaction
  32. * event to trigger fullscreen mode. (Browsers don't allow us to switch to FS
  33. * without a user event.)
  34. *
  35. * Specifying media locations:
  36. * For any parameters that expect a list of audio/video sources, you can EITHER provide
  37. * a list of src/type pairs with full paths like this:
  38. ```json
  39. [
  40. {
  41. 'src': 'http://.../video1.mp4',
  42. 'type': 'video/mp4'
  43. },
  44. {
  45. 'src': 'http://.../video1.webm',
  46. 'type': 'video/webm'
  47. }
  48. ]
  49. ```
  50. * OR you can provide a single string 'stub', which will be expanded
  51. * based on the parameter baseDir and the media types expected - either audioTypes or
  52. * videoTypes as appropriate. For example, if you provide the audio source `intro`
  53. * and baseDir is https://mystimuli.org/mystudy/, with audioTypes ['mp3', 'ogg'], then this
  54. * will be expanded to:
  55. ```json
  56. [
  57. {
  58. src: 'https://mystimuli.org/mystudy/mp3/intro.mp3',
  59. type: 'audio/mp3'
  60. },
  61. {
  62. src: 'https://mystimuli.org/mystudy/ogg/intro.ogg',
  63. type: 'audio/ogg'
  64. }
  65. ]
  66. ```
  67. * This allows you to simplify your JSON document a bit and also easily switch to a
  68. * new version of your stimuli without changing every URL. You can mix source objects with
  69. * full URLs and those using stubs within the same directory. However, any stimuli
  70. * specified using stubs MUST be
  71. * organized as expected under baseDir/MEDIATYPE/filename.MEDIATYPE.
  72. *
  73. * Example usage:
  74.  
  75. ```json
  76. "frames": {
  77. "alt-trial": {
  78. "kind": "exp-lookit-geometry-alternation-open",
  79. "triangleLineWidth": 8,
  80. "baseDir": "https://s3.amazonaws.com/lookitcontents/geometry/",
  81. "videoTypes": ["mp4", "webm"],
  82. "audioTypes": ["mp3", "ogg"],
  83. "calibrationVideoSources": "attention",
  84. "trialLength": 60,
  85. "attnLength": 10,
  86. "calibrationLength": 3000,
  87. "fsAudio": "fullscreen",
  88. "triangleColor": "#056090",
  89. "unpauseAudio": "return_after_pause",
  90. "pauseAudio": "pause",
  91. "videoSources": "attentiongrabber",
  92. "musicSources": "happy-stroll",
  93. "calibrationAudioSources": "chimes",
  94. "altOnLeft": true,
  95. "context": true,
  96. "audioSources": "video_01",
  97. "endAudioSources": "all_done"
  98. }
  99. }
  100.  
  101. * ```
  102. * @class Exp-lookit-geometry-alternation-open
  103. * @extends Exp-frame-base
  104. * @uses Full-screen
  105. * @uses Video-record
  106. * @uses Expand-assets
  107. * @deprecated
  108. */
  109.  
  110. export default ExpFrameBaseComponent.extend(FullScreen, VideoRecord, ExpandAssets, {
  111.  
  112. type: 'exp-lookit-geometry-alternation',
  113. layout: layout,
  114. displayFullscreen: true, // force fullscreen for all uses of this component
  115. fullScreenElementId: 'experiment-player',
  116. fsButtonID: 'fsButton',
  117.  
  118. // Track state of experiment
  119. completedAudio: false,
  120. completedAttn: false,
  121. currentSegment: 'intro', // 'calibration', 'test' (mutually exclusive)
  122. alreadyStartedCalibration: false,
  123.  
  124. // Override setting in VideoRecord mixin - only use camera if doing recording
  125. doUseCamera: Ember.computed.alias('doRecording'),
  126. startRecordingAutomatically: Ember.computed.alias('doRecording'),
  127.  
  128. recordingStarted: false,
  129.  
  130. assetsToExpand: {
  131. 'audio': [
  132. 'audioSources',
  133. 'musicSources',
  134. 'calibrationAudioSources',
  135. 'endAudioSources',
  136. 'pauseAudio',
  137. 'unpauseAudio',
  138. 'fsAudio'
  139. ],
  140. 'video': [
  141. 'calibrationVideoSources',
  142. 'videoSources'
  143. ],
  144. 'image': [
  145. ]
  146. },
  147.  
  148. readyToStartCalibration: Ember.computed('recordingStarted', 'completedAudio', 'completedAttn',
  149. function() {
  150. var recordingStarted = false;
  151. if (this.get('session').get('recorder')) {
  152. recordingStarted = this.get('session').get('recorder').get('recording');
  153. } else {
  154. recordingStarted = this.get('recordingStarted');
  155. }
  156. return (recordingStarted && this.get('completedAudio') && this.get('completedAttn'));
  157. }),
  158.  
  159. // used only by template
  160. doingCalibration: Ember.computed('currentSegment', function() {
  161. return (this.get('currentSegment') === 'calibration');
  162. }),
  163. doingIntro: Ember.computed('currentSegment', function() {
  164. return (this.get('currentSegment') === 'intro');
  165. }),
  166.  
  167. isPaused: false,
  168. hasBeenPaused: false,
  169.  
  170. // Timers for intro & stimuli
  171. introTimer: null, // minimum length of intro segment
  172. stimTimer: null,
  173.  
  174. // Store data about triangles to show, display lengths, etc. in frame
  175. settings: null,
  176. triangleBases: null,
  177.  
  178. frameSchemaProperties: {
  179. /**
  180. * Whether to do webcam recording on this frame
  181. *
  182. * @property {Boolean} doRecording
  183. */
  184. doRecording: {
  185. type: 'boolean',
  186. description: 'Whether to do webcam recording',
  187. default: true
  188. },
  189. /**
  190. * True to use big uneven triangle as context figure, or false to use small even triangle as context.
  191. *
  192. * @property {Boolean} context
  193. * @default true
  194. */
  195. context: {
  196. type: 'boolean',
  197. description: 'True to use big uneven triangle as context figure, or false to use small even triangle as context.',
  198. default: true
  199. },
  200. /**
  201. * Whether to put the shape+size alternating stream on the left (other stream alternates only in size)
  202. *
  203. * @property {Boolean} altOnLeft
  204. * @default true
  205. */
  206. altOnLeft: {
  207. type: 'boolean',
  208. description: 'Whether to put the shape+size alternating stream on the left.',
  209. default: true
  210. },
  211. /**
  212. * color of triangle outline (3 or 6 char hex, starting with #)
  213. *
  214. * @property {String} triangleColor
  215. * @default '#056090'
  216. */
  217. triangleColor: {
  218. type: 'string',
  219. description: 'color of triangle outline (3 or 6 char hex, starting with #)',
  220. default: '#056090'
  221. },
  222. /**
  223. * triangle line width in pixels
  224. *
  225. * @property {Integer} triangleLineWidth
  226. * @default 5
  227. */
  228. triangleLineWidth: {
  229. type: 'integer',
  230. description: 'triangle line width in pixels',
  231. default: 5
  232. },
  233. /**
  234. * minimum amount of time to show attention-getter in seconds
  235. *
  236. * @property {Number} attnLength
  237. * @default 5
  238. */
  239. attnLength: {
  240. type: 'number',
  241. description: 'minimum amount of time to show attention-getter in seconds',
  242. default: 5
  243. },
  244. /**
  245. * length of alternation trial in seconds
  246. *
  247. * @property {Number} trialLength
  248. * @default 6
  249. */
  250. trialLength: {
  251. type: 'number',
  252. description: 'length of alternation trial in seconds',
  253. default: 6
  254. },
  255. /**
  256. * length of single calibration segment in ms
  257. *
  258. * @property {Number} calibrationLength
  259. * @default 3000
  260. */
  261. calibrationLength: {
  262. type: 'number',
  263. description: 'length of single calibration segment in ms',
  264. default: 3000
  265. },
  266. /**
  267. * Sources Array of {src: 'url', type: 'MIMEtype'} objects for
  268. * instructions during attention-getter video
  269. *
  270. * @property {Object[]} audioSources
  271. */
  272. audioSources: {
  273. oneOf: audioAssetOptions,
  274. description: 'List of objects specifying audio src and type for instructions during attention-getter video',
  275. default: []
  276. },
  277. /**
  278. * Sources Array of {src: 'url', type: 'MIMEtype'} objects for
  279. * music during trial
  280. *
  281. * @property {Object[]} musicSources
  282. */
  283. musicSources: {
  284. oneOf: audioAssetOptions,
  285. description: 'List of objects specifying audio src and type for music during trial',
  286. default: []
  287. },
  288. /**
  289. * Sources Array of {src: 'url', type: 'MIMEtype'} objects for
  290. * audio after completion of trial (optional; used for last
  291. * trial "okay to open your eyes now" announcement)
  292. *
  293. * @property {Object[]} endAudioSources
  294. */
  295. endAudioSources: {
  296. oneOf: audioAssetOptions,
  297. description: 'Supply this to play audio at the end of the trial; list of objects specifying audio src and type',
  298. default: []
  299. },
  300. /**
  301. * Sources Array of {src: 'url', type: 'MIMEtype'} objects for
  302. * calibration audio (played 4 times during calibration)
  303. *
  304. * @property {Object[]} calibrationAudioSources
  305. */
  306. calibrationAudioSources: {
  307. oneOf: audioAssetOptions,
  308. description: 'list of objects specifying audio src and type for calibration audio',
  309. default: []
  310. },
  311. /**
  312. * Sources Array of {src: 'url', type: 'MIMEtype'} objects for
  313. * calibration video (played from start 4 times during
  314. * calibration)
  315. *
  316. * @property {Object[]} calibrationVideoSources
  317. */
  318. calibrationVideoSources: {
  319. oneOf: videoAssetOptions,
  320. description: 'list of objects specifying video src and type for calibration audio',
  321. default: []
  322. },
  323. /**
  324. * Sources Array of {src: 'url', type: 'MIMEtype'} objects for
  325. * attention-getter video (should be loopable)
  326. *
  327. * @property {Object[]} videoSources
  328. */
  329. videoSources: {
  330. oneOf: videoAssetOptions,
  331. description: 'List of objects specifying video src and type for attention-getter video',
  332. default: []
  333. },
  334. /**
  335. * Sources Array of {src: 'url', type: 'MIMEtype'} objects for
  336. * audio played upon pausing study
  337. *
  338. * @property {Object[]} pauseAudio
  339. */
  340. pauseAudio: {
  341. oneOf: audioAssetOptions,
  342. description: 'List of objects specifying audio src and type for audio played when pausing study',
  343. default: []
  344. },
  345. /**
  346. * Sources Array of {src: 'url', type: 'MIMEtype'} objects for
  347. * audio played upon unpausing study
  348. *
  349. * @property {Object[]} unpauseAudio
  350. */
  351. unpauseAudio: {
  352. oneOf: audioAssetOptions,
  353. description: 'List of objects specifying audio src and type for audio played when pausing study',
  354. default: []
  355. },
  356. /**
  357. * Sources Array of {src: 'url', type: 'MIMEtype'} objects for
  358. * audio played when study is paused due to not being fullscreen
  359. *
  360. * @property {Object[]} fsAudio
  361. */
  362. fsAudio: {
  363. oneOf: audioAssetOptions,
  364. description: 'List of objects specifying audio src and type for audio played when pausing study if study is not fullscreen',
  365. default: []
  366. }
  367. },
  368.  
  369. meta: {
  370. data: {
  371. type: 'object',
  372. properties: {
  373. /**
  374. * True to use big fat triangle as context figure, or false to use small skinny triangle as context. [same as passed to this frame]
  375. * @attribute context
  376. */
  377. context: {
  378. type: 'boolean'
  379. },
  380. /**
  381. * Whether to put the shape+size alternating stream on the left (other stream alternates only in size) [same as passed to this frame]
  382. * @attribute altOnLeft
  383. */
  384. altOnLeft: {
  385. type: 'boolean'
  386. },
  387. videoId: {
  388. type: 'string'
  389. },
  390. /**
  391. * whether this trial was paused
  392. * @attribute hasBeenPaused
  393. */
  394. hasBeenPaused: {
  395. type: 'boolean'
  396. }
  397. }
  398. }
  399. },
  400.  
  401. calObserver: observer('readyToStartCalibration', function(frame) {
  402. if (frame.get('readyToStartCalibration') && frame.get('currentSegment') === 'intro') {
  403. if (!frame.checkFullscreen()) {
  404. frame.pauseStudy();
  405. } else {
  406. frame.set('currentSegment', 'calibration');
  407. }
  408. }
  409. }),
  410.  
  411. segmentObserver: observer('currentSegment', function(frame) {
  412. // Don't trigger starting intro; that'll be done manually.
  413. if (frame.get('currentSegment') === 'calibration') {
  414. frame.notifyPropertyChange('doingCalibration');
  415. frame.set('alreadyStartedCalibration', false);
  416. frame.rerender(); // Defer starting calibration until re-render completes, to
  417. // wait for video to be available. Forcing rerender due to idiosyncratic
  418. // calibration display problem
  419.  
  420. } else if (frame.get('currentSegment') === 'test') {
  421. frame.startTrial();
  422. }
  423. }),
  424.  
  425. didRender() {
  426. this._super(...arguments);
  427. if (this.get('doingCalibration') && !this.get('alreadyStartedCalibration')) {
  428. this.set('alreadyStartedCalibration', true);
  429. this.startCalibration();
  430. }
  431. },
  432.  
  433. actions: {
  434. // When intro audio is complete
  435. endAudio() {
  436. this.set('completedAudio', true);
  437. this.notifyPropertyChange('readyToStartCalibration');
  438. },
  439.  
  440. finish() {
  441.  
  442. // Call this something separate from next because stopRecorder promise needs
  443. // to call next AFTER recording is stopped and we don't want this to have
  444. // already been destroyed at that point.
  445. /**
  446. * Just before stopping webcam video capture
  447. *
  448. * @event stoppingCapture
  449. */
  450. var _this = this;
  451. this.stopRecorder().then(() => {
  452. _this.set('stoppedRecording', true);
  453. _this.send('next');
  454. return;
  455. }, () => {
  456. _this.send('next');
  457. return;
  458. });
  459.  
  460. this._super(...arguments);
  461. }
  462.  
  463. },
  464.  
  465. startIntro() {
  466. // Allow pausing during intro
  467. var _this = this;
  468. $(document).off('keyup.pauser');
  469. $(document).on('keyup.pauser', function(e) {_this.handleSpace(e, _this);});
  470.  
  471. // Start placeholder video right away
  472. /**
  473. * Immediately before starting intro/announcement segment
  474. *
  475. * @event startIntro
  476. */
  477. this.send('setTimeEvent', 'startIntro');
  478. $('#player-video')[0].play();
  479.  
  480. // Set a timer for the minimum length for the intro/break
  481. $('#player-audio')[0].play();
  482. this.set('introTimer', window.setTimeout(function() {
  483. _this.set('completedAttn', true);
  484. _this.notifyPropertyChange('readyToStartCalibration');
  485. }, _this.get('attnLength') * 1000));
  486.  
  487. },
  488.  
  489. startCalibration() {
  490. var _this = this;
  491.  
  492. // Don't allow pausing during calibration/test.
  493. $(document).off('keyup.pauser');
  494.  
  495. // Attempt to fix calibration display similar to exit-fullscreen-and-return fix
  496. $('#allstimuli').css('background-color', 'white');
  497.  
  498. var calAudio = $('#player-calibration-audio')[0];
  499. var calVideo = $('#player-calibration-video')[0];
  500. $('#player-calibration-video').show();
  501.  
  502. // Show the calibration segment at center, left, right, center, each
  503. // time recording an event and playing the calibration audio.
  504. var doCalibrationSegments = function(calList, lastLoc) {
  505. if (calList.length === 0) {
  506. $('#player-calibration-video').hide();
  507. _this.set('currentSegment', 'test');
  508. } else {
  509. var thisLoc = calList.shift();
  510. /**
  511. * Start of EACH calibration segment
  512. *
  513. * @event startCalibration
  514. * @param {String} location location of calibration ball, relative to child: 'left', 'right', or 'center'
  515. */
  516. _this.send('setTimeEvent', 'startCalibration',
  517. {location: thisLoc});
  518. calAudio.pause();
  519. calAudio.currentTime = 0;
  520. calAudio.play();
  521. calVideo.pause();
  522. calVideo.currentTime = 0;
  523. calVideo.play();
  524. $('#player-calibration-video').removeClass(lastLoc);
  525. $('#player-calibration-video').addClass(thisLoc);
  526. window.setTimeout(function() {
  527. doCalibrationSegments(calList, thisLoc);
  528. }, _this.settings.calLength);
  529. }
  530. };
  531.  
  532. doCalibrationSegments(['center', 'left', 'right', 'center'], '');
  533.  
  534. },
  535.  
  536. startTrial() {
  537.  
  538. var _this = this;
  539. /**
  540. * Immediately before starting test trial segment
  541. *
  542. * @event startTestTrial
  543. */
  544. _this.send('setTimeEvent', 'startTestTrial');
  545.  
  546. // Begin playing music; fade in and set to fade out at end of trial
  547. var $musicPlayer = $('#player-music');
  548. $musicPlayer.prop('volume', 0.1);
  549. $musicPlayer[0].play();
  550. $musicPlayer.animate({volume: 1}, _this.settings.musicFadeLength);
  551. window.setTimeout(function() {
  552. $musicPlayer.animate({volume: 0}, _this.settings.musicFadeLength);
  553. }, _this.settings.trialLength * 1000 - _this.settings.musicFadeLength);
  554.  
  555. // Start presenting triangles and set to stop after trial length
  556. _this.presentTriangles(_this.settings.LshapesStart,
  557. _this.settings.RshapesStart,
  558. _this.settings.LsizeBaseStart,
  559. _this.settings.RsizeBaseStart);
  560. window.setTimeout(function() {
  561. window.clearTimeout(_this.get('stimTimer'));
  562. _this.clearTriangles();
  563. _this.endTrial();
  564. }, _this.settings.trialLength * 1000);
  565. },
  566.  
  567. // When triangles have been shown for time indicated: play end-audio if
  568. // present, or just move on.
  569. endTrial() {
  570. this.stopRecorder();
  571. if (this.get('endAudioSources').length) {
  572. $('#player-endaudio')[0].play();
  573. } else {
  574. this.send('finish');
  575. }
  576. },
  577.  
  578. getRandomElement(arr) {
  579. return arr[Math.floor(Math.random() * arr.length)];
  580. },
  581.  
  582. getRandom(min, max) {
  583. return Math.random() * (max - min) + min;
  584. },
  585.  
  586. drawTriangles(Lshape, LX, LY, LRot, LFlip, LSize, Rshape, RX, RY, RRot, RFlip, RSize) {
  587. /**
  588. * records EACH triangle presentation during test trial
  589. *
  590. * @event videoStreamConnection
  591. * @param {String} Lshape shape of left triangle: 'skinny' or 'fat'
  592. * @param {String} Rshape shape of right triangle: 'skinny' or 'fat'
  593. * @param {Number} LX Horizontal offset of left triangle from rectangle center, in units where rectangle width = 70; positive = to right
  594. * @param {Number} LY Vertical offset of left triangle from rectangle center, in units where rectangle height = 100.8; positive = down
  595. * @param {Number} RX Horizontal offset of right triangle from rectangle center, in units where screen width = 200 and rectangle width = 70; positive = to right
  596. * @param {Number} RY Vertical offset of right triangle from rectangle center, in units where rectangle height = 100.8; positive = down
  597. * @param {Number} LRot rotation of left triangle in degrees. 0 degrees has long side horizontal and 15 degree angle (skinny triangle) or 60 degree angle (fat triangle) on left.
  598. * @param {Number} RRot rotation of right triangle in degrees. 0 degrees has long side horizontal and 15 degree angle (skinny triangle) or 60 degree angle (fat triangle) on left.
  599. * @param {Number} LFlip whether left triangle is flipped (1 = no, -1 = yes)
  600. * @param {Number} RFlip whether right triangle is flipped (1 = no, -1 = yes)
  601. * @param {Number} LSize size of left triangle, relative to standard ('standard' sizes are set so that areas of skinny & fat triangles are equal), in terms of side length (e.g. for a rectangle, 2 would mean take a 1x3 rectangle and make it a 2x6 rectangle, quadrupling the area)
  602. * @param {Number} RSize size of right triangle, relative to standard ('standard' sizes are set so that areas of skinny & fat triangles are equal), in terms of side length (e.g. for a rectangle, 2 would mean take a 1x3 rectangle and make it a 2x6 rectangle, quadrupling the area)
  603. */
  604. this.send('setTimeEvent', 'presentTriangles', {
  605. Lshape: Lshape,
  606. LX: LX,
  607. LY: LY,
  608. LRot: LRot,
  609. LFlip: LFlip,
  610. LSize: LSize,
  611. Rshape: Rshape,
  612. RX: RX,
  613. RY: RY,
  614. RRot: RRot,
  615. RFlip: RFlip,
  616. RSize: RSize
  617. });
  618.  
  619. var leftTriangle = `${this.triangleBases[Lshape]}
  620. transform=" translate(${LX}, ${LY})
  621. translate(37.5, 56)
  622. rotate(${LRot})
  623. scale(${LSize})
  624. scale(${LFlip}, 1)" />`;
  625. var rightTriangle = `${this.triangleBases[Rshape]}
  626. transform=" translate(${RX}, ${RY})
  627. translate(162.5, 56)
  628. rotate(${RRot})
  629. scale(${RSize})
  630. scale(${RFlip}, 1)" />`;
  631. $('#stimuli').html(leftTriangle + rightTriangle);
  632. },
  633.  
  634. clearTriangles() {
  635. /**
  636. * Records each time triangles are cleared from display
  637. *
  638. * @event clearTriangles
  639. */
  640. this.send('setTimeEvent', 'clearTriangles');
  641. $('#stimuli').html('');
  642. },
  643.  
  644. presentTriangles(Lshapes, Rshapes, LsizeBase, RsizeBase) {
  645. // select X and Y positions for each shape
  646. var LX = this.getRandom(this.settings.XRange[0],
  647. this.settings.XRange[1]);
  648. var RX = this.getRandom(this.settings.XRange[0],
  649. this.settings.XRange[1]);
  650. var LY = this.getRandom(this.settings.YRange[0],
  651. this.settings.YRange[1]);
  652. var RY = this.getRandom(this.settings.YRange[0],
  653. this.settings.YRange[1]);
  654. // select rotation, flip, size per shape
  655. var LRot = this.getRandom(this.settings.rotRange[0],
  656. this.settings.rotRange[1]);
  657. var RRot = this.getRandom(this.settings.rotRange[0],
  658. this.settings.rotRange[1]);
  659. var LFlip = this.getRandomElement(this.settings.flipVals);
  660. var RFlip = this.getRandomElement(this.settings.flipVals);
  661. var LSize = this.getRandom(this.settings.sizeRange[0],
  662. this.settings.sizeRange[1]) * LsizeBase[0];
  663. var RSize = this.getRandom(this.settings.sizeRange[0],
  664. this.settings.sizeRange[1]) * RsizeBase[0];
  665.  
  666. var _this = this;
  667. _this.clearTriangles();
  668. _this.set('stimTimer', window.setTimeout(function() {
  669. _this.drawTriangles(Lshapes[0], LX, LY, LRot, LFlip, LSize,
  670. Rshapes[0], RX, RY, RRot, RFlip, RSize);
  671. _this.set('stimTimer', window.setTimeout(function() {
  672. _this.presentTriangles(Lshapes.reverse(), Rshapes.reverse(),
  673. LsizeBase.reverse(), RsizeBase.reverse());
  674. }, _this.settings.msTriangles));
  675. }, _this.settings.msBlank));
  676. },
  677.  
  678. handleSpace(event, frame) {
  679. if (frame.checkFullscreen() || !frame.isPaused) {
  680. if (event.which === 32) { // space
  681. frame.pauseStudy();
  682. }
  683. }
  684. },
  685.  
  686. // Pause/unpause study; only called if doing intro.
  687. pauseStudy() {
  688.  
  689. $('#player-audio')[0].pause();
  690. $('#player-audio')[0].currentTime = 0;
  691. $('#player-pause-audio')[0].pause();
  692. $('#player-pause-audio')[0].currentTime = 0;
  693. $('#player-pause-audio-leftfs')[0].pause();
  694. $('#player-pause-audio-leftfs')[0].currentTime = 0;
  695.  
  696. this.set('completedAudio', false);
  697. this.set('completedAttn', false);
  698.  
  699. Ember.run.once(this, () => {
  700. this.set('hasBeenPaused', true);
  701. var wasPaused = this.get('isPaused');
  702. this.set('currentSegment', 'intro');
  703.  
  704. // Currently paused: RESUME
  705. if (wasPaused) {
  706. this.startIntro();
  707. this.set('isPaused', false);
  708. } else { // Not currently paused: PAUSE
  709. window.clearTimeout(this.get('introTimer'));
  710. if (this.checkFullscreen()) {
  711. $('#player-pause-audio')[0].play();
  712. } else {
  713. $('#player-pause-audio-leftfs')[0].play();
  714. }
  715. this.set('isPaused', true);
  716. }
  717. });
  718.  
  719. },
  720.  
  721. didInsertElement() {
  722. this._super(...arguments);
  723.  
  724. // Define basic properties for two triangle shapes used. It would be
  725. // more natural to define these in the template, and then use the
  726. // <use xlink:href="#name" .../> syntax to transform them as
  727. // appropriate, but although this worked fine on experimenter I couldn't
  728. // get the links working on lookit. The code was correctly generated,
  729. // but while a direct use of polygon showed up, nothing that used
  730. // xlink:href showed up at all (even when hard-coded into the template).
  731. // Possibly related to issues like
  732. // https://github.com/emberjs/ember.js/issues/14752.
  733. // --kim
  734.  
  735. this.set('triangleBases', {
  736. 'even': `<polyline stroke="${this.get('triangleColor')}"
  737. stroke-width="${this.get('triangleLineWidth')}"
  738. fill="none"
  739. points="-5.75451015291 , -5.14699035165
  740. -5.75451015291 , 10.2939807033
  741. 11.5090203058 , -5.14699035165"
  742. vector-effect="non-scaling-stroke"
  743. stroke-linejoin="round"`,
  744. 'uneven': `<polyline stroke="${this.get('triangleColor')}"
  745. stroke-width="${this.get('triangleLineWidth')}"
  746. fill="none"
  747. points="-7.19313769114 , 0.0
  748. -7.19313769114 , 9.65060690934
  749. 14.3862753823 , -9.65060690934"
  750. vector-effect="non-scaling-stroke"
  751. stroke-linejoin="round"`
  752.  
  753. });
  754.  
  755. // COUNTERBALANCING (2x2):
  756. // context: whether to use even or uneven triangle as context. If 'even',
  757. // contrasts are small even/big even and small even/big uneven. If 'uneven',
  758. // contrasts are big uneven/small even and big uneven/small uneven.
  759. // altOnLeft: whether to put size-and-shape alteration on left
  760.  
  761. var diffShapes;
  762. var sameShapes;
  763. var shapeSizes;
  764.  
  765. if (this.get('context')) {
  766. sameShapes = ['uneven']; // context: big fat triangle
  767. shapeSizes = [1.6, 1]; // big fat vs. small fat/small skinny
  768. diffShapes = ['uneven', 'even']; // start with context
  769. } else {
  770. sameShapes = ['even']; // context: small skinny triangle
  771. shapeSizes = [1, 1.6]; // small skinny vs. big skinny/big fat
  772. diffShapes = ['even', 'uneven']; // start with context
  773. }
  774.  
  775. var Lshapes;
  776. var Rshapes;
  777. if (this.get('altOnLeft')) {
  778. Lshapes = diffShapes;
  779. Rshapes = sameShapes;
  780. } else {
  781. Lshapes = sameShapes;
  782. Rshapes = diffShapes;
  783. }
  784.  
  785. this.set('settings', {
  786. msBlank: 300,
  787. msTriangles: 500,
  788. LsizeBaseStart: shapeSizes,
  789. RsizeBaseStart: shapeSizes.slice(),
  790. XRange: [-3.125, 3.125],
  791. YRange: [-3.125, 3.125],
  792. rotRange: [0, 360],
  793. flipVals: [-1, 1],
  794. sizeRange: [0.921954, 1.072381], // 15% by AREA: sqrt(0.85), sqrt(1.15)
  795. trialLength: this.get('trialLength'),
  796. LshapesStart: Lshapes,
  797. RshapesStart: Rshapes,
  798. musicFadeLength: 2000,
  799. calLength: this.get('calibrationLength')});
  800.  
  801. this.notifyPropertyChange('readyToStartCalibration');
  802. this.startIntro();
  803. },
  804.  
  805. willDestroyElement() { // remove event handler
  806. $(document).off('keyup.pauser');
  807. window.clearInterval(this.get('introTimer'));
  808. window.clearInterval(this.get('stimTimer'));
  809. this._super(...arguments);
  810. },
  811.  
  812. // Override to do a bit extra when starting recording
  813. onRecordingStarted() {
  814. this.set('recordingStarted', true);
  815. },
  816.  
  817. // Override to do a bit extra when starting session recorder
  818. onSessionRecordingStarted() {
  819. this.set('recordingStarted', true);
  820. }
  821.  
  822. });
  823.