API Docs for: 0.5.1
Show:

File: addon/randomizers/pref-phys-pilot.js

  1. // jscs:disable
  2. import Ember from 'ember';
  3.  
  4. // http://stackoverflow.com/a/12646864
  5. function shuffleArray(array) {
  6. for (var i = array.length - 1; i > 0; i--) {
  7. var j = Math.floor(Math.random() * (i + 1));
  8. var temp = array[i];
  9. array[i] = array[j];
  10. array[j] = temp;
  11. }
  12. return array;
  13. }
  14.  
  15. /**
  16. * Select the first matching session from an array of options, according to the specified rules
  17. *
  18. * @method getLastSession
  19. * @param {Session[]} pastSessions An array of session records. This returns the first match, eg assumes newest-first sort order
  20. * @return {Session} The model representing the last session in which the user participated
  21. */
  22. function getLastSession(pastSessions) {
  23. // Base randomization on the newest (last completed) session for which the participant got at
  24. // least as far as recording data for a single video ID.
  25. for (let i = 0; i < pastSessions.length; i++) {
  26. let session = pastSessions[i];
  27. // Frames might be numbered differently in different experiments... rather than check for a frame ID, check that at least one frame referencing the videos exists at all.
  28. let expData = session.get('expData') || {};
  29. let keys = Object.keys(expData);
  30. for (let i = 0; i < keys.length; i++) {
  31. let frameKeyName = keys[i];
  32. let frameData = expData[frameKeyName];
  33. if (frameKeyName.indexOf('pref-phys-videos') !== -1 && frameData && frameData.videoId) {
  34. return session;
  35. }
  36. }
  37. }
  38. // If no match found, explicitly return null
  39. return null;
  40. }
  41.  
  42. function getConditions(lastSession, frameId) {
  43. var startType, showStay, whichObjects;
  44. // The last session payload refers to the frame we want by number (#-frameName), but frames aren't numbered until the sequence
  45. // has been resolved (eg until we expand pref-phys-videos into a series of video frames, we won't know how many
  46. // frames there are or in what order)
  47. // To find the last conditions, we take the last (and presumably only) key of session.conditions that looks like
  48. // the name (without the leading number part)
  49.  
  50. // This works insofar as this function only targets one sort of frame that we expect to occur only once in
  51. // the pref-phys experiment. Otherwise this function would get confused.
  52. let lastConditions = lastSession ? lastSession.get('conditions') : null;
  53. let lastFrameConditions;
  54. Object.keys(lastConditions || {}).forEach((keyName) => {
  55. if (keyName.indexOf(frameId) !== -1) {
  56. lastFrameConditions = lastConditions[keyName];
  57. }
  58. });
  59.  
  60. if (!lastFrameConditions) {
  61. startType = Math.floor(Math.random() * 4);
  62. showStay = Math.floor(Math.random() * 2);
  63. var whichObjectG = Math.floor(Math.random() * 6);
  64. var whichObjectI = Math.floor(Math.random() * 6);
  65. var whichObjectS = Math.floor(Math.random() * 6);
  66. var whichObjectC = Math.floor(Math.random() * 6);
  67. whichObjects = [whichObjectG, whichObjectI, whichObjectS, whichObjectC];
  68. } else {
  69. startType = lastFrameConditions.startType;
  70. startType++;
  71. if (startType > 3) {
  72. startType = 0;
  73. }
  74.  
  75. showStay = lastFrameConditions.showStay;
  76. //parseInt(prompt('Show support-stay (1) or support-fall (0) last session?', '0/1'));
  77. showStay = 1 - showStay;
  78. whichObjects = Ember.copy(lastFrameConditions.whichObjects);
  79. for (var i = 0; i < 4; i++) {
  80. whichObjects[i]++;
  81. if (whichObjects[i] > 5) {
  82. whichObjects[i] = 0;
  83. }
  84. }
  85. }
  86. return {
  87. startType: startType,
  88. showStay: showStay,
  89. whichObjects: whichObjects
  90. };
  91. }
  92.  
  93. function assignVideos(startType, showStay, whichObjects, NPERTYPE) {
  94. // Types of comparisons for each event type (gravity, inertia, support-fall, support-stay,
  95. // control). Format [event, outcomeMoreProb, outcomeLessProb]
  96. const comparisonsG = [
  97. ['ramp', 'down', 'up'],
  98. ['ramp', 'down', 'up'],
  99. ['toss', 'down', 'up']
  100. ];
  101. // TODO: Is this one still used?
  102. const comparisonsI = [ // jshint ignore:line
  103. ['stop', 'hand', 'nohand'],
  104. ['reverse', 'barrier', 'nobarrier']
  105. ];
  106. const comparisonsSF = [
  107. ['fall', 'slightly-on', 'mostly-on'],
  108. ['fall', 'next-to', 'mostly-on'],
  109. ['fall', 'near', 'mostly-on'],
  110. ['fall', 'next-to', 'slightly-on'],
  111. ['fall', 'near', 'slightly-on'],
  112. ['fall', 'near', 'next-to']
  113. ];
  114. const comparisonsSS = [
  115. ['stay', 'slightly-on', 'mostly-on'],
  116. ['stay', 'next-to', 'mostly-on'],
  117. ['stay', 'near', 'mostly-on'],
  118. ['stay', 'next-to', 'slightly-on'],
  119. ['stay', 'near', 'slightly-on'],
  120. ['stay', 'near', 'next-to']
  121. ];
  122. const comparisonsC = [
  123. ['same', 'A', 'B'],
  124. ['salience', 'interesting', 'boring']
  125. ];
  126.  
  127. // const videotypes = ['gravity', 'inertia', 'support', 'control'];
  128. // FOR PILOT ONLY:
  129. const videotypes = ['gravity', 'stay', 'control', 'fall'];
  130. var compTypes = [comparisonsG, comparisonsSS, comparisonsC, comparisonsSF];
  131. // how many times does each comparison type listed need to be shown to get to NPERTYPE for that event type?
  132. var nReps = [2, 1, 3, 1];
  133.  
  134. /*
  135. // Choose which videos to show for support
  136. if (showStay === 0) {
  137. videotypes[1] = 'fall';
  138. compTypes[1] = comparisonsSF;
  139. } else if (showStay === 1) {
  140. videotypes[1] = 'stay';
  141. compTypes[1] = comparisonsSS;
  142. } /* else {
  143. alert('invalid value for showStay (should be '0' or '1'), using '0'');
  144. videotypes[2] = 'fall';
  145. compTypes[2] = comparisonsSF;
  146. } */
  147.  
  148. // Objects to use: elements correspond to videotypes
  149. const physicalObjects = [
  150. ['apple', 'cup', 'whiteball', 'lotion', 'spray', 'whiteball'],
  151. ['hammer', 'tissues', 'duck', 'book', 'shoe', 'brush'],
  152. ['box', 'funnel', 'eraser', 'scissors', 'spoon', 'wrench'],
  153. ['hammer', 'tissues', 'duck', 'book', 'shoe', 'brush']
  154. ];
  155.  
  156. // Options for videos, organized by event
  157. const cameraAngles = {
  158. table: ['c1', 'c2'],
  159. ramp: ['c1', 'c2'],
  160. toss: ['c1', 'c2'],
  161. stop: ['c1', 'c2'],
  162. reverse: ['c1', 'c2'],
  163. fall: ['c2'],
  164. stay: ['c2'],
  165. same: ['c1'],
  166. salience: ['c1'],
  167. };
  168. const backgrounds = {
  169. table: ['b1', 'b2'],
  170. ramp: ['b1', 'b2'],
  171. toss: ['b1'],
  172. stop: ['b1'],
  173. reverse: ['b1'],
  174. fall: ['green'],
  175. stay: ['green'],
  176. same: ['b1'],
  177. salience: ['b1']
  178. };
  179.  
  180. const flips = {
  181. table: ['NR'],
  182. ramp: ['NN', 'RR', 'NR', 'RN'],
  183. toss: ['NN', 'RR'],
  184. stop: ['NR'],
  185. reverse: ['RN'],
  186. fall: ['NN', 'NR', 'RN', 'RR'],
  187. stay: ['NN', 'NR', 'RN', 'RR'],
  188. same: ['NN', 'RR', 'NR', 'RN'],
  189. salience: ['NN', 'NR', 'RN', 'RR'],
  190. };
  191. // Create list of TYPES (e.g. gravity, inertia, ...)
  192. var typeOrder = videotypes.slice(startType, videotypes.length);
  193. typeOrder = typeOrder.concat(videotypes.slice(0, startType));
  194.  
  195. var playlistsByType = {};
  196. for (var iType = 0; iType < videotypes.length; iType++) { // for each video type
  197.  
  198. // make list of objects to use with canonically-ordered comparison types
  199. var objList = physicalObjects[iType].slice(whichObjects[iType], physicalObjects[iType].length);
  200. objList = objList.concat(physicalObjects[iType].slice(0, whichObjects[iType]));
  201.  
  202. // make canonical comparison type list
  203. var eventTypeList = compTypes[iType];
  204. for (var iRep = 1; iRep < nReps[iType]; iRep++) {
  205. eventTypeList = eventTypeList.concat(compTypes[iType]);
  206. }
  207.  
  208. // choose placement of more/less surprising outcomes (balanced)
  209. var onLeft = ['moreProb', 'moreProb', 'moreProb', 'lessProb', 'lessProb', 'lessProb'];
  210. onLeft = shuffleArray(onLeft);
  211.  
  212. // pair objects and comparison types
  213. var events = [];
  214. for (var iEvent = 0; iEvent < eventTypeList.length; iEvent++) {
  215. var outcomeL, outcomeR;
  216. if (onLeft[iEvent] === 'moreProb') {
  217. outcomeL = eventTypeList[iEvent][1];
  218. outcomeR = eventTypeList[iEvent][2];
  219. } else {
  220. outcomeL = eventTypeList[iEvent][2];
  221. outcomeR = eventTypeList[iEvent][1];
  222. }
  223.  
  224. // choose camera angle, background, and NN/NR/RN/RR randomly
  225. var iCamera = Math.floor(Math.random() *
  226. cameraAngles[eventTypeList[iEvent][0]].length);
  227. var iBackground = Math.floor(Math.random() *
  228. backgrounds[eventTypeList[iEvent][0]].length);
  229. var iFlip = Math.floor(Math.random() *
  230. flips[eventTypeList[iEvent][0]].length);
  231.  
  232. events.push({
  233. compType: eventTypeList[iEvent][0],
  234. outcomeL: outcomeL,
  235. outcomeR: outcomeR,
  236. object: objList[iEvent],
  237. camera: cameraAngles[eventTypeList[iEvent][0]][iCamera],
  238.  
  239. background: backgrounds[eventTypeList[iEvent][0]][iBackground],
  240. flip: flips[eventTypeList[iEvent][0]][iFlip]
  241. });
  242. }
  243.  
  244. // choose order of events randomly
  245. events = shuffleArray(events);
  246. playlistsByType[videotypes[iType]] = events;
  247. }
  248.  
  249. // Put list together
  250. var allEvents = [];
  251. var filenames = [];
  252. var eventNum = 1;
  253. for (var nEvents = 0; nEvents < NPERTYPE; nEvents++) {
  254. for (iType = 0; iType < typeOrder.length; iType++) {
  255. var e = playlistsByType[typeOrder[iType]][nEvents];
  256. var fname = `sbs_${e.compType}_${e.outcomeL}_${e.outcomeR}_${e.object}_${e.camera}_${e.background}_${e.flip}`;
  257. filenames.push(fname);
  258. var altName = `sbs_${e.compType}_${e.outcomeR}_${e.outcomeL}_${e.object}_${e.camera}_${e.background}_${e.flip}`;
  259. e.fname = fname;
  260. e.altName = altName;
  261. e.index = eventNum;
  262. allEvents.push(e);
  263. eventNum++;
  264. }
  265. }
  266.  
  267. return [allEvents, filenames];
  268. }
  269.  
  270. function parse_name(fname) {
  271. var pieces = fname.split('_');
  272. var features = {};
  273.  
  274.  
  275. features.eventType = pieces[1];
  276. features.leftEvent = pieces[2];
  277. features.rightEvent = pieces[3];
  278. features.object = pieces[4];
  279. features.camera = pieces[5];
  280. features.bg = pieces[6];
  281. var variantExt = pieces[7];
  282. features.variant = (variantExt.split('.'))[0];
  283.  
  284. //quick hack for dummy clips which have wrong names for some objects
  285. // (so we can get a correct intro name)
  286. switch (features.object) {
  287. case 'A':
  288. features.object = 'box';
  289. break;
  290. case 'B':
  291. features.object = 'eraser';
  292. break;
  293. case 'C':
  294. features.object = 'funnel';
  295. break;
  296. case 'D':
  297. features.object = 'scissors';
  298. break;
  299. case 'E':
  300. features.object = 'spoon';
  301. break;
  302. case 'F':
  303. features.object = 'wrench';
  304. break;
  305. }
  306.  
  307. return features;
  308.  
  309. }
  310.  
  311. function audioSourceObjs(path, shortname) {
  312. return [
  313. {
  314. 'src': path + shortname + '.ogg',
  315. 'type': 'audio/ogg'
  316. },
  317. {
  318. 'src': path + shortname + '.mp3',
  319. 'type': 'audio/mp3'
  320. }
  321. ];
  322. }
  323.  
  324. function videoSourceObjs(path, shortname, organizedByType) {
  325. if (!organizedByType) {
  326. return [
  327. {
  328. 'src': path + shortname + '.webm',
  329. 'type': 'video/webm'
  330. },
  331. {
  332. 'src': path + shortname + '.mp4',
  333. 'type': 'video/mp4'
  334. }
  335. ];
  336. } else {
  337. return [
  338. {
  339. 'src': path + 'webm/' + shortname + '.webm',
  340. 'type': 'video/webm'
  341. },
  342. {
  343. 'src': path + 'mp4/' + shortname + '.mp4',
  344. 'type': 'video/mp4'
  345. }
  346. ];
  347. }
  348. }
  349.  
  350. function toFrames(frameId, eventVideos, BASE_DIR) {
  351. var nVideos = eventVideos.length;
  352. return eventVideos.map((e) => {
  353. if (e.index === nVideos) {
  354. return {
  355. kind: 'exp-video-physics',
  356. id: `${frameId}`,
  357. autoplay: true,
  358. isLast: true,
  359. audioSources: audioSourceObjs(
  360. BASE_DIR + 'audio/',
  361. 'all_done'),
  362. attnSources: videoSourceObjs(
  363. BASE_DIR + 'stimuli/attention/',
  364. 'attentiongrabber'),
  365. };
  366. }
  367. var features = parse_name(e.fname);
  368. var allMusic = ['music_01', 'music_02', 'music_03', 'music_04', 'music_06', 'music_07', 'music_09', 'music_10'];
  369. var musicName = allMusic[Math.floor(Math.random() * allMusic.length)];
  370.  
  371. var returnFrame = {
  372. kind: 'exp-video-physics',
  373. id: `${frameId}`,
  374. autoplay: true,
  375. testLength: 20, // TODO: change to 20s for actual testing.
  376. isLast: false,
  377. audioSources: audioSourceObjs(
  378. BASE_DIR + 'audio/',
  379. 'video_' + ('00' + (e.index)).slice(-2)),
  380. musicSources: audioSourceObjs(
  381. BASE_DIR + 'audio/',
  382. musicName),
  383. introSources: videoSourceObjs(
  384. BASE_DIR + 'stimuli/intro/',
  385. `cropped_${features.object}`),
  386. attnSources: videoSourceObjs(
  387. BASE_DIR + 'stimuli/attention/',
  388. 'attentiongrabber'),
  389. sources: videoSourceObjs(
  390. BASE_DIR + 'stimuli/' + features.eventType + '/',
  391. e.fname, true),
  392. altSources: videoSourceObjs(
  393. BASE_DIR + 'stimuli/' + features.eventType + '/',
  394. e.altName, true)
  395. };
  396.  
  397. // FOR PILOT ONLY: replace fall videos with calibration
  398. if (e.compType === 'fall') {
  399. returnFrame.sources = videoSourceObjs(
  400. BASE_DIR + 'stimuli/attention/',
  401. 'calibration');
  402. returnFrame.altSources = videoSourceObjs(
  403. BASE_DIR + 'stimuli/attention/',
  404. 'calibration');
  405. }
  406.  
  407. return returnFrame;
  408.  
  409. });
  410. }
  411.  
  412. var randomizer = function (frameId, frameConfig, pastSessions, resolveFrame) {
  413. var MAX_VIDEOS = 24; // limit number of videos. Use 24 for actual study.
  414. var BASE_DIR = 'https://s3.amazonaws.com/lookitcontents/exp-physics/';
  415.  
  416. pastSessions.sort(function (a, b) {
  417. return a.get('createdOn') > b.get('createdOn') ? -1 : 1;
  418. });
  419.  
  420. // TODO: In the future, we may want to identify the specific frame # to fetch instead of generic frame name
  421. pastSessions = pastSessions.filter(function (session) {
  422. return session.get('conditions');
  423. });
  424. let lastSession = getLastSession(pastSessions);
  425. var conditions = getConditions(lastSession, frameId);
  426.  
  427. conditions.NPERTYPE = 6;
  428. var {
  429. startType,
  430. showStay,
  431. whichObjects,
  432. NPERTYPE
  433. } = conditions;
  434.  
  435. var [eventVideos, ] = assignVideos(startType, showStay, whichObjects, NPERTYPE);
  436.  
  437. eventVideos = eventVideos.slice(0, MAX_VIDEOS);
  438. eventVideos.push({index: MAX_VIDEOS + 1});
  439.  
  440. // allEvents and filenames are a function of conditions (no need to store)
  441. var resolved = [];
  442. toFrames(frameId, eventVideos, BASE_DIR).forEach((frame) => {
  443. return resolved.push(...resolveFrame(null, frame)[0]);
  444. });
  445. return [resolved, conditions];
  446. };
  447. export default randomizer;
  448.  
  449. // Export helper functions to support unit testing
  450. export { getConditions, getLastSession };
  451.