API Docs for: 0.5.1
Show:

File: addon/randomizers/pref-phys-pilot.js

// jscs:disable
import Ember from 'ember';

// http://stackoverflow.com/a/12646864
function shuffleArray(array) {
    for (var i = array.length - 1; i > 0; i--) {
        var j = Math.floor(Math.random() * (i + 1));
        var temp = array[i];
        array[i] = array[j];
        array[j] = temp;
    }
    return array;
}

/**
 * Select the first matching session from an array of options, according to the specified rules
 *
 * @method getLastSession
 * @param {Session[]} pastSessions An array of session records. This returns the first match, eg assumes newest-first sort order
 * @return {Session} The model representing the last session in which the user participated
 */
function getLastSession(pastSessions) {
    // Base randomization on the newest (last completed) session for which the participant got at
    // least as far as recording data for a single video ID.
    for (let i = 0; i < pastSessions.length; i++) {
        let session = pastSessions[i];
        // Frames might be numbered differently in different experiments... rather than check for a frame ID, check that at least one frame referencing the videos exists at all.
        let expData = session.get('expData') || {};
        let keys = Object.keys(expData);
        for (let i = 0; i < keys.length; i++) {
            let frameKeyName = keys[i];
            let frameData = expData[frameKeyName];
            if (frameKeyName.indexOf('pref-phys-videos') !== -1 && frameData && frameData.videoId) {
                return session;
            }
        }
    }
    // If no match found, explicitly return null
    return null;
}

function getConditions(lastSession, frameId) {
    var startType, showStay, whichObjects;
    // The last session payload refers to the frame we want by number (#-frameName), but frames aren't numbered until the sequence
    //   has been resolved (eg until we expand pref-phys-videos into a series of video frames, we won't know how many
    //   frames there are or in what order)
    // To find the last conditions, we take the last (and presumably only) key of session.conditions that looks like
    //  the name (without the leading number part)

    // This works insofar as this function only targets one sort of frame that we expect to occur only once in
    // the pref-phys experiment. Otherwise this function would get confused.
    let lastConditions = lastSession ? lastSession.get('conditions') : null;
    let lastFrameConditions;
    Object.keys(lastConditions || {}).forEach((keyName) => {
        if (keyName.indexOf(frameId) !== -1) {
            lastFrameConditions = lastConditions[keyName];
        }
    });

    if (!lastFrameConditions) {
        startType = Math.floor(Math.random() * 4);
        showStay = Math.floor(Math.random() * 2);
        var whichObjectG = Math.floor(Math.random() * 6);
        var whichObjectI = Math.floor(Math.random() * 6);
        var whichObjectS = Math.floor(Math.random() * 6);
        var whichObjectC = Math.floor(Math.random() * 6);
        whichObjects = [whichObjectG, whichObjectI, whichObjectS, whichObjectC];
    } else {
        startType = lastFrameConditions.startType;
        startType++;
        if (startType > 3) {
            startType = 0;
        }

        showStay = lastFrameConditions.showStay;
        //parseInt(prompt('Show support-stay (1) or support-fall (0) last session?', '0/1'));
        showStay = 1 - showStay;
        whichObjects = Ember.copy(lastFrameConditions.whichObjects);
        for (var i = 0; i < 4; i++) {
            whichObjects[i]++;
            if (whichObjects[i] > 5) {
                whichObjects[i] = 0;
            }
        }
    }
    return {
        startType: startType,
        showStay: showStay,
        whichObjects: whichObjects
    };
}

function assignVideos(startType, showStay, whichObjects, NPERTYPE) {
    // Types of comparisons for each event type (gravity, inertia, support-fall, support-stay,
    // control). Format [event, outcomeMoreProb, outcomeLessProb]
    const comparisonsG = [
        ['ramp', 'down', 'up'],
        ['ramp', 'down', 'up'],
        ['toss', 'down', 'up']
    ];
    // TODO: Is this one still used?
    const comparisonsI = [ // jshint ignore:line
        ['stop', 'hand', 'nohand'],
        ['reverse', 'barrier', 'nobarrier']
    ];
    const comparisonsSF = [
        ['fall', 'slightly-on', 'mostly-on'],
        ['fall', 'next-to', 'mostly-on'],
        ['fall', 'near', 'mostly-on'],
        ['fall', 'next-to', 'slightly-on'],
        ['fall', 'near', 'slightly-on'],
        ['fall', 'near', 'next-to']
    ];
    const comparisonsSS = [
        ['stay', 'slightly-on', 'mostly-on'],
        ['stay', 'next-to', 'mostly-on'],
        ['stay', 'near', 'mostly-on'],
        ['stay', 'next-to', 'slightly-on'],
        ['stay', 'near', 'slightly-on'],
        ['stay', 'near', 'next-to']
    ];
    const comparisonsC = [
        ['same', 'A', 'B'],
        ['salience', 'interesting', 'boring']
    ];

    // const videotypes = ['gravity', 'inertia', 'support', 'control'];
    // FOR PILOT ONLY:
    const videotypes = ['gravity', 'stay', 'control', 'fall'];
    var compTypes = [comparisonsG, comparisonsSS, comparisonsC, comparisonsSF];
    // how many times does each comparison type listed need to be shown to get to NPERTYPE for that event type?
    var nReps = [2, 1, 3, 1];

    /*
    // Choose which videos to show for support
    if (showStay === 0) {
        videotypes[1] = 'fall';
        compTypes[1] = comparisonsSF;
    } else if (showStay === 1) {
        videotypes[1] = 'stay';
        compTypes[1] = comparisonsSS;
    } /* else {
        alert('invalid value for showStay (should be '0' or '1'), using '0'');
        videotypes[2] = 'fall';
        compTypes[2] = comparisonsSF;
    } */

    // Objects to use: elements correspond to videotypes
    const physicalObjects = [
        ['apple', 'cup', 'whiteball', 'lotion', 'spray', 'whiteball'],
        ['hammer', 'tissues', 'duck', 'book', 'shoe', 'brush'],
        ['box', 'funnel', 'eraser', 'scissors', 'spoon', 'wrench'],
        ['hammer', 'tissues', 'duck', 'book', 'shoe', 'brush']
    ];

    // Options for videos, organized by event
    const cameraAngles = {
        table: ['c1', 'c2'],
        ramp: ['c1', 'c2'],
        toss: ['c1', 'c2'],
        stop: ['c1', 'c2'],
        reverse: ['c1', 'c2'],
        fall: ['c2'],
        stay: ['c2'],
        same: ['c1'],
        salience: ['c1'],
    };
    const backgrounds = {
        table: ['b1', 'b2'],
        ramp: ['b1', 'b2'],
        toss: ['b1'],
        stop: ['b1'],
        reverse: ['b1'],
        fall: ['green'],
        stay: ['green'],
        same: ['b1'],
        salience: ['b1']
    };

    const flips = {
        table: ['NR'],
        ramp: ['NN', 'RR', 'NR', 'RN'],
        toss: ['NN', 'RR'],
        stop: ['NR'],
        reverse: ['RN'],
        fall: ['NN', 'NR', 'RN', 'RR'],
        stay: ['NN', 'NR', 'RN', 'RR'],
        same: ['NN', 'RR', 'NR', 'RN'],
        salience: ['NN', 'NR', 'RN', 'RR'],
    };
    // Create list of TYPES (e.g. gravity, inertia, ...)
    var typeOrder = videotypes.slice(startType, videotypes.length);
    typeOrder = typeOrder.concat(videotypes.slice(0, startType));

    var playlistsByType = {};
    for (var iType = 0; iType < videotypes.length; iType++) { // for each video type

        // make list of objects to use with canonically-ordered comparison types
        var objList = physicalObjects[iType].slice(whichObjects[iType], physicalObjects[iType].length);
        objList = objList.concat(physicalObjects[iType].slice(0, whichObjects[iType]));

        // make canonical comparison type list
        var eventTypeList = compTypes[iType];
        for (var iRep = 1; iRep < nReps[iType]; iRep++) {
            eventTypeList = eventTypeList.concat(compTypes[iType]);
        }

        // choose placement of more/less surprising outcomes (balanced)
        var onLeft = ['moreProb', 'moreProb', 'moreProb', 'lessProb', 'lessProb', 'lessProb'];
        onLeft = shuffleArray(onLeft);

        // pair objects and comparison types
        var events = [];
        for (var iEvent = 0; iEvent < eventTypeList.length; iEvent++) {
            var outcomeL, outcomeR;
            if (onLeft[iEvent] === 'moreProb') {
                outcomeL = eventTypeList[iEvent][1];
                outcomeR = eventTypeList[iEvent][2];
            } else {
                outcomeL = eventTypeList[iEvent][2];
                outcomeR = eventTypeList[iEvent][1];
            }

            // choose camera angle, background, and NN/NR/RN/RR randomly
            var iCamera = Math.floor(Math.random() *
                cameraAngles[eventTypeList[iEvent][0]].length);
            var iBackground = Math.floor(Math.random() *
                backgrounds[eventTypeList[iEvent][0]].length);
            var iFlip = Math.floor(Math.random() *
                flips[eventTypeList[iEvent][0]].length);

            events.push({
                compType: eventTypeList[iEvent][0],
                outcomeL: outcomeL,
                outcomeR: outcomeR,
                object: objList[iEvent],
                camera: cameraAngles[eventTypeList[iEvent][0]][iCamera],

                background: backgrounds[eventTypeList[iEvent][0]][iBackground],
                flip: flips[eventTypeList[iEvent][0]][iFlip]
            });
        }

        // choose order of events randomly
        events = shuffleArray(events);
        playlistsByType[videotypes[iType]] = events;
    }

    // Put list together
    var allEvents = [];
    var filenames = [];
    var eventNum = 1;
    for (var nEvents = 0; nEvents < NPERTYPE; nEvents++) {
        for (iType = 0; iType < typeOrder.length; iType++) {
            var e = playlistsByType[typeOrder[iType]][nEvents];
            var fname = `sbs_${e.compType}_${e.outcomeL}_${e.outcomeR}_${e.object}_${e.camera}_${e.background}_${e.flip}`;
            filenames.push(fname);
            var altName = `sbs_${e.compType}_${e.outcomeR}_${e.outcomeL}_${e.object}_${e.camera}_${e.background}_${e.flip}`;
            e.fname = fname;
            e.altName = altName;
            e.index = eventNum;
            allEvents.push(e);
            eventNum++;
        }
    }

    return [allEvents, filenames];
}

function parse_name(fname) {
    var pieces = fname.split('_');
    var features = {};


    features.eventType = pieces[1];
    features.leftEvent = pieces[2];
    features.rightEvent = pieces[3];
    features.object = pieces[4];
    features.camera = pieces[5];
    features.bg = pieces[6];
    var variantExt = pieces[7];
    features.variant = (variantExt.split('.'))[0];

    //quick hack for dummy clips which have wrong names for some objects
    // (so we can get a correct intro name)
    switch (features.object) {
        case 'A':
            features.object = 'box';
            break;
        case 'B':
            features.object = 'eraser';
            break;
        case 'C':
            features.object = 'funnel';
            break;
        case 'D':
            features.object = 'scissors';
            break;
        case 'E':
            features.object = 'spoon';
            break;
        case 'F':
            features.object = 'wrench';
            break;
    }

    return features;

}

function audioSourceObjs(path, shortname) {
    return [
        {
            'src': path + shortname + '.ogg',
            'type': 'audio/ogg'
        },
        {
            'src': path + shortname + '.mp3',
            'type': 'audio/mp3'
        }
    ];
}

function videoSourceObjs(path, shortname, organizedByType) {
    if (!organizedByType) {
        return [
            {
                'src': path + shortname + '.webm',
                'type': 'video/webm'
            },
            {
                'src': path + shortname + '.mp4',
                'type': 'video/mp4'
            }
        ];
    } else {
        return [
            {
                'src': path + 'webm/' + shortname + '.webm',
                'type': 'video/webm'
            },
            {
                'src': path + 'mp4/' + shortname + '.mp4',
                'type': 'video/mp4'
            }
        ];
    }
}

function toFrames(frameId, eventVideos, BASE_DIR) {
    var nVideos = eventVideos.length;
    return eventVideos.map((e) => {
        if (e.index === nVideos) {
            return {
                kind: 'exp-video-physics',
                id: `${frameId}`,
                autoplay: true,
                isLast: true,
                audioSources: audioSourceObjs(
                    BASE_DIR + 'audio/',
                    'all_done'),
                attnSources: videoSourceObjs(
                    BASE_DIR + 'stimuli/attention/',
                    'attentiongrabber'),
            };
        }
        var features = parse_name(e.fname);
        var allMusic = ['music_01', 'music_02', 'music_03', 'music_04', 'music_06', 'music_07', 'music_09', 'music_10'];
        var musicName = allMusic[Math.floor(Math.random() * allMusic.length)];

        var returnFrame = {
            kind: 'exp-video-physics',
            id: `${frameId}`,
            autoplay: true,
            testLength: 20, // TODO: change to 20s for actual testing.
            isLast: false,
            audioSources: audioSourceObjs(
                BASE_DIR + 'audio/',
                'video_' + ('00' + (e.index)).slice(-2)),
            musicSources: audioSourceObjs(
                BASE_DIR + 'audio/',
                musicName),
            introSources: videoSourceObjs(
                BASE_DIR + 'stimuli/intro/',
                `cropped_${features.object}`),
            attnSources: videoSourceObjs(
                BASE_DIR + 'stimuli/attention/',
                'attentiongrabber'),
            sources: videoSourceObjs(
                BASE_DIR + 'stimuli/' + features.eventType + '/',
                e.fname, true),
            altSources: videoSourceObjs(
                BASE_DIR + 'stimuli/' + features.eventType + '/',
                e.altName, true)
        };

        // FOR PILOT ONLY: replace fall videos with calibration
        if (e.compType === 'fall') {
            returnFrame.sources = videoSourceObjs(
                BASE_DIR + 'stimuli/attention/',
                'calibration');
            returnFrame.altSources = videoSourceObjs(
                BASE_DIR + 'stimuli/attention/',
                'calibration');
        }

        return returnFrame;

    });
}

var randomizer = function (frameId, frameConfig, pastSessions, resolveFrame) {
    var MAX_VIDEOS = 24; // limit number of videos. Use 24 for actual study.
    var BASE_DIR = 'https://s3.amazonaws.com/lookitcontents/exp-physics/';

    pastSessions.sort(function (a, b) {
        return a.get('createdOn') > b.get('createdOn') ? -1 : 1;
    });

    // TODO: In the future, we may want to identify the specific frame # to fetch instead of generic frame name
    pastSessions = pastSessions.filter(function (session) {
        return session.get('conditions');
    });
    let lastSession = getLastSession(pastSessions);
    var conditions = getConditions(lastSession, frameId);

    conditions.NPERTYPE = 6;
    var {
        startType,
        showStay,
        whichObjects,
        NPERTYPE
    } = conditions;

    var [eventVideos, ] = assignVideos(startType, showStay, whichObjects, NPERTYPE);

    eventVideos = eventVideos.slice(0, MAX_VIDEOS);
    eventVideos.push({index: MAX_VIDEOS + 1});

    // allEvents and filenames are a function of conditions (no need to store)
    var resolved = [];
    toFrames(frameId, eventVideos, BASE_DIR).forEach((frame) => {
        return resolved.push(...resolveFrame(null, frame)[0]);
    });
    return [resolved, conditions];
};
export default randomizer;

// Export helper functions to support unit testing
export { getConditions, getLastSession };