Short video plays very slowly

OS (e.g. Win10): macOS Mojave 10.14.6
PsychoPy version (e.g. 1.84.x): v2020.2.4
Standard Standalone? (y/n) : y
What are you trying to achieve?:
I am simply trying to play a short mp4 video (700 ms) using the builder. The video itself was previously created in Psychopy (fps=60, codec=‘libx264’) and contains an RDK. The problem is that the video plays very slowly.

This is a screenshot of my setup:

What did you try to make it work?:
I tried to:

  • run the video in full screen
  • run the video NOT in full screen
  • change the window size of the movie
  • use all the three types of “backend”
  • preload the video
  • read all the related posts on the forum and try all the suggestions

What specifically went wrong when you tried that?:
None of my attempts described above made any difference. The weird thing is that the speed at which the video is played doesn’t change either for the different screen/window sizes. I don’t get any error message, but the video is not presented at all when I use “avbin” or “opencv”.

This sounds like frame rate issue, what happens when you play the file in a regular video player? Can you confirm via whatever the Mac equivalent of right clicking the file and viewing Properties (forgive me, I’m a Windows user :stuck_out_tongue: ) that the frame rate of the video is definitely 60Hz?

Hello there, thanks a lot for your reply.

When I play the file in Quicktime, it plays at the right speed. The frame rate is definitely 60 Hz (see below the properties of the video, as reported by the Quicktime Inspector). I created it with the Psychopy coder, with fps=60 and codec=‘libx264’.

Any suggestion is more than welcome, as I am still stuck. I need to make these videos work for an online experiment in Pavlovia.
You’ll find below the code I used to create the video (it actually creates 8 videos, one for each stimulus level):

#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This experiment was created using PsychoPy3 Experiment Builder (v2020.2.4),
    on Fri Sep 25 12:23:11 2020
If you publish work using this script the most relevant publication is:

    Peirce J, Gray JR, Simpson S, MacAskill M, Höchenberger R, Sogo H, Kastman E, Lindeløv JK. (2019) 
        PsychoPy2: Experiments in behavior made easy Behav Res 51: 195. 
        https://doi.org/10.3758/s13428-018-01193-y

"""

from __future__ import absolute_import, division

from psychopy import locale_setup
from psychopy import prefs
from psychopy import sound, gui, visual, core, data, event, logging, clock
from psychopy.constants import (NOT_STARTED, STARTED, PLAYING, PAUSED,
                                STOPPED, FINISHED, PRESSED, RELEASED, FOREVER)

import numpy as np  # whole numpy lib is available, prepend 'np.'
from numpy import (sin, cos, tan, log, log10, pi, average,
                   sqrt, std, deg2rad, rad2deg, linspace, asarray)
from numpy.random import random, randint, normal, shuffle
import os  # handy system and path functions
import sys  # to get file system encoding

from psychopy.hardware import keyboard


# PARAMETERS ---------------------------------------------------------------
Det_Discr = 1; # 1 = Detection, 2 = Discrimination
#test_Coherence = 0.00

size_mat = 8
#Det_Mat = array([0.0, 0.01, 0.02, 0.04, 0.08, 0.16, 0.32, 0.64])
#Discr_Mat = array([0.30, 0.31, 0.32, 0.34, 0.38, 0.46, 0.62, 0.94])

Det_Mat=[0, 0.01, 0.02, 0.04, 0.08, 0.16, 0.32, 0.64]
Discr_Mat=[0.3, 0.31, 0.32, 0.34, 0.38, 0.46, 0.62, 0.94]

motion_direction = 315; # 0 = RIGHT, 180 = LEFT, 90 = UP, 270 = DOWN
                         # 45 = RU, 135 = LU, 225 = LD, 315 = RD

if motion_direction == 0:
    m_dir = 'R'
elif motion_direction == 180:
    m_dir = 'L'
elif motion_direction == 90:
    m_dir = 'UP'
elif motion_direction == 270:
    m_dir = 'DOWN'
elif motion_direction == 45:
    m_dir = 'RU'
elif motion_direction == 135:
    m_dir = 'LU'
elif motion_direction == 225:
    m_dir = 'LD'
elif motion_direction == 315:
    m_dir = 'RD'


int_length_Cross = 0.2
int_length_noCoherence = 0.0 #in seconds
int_length_Coherence = 0.3 #in seconds

#----------- ---------------------------------------------------------------


# Ensure that relative paths start from the same directory as this script
_thisDir = os.path.dirname(os.path.abspath(__file__))
os.chdir(_thisDir)

# Store info about the experiment session
psychopyVersion = '2020.2.4'
expName = 'test'  # from the Builder filename that created this script
expInfo = {'participant': '', 'session': '001'}
dlg = gui.DlgFromDict(dictionary=expInfo, sort_keys=False, title=expName)
if dlg.OK == False:
    core.quit()  # user pressed cancel
expInfo['date'] = data.getDateStr()  # add a simple timestamp
expInfo['expName'] = expName
expInfo['psychopyVersion'] = psychopyVersion

# Data file name stem = absolute path + name; later add .psyexp, .csv, .log, etc
filename = _thisDir + os.sep + u'data/%s_%s_%s' % (expInfo['participant'], expName, expInfo['date'])

# An ExperimentHandler isn't essential but helps with data saving
thisExp = data.ExperimentHandler(name=expName, version='',
    extraInfo=expInfo, runtimeInfo=None,
    originPath='/Users/aurelio/Documents/METACOGNITION_BINOCULAR_RIVALRY/MOTION_DISCRIMINATION/RDK_Stim.py',
    savePickle=True, saveWideText=True,
    dataFileName=filename)
# save a log file for detail verbose info
logFile = logging.LogFile(filename+'.log', level=logging.DEBUG)
logging.console.setLevel(logging.WARNING)  # this outputs to the screen, not a file

endExpNow = False  # flag for 'escape' or other condition => quit the exp
frameTolerance = 0.001  # how close to onset before 'same' frame

# Start Code - component code to be run before the window creation

# Setup the Window
win = visual.Window(
    size=[1440, 900], fullscr=True, screen=0, 
    winType='pyglet', allowGUI=False, allowStencil=False,
    monitor='testMonitor', color=[0,0,0], colorSpace='rgb',
    blendMode='avg', useFBO=True, 
    units='deg')
# store frame rate of monitor if we can measure it
expInfo['frameRate'] = win.getActualFrameRate()
if expInfo['frameRate'] != None:
    frameDur = 1.0 / round(expInfo['frameRate'])
else:
    frameDur = 1.0 / 60.0  # could not measure, so guess

int_Cross = round(int_length_Cross/frameDur)
int_noCoherence_length = round(int_length_noCoherence/frameDur)
int_Coherence_length = round(int_length_Coherence/frameDur)

# create a default keyboard (e.g. to check for escape)
defaultKeyboard = keyboard.Keyboard()

# Initialize components for Routine "trial"
trialClock = core.Clock()

# Initialize the bloody central Cross
polygon = visual.ShapeStim(
    win=win, name='polygon', vertices='cross',
    size=(0.6, 0.6),
    ori=0, pos=(0, 0),
    lineWidth=0.6, lineColor=[-1.000,-1.000,-1.000], lineColorSpace='rgb',
    fillColor=[-1.000,-1.000,-1.000], fillColorSpace='rgb',
    opacity=1, depth=-3.0, interpolate=True)


#DETECTION
if Det_Discr == 1:
    
    for tt in range(size_mat):
    
        dots_3 = visual.DotStim(
        win=win, name='dots_3',
        nDots=200, dotSize=4,
        speed=0.0668, dir= motion_direction, coherence=Det_Mat[tt],
        fieldPos=(0.0, 0.0), fieldSize=8,fieldShape='circle',
        signalDots='same', noiseDots='direction',dotLife=4,
        color=[1.0,1.0,1.0], colorSpace='rgb', opacity=1,
        depth=-1.0)
        
            # First interval (no coherence)
        for frameN in range(int_Cross):
            polygon.draw()
            win.flip()  # make the drawn things visible
            win.getMovieFrame()
        
        for frameN in range(int_Coherence_length):
            dots_3.draw()
            polygon.draw()
            win.flip()  # make the drawn things visible
            win.getMovieFrame()
            
        for frameN in range(int_Cross):
            polygon.draw()
            win.flip()  # make the drawn things visible
            win.getMovieFrame()
        
        filename = 'RDK_DEG_Coherence_' + str(round(Det_Mat[tt]*100)) + '_SIGNAL_ONLY_'+  str(m_dir) + '.mp4'
        win.saveMovieFrames(filename, fps=60, codec='libx264', clearFrames=True)
        
elif Det_Discr == 2: # DISCRIMINATION
    for tt in range(size_mat):
    
        dots_3 = visual.DotStim(
        win=win, name='dots_3',
        nDots=200, dotSize=4,
        speed=0.0668, dir= motion_direction, coherence=Discr_Mat[tt],
        fieldPos=(0.0, 0.0), fieldSize=8,fieldShape='circle',
        signalDots='same', noiseDots='direction',dotLife=4,
        color=[1.0,1.0,1.0], colorSpace='rgb', opacity=1,
        depth=-1.0)
        
            # First interval (no coherence)
        for frameN in range(int_Cross):
            polygon.draw()
            win.flip()  # make the drawn things visible
            win.getMovieFrame()
        
        for frameN in range(int_Coherence_length):
            dots_3.draw()
            polygon.draw()
            win.flip()  # make the drawn things visible
            win.getMovieFrame()
            
        for frameN in range(int_Cross):
            polygon.draw()
            win.flip()  # make the drawn things visible
            win.getMovieFrame()
        
        filename = 'RDK_DEG_Coherence_' + str(round (Discr_Mat[tt]*100)) + '_SIGNAL_ONLY_'+  str(m_dir) + '.mp4'
        win.saveMovieFrames(filename, fps=60, codec='libx264', clearFrames=True)
            
win.close()
core.quit()

And this is the script of the .psyexp I used to play the video:

#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This experiment was created using PsychoPy3 Experiment Builder (v2020.2.4),
    on Fri Oct 16 09:32:41 2020
If you publish work using this script the most relevant publication is:

    Peirce J, Gray JR, Simpson S, MacAskill M, Höchenberger R, Sogo H, Kastman E, Lindeløv JK. (2019) 
        PsychoPy2: Experiments in behavior made easy Behav Res 51: 195. 
        https://doi.org/10.3758/s13428-018-01193-y

"""

from __future__ import absolute_import, division

from psychopy import locale_setup
from psychopy import prefs
from psychopy import sound, gui, visual, core, data, event, logging, clock
from psychopy.constants import (NOT_STARTED, STARTED, PLAYING, PAUSED,
                                STOPPED, FINISHED, PRESSED, RELEASED, FOREVER)

import numpy as np  # whole numpy lib is available, prepend 'np.'
from numpy import (sin, cos, tan, log, log10, pi, average,
                   sqrt, std, deg2rad, rad2deg, linspace, asarray)
from numpy.random import random, randint, normal, shuffle
import os  # handy system and path functions
import sys  # to get file system encoding

from psychopy.hardware import keyboard



# Ensure that relative paths start from the same directory as this script
_thisDir = os.path.dirname(os.path.abspath(__file__))
os.chdir(_thisDir)

# Store info about the experiment session
psychopyVersion = '2020.2.4'
expName = 'test'  # from the Builder filename that created this script
expInfo = {'participant': '', 'session': '001'}
expInfo['date'] = data.getDateStr()  # add a simple timestamp
expInfo['expName'] = expName
expInfo['psychopyVersion'] = psychopyVersion

# Data file name stem = absolute path + name; later add .psyexp, .csv, .log, etc
filename = _thisDir + os.sep + u'data/%s_%s_%s' % (expInfo['participant'], expName, expInfo['date'])

# An ExperimentHandler isn't essential but helps with data saving
thisExp = data.ExperimentHandler(name=expName, version='',
    extraInfo=expInfo, runtimeInfo=None,
    originPath='/Users/aurelio/Documents/METACOGNITION_BINOCULAR_RIVALRY/MOTION_DISCRIMINATION/MOTION_EXP_SONA/test.py',
    savePickle=True, saveWideText=True,
    dataFileName=filename)
# save a log file for detail verbose info
logFile = logging.LogFile(filename+'.log', level=logging.DEBUG)
logging.console.setLevel(logging.WARNING)  # this outputs to the screen, not a file

endExpNow = False  # flag for 'escape' or other condition => quit the exp
frameTolerance = 0.001  # how close to onset before 'same' frame

# Start Code - component code to be run before the window creation

# Setup the Window
win = visual.Window(
    size=[1440, 900], fullscr=True, screen=0, 
    winType='pyglet', allowGUI=False, allowStencil=False,
    monitor='testMonitor', color=[0,0,0], colorSpace='rgb',
    blendMode='avg', useFBO=True, 
    units='pix')
# store frame rate of monitor if we can measure it
expInfo['frameRate'] = win.getActualFrameRate()
if expInfo['frameRate'] != None:
    frameDur = 1.0 / round(expInfo['frameRate'])
else:
    frameDur = 1.0 / 60.0  # could not measure, so guess

# create a default keyboard (e.g. to check for escape)
defaultKeyboard = keyboard.Keyboard()

# Initialize components for Routine "trial"
trialClock = core.Clock()
print(win.monitor.name, win.monitor.getSizePix(), win.getActualFrameRate())
print(win.monitor.getWidth())
print(win.size)
#print(movie.size)
ISI = clock.StaticPeriod(win=win, screenHz=expInfo['frameRate'], name='ISI')

# Create some handy timers
globalClock = core.Clock()  # to track the time since experiment started
routineTimer = core.CountdownTimer()  # to track time remaining of each (non-slip) routine 

# ------Prepare to start Routine "trial"-------
continueRoutine = True
# update component parameters for each repeat
movie = visual.MovieStim3(
    win=win, name='movie',units='pix', 
    noAudio = True,
    filename='/Users/aurelio/Documents/METACOGNITION_BINOCULAR_RIVALRY/MOTION_DISCRIMINATION/MOTION_EXP_SONA/stimuli/DISC_RDK_DEG_Coherence_94_SIGNAL_ONLY_UP.mp4',
    ori=0, pos=(0, 0), opacity=1,
    loop=False,
    size=(1440,900),
    depth=0.0,
    )
print(win.monitor.name, win.monitor.getSizePix(), win.getActualFrameRate())
print(win.monitor.getWidth())
print(win.size)
print(movie.size)
print(movie.filename)
# keep track of which components have finished
trialComponents = [movie, ISI]
for thisComponent in trialComponents:
    thisComponent.tStart = None
    thisComponent.tStop = None
    thisComponent.tStartRefresh = None
    thisComponent.tStopRefresh = None
    if hasattr(thisComponent, 'status'):
        thisComponent.status = NOT_STARTED
# reset timers
t = 0
_timeToFirstFrame = win.getFutureFlipTime(clock="now")
trialClock.reset(-_timeToFirstFrame)  # t0 is time of first possible flip
frameN = -1

# -------Run Routine "trial"-------
while continueRoutine:
    # get current time
    t = trialClock.getTime()
    tThisFlip = win.getFutureFlipTime(clock=trialClock)
    tThisFlipGlobal = win.getFutureFlipTime(clock=None)
    frameN = frameN + 1  # number of completed frames (so 0 is the first frame)
    # update/draw components on each frame
    
    # *movie* updates
    if movie.status == NOT_STARTED and tThisFlip >= 1.5-frameTolerance:
        # keep track of start time/frame for later
        movie.frameNStart = frameN  # exact frame index
        movie.tStart = t  # local t and not account for scr refresh
        movie.tStartRefresh = tThisFlipGlobal  # on global time
        win.timeOnFlip(movie, 'tStartRefresh')  # time at next scr refresh
        movie.setAutoDraw(True)
    # *ISI* period
    if ISI.status == NOT_STARTED and t >= 0.0-frameTolerance:
        # keep track of start time/frame for later
        ISI.frameNStart = frameN  # exact frame index
        ISI.tStart = t  # local t and not account for scr refresh
        ISI.tStartRefresh = tThisFlipGlobal  # on global time
        win.timeOnFlip(ISI, 'tStartRefresh')  # time at next scr refresh
        ISI.start(1.5)
    elif ISI.status == STARTED:  # one frame should pass before updating params and completing
        # updating other components during *ISI*
        movie.setOpacity(1)
        movie.setPos((0, 0))
        movie.setSize((1440,900))
        movie.setOri(0)
        movie.setMovie('/Users/aurelio/Documents/METACOGNITION_BINOCULAR_RIVALRY/MOTION_DISCRIMINATION/MOTION_EXP_SONA/stimuli/DISC_RDK_DEG_Coherence_94_SIGNAL_ONLY_UP.mp4')
        # component updates done
        ISI.complete()  # finish the static period
        ISI.tStop = ISI.tStart + 1.5  # record stop time
    
    # check for quit (typically the Esc key)
    if endExpNow or defaultKeyboard.getKeys(keyList=["escape"]):
        core.quit()
    
    # check if all components have finished
    if not continueRoutine:  # a component has requested a forced-end of Routine
        break
    continueRoutine = False  # will revert to True if at least one component still running
    for thisComponent in trialComponents:
        if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
            continueRoutine = True
            break  # at least one component has not yet finished
    
    # refresh the screen
    if continueRoutine:  # don't flip if this routine is over or we'll get a blank screen
        win.flip()

# -------Ending Routine "trial"-------
for thisComponent in trialComponents:
    if hasattr(thisComponent, "setAutoDraw"):
        thisComponent.setAutoDraw(False)
movie.stop()
thisExp.addData('ISI.started', ISI.tStart)
thisExp.addData('ISI.stopped', ISI.tStop)
# the Routine "trial" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()

# Flip one final time so any remaining win.callOnFlip() 
# and win.timeOnFlip() tasks get executed before quitting
win.flip()

# these shouldn't be strictly necessary (should auto-save)
thisExp.saveAsWideText(filename+'.csv', delim='auto')
thisExp.saveAsPickle(filename)
logging.flush()
# make sure everything is closed down
thisExp.abort()  # or data files will save again on exit
win.close()
core.quit()