Hello,
Thank you very much for responding to me.
So first, I think it’s better to give some context to what I’m trying to conduct with Psychtoolbox. I want to do a psychophysical experiment where I show the observer two distorted 4K HDR videos on two LG OLED G2 displays (on repeat). I also want to add an option where the observer can switch from viewing the distorted videos to the reference video. So in summary I want to play three 4K HDR videos (at 60 Hz) at the same time. And because the videos have the same content at different distortion levels, I also want to play all three videos in synchronisation.
All videos are between 5 and 10 seconds, with no sound.
The PC specs are: CPU: AMD Ryzen 9 5900X 12-Core processor 3.7GHz, RAM: 32GB, GPU: NVIDIA GeForce RTX 3080, OS: Windows 10 Pro.
I have the code for the whole experiment, but for simplicity, I included a simplified version of it (In this code I am not drawing the textures, just trying to measure how much time the Psychtoolbox takes to decode and give a texture). Also, some sample movie files can be found in this drive folder.
global GL;
% Get our 4K displays indexes
display_indexes = [] ;
j = 1 ;
for i=0:3
metadata = Screen('Resolution',i) ;
if metadata.width == 3840 && metadata.height == 2160
display_indexes(j) = i ;
j=j+1 ;
end
end
right_index = 1 ;
% Define the screen no
screen_no = display_indexes(right_index) ;
try
%Define the preferences
Screen('Preference', 'SkipSyncTests', 0);
Screen('Preference', 'Verbosity', 3) ;
PsychGPUControl('SetGPUPerformance', 10) ;
% - Prepare setup of imaging pipeline for onscreen window. This is the
% first step in the sequence of configuration steps.
PsychImaging('PrepareConfiguration');
PsychImaging('AddTask', 'General', 'EnableHDR', 'Nits', 'HDR10');
PsychImaging('AddTask', 'General', 'FloatingPoint32Bit');
% Open the window
[win, rect] = PsychImaging('OpenWindow', screen_no, 0, [], [], [], [], [], 0);
AssertGLSL
hdrProperties = PsychHDR('GetHDRProperties', win);
display(hdrProperties);
glActiveTexture(GL.TEXTURE0);
AssertOpenGL;
KbName('UnifyKeyNames');
% OPEN MOVIE PARAMETERS
async = 0 ;
preloadSecs=1 ; % We probably want to load all the video beforehand
specialFlags1=0;
pixelFormat=11; % 11 for HDR
maxNumberThreads=[];
movieOptions=[];
% Get the movies path
pathA='path\DevilMayCry5.mp4';
pathB='path\DevilMayCry5_H_3840x2160.mp4';
pathC='path\DevilMayCry5_M_3840x2160.mp4';
% Open the videos
[movieA, duration, fps] = Screen('OpenMovie', win, pathA, async, preloadSecs, specialFlags1, pixelFormat, maxNumberThreads, movieOptions);
movieB = Screen('OpenMovie', win, pathB, async, preloadSecs, specialFlags1, pixelFormat, maxNumberThreads, movieOptions);
movieC = Screen('OpenMovie', win, pathC, async, preloadSecs, specialFlags1, pixelFormat, maxNumberThreads, movieOptions);
% Seek to start of movies (timeindex 0):
Screen('SetMovieTimeIndex', movieA, 0);
Screen('SetMovieTimeIndex', movieB, 0);
Screen('SetMovieTimeIndex', movieC, 0);
% PLAY MOVIE PARAMETERS
rate=1;
loop=1;
soundvolume=0;
% Start playback of movies.
Screen('PlayMovie', movieA, rate, loop, soundvolume);
Screen('PlayMovie', movieB, rate, loop, soundvolume);
Screen('PlayMovie', movieC, rate, loop, soundvolume);
time_to_get_frame = 0;
frames_read = 0;
while true
% Return next frame in movie, in sync with current playback
% time.
ttgf = tic;
%GET MOVIE IMAGE PARAMETERS
texA = Screen('GetMovieImage', win, movieA, 1, []);
texB = Screen('GetMovieImage', win, movieB, 1, []);
texC = Screen('GetMovieImage', win, movieC, 1, []);
read_time = toc(ttgf);
time_to_get_frame = time_to_get_frame + read_time;
frames_read = frames_read + 1;
if( mod(frames_read,fps)==0 )
fprintf( 1, 'Time to read a second of the movie is = %g s\n', time_to_get_frame/frames_read*fps )
end
if texA>0
Screen('Close', texA);
end
if texB>0
Screen('Close', texB);
end
if texC>0
Screen('Close', texC);
end
[keyIsDown, secs, keyCode, deltaSecs] = KbCheck();
if(keyIsDown)
if all(keyCode(KbName('ESCAPE')))
Screen('PlayMovie', movieA, 0);
Screen('CloseMovie', movieA);
Screen('PlayMovie', movieB, 0);
Screen('CloseMovie', movieB);
Screen('PlayMovie', movieC, 0);
Screen('CloseMovie', movieC);
ME = 'break the experiment' ;
throw(ME) ;
end
end
end
catch ME
% catch error: This is executed in case something goes wrong in the
% 'try' part due to programming error etc.:
Screen('CloseAll');
fclose('all');
Priority(0);
sca ;
display( 'Exception caught' );
rethrow(ME);
% Output the error message that describes the error:
end
To be able to find the main issue, I have tried different videos. I will summarise my findings here:
- A 4K HDR video at 60 fps (DevilMayCry5.mp4 file in the drive folder) cannot be played in real-time (the frames are not dropped). I have also tried the PlayMoviesDemo from Psychtoolbox, and the issue is the same. This is the output of the code:
ITER=1::Movie: path\DevilMayCry5.mp4 : 10.000000 seconds duration, 60.000000 fps, w x h = 3840 x 2160…
Elapsed time 58.436810 seconds, for 2543 frames. Average framerate 43.517091 fps.
- Three 4K HDR videos at 60 fps (DevilMayCry5.mp4, DevilMayCry5_H_3840x2160.mp4, DevilMayCry5_M_3840x2160.mp4 files in the drive folder) can be decoded in real-time using GStreamer (or even opening them on the web browser in full mode). Here is the code I have used to decode them with GStreamer:
#include <gst/gst.h>
#include <time.h>
#include <stdio.h>
int main(int argc, char* argv[]) {
GstElement* pipeline1;
GstElement* pipeline2;
GstElement* pipeline3;
GstBus* bus1;
GstBus* bus2;
GstBus* bus3;
GstMessage* msg1;
GstMessage* msg2;
GstMessage* msg3;
GMainLoop* loop;
double time_init;
double time_diff;
/* Initialize GStreamer */
gst_init(&argc, &argv);
loop = g_main_loop_new(NULL, FALSE);
/* Create the elements */
pipeline1 = gst_parse_launch("playbin uri=file:///path/DevilMayCry5.mp4", NULL);
pipeline2 = gst_parse_launch("playbin uri=file:///path/DevilMayCry5_H_3840x2160.mp4", NULL);
pipeline3 = gst_parse_launch("playbin uri=file:///path/DevilMayCry5_M_3840x2160.mp4", NULL);
/* Start measuring the time*/
time_init = (double) clock() / CLOCKS_PER_SEC;
/* Start playing */
gst_element_set_state(pipeline1, GST_STATE_PLAYING);
gst_element_set_state(pipeline2, GST_STATE_PLAYING);
gst_element_set_state(pipeline3, GST_STATE_PLAYING);
/* Wait until error or EOS */
bus1 = gst_element_get_bus(pipeline1);
bus2 = gst_element_get_bus(pipeline2);
bus3 = gst_element_get_bus(pipeline3);
msg1 = gst_bus_timed_pop_filtered(bus1, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
msg2 = gst_bus_timed_pop_filtered(bus2, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
msg3 = gst_bus_timed_pop_filtered(bus3, GST_CLOCK_TIME_NONE, (GstMessageType)(GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
/* Look for errors */
if (GST_MESSAGE_TYPE(msg1) == GST_MESSAGE_ERROR || GST_MESSAGE_TYPE(msg2) == GST_MESSAGE_ERROR || GST_MESSAGE_TYPE(msg3) == GST_MESSAGE_ERROR) {
g_error("An error occurred! Re-run with the GST_DEBUG=*:WARN environment variable set for more details.");
}
/* Display the time spent to decode the videos*/
time_diff = (double) clock() / CLOCKS_PER_SEC - time_init;
printf("The elapsed time is %f seconds\n", time_diff);
/* Free resources */
gst_message_unref(msg1);
gst_message_unref(msg2);
gst_message_unref(msg3);
gst_object_unref(bus1);
gst_element_set_state(pipeline1, GST_STATE_NULL);
gst_object_unref(pipeline1);
gst_object_unref(bus2);
gst_element_set_state(pipeline2, GST_STATE_NULL);
gst_object_unref(pipeline2);
gst_object_unref(bus3);
gst_element_set_state(pipeline3, GST_STATE_NULL);
gst_object_unref(pipeline3);
return 0;
}
-
Three 4K HDR videos at 30fps (Pubg.mp4, Pubg_H_3840x2160.mp4, Pubg_M_3840x2160.mp4 files in the drive folder) can be played in real-time; however, Psychtoolbox could not keep all the framerates and around 2~17 frames were dropped for each file, which may cause a synchronisation problem.
-
Three HD HDR videos at 60fps (DevilMayCry5_H_1920x1080.mp4, DevilMayCry5_M_1920x1080.mp4, DevilMayCry5_L_1920x1080.mp4 files in the drive folder) can be played in real-time.
-
An HD HDR video at 60fps with a higher complexity of ~ 6.9MB (DevilMayCry5_H_1920x1080.mp4) can be played in real-time, while a 4K HDR video at 60 fps with a lower complexity of ~ 2.53MB (DevilMayCry5_L_3840x2160.mp4) cannot be played in real-time.
So after testing all of these variations, I knew it was not an issue decoding with decoding speed, but mostly an issue of the texture size and how many textures that needed to be transferred from GStreamer to Psychtoolbox. Which is why I think that videos with high resolutions and framerate cannot be played in real time if we rely on the GPU-CPU-GPU memory transfers.
In summary, my main goal is to be able to play three 4K HDR videos at 60 Hz in real time without dropping any frames (to keep the synchronisation of the videos). From the results of GStreamer I can determine that my GPU is capable of it, but I am not sure if such an experiment is possible to do in Psychtoolbox.