I try to get video from a CSI-2 interface of a custom iMX8QXP board to be displayed using OpenGLES (will need some transformations on the video image). For performance reasons we would like to use glTexDirectMapVIV to directly use the buffers from v4l2 as an OpenGLES texture. But unfortunatly I am not able to get this to work. I stripped our test program as much as possible so no v4l2 involved. Just egl/gles:
#include <stdbool.h>
#include <stdlib.h>
#include <unistd.h>
#include <GLES/gl.h>
#include <GLES/glext.h>
#include <vdk.h>
int main(int argc, char *argv[])
{
int xRes = 1920;
int yRes = 720;
int optNumFrames = 600;
bool optVivMap = false;
int c;
while(-1 != (c = getopt(argc, argv, "n:m")))
{
if('n' == c)
{
optNumFrames = strtoul(optarg, 0, 0);
}
else if('m' == c)
{
optVivMap = true;
}
}
vdkEGL egl = {};
EGLint configAttribs[] =
{
EGL_SAMPLES, 0,
EGL_RED_SIZE, 8,
EGL_GREEN_SIZE, 8,
EGL_BLUE_SIZE, 8,
EGL_ALPHA_SIZE, EGL_DONT_CARE,
EGL_DEPTH_SIZE, 16,
EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
EGL_NONE,
};
EGLint attribListContext[] =
{
// Needs to be set for es2.0 as default client version is es1.1.
EGL_CONTEXT_CLIENT_VERSION, 1,
EGL_NONE
};
vdkInitialize();
vdkSetupEGL(0, 0, xRes, yRes, configAttribs, NULL, attribListContext, &egl);
vdkShowWindow(egl.window);
glClearColor(0.2f, 0.0f, 0.0f, 0.0f);
glViewport(0, 0, xRes, yRes);
glMatrixMode(GL_PROJECTION);
glOrthof(-10.0f, 10.0f, -10.0f , 10.0f, -1.0f, 1.0f);
glEnable(GL_TEXTURE_2D);
unsigned int texture;
glGenTextures(1, &texture);
glBindTexture(GL_TEXTURE_2D, texture);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
//Alloc texture data and write some data into it
int texXRes = 64;
int texYRes = 64;
void * videoData = malloc(texXRes*texYRes*4);
//Get some data to the texture
uint8_t * p0 = (uint8_t *)videoData + 0 * texXRes * texYRes;
uint8_t * p1 = (uint8_t *)videoData + 1 * texXRes * texYRes;
uint8_t * p2 = (uint8_t *)videoData + 2 * texXRes * texYRes;
uint8_t * p3 = (uint8_t *)videoData + 3 * texXRes * texYRes;
for(size_t k = 0; k < texXRes * texYRes; k += 4)
{
*p0++ = rand(); *p0++ = rand(); *p0++ = rand(); *p0++ = rand();
*p1++ = 0xFF; *p1++ = 0x00; *p1++ = 0x00; *p1++ = 0x00;
*p2++ = 0x00; *p2++ = 0xFF; *p2++ = 0x00; *p2++ = 0x00;
*p3++ = 0x00; *p3++ = 0x00; *p3++ = 0xFF; *p3++ = 0x00;
}
if(optVivMap)
{
PFNGLTEXDIRECTMAPVIVPROC glTexDirectMapVIV = (PFNGLTEXDIRECTMAPVIVPROC)eglGetProcAddress("glTexDirectMapVIV");
PFNGLTEXDIRECTINVALIDATEVIVPROC glTexDirectInvalidateVIV = (PFNGLTEXDIRECTINVALIDATEVIVPROC)eglGetProcAddress("glTexDirectInvalidateVIV");
GLuint pAddr = ~0U;
glTexDirectMapVIV(GL_TEXTURE_2D, texXRes, texYRes, GL_BGRA_EXT, &videoData, &pAddr);
glTexDirectInvalidateVIV(GL_TEXTURE_2D);
}
else
{
glTexImage2D(GL_TEXTURE_2D, 0, GL_BGRA_EXT, texXRes, texYRes, 0, GL_BGRA_EXT, GL_UNSIGNED_BYTE, videoData);
}
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glEnableClientState(GL_VERTEX_ARRAY);
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
for(int i = 0; i < optNumFrames; i++)
{
glClear(GL_COLOR_BUFFER_BIT);
float vertices[] = {
-8.0f, -8.0f, 0.0f, //lower left
8.0f, -8.0f, 0.0f, //lower right
-8.0f, 8.0f, 0.0f, //upper left
8.0f, 8.0f, 0.0f, //upper right
};
float texCoords[] = {
0.0f, 1.0f, //lower left
1.0f, 1.0f, //lower right
0.0f, 0.0f, //upper left
1.0f, 0.0f, //upper right
};
glVertexPointer(3, GL_FLOAT, 0, vertices);
glTexCoordPointer(2, GL_FLOAT, 0, texCoords);
uint8_t indices[] = { 0, 1, 2, 1, 3, 2 };
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_BYTE, indices);
glFinish();
eglSwapBuffers(egl.eglDisplay, egl.eglSurface);
}
return EXIT_SUCCESS;
}
If started without option -m (optVivMap == false) I can see the prepared texture. When the application is started with option -m I only see a white rectangle instead of the expected texture. Everything is based on rel_imx_5.4.70_2.3.0.
I would be thankful for any hint.
Hello mod42,
From quickly test, L4.14.98 BSP has no this issue, but L5.4.70 BSP has it.
Some difference for the two kernel:
L5.4.70's GPU driver will create a DRM device "/dev/dri/card0" and DPU display driver will be "/dev/dri/card1".
But in L4.14.98 kernel, there is only one drm device "/dev/dri/card0" for DPU display.
I had tried to change "/etc/xdg/weston/weston.ini", set "drm-device=card1". But the issue is still there.
Regards
I've built this on my imx8mp with 5.10.72-2.2.0 and the white texture with glTexDirectMapVIV still occurs. When is this going to be fixed?
Hello.
I tried to build your posted example and can't find vdk.h anywhere in /usr/include. I'm using 5.10.72-2.2.0 BSP image. Anyone have ideas?