在着色器中offset变量需要设置为一个归一化之后的值:
1.0/width
, 按照原理图,在纹理坐标 y < (2/3) 范围,一次采样(加三次偏移采样)4 个 RGBA 像素(R,G,B,A)生成 1 个(Y0,Y1,Y2,Y3),整个范围采样结束时填充好
width*height
大小的缓冲区;当纹理坐标 y > (2/3) 范围,一次采样(加三次偏移采样)4 个 RGBA 像素(R,G,B,A)生成 1 个(V0,U0,V0,U1),又因为 UV 缓冲区的高度为 height/2 ,VU plane 在垂直方向的采样是隔行进行,整个范围采样结束时填充好
width*height/2
大小的缓冲区。
RGBtoYUVOpengl.cpp#include "../utils/Log.h" #include "RGBtoYUVOpengl.h" // 顶点着色器 static const char *ver = "#version 300 esn" "in vec4 aPosition;n" "in vec2 aTexCoord;n" "out vec2 v_texCoord;n" "void main() {n" " v_texCoord = aTexCoord;n" " gl_Position = aPosition;n" "}"; // 片元着色器 static const char *fragment = "#version 300 esn" "precision mediump float;n" "in vec2 v_texCoord;n" "layout(location = 0) out vec4 outColor;n" "uniform sampler2D s_TextureMap;n" "uniform float u_Offset;n" "const vec3 COEF_Y = vec3(0.299, 0.587, 0.114);n" "const vec3 COEF_U = vec3(-0.147, -0.289, 0.436);n" "const vec3 COEF_V = vec3(0.615, -0.515, -0.100);n" "const float UV_DIVIDE_LINE = 2.0 / 3.0;n" "void main(){n" " vec2 texelOffset = vec2(u_Offset, 0.0);n" " if (v_texCoord. y <= UV_DIVIDE_LINE) {n" " vec2 texCoord = vec2(v_texCoord. x, v_texCoord. y * 3.0 / 2.0);n" " vec4 color0 = texture(s_TextureMap, texCoord);n" " vec4 color1 = texture(s_TextureMap, texCoord + texelOffset);n" " vec4 color2 = texture(s_TextureMap, texCoord + texelOffset * 2.0);n" " vec4 color3 = texture(s_TextureMap, texCoord + texelOffset * 3.0);n" " float y0 = dot(color0. rgb, COEF_Y);n" " float y1 = dot(color1. rgb, COEF_Y);n" " float y2 = dot(color2. rgb, COEF_Y);n" " float y3 = dot(color3. rgb, COEF_Y);n" " outColor = vec4(y0, y1, y2, y3);n" " } else {n" " vec2 texCoord = vec2(v_texCoord.x, (v_texCoord.y - UV_DIVIDE_LINE) * 3.0);n" " vec4 color0 = texture(s_TextureMap, texCoord);n" " vec4 color1 = texture(s_TextureMap, texCoord + texelOffset);n" " vec4 color2 = texture(s_TextureMap, texCoord + texelOffset * 2.0);n" " vec4 color3 = texture(s_TextureMap, texCoord + texelOffset * 3.0);n" " float v0 = dot(color0. rgb, COEF_V) + 0.5;n" " float u0 = dot(color1. rgb, COEF_U) + 0.5;n" " float v1 = dot(color2. rgb, COEF_V) + 0.5;n" " float u1 = dot(color3. rgb, COEF_U) + 0.5;n" " outColor = vec4(v0, u0, v1, u1);n" " }n" "}"; // 使用绘制两个三角形组成一个矩形的形式(三角形带) // 第一第二第三个点组成一个三角形,第二第三第四个点组成一个三角形 const static GLfloat VERTICES[] = { 1.0f,-1.0f, // 右下 1.0f,1.0f, // 右上 -1.0f,-1.0f, // 左下 -1.0f,1.0f // 左上 }; // FBO贴图纹理坐标(参考手机屏幕坐标系统,原点在左下角) // 注意坐标不要错乱 const static GLfloat TEXTURE_COORD[] = { 1.0f,0.0f, // 右下 1.0f,1.0f, // 右上 0.0f,0.0f, // 左下 0.0f,1.0f // 左上 }; RGBtoYUVOpengl::RGBtoYUVOpengl() { initGlProgram(ver,fragment); positionHandle = glGetAttribLocation(program,"aPosition"); textureHandle = glGetAttribLocation(program,"aTexCoord"); textureSampler = glGetUniformLocation(program,"s_TextureMap"); u_Offset = glGetUniformLocation(program,"u_Offset"); LOGD("program:%d",program); LOGD("positionHandle:%d",positionHandle); LOGD("textureHandle:%d",textureHandle); LOGD("textureSample:%d",textureSampler); LOGD("u_Offset:%d",u_Offset); } RGBtoYUVOpengl::~RGBtoYUVOpengl() noexcept { } void RGBtoYUVOpengl::fboPrepare() { glGenTextures(1, &fboTextureId); // 绑定纹理 glBindTexture(GL_TEXTURE_2D, fboTextureId); // 为当前绑定的纹理对象设置环绕、过滤方式 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glBindTexture(GL_TEXTURE_2D, GL_NONE); glGenFramebuffers(1,&fboId); glBindFramebuffer(GL_FRAMEBUFFER,fboId); // 绑定纹理 glBindTexture(GL_TEXTURE_2D,fboTextureId); glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, fboTextureId, 0); // 这个纹理是多大的? glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, imageWidth / 4, imageHeight * 1.5, 0, GL_RGBA, GL_UNSIGNED_BYTE, nullptr); // 检查FBO状态 if (glCheckFramebufferStatus(GL_FRAMEBUFFER)!= GL_FRAMEBUFFER_COMPLETE) { LOGE("FBOSample::CreateFrameBufferObj glCheckFramebufferStatus status != GL_FRAMEBUFFER_COMPLETE"); } // 解绑 glBindTexture(GL_TEXTURE_2D, GL_NONE); glBindFramebuffer(GL_FRAMEBUFFER, GL_NONE); } // 渲染逻辑 void RGBtoYUVOpengl::onDraw() { // 绘制到FBO上去 // 绑定fbo glBindFramebuffer(GL_FRAMEBUFFER, fboId); glPixelStorei(GL_UNPACK_ALIGNMENT,1); // 设置视口大小 glViewport(0, 0,imageWidth / 4, imageHeight * 1.5); glClearColor(0.0f, 1.0f, 0.0f, 1.0f); glClear(GL_COLOR_BUFFER_BIT); glUseProgram(program); // 激活纹理 glActiveTexture(GL_TEXTURE2); glUniform1i(textureSampler, 2); // 绑定纹理 glBindTexture(GL_TEXTURE_2D, textureId); // 设置偏移 float texelOffset = (float) (1.f / (float) imageWidth); glUniform1f(u_Offset,texelOffset); /** * size 几个数字表示一个点,显示是两个数字表示一个点 * normalized 是否需要归一化,不用,这里已经归一化了 * stride 步长,连续顶点之间的间隔,如果顶点直接是连续的,也可填0 */ // 启用顶点数据 glEnableVertexAttribArray(positionHandle); glVertexAttribPointer(positionHandle,2,GL_FLOAT,GL_FALSE,0,VERTICES); // 纹理坐标 glEnableVertexAttribArray(textureHandle); glVertexAttribPointer(textureHandle,2,GL_FLOAT,GL_FALSE,0,TEXTURE_COORD); // 4个顶点绘制两个三角形组成矩形 glDrawArrays(GL_TRIANGLE_STRIP,0,4); glUseProgram(0); // 禁用顶点 glDisableVertexAttribArray(positionHandle); if(nullptr != eglHelper){ eglHelper->swapBuffers(); } glBindTexture(GL_TEXTURE_2D, 0); // 解绑fbo glBindFramebuffer(GL_FRAMEBUFFER, 0); } // 设置RGB图像数据 void RGBtoYUVOpengl::setPixel(void *data, int width, int height, int length) { LOGD("texture setPixel"); imageWidth = width; imageHeight = height; // 准备fbo fboPrepare(); glGenTextures(1, &textureId); // 激活纹理,注意以下这个两句是搭配的,glActiveTexture激活的是那个纹理,就设置的sampler2D是那个 // 默认是0,如果不是0的话,需要在onDraw的时候重新激活一下? // glActiveTexture(GL_TEXTURE0); // glUniform1i(textureSampler, 0); // 例如,一样的 glActiveTexture(GL_TEXTURE2); glUniform1i(textureSampler, 2); // 绑定纹理 glBindTexture(GL_TEXTURE_2D, textureId); // 为当前绑定的纹理对象设置环绕、过滤方式 glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_REPEAT); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_REPEAT); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, width, height, 0, GL_RGBA, GL_UNSIGNED_BYTE, data); // 生成mip贴图 glGenerateMipmap(GL_TEXTURE_2D); // 解绑定 glBindTexture(GL_TEXTURE_2D, 0); } // 读取渲染后的YUV数据 void RGBtoYUVOpengl::readYUV(uint8_t **data, int *width, int *height) { // 从fbo中读取 // 绑定fbo *width = imageWidth; *height = imageHeight; glBindFramebuffer(GL_FRAMEBUFFER, fboId); glBindTexture(GL_TEXTURE_2D, fboTextureId); glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, fboTextureId, 0); *data = new uint8_t[imageWidth * imageHeight * 3 / 2]; glReadPixels(0, 0, imageWidth / 4, imageHeight * 1.5, GL_RGBA, GL_UNSIGNED_BYTE, *data); glBindTexture(GL_TEXTURE_2D, 0); // 解绑fbo glBindFramebuffer(GL_FRAMEBUFFER, 0); }
下面是Activity的主要代码逻辑:
public class RGBToYUVActivity extends AppCompatActivity { protected MyGLSurfaceView myGLSurfaceView; @Override protected void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_rgb_to_yuv); myGLSurfaceView = findViewById(R.id.my_gl_surface_view); myGLSurfaceView.setOpenGlListener(new MyGLSurfaceView.OnOpenGlListener() { @Override public BaseOpengl onOpenglCreate() { return new RGBtoYUVOpengl(); } @Override public Bitmap requestBitmap() { BitmapFactory.Options options = new BitmapFactory.Options(); options.inScaled = false; return BitmapFactory.decodeResource(getResources(),R.mipmap.ic_smile,options); } @Override public void readPixelResult(byte[] bytes) { if (null != bytes) { } } // 也就是RGBtoYUVOpengl::readYUV读取到结果数据回调 @Override public void readYUVResult(byte[] bytes) { if (null != bytes) { String fileName = System.currentTimeMillis() + ".yuv"; File fileParent = getFilesDir(); if (!fileParent.exists()) { fileParent.mkdirs(); } FileOutputStream fos = null; try { File file = new File(fileParent, fileName); fos = new FileOutputStream(file); fos.write(bytes,0,bytes.length); fos.flush(); fos.close(); Toast.makeText(RGBToYUVActivity.this, "YUV图片保存成功" + file.getAbsolutePath(), Toast.LENGTH_LONG).show(); } catch (Exception e) { Log.v("fly_learn_opengl", "图片保存异常:" + e.getMessage()); Toast.makeText(RGBToYUVActivity.this, "YUV图片保存失败", Toast.LENGTH_LONG).show(); } } } }); Button button = findViewById(R.id.bt_rgb_to_yuv); button.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { myGLSurfaceView.readYuvData(); } }); ImageView iv_rgb = findViewById(R.id.iv_rgb); iv_rgb.setImageResource(R.mipmap.ic_smile); } }
以下是自定义SurfaceView的代码:
public class MyGLSurfaceView extends SurfaceView implements SurfaceHolder.Callback { private final static int MSG_CREATE_GL = 101; private final static int MSG_CHANGE_GL = 102; private final static int MSG_DRAW_GL = 103; private final static int MSG_DESTROY_GL = 104; private final static int MSG_READ_PIXEL_GL = 105; private final static int MSG_UPDATE_BITMAP_GL = 106; private final static int MSG_UPDATE_YUV_GL = 107; private final static int MSG_READ_YUV_GL = 108; public BaseOpengl baseOpengl; private OnOpenGlListener onOpenGlListener; private HandlerThread handlerThread; private Handler renderHandler; public int surfaceWidth; public int surfaceHeight; public MyGLSurfaceView(Context context) { this(context,null); } public MyGLSurfaceView(Context context, AttributeSet attrs) { super(context, attrs); getHolder().addCallback(this); handlerThread = new HandlerThread("RenderHandlerThread"); handlerThread.start(); renderHandler = new Handler(handlerThread.getLooper()){ @Override public void handleMessage(@NonNull Message msg) { switch (msg.what){ case MSG_CREATE_GL: baseOpengl = onOpenGlListener.onOpenglCreate(); Surface surface = (Surface) msg.obj; if(null != baseOpengl){ baseOpengl.surfaceCreated(surface); Bitmap bitmap = onOpenGlListener.requestBitmap(); if(null != bitmap){ baseOpengl.setBitmap(bitmap); } } break; case MSG_CHANGE_GL: if(null != baseOpengl){ Size size = (Size) msg.obj; baseOpengl.surfaceChanged(size.getWidth(),size.getHeight()); } break; case MSG_DRAW_GL: if(null != baseOpengl){ baseOpengl.onGlDraw(); } break; case MSG_READ_PIXEL_GL: if(null != baseOpengl){ byte[] bytes = baseOpengl.readPixel(); if(null != bytes && null != onOpenGlListener){ onOpenGlListener.readPixelResult(bytes); } } break; case MSG_READ_YUV_GL: if(null != baseOpengl){ byte[] bytes = baseOpengl.readYUVResult(); if(null != bytes && null != onOpenGlListener){ onOpenGlListener.readYUVResult(bytes); } } break; case MSG_UPDATE_BITMAP_GL: if(null != baseOpengl){ Bitmap bitmap = onOpenGlListener.requestBitmap(); if(null != bitmap){ baseOpengl.setBitmap(bitmap); baseOpengl.onGlDraw(); } } break; case MSG_UPDATE_YUV_GL: if(null != baseOpengl){ YUVBean yuvBean = (YUVBean) msg.obj; if(null != yuvBean){ baseOpengl.setYuvData(yuvBean.getyData(),yuvBean.getUvData(),yuvBean.getWidth(),yuvBean.getHeight()); baseOpengl.onGlDraw(); } } break; case MSG_DESTROY_GL: if(null != baseOpengl){ baseOpengl.surfaceDestroyed(); } break; } } }; } public void setOpenGlListener(OnOpenGlListener listener) { this.onOpenGlListener = listener; } @Override public void surfaceCreated(@NonNull SurfaceHolder surfaceHolder) { Message message = Message.obtain(); message.what = MSG_CREATE_GL; message.obj = surfaceHolder.getSurface(); renderHandler.sendMessage(message); } @Override public void surfaceChanged(@NonNull SurfaceHolder surfaceHolder, int i, int w, int h) { Message message = Message.obtain(); message.what = MSG_CHANGE_GL; message.obj = new Size(w,h); renderHandler.sendMessage(message); Message message1 = Message.obtain(); message1.what = MSG_DRAW_GL; renderHandler.sendMessage(message1); surfaceWidth = w; surfaceHeight = h; } @Override public void surfaceDestroyed(@NonNull SurfaceHolder surfaceHolder) { Message message = Message.obtain(); message.what = MSG_DESTROY_GL; renderHandler.sendMessage(message); } public void readGlPixel(){ Message message = Message.obtain(); message.what = MSG_READ_PIXEL_GL; renderHandler.sendMessage(message); } public void readYuvData(){ Message message = Message.obtain(); message.what = MSG_READ_YUV_GL; renderHandler.sendMessage(message); } public void updateBitmap(){ Message message = Message.obtain(); message.what = MSG_UPDATE_BITMAP_GL; renderHandler.sendMessage(message); } public void setYuvData(byte[] yData,byte[] uvData,int width,int height){ Message message = Message.obtain(); message.what = MSG_UPDATE_YUV_GL; message.obj = new YUVBean(yData,uvData,width,height); renderHandler.sendMessage(message); } public void release(){ // todo 主要线程同步问题,当心surfaceDestroyed还没有执行到,但是就被release了,那就内存泄漏了 if(null != baseOpengl){ baseOpengl.release(); } } public void requestRender(){ Message message = Message.obtain(); message.what = MSG_DRAW_GL; renderHandler.sendMessage(message); } public interface OnOpenGlListener{ BaseOpengl onOpenglCreate(); Bitmap requestBitmap(); void readPixelResult(byte[] bytes); void readYUVResult(byte[] bytes); } }
BaseOpengl的java代码:
public class BaseOpengl { public static final int YUV_DATA_TYPE_NV12 = 0; public static final int YUV_DATA_TYPE_NV21 = 1; // 三角形 public static final int DRAW_TYPE_TRIANGLE = 0; // 四边形 public static final int DRAW_TYPE_RECT = 1; // 纹理贴图 public static final int DRAW_TYPE_TEXTURE_MAP = 2; // 矩阵变换 public static final int DRAW_TYPE_MATRIX_TRANSFORM = 3; // VBO/VAO public static final int DRAW_TYPE_VBO_VAO = 4; // EBO public static final int DRAW_TYPE_EBO_IBO = 5; // FBO public static final int DRAW_TYPE_FBO = 6; // PBO public static final int DRAW_TYPE_PBO = 7; // YUV nv12与nv21渲染 public static final int DRAW_YUV_RENDER = 8; // 将rgb图像转换城nv21 public static final int DRAW_RGB_TO_YUV = 9; public long glNativePtr; protected EGLHelper eglHelper; protected int drawType; public BaseOpengl(int drawType) { this.drawType = drawType; this.eglHelper = new EGLHelper(); } public void surfaceCreated(Surface surface) { Log.v("fly_learn_opengl","------------surfaceCreated:" + surface); eglHelper.surfaceCreated(surface); } public void surfaceChanged(int width, int height) { Log.v("fly_learn_opengl","------------surfaceChanged:" + Thread.currentThread()); eglHelper.surfaceChanged(width,height); } public void surfaceDestroyed() { Log.v("fly_learn_opengl","------------surfaceDestroyed:" + Thread.currentThread()); eglHelper.surfaceDestroyed(); } public void release(){ if(glNativePtr != 0){ n_free(glNativePtr,drawType); glNativePtr = 0; } } public void onGlDraw(){ Log.v("fly_learn_opengl","------------onDraw:" + Thread.currentThread()); if(glNativePtr == 0){ glNativePtr = n_gl_nativeInit(eglHelper.nativePtr,drawType); } if(glNativePtr != 0){ n_onGlDraw(glNativePtr,drawType); } } public void setBitmap(Bitmap bitmap){ if(glNativePtr == 0){ glNativePtr = n_gl_nativeInit(eglHelper.nativePtr,drawType); } if(glNativePtr != 0){ n_setBitmap(glNativePtr,bitmap); } } public void setYuvData(byte[] yData,byte[] uvData,int width,int height){ if(glNativePtr != 0){ n_setYuvData(glNativePtr,yData,uvData,width,height,drawType); } } public void setMvpMatrix(float[] mvp){ if(glNativePtr == 0){ glNativePtr = n_gl_nativeInit(eglHelper.nativePtr,drawType); } if(glNativePtr != 0){ n_setMvpMatrix(glNativePtr,mvp); } } public byte[] readPixel(){ if(glNativePtr != 0){ return n_readPixel(glNativePtr,drawType); } return null; } public byte[] readYUVResult(){ if(glNativePtr != 0){ return n_readYUV(glNativePtr,drawType); } return null; } // 绘制 private native void n_onGlDraw(long ptr,int drawType); private native void n_setMvpMatrix(long ptr,float[] mvp); private native void n_setBitmap(long ptr,Bitmap bitmap); protected native long n_gl_nativeInit(long eglPtr,int drawType); private native void n_free(long ptr,int drawType); private native byte[] n_readPixel(long ptr,int drawType); private native byte[] n_readYUV(long ptr,int drawType); private native void n_setYuvData(long ptr,byte[] yData,byte[] uvData,int width,int height,int drawType); }
将转换后的YUV数据读取保存好后,可以将数据拉取到电脑上使用
YUVViewer
这个软件查看是否真正转换成功。
https://juejin.cn/post/7025223104569802789
关注我,一起进步,人生不止coding!!!
本文来自作者投稿,版权归原作者所有。如需转载,请注明出处:https://www.nxrte.com/jishu/9499.html
赞
(0)
OpenGL ES + MediaPlayer 渲染播放视频+滤镜效果
给虚拟世界以真实助力 | ZEGO元宇宙智能互动引擎
WebRTC 在 2023 年的广播行业中扮演什么角色?
苹果专利 | 双耳机扬声器系统,通过额外排气孔提供隐私-公开双模式
基于方寸T630 USB3.0 4K超高清音视频采集应用方案
用 FFmpeg 释放 Rails 中的富媒体功能: 开发人员工具包
摩尔线程AI创作绘本“摩笔天书”开放邀请测试
UC在TMT领域的案例研究:Meta和MELI
Josh × 微帧科技,为印度TOP短视频平台提供视频编码服务
非监督跨模态图像分割中的双网络输出可信度加权互监督学习 | 陈雅婕,杨欣,白翔