你好,我在ios上可以导出完整视频,但华为鸿蒙系统上不行
let choosedVideoInfo = {}
const w = 300
let h = 200
const vs = `
attribute vec3 aPos;
attribute vec2 aVertexTextureCoord;
varying highp vec2 vTextureCoord;
void main(void){
gl_Position = vec4(aPos, 1);
vTextureCoord = aVertexTextureCoord;
}
`
const fs = `
varying highp vec2 vTextureCoord;
uniform sampler2D uSampler;
void main(void) {
gl_FragColor = texture2D(uSampler, vTextureCoord);
}
`
const vertex = [
-0.5, -0.5, 0.0,
0.5, -0.5, 0.0,
0.5, 0.5, 0.0,
-0.5, 0.5, 0.0
]
const vertexIndice = [
0, 1, 2,
0, 2, 3
]
const texCoords = [
0.0, 0.0,
1.0, 0.0,
1.0, 1.0,
0.0, 1.0
]
// 着色器
function createShader(gl, src, type) {
const shader = gl.createShader(type)
gl.shaderSource(shader, src)
gl.compileShader(shader)
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
console.error('Error compiling shader: ' + gl.getShaderInfoLog(shader))
}
return shader
}
const buffers = {}
function createRenderer(canvas, width, height) {
const gl = canvas.getContext("webgl")
if (!gl) {
console.error('Unable to get webgl context.')
return
}
// 设置画布宽高
const info = wx.getSystemInfoSync()
gl.canvas.width = width // info.pixelRatio * width
gl.canvas.height = height //info.pixelRatio * height
// 设置webgl视口大小
// console.log(gl.drawingBufferWidth, gl.drawingBufferHeight)
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferHeight)
// 创建顶点着色器 & 片元着色器
const vertexShader = createShader(gl, vs, gl.VERTEX_SHADER)
const fragmentShader = createShader(gl, fs, gl.FRAGMENT_SHADER)
// 创建一个webglprogram并且添加着色器
const program = gl.createProgram()
gl.attachShader(program, vertexShader)
gl.attachShader(program, fragmentShader)
gl.linkProgram(program)
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
console.error('Unable to initialize the shader program.')
return
}
// 使用着色器程序
gl.useProgram(program)
// 创建一个纹理
const texture = gl.createTexture()
gl.activeTexture(gl.TEXTURE0)
gl.bindTexture(gl.TEXTURE_2D, texture)
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE)
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE)
gl.bindTexture(gl.TEXTURE_2D, null)
buffers.vertexBuffer = gl.createBuffer()
gl.bindBuffer(gl.ARRAY_BUFFER, buffers.vertexBuffer)
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(vertex), gl.STATIC_DRAW)
buffers.vertexIndiceBuffer = gl.createBuffer()
gl.bindBuffer(gl.ELEMENT_ARRAY_BUFFER, buffers.vertexIndiceBuffer)
gl.bufferData(gl.ELEMENT_ARRAY_BUFFER, new Uint16Array(vertexIndice), gl.STATIC_DRAW)
const aVertexPosition = gl.getAttribLocation(program, 'aPos')
gl.vertexAttribPointer(aVertexPosition, 3, gl.FLOAT, false, 0, 0)
gl.enableVertexAttribArray(aVertexPosition)
buffers.trianglesTexCoordBuffer = gl.createBuffer()
gl.bindBuffer(gl.ARRAY_BUFFER, buffers.trianglesTexCoordBuffer)
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(texCoords), gl.STATIC_DRAW)
const vertexTexCoordAttribute = gl.getAttribLocation(program, "aVertexTextureCoord")
gl.enableVertexAttribArray(vertexTexCoordAttribute)
gl.vertexAttribPointer(vertexTexCoordAttribute, 2, gl.FLOAT, false, 0, 0)
const samplerUniform = gl.getUniformLocation(program, 'uSampler')
gl.uniform1i(samplerUniform, 0)
return (arrayBuffer, width, height) => {
// gl.clearColor(Math.random(), Math.random(), 0, 1)
gl.clearColor(1, 1, 0, 1)
gl.clear(gl.COLOR_BUFFER_BIT)
// 绑定纹理
gl.bindTexture(gl.TEXTURE_2D, texture)
// 指定二维纹理图像
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, arrayBuffer)
gl.drawElements(gl.TRIANGLES, 6, gl.UNSIGNED_SHORT, 0)
}
}
Page({
/**
* 页面的初始数据
*/
data: {
},
/**
* 导出视频
*/
async export() {
let i = 0
await this.initRenderer()
this.getDecoder().then((decoder) => {
let recorder = this.getRecorder()
var self = this
function loop() {
if (self.stopped) {
return
}
let frameData = decoder.getFrameData()
if (!frameData) {
console.log('没取到帧')
setTimeout(() => {
loop()
}, 1000/60)
} else {
self.renderFrame(frameData)
recorder.requestFrame(() => {
console.log('录制帧数:', i++)
loop()
})
}
}
loop()
})
},
/**
* 获取视频解码器
*/
getDecoder() {
return new Promise((resolve, reject) => {
let decoder = wx.createVideoDecoder()
decoder.on("start", () => {
console.log('decoder启动成功')
resolve(decoder)
})
decoder.on("ended", () => {
console.log('decoder解码结束')
// 解码结束同时停止画面录制
this.recorder.stop()
this.stopped = true
})
let options = {
source: this.choosedFile,
mode: 1, //mode 0 === normal 1 === fast(可用于导出)
abortAudio: true
};
// 开始解码
decoder.start(options)
this.decoder = decoder
})
},
/**
* 获取画面录制器
*/
getRecorder() {
let canvas = this.getMainCanvasNode()
let recorder = wx.createMediaRecorder(canvas, {
fps: choosedVideoInfo.fps, // 实际视频的 fps
videoBitsPerSecond: choosedVideoInfo.bitrate, // 实际视频的 bitrate
gop: 12
})
// 监听录制事件
recorder.on("timeupdate", (res) => {
console.log('recorder 录制中,当前时间:', res.currentTime)
})
recorder.on("stop", (res) => {
console.log('recorder停止')
this.saveMedia(res.tempFilePath)
})
// 开始录制
recorder.start()
this.recorder = recorder
return recorder
},
/**
* 将视频和音频合到一起并保存到本地
* @param {*} videoTempFilePath
*/
saveMedia(videoTempFilePath) {
const self = this
let choosedFile = this.choosedFile
const MediaContainer = wx.createMediaContainer()
// webgl的取视频
console.log('webgl 视频 MediaTrack', videoTempFilePath)
MediaContainer.extractDataSource({
source: videoTempFilePath,
success(res) {
console.log('webgl 视频 MediaTrack 完成,res:',res)
MediaContainer.addTrack(res.tracks[0])
// 源视频取音频
console.log('源视频取音频 MediaTrack', choosedFile)
MediaContainer.extractDataSource({
source: choosedFile,
success(res) {
console.log('源视频取音频 MediaTrack 完成,res:',res)
// 拿到音频轨道并加入到容器
res.tracks[0].kind == 'audio' && MediaContainer.addTrack(res.tracks[0])
res.tracks[1].kind == 'audio' && MediaContainer.addTrack(res.tracks[1])
// 合成视频并导出视频文件
MediaContainer.export({
success(res) {
// 保存视频到本地
wx.saveVideoToPhotosAlbum({
filePath: res.tempFilePath,
success() {
wx.showToast({
title: '导出成功',
icon: 'success',
duration: 2000
})
self.destroy()
},
complete (res) {
console.log(res.errMsg)
}
})
},
complete(res) {
console.log('MediaContainer.export', res)
}
})
},
complete(res) {
console.log(res)
}
})
}
})
},
renderFrame(frameData) {
this.render(new Uint8Array(frameData.data), frameData.width, frameData.height);
},
// 初始化渲染
async initRenderer() {
console.log('initRenderer')
let w = choosedVideoInfo.width || 600;
let h = choosedVideoInfo.height || 400;
console.log('video width & height: ', w , h)
let mainCanvas = this.getMainCanvasNode(w, h);
const render = createRenderer(mainCanvas, w, h);
this.render = render;
},
// 获取canvas
getMainCanvasNode(w, h) {
w = w || choosedVideoInfo.width;
h = h || choosedVideoInfo.height;
if (!this.mainCanvas) {
// 创建一个离屏 canvas
this.mainCanvas = wx.createOffscreenCanvas({
width: w,
height: h
})
}
return this.mainCanvas
},
/**
* 选择视频
*/
choose() {
wx.chooseMedia({
sourceType: ['album'],
mediaType: ['video'],
sizeType: ['compressed'],
count: 1,
success: async(res) => {
let { tempFiles, type } = res;
this.choosedFile = tempFiles[0].tempFilePath
wx.getVideoInfo({
src: this.choosedFile,
success(res) {
console.log(res)
choosedVideoInfo = res
}
})
}
})
},
/**
* 销毁
*/
destroy() {
this.stopped = true
if (this.recorder) {
console.log('recorder销毁')
this.recorder.destroy()
this.recorder = null
}
if (this.decoder) {
console.log('decoder销毁')
this.decoder.remove()
this.decoder = null
}
if (this.mainCanvas) {
console.log('mainCanvas引用释放')
this.mainCanvas = null
}
this.stopped = false
}
})
请具体描述问题出现的流程,并提供能复现问题的简单代码片段(https://developers.weixin.qq.com/miniprogram/dev/devtools/minicode.html)。