I'm trying to convert two lists of bitmap to video. First list are the main bitmaps that make a video and second bitmap list contains the bitmap that would be drawn on the video frame by frame. Both bitmap lists are of same size.
I have tried to do this using the FFMPEG library and gave me my desired results. But the main problem with FFMPEG is that it increase the size of the APK and also it take long time because my code first save those list of bitmaps to the cache directory as a PNG file. Than provide both to the FFMPEG that convert them to video. Here is the code for FFMPEG:
fun convertToVideo(context: Context, bitmapList: List<Bitmap?>, effectedBitmaps: List<Bitmap?>, executed: (Boolean) -> Unit) {
val outputDirCap = File(context.cacheDir, "temp_cap")
val outputDirEff = File(context.cacheDir, "temp_effect")
outputDirCap.mkdirs()
outputDirEff.mkdirs()
val outputFilePath = File(
context.cacheDir,
"output.mp4"
).absolutePath
for (i in bitmapList.indices) {
val bitmap = bitmapList[i]
val outputFile = File(outputDirCap, "frame$i.png")
try {
val fileOutputStream = FileOutputStream(outputFile)
bitmap?.compress(Bitmap.CompressFormat.PNG, 100, fileOutputStream)
fileOutputStream.flush()
fileOutputStream.close()
} catch (e: IOException) {
e.printStackTrace()
}
}
for (i in effectedBitmaps.indices) {
val bitmap = effectedBitmaps[i]
val outputFile = File(outputDirEff, "frame$i.png")
try {
val fileOutputStream = FileOutputStream(outputFile)
bitmap?.compress(Bitmap.CompressFormat.PNG, 100, fileOutputStream)
fileOutputStream.flush()
fileOutputStream.close()
} catch (e: IOException) {
e.printStackTrace()
}
}
val command = arrayOf(
"-i", "${outputDirCap.absolutePath}/frame%d.png",
"-i", "${outputDirEff.absolutePath}/frame%d.png",
"-filter_complex",
"[0:v][1:v]overlay",
outputFilePath
)
try {
FFmpeg.executeAsync(command
) { executionId, returnCode ->
if (returnCode == RETURN_CODE_SUCCESS) {
Log.d(TAG, "Async command execution completed successfully.");
outputDirCap.deleteRecursively()
outputDirEff.deleteRecursively()
executed(true)
} else if (returnCode == RETURN_CODE_CANCEL) {
Log.d(TAG, "Async command execution cancelled by user.");
outputDirCap.deleteRecursively()
outputDirEff.deleteRecursively()
executed(false)
} else {
outputDirCap.deleteRecursively()
outputDirEff.deleteRecursively()
executed(false)
Log.d(
TAG,
String.format(
"Async command execution failed with returnCode=%d.",
returnCode
)
);
}
}
} catch (e: Exception) {
e.printStackTrace()
}
}
}
I have also tried to do this using the MediaCodec but that code doesn't work it just stuck on this line : "var inputBufferIndex = encoder.dequeueInputBuffer(-1)". Here is the code for MediaCodec:
private fun convertBitmapsToVideo(list1: List<Bitmap?>, list2: List<Bitmap?>) {
val OUTPUT_VIDEO_PATH =
requireActivity().cacheDir.absolutePath + File.separator + "output.mp4"
try {
val encoder = MediaCodec.createEncoderByType(OUTPUT_VIDEO_MIME_TYPE)
val format = MediaFormat.createVideoFormat(
OUTPUT_VIDEO_MIME_TYPE,
OUTPUT_VIDEO_WIDTH,
OUTPUT_VIDEO_HEIGHT
)
format.setInteger(MediaFormat.KEY_BIT_RATE, OUTPUT_VIDEO_BIT_RATE)
format.setInteger(MediaFormat.KEY_FRAME_RATE, OUTPUT_VIDEO_FRAME_RATE)
format.setInteger(
MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface
)
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1)
encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE)
val muxer = MediaMuxer(OUTPUT_VIDEO_PATH, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4)
encoder.start()
val inputBuffers = encoder.inputBuffers
val outputBuffers = encoder.outputBuffers
val bufferInfo = BufferInfo()
var trackIndex = -1
var isMuxerStarted = false
var presentationTimeUs: Long = 0
val maxFrames = Math.max(list1.size, list2.size)
Log.d(TAG, "convertBitmapsToVideo: Max Frames $maxFrames")
for (i in 0 until maxFrames) {
val bitmap1 = if (i < list1.size) list1[i] else null
val bitmap2 = if (i < list2.size) list2[i] else null
var inputBufferIndex = encoder.dequeueInputBuffer(-1)
if (inputBufferIndex >= 0) {
var inputBuffer = inputBuffers[inputBufferIndex]
inputBuffer.clear()
// Convert bitmaps to YUV420 format
val frameData1: ByteArray? = bitmap1?.let { convertBitmapToYUV(it) }
val frameData2: ByteArray? = bitmap2?.let { convertBitmapToYUV(it) }
if (frameData1 != null) {
inputBuffer.put(frameData1)
encoder.queueInputBuffer(
inputBufferIndex,
0,
frameData1.size,
presentationTimeUs,
0
)
}
if (frameData2 != null) {
inputBufferIndex = encoder.dequeueInputBuffer(-1)
if (inputBufferIndex >= 0) {
inputBuffer = inputBuffers[inputBufferIndex]
inputBuffer.clear()
inputBuffer.put(frameData2)
encoder.queueInputBuffer(
inputBufferIndex,
0,
frameData2.size,
presentationTimeUs,
0
)
}
}
presentationTimeUs += 1000000 / OUTPUT_VIDEO_FRAME_RATE
var outputBufferIndex = encoder.dequeueOutputBuffer(bufferInfo, 0)
while (outputBufferIndex >= 0) {
Log.d(TAG, "convertBitmapsToVideo: while")
val outputBuffer = outputBuffers[outputBufferIndex]
if (!isMuxerStarted) {
trackIndex = muxer.addTrack(encoder.outputFormat)
muxer.start()
isMuxerStarted = true
}
outputBuffer.position(bufferInfo.offset)
outputBuffer.limit(bufferInfo.offset + bufferInfo.size)
muxer.writeSampleData(trackIndex, outputBuffer, bufferInfo)
encoder.releaseOutputBuffer(outputBufferIndex, false)
outputBufferIndex = encoder.dequeueOutputBuffer(bufferInfo, 0)
}
}
}
encoder.signalEndOfInputStream()
var outputBufferIndex = encoder.dequeueOutputBuffer(bufferInfo, 0)
while (outputBufferIndex >= 0) {
Log.d(TAG, "convertBitmapsToVideo: ")
val outputBuffer = outputBuffers[outputBufferIndex]
if (!isMuxerStarted) {
trackIndex = muxer.addTrack(encoder.outputFormat)
muxer.start()
isMuxerStarted = true
}
outputBuffer.position(bufferInfo.offset)
outputBuffer.limit(bufferInfo.offset + bufferInfo.size)
muxer.writeSampleData(trackIndex, outputBuffer, bufferInfo)
encoder.releaseOutputBuffer(outputBufferIndex, false)
outputBufferIndex = encoder.dequeueOutputBuffer(bufferInfo, 0)
}
encoder.stop()
encoder.release()
muxer.stop()
muxer.release()
Log.d(TAG, "Video conversion completed. Output file path: $OUTPUT_VIDEO_PATH")
} catch (e: IOException) {
Log.d(TAG, "Error converting bitmaps to video: " + e.message)
}
}
private fun convertBitmapToYUV(bitmap: Bitmap): ByteArray? {
val width = bitmap.width
val height = bitmap.height
val argb = IntArray(width * height)
val yuv = ByteArray(width * height * 3 / 2) // YUV420 format
bitmap.getPixels(argb, 0, width, 0, 0, width, height)
var inputOffset = 0
var outputOffset = 0
// Convert ARGB to YUV420 format
for (i in 0 until height) {
for (j in 0 until width) {
val rgb = argb[inputOffset++]
val r = rgb shr 16 and 0xFF
val g = rgb shr 8 and 0xFF
val b = rgb and 0xFF
var y = (0.299 * r + 0.587 * g + 0.114 * b).toInt()
var u = (-0.14713 * r - 0.28886 * g + 0.436 * b).toInt()
var v = (0.615 * r - 0.51498 * g - 0.10001 * b).toInt()
// Clamp values
y = Math.max(0, Math.min(y, 255))
u = Math.max(0, Math.min(u, 255))
v = Math.max(0, Math.min(v, 255))
yuv[outputOffset++] = y.toByte()
if (i % 2 == 0 && j % 2 == 0) {
yuv[outputOffset++] = u.toByte()
yuv[outputOffset++] = v.toByte()
}
}
inputOffset += width - width
}
return yuv
}
If anyone can have better solution than these two or make my MediaCodec code work than kindly let me know. Or someone has solution for the large APK size. Help will be much appreciated.