Reputation: 67
I am trying to get video frame from SurfaceTextureHelper and the processing it to create bitmap which later I am as input to canvas where I am adding some text and paint to it. To convert video frame to bitmap. I am using YuvFrame class. The problem I am facing here is app crashes after few minutes. Even In logcat, I didn't find anything useful.
@Override
public void startCapture(int width, int height, int fps) {
surTexture.stopListening();
cameraHeight = height;
cameraWidth = width;
releaseBitmap(bitmap);
bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
WeakReference<Bitmap> newBitmap = new WeakReference<>(bitmap.copy(bitmap.getConfig(), true));
releaseBitmap(bitmap);
surTexture.startListening(new VideoSink() {
@Override
public void onFrame(VideoFrame videoFrame) {
if (yuvFrame == null) {
yuvFrame = new YuvFrame(videoFrame, appContext);
} else {
yuvFrame.fromVideoFrame(videoFrame);
}
}
});
captureThread = new Thread(() -> {
try {
long start = System.nanoTime();
capturerObs.onCapturerStarted(true);
int[] textures = new int[1];
GLES20.glGenTextures(1, textures, 0);
YuvConverter yuvConverter = new YuvConverter();
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[0]);
Log.d("InitialsClass", "camera start capturer width- " + cameraWidth + " height- " + cameraHeight);
//The bitmap is drawn on the GPU at this point.
TextureBufferImpl buffer = new TextureBufferImpl(cameraWidth, cameraHeight - 3, VideoFrame.TextureBuffer.Type.RGB, textures[0], new Matrix(), surTexture.getHandler(), yuvConverter, null);
while (true) {
if (yuvFrame != null) {
cameraBitmap = yuvFrame.getBitmap();
}
if (cameraBitmap != null) {
Log.d("InitialsClass", "cameraBitmap start capturer width- " + cameraBitmap.getWidth() + " height- " + cameraBitmap.getHeight());
Resources resources = appContext.getResources();
float scale = resources.getDisplayMetrics().density;
if (canvas == null) {
canvas = new Canvas(newBitmap.get());
}
if (matrix == null) {
matrix = new Matrix();
}
matrix.setScale(-1, 1);
matrix.postTranslate(newBitmap.get().getWidth(), 0);
int startX = (newBitmap.get().getWidth() - cameraBitmap.getWidth()) / 2;
int startY = (newBitmap.get().getHeight() - cameraBitmap.getHeight()) / 2;
canvas.drawBitmap(cameraBitmap, startX, startY, null);
matrix.setScale(1, -1);
matrix.postTranslate(0, newBitmap.get().getHeight());
canvas.setMatrix(matrix);
if (textPaint == null) {
textPaint = new TextPaint();
}
textPaint.setColor(Color.WHITE);
textPaint.setTypeface(Typeface.create(typeFace, Typeface.BOLD));
textPaint.setTextSize((int) (5 * scale));
if (textBounds == null) {
textBounds = new Rect();
}
textPaint.getTextBounds(userName, 0, userName.length(), textBounds);
int horizontalSpacing = 8;
int verticalSpacing = 16;
int x = horizontalSpacing;
int y = cameraHeight - verticalSpacing;
textPaint.setTextAlign(Paint.Align.LEFT);
textPaint.setAntiAlias(true);
canvas.drawText(userName, x, y, textPaint);
if (paint == null) {
paint = new Paint();
}
if (isLocalCandidate) {
paint.setColor(Color.GREEN);
paint.setStrokeWidth(8);
paint.setStyle(Paint.Style.STROKE);
canvas.drawRect(0, 8, cameraWidth - 8, cameraHeight - 8, paint);
} else {
paint.setColor(Color.TRANSPARENT);
paint.setStrokeWidth(2);
paint.setStyle(Paint.Style.STROKE);
canvas.drawRect(0, 2, cameraWidth - 2, cameraHeight - 2, paint);
}
if (surTexture != null && surTexture.getHandler() != null && surTexture.getHandler().getLooper().getThread().isAlive()) {
surTexture.getHandler().post(() -> {
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, newBitmap.get(), 0);
//We transfer it to the VideoFrame
VideoFrame.I420Buffer i420Buf = yuvConverter.convert(buffer);
long frameTime = System.nanoTime() - start;
VideoFrame videoFrame = new VideoFrame(i420Buf, 0, frameTime);
capturerObs.onFrameCaptured(videoFrame);
});
}
}
Thread.sleep(100);
}
} catch (InterruptedException ex) {
Log.d("InitialsClass camera", ex.toString());
dispose();
}
});
captureThread.start();
}
Upvotes: 0
Views: 733
Reputation: 121
I also faced this issue but if you see profiler it is occupying memory every frame basically you need to create YuvConverter and buffer only one time and reuse it for the rest.
here is the code that I have used:
class CustomVideoCapturer(private val getVideoFrameBitmap: () -> Bitmap?) : VideoCapturer {
private val textures by lazy { IntArray(1) }
private val yuvConverter by lazy { YuvConverter() }
private val matrix by lazy { Matrix() }
private val buffer by lazy {
getVideoFrameBitmap()?.let {
TextureBufferImpl(
it.width,
it.height,
VideoFrame.TextureBuffer.Type.RGB,
textures[0],
matrix,
textureHelper!!.handler,
yuvConverter,
null
)
}
}
init {
GLES31.glGenTextures(1, textures, 0);
GLES31.glBindTexture(GLES31.GL_TEXTURE_2D, textures[0])
}
private var capturerObserver: CapturerObserver? = null
private var textureHelper: SurfaceTextureHelper? = null
private var timer = Timer()
private var tickTask: TimerTask = object : TimerTask() {
override fun run() {
try {
tick()
} catch (e: Exception) {
"Exception ${e.message}".logE()
}
}
}
@Synchronized
fun tick() {
try {
getVideoFrameBitmap.invoke()?.let { bitmap ->
/*sendToServer(bitmap, YuvConverter())*/
sendToServer(bitmap)
}
} catch (e: Exception) {
"Exception ${e.message}".logE()
}
}
@Synchronized
private fun sendToServer(bitmap: Bitmap) {
try {
textureHelper?.handler?.post {
// Exception Framebuffer not complete, status: 0
try {
GLES31.glTexParameteri(
GLES31.GL_TEXTURE_2D,
GLES31.GL_TEXTURE_MIN_FILTER,
GLES31.GL_NEAREST
)
GLES31.glTexParameteri(
GLES31.GL_TEXTURE_2D,
GLES31.GL_TEXTURE_MAG_FILTER,
GLES31.GL_NEAREST
)
GLUtils.texImage2D(
GLES31.GL_TEXTURE_2D,
0,
bitmap,
0
)
val frameTime: Long =
TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime())
val videoFrame = VideoFrame(buffer?.toI420(), 180, frameTime)
capturerObserver?.onFrameCaptured(videoFrame)
videoFrame.release()
GlideBitmapPool.putBitmap(bitmap)
} catch (e: GLException) {
"Exception OpenGl : ${e.message}".logE()
} catch (e: Exception) {
"Exception ${e.message}".logE()
}
}
} catch (e: Exception) {
"Exception ${e.message}".logE()
}
}
override fun initialize(
textureHelper: SurfaceTextureHelper?,
context: Context?,
capturerObserver: CapturerObserver?
) {
this.textureHelper = textureHelper
this.capturerObserver = capturerObserver
}
override fun startCapture(width: Int, height: Int, framerate: Int) {
this.timer.schedule(this.tickTask, 0L, (1000 / framerate).toLong())
}
override fun stopCapture() {
this.timer.cancel()
this.timer = Timer()
this.tickTask = object : TimerTask() {
override fun run() {
try {
tick()
} catch (e: Exception) {
}
}
}
}
override fun changeCaptureFormat(p0: Int, p1: Int, p2: Int) {}
override fun dispose() {
this.timer.cancel()
}
override fun isScreencast(): Boolean {
return false
}
}
this will reduce memory consumption for every frame.
Upvotes: 1