- mediacodec初始化
try {
mCodec = MediaCodec.createDecoderByType(mimeType);
MediaCodecInfo codecInfo = mCodec.getCodecInfo();
MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
MediaFormat format = MediaFormat.createVideoFormat(mimeType, width, height);
mCodec.configure(format, null, null, 0);
mCodec.start();
} catch(Exception ex) {
ex.printStackTrace();
}
- h264流输入及输出
public void inputBytes(byte[] frame, long ts) {
try {
int inputBufferIndex = mCodec.dequeueInputBuffer(0);
if (inputBufferIndex >= 0) {
ByteBuffer[] inputBuffers = mCodec.getInputBuffers();
ByteBuffer buffer = inputBuffers[inputBufferIndex];
buffer.clear();
buffer.put(frame);
mCodec.queueInputBuffer(inputBufferIndex, 0, frame.length, ts, 0);
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mCodec.dequeueOutputBuffer(bufferInfo, 0);
while (outputBufferIndex >= 0) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
Image image = mCodec.getOutputImage(outputBufferIndex);
try {
imageToBitmap(image);
} catch(Exception ex) {
ex.printStackTrace();
} finally {
image.close();
}
}
mCodec.releaseOutputBuffer(outputBufferIndex, false);
outputBufferIndex = mCodec.dequeueOutputBuffer(bufferInfo, 0);
}
if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.d(TAG, "inputBytes: try later ..");
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
Log.d(TAG, "inputBytes: BUFFERS_CHANGED");
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
Log.d(TAG, "inputBytes: outputFormat " + mCodec.getOutputFormat());
}
} catch(Exception ex) {
ex.printStackTrace();
}
}
- getOutputImage中返回的是Image,为YUV_420_888, 需要转为NV21,然后通过YUVImage转为JPEG.
private static byte[] getDataFromImage(Image image) {
Rect crop = image.getCropRect();
int format = image.getFormat();
int width = crop.width();
int height = crop.height();
Image.Plane[] planes = image.getPlanes();
byte[] data = new byte[width * height * ImageFormat.getBitsPerPixel(format) / 8];
byte[] rowData = new byte[planes[0].getRowStride()];
int channelOffset = 0;
int outputStride = 1;
for (int i = 0; i < planes.length; i++) {
switch (i) {
case 0:
channelOffset = 0;
outputStride = 1;
break;
case 1:
channelOffset = width * height + 1;
outputStride = 2;
break;
case 2:
channelOffset = width * height;
outputStride = 2;
break;
}
ByteBuffer buffer = planes[i].getBuffer();
int rowStride = planes[i].getRowStride();
int pixelStride = planes[i].getPixelStride();
int shift = (i == 0) ? 0 : 1;
int w = width >> shift;
int h = height >> shift;
buffer.position(rowStride * (crop.top >> shift) + pixelStride * (crop.left >> shift));
for (int row = 0; row < h; row++) {
int length;
if (pixelStride == 1 && outputStride == 1) {
length = w;
buffer.get(data, channelOffset, length);
channelOffset += length;
} else {
length = (w - 1) * pixelStride + 1;
buffer.get(rowData, 0, length);
for (int col = 0; col < w; col++) {
data[channelOffset] = rowData[col * pixelStride];
channelOffset += outputStride;
}
}
if (row < h - 1) {
buffer.position(buffer.position() + rowStride - length);
}
}
}
return data;
}
Bitmap imageToBitmap(Image image) {
Rect rect = image.getCropRect();
YuvImage yuvImage = new YuvImage(getDataFromImage(image), ImageFormat.NV21, rect.width(),rect.height(), null);
ByteArrayOutputStream stream = new ByteArrayOutputStream();
yuvImage.compressToJpeg(new Rect(0, 0, width, height), 100, stream);
Bitmap bitmap = BitmapFactory.decodeByteArray(stream.toByteArray(), 0, stream.size());
// saveBitmap(System.currentTimeMillis()+"", bitmap, mslict.getContext());
try {
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
return bitmap;
}