以前做过的一个视频通话中,有用到Bitmap和YUV的转换,现在整理出来。
参考自:http://blog.csdn.net/lancees/article/details/7686046
http://www.cnblogs.com/leaven/archive/2012/09/06/2672830.html
以下是各种转换,仅供参考
private static Bitmap getAssetFile() {
Bitmap bitmap = null;
try {
bitmap = BitmapFactory.decodeStream(NgnApplication.getContext()
.getAssets().open("chat_default_bg.png"));
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return bitmap;
}
public static byte[] rgb2YCbCr420(int[] pixels, int width, int height) {
int len = width * height;
// yuv格式数组大小,y亮度占len长度,u,v各占len/4长度。
byte[] yuv = new byte[len * 3 / 2];
int y, u, v;
for (int i = 0; i < height; i++) {
for (int j = 0; j < width; j++) {
// 屏蔽ARGB的透明度值
int rgb = pixels[i * width + j] & 0x00FFFFFF;
// 像素的颜色顺序为bgr,移位运算。
int r = rgb & 0xFF;
int g = (rgb >> 8) & 0xFF;
int b = (rgb >> 16) & 0xFF;
// 套用公式
y = ((66 * r + 129 * g + 25 * b + 128) >> 8) + 16;
u = ((-38 * r - 74 * g + 112 * b + 128) >> 8) + 128;
v = ((112 * r - 94 * g - 18 * b + 128) >> 8) + 128;
// rgb2yuv
// y = (int) (0.299 * r + 0.587 * g + 0.114 * b);
// u = (int) (-0.147 * r - 0.289 * g + 0.437 * b);
// v = (int) (0.615 * r - 0.515 * g - 0.1 * b);
// RGB转换YCbCr
// y = (int) (0.299 * r + 0.587 * g + 0.114 * b);
// u = (int) (-0.1687 * r - 0.3313 * g + 0.5 * b + 128);
// if (u > 255)
// u = 255;
// v = (int) (0.5 * r - 0.4187 * g - 0.0813 * b + 128);
// if (v > 255)
// v = 255;
// 调整
y = y < 16 ? 16 : (y > 255 ? 255 : y);
u = u < 0 ? 0 : (u > 255 ? 255 : u);
v = v < 0 ? 0 : (v > 255 ? 255 : v);
// 赋值
yuv[i * width + j] = (byte) y;
yuv[len + (i >> 1) * width + (j & ~1) + 0] = (byte) u;
yuv[len + +(i >> 1) * width + (j & ~1) + 1] = (byte) v;
}
}
return yuv;
}
public static void decodeYUV420SP(byte[] rgbBuf, byte[] yuv420sp,
int width, int height) {
final int frameSize = width * height;
if (rgbBuf == null)
throw new NullPointerException("buffer 'rgbBuf' is null");
if (rgbBuf.length < frameSize * 3)
throw new IllegalArgumentException("buffer 'rgbBuf' size "
+ rgbBuf.length + " < minimum " + frameSize * 3);
if (yuv420sp == null)
throw new NullPointerException("buffer 'yuv420sp' is null");
if (yuv420sp.length < frameSize * 3 / 2)
throw new IllegalArgumentException("buffer 'yuv420sp' size "
+ yuv420sp.length + " < minimum " + frameSize * 3 / 2);
int i = 0, y = 0;
int uvp = 0, u = 0, v = 0;
int y1192 = 0, r = 0, g = 0, b = 0;
for (int j = 0, yp = 0; j < height; j++) {
uvp = frameSize + (j >> 1) * width;
u = 0;
v = 0;
for (i = 0; i < width; i++, yp++) {
y = (0xff & ((int) yuv420sp[yp])) - 16;
if (y < 0)
y = 0;
if ((i & 1) == 0) {
v = (0xff & yuv420sp[uvp++]) - 128;
u = (0xff & yuv420sp[uvp++]) - 128;
}
y1192 = 1192 * y;
r = (y1192 + 1634 * v);
g = (y1192 - 833 * v - 400 * u);
b = (y1192 + 2066 * u);
if (r < 0)
r = 0;
else if (r > 262143)
r = 262143;
if (g < 0)
g = 0;
else if (g > 262143)
g = 262143;
if (b < 0)
b = 0;
else if (b > 262143)
b = 262143;
rgbBuf[yp * 3] = (byte) (r >> 10);
rgbBuf[yp * 3 + 1] = (byte) (g >> 10);
rgbBuf[yp * 3 + 2] = (byte) (b >> 10);
}
}
}
/*
* 获取位图的RGB数据
*/
public static byte[] getRGBByBitmap(Bitmap bitmap) {
if (bitmap == null) {
return null;
}
int width = bitmap.getWidth();
int height = bitmap.getHeight();
int size = width * height;
int pixels[] = new int[size];
bitmap.getPixels(pixels, 0, width, 0, 0, width, height);
byte[] data = convertColorToByte(pixels);
return data;
}
/*
* 获取位图的YUV数据
*/
public static byte[] getYUVByBitmap(Bitmap bitmap) {
if (bitmap == null) {
return null;
}
int width = bitmap.getWidth();
int height = bitmap.getHeight();
int size = width * height;
int pixels[] = new int[size];
bitmap.getPixels(pixels, 0, width, 0, 0, width, height);
// byte[] data = convertColorToByte(pixels);
byte[] data = rgb2YCbCr420(pixels, width, height);
return data;
}
/*
* 像素数组转化为RGB数组
*/
public static byte[] convertColorToByte(int color[]) {
if (color == null) {
return null;
}
byte[] data = new byte[color.length * 3];
for (int i = 0; i < color.length; i++) {
data[i * 3] = (byte) (color[i] >> 16 & 0xff);
data[i * 3 + 1] = (byte) (color[i] >> 8 & 0xff);
data[i * 3 + 2] = (byte) (color[i] & 0xff);
}
return data;
}
// untested function
byte [] getNV21(int inputWidth, int inputHeight, Bitmap scaled) {
int [] argb = new int[inputWidth * inputHeight];
scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);
byte [] yuv = new byte[inputWidth*inputHeight*3/2];
encodeYUV420SP(yuv, argb, inputWidth, inputHeight);
scaled.recycle();
return yuv;
}
void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
final int frameSize = width * height;
int yIndex = 0;
int uvIndex = frameSize;
int a, R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
R = (argb[index] & 0xff0000) >> 16;
G = (argb[index] & 0xff00) >> 8;
B = (argb[index] & 0xff) >> 0;
// well known RGB to YUV algorithm
Y = ( ( 66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ( ( -38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ( ( 112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
// NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2
// meaning for every 4 Y pixels there are 1 V and 1 U. Note the sampling is every other
// pixel AND every other scanline.
yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
if (j % 2 == 0 && index % 2 == 0) {
yuv420sp[uvIndex++] = (byte)((V<0) ? 0 : ((V > 255) ? 255 : V));
yuv420sp[uvIndex++] = (byte)((U<0) ? 0 : ((U > 255) ? 255 : U));
}
index ++;
}
}
}
static public void encodeYUV420SP(byte[] yuv420sp, int[] rgba, int width,
int height) {
final int frameSize = width * height;
int[] U, V;
U = new int[frameSize];
V = new int[frameSize];
final int uvwidth = width / 2;
int r, g, b, y, u, v;
for (int j = 0; j < height; j++) {
int index = width * j;
for (int i = 0; i < width; i++) {
r = (rgba[index] & 0xff000000) >> 24;
g = (rgba[index] & 0xff0000) >> 16;
b = (rgba[index] & 0xff00) >> 8;
// rgb to yuv
y = (66 * r + 129 * g + 25 * b + 128) >> 8 + 16;
u = (-38 * r - 74 * g + 112 * b + 128) >> 8 + 128;
v = (112 * r - 94 * g - 18 * b + 128) >> 8 + 128;
// clip y
yuv420sp[index++] = (byte) ((y < 0) ? 0 : ((y > 255) ? 255 : y));
U[index] = u;
V[index++] = v;
}
}
}
static void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
final int frameSize = width * height;
int yIndex = 0;
int uvIndex = frameSize;
int R, G, B, Y, U, V;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
R = (argb[index] & 0xff0000) >> 16;
G = (argb[index] & 0xff00) >> 8;
B = (argb[index] & 0xff) >> 0;
// well known RGB to YUV algorithm
Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;
// NV21 has a plane of Y and interleaved planes of VU each sampled by a factor of 2
// meaning for every 4 Y pixels there are 1 V and 1 U. Note the sampling is every other
// pixel AND every other scanline.
yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0 : ((Y > 255) ? 255 : Y));
if (j % 2 == 0 && index % 2 == 0) {
yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0 : ((V > 255) ? 255 : V));
yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0 : ((U > 255) ? 255 : U));
}
index++;
}
}
}