CameraDecoder.c

public native void decode(Bitmap pTarget, byte[] pSource, int pFilter);

 

 1 #include <android/bitmap.h>
 2 #include <stdlib.h>
 3 
 4 #define NULL 0
 5 
 6 #define toInt(pValue)     (0xff & (int32_t) pValue)
 7 
 8 #define max(pValue1, pValue2)     (pValue1 < pValue2) ? pValue2 : pValue1
 9 
10 #define clamp(pValue, pLowest, pHighest)     ((pValue < 0) ? pLowest : (pValue > pHighest) ? pHighest : pValue)
11 
12 #define color(pColorR, pColorG, pColorB) \
13     (0xFF000000 | ((pColorB << 6)  & 0x00FF0000) \
14                 | ((pColorG >> 2)  & 0x0000FF00) \
15                 | ((pColorR >> 10) & 0x000000FF))
16 
17 void JNICALL decode(JNIEnv * pEnv, jclass pClass, jobject pTarget, jbyteArray pSource, jint pFilter) {
18 
19     // Retrieves bitmap information and locks it for drawing.
20     AndroidBitmapInfo bitmapInfo;
21     uint32_t* bitmapContent;
22     if (AndroidBitmap_getInfo(pEnv,pTarget, &bitmapInfo) < 0) abort();
23     if (bitmapInfo.format != ANDROID_BITMAP_FORMAT_RGBA_8888) abort();
24     if (AndroidBitmap_lockPixels(pEnv, pTarget, (void**)&bitmapContent) < 0) abort();
25 
26     // Accesses source array data.
27     jbyte* source = (*pEnv)->GetPrimitiveArrayCritical(pEnv, pSource, 0);
28     if (source == NULL) abort();
29 
30     int32_t frameSize = bitmapInfo.width * bitmapInfo.height;
31     int32_t yIndex, uvIndex, x, y;
32     int32_t colorY, colorU, colorV;
33     int32_t colorR, colorG, colorB;
34     int32_t y1192;
35 
36     // Processes each pixel and converts YUV to RGB color.
37     // Algorithm originates from the Ketai open source project.
38     // See http://ketai.googlecode.com/.
39     for (y = 0, yIndex = 0; y < bitmapInfo.height; ++y) {
40         colorU = 0; colorV = 0;
41         // Y is divided by 2 because UVs are subsampled vertically.
42         // This means that two consecutives iterations refer to the
43         // same UV line (e.g when Y=0 and Y=1).
44         uvIndex = frameSize + (y >> 1) * bitmapInfo.width;
45 
46         for (x = 0; x < bitmapInfo.width; ++x, ++yIndex) {
47             // Retrieves YUV components. UVs are subsampled
48             // horizontally too, hence %2 (1 UV for 2 Y).
49             colorY = max(toInt(source[yIndex]) - 16, 0);
50             if (!(x % 2)) {
51                 colorV = toInt(source[uvIndex++]) - 128;
52                 colorU = toInt(source[uvIndex++]) - 128;
53             }
54 
55             // Computes R, G and B from Y, U and V.
56             y1192 = 1192 * colorY;
57             colorR = (y1192 + 1634 * colorV);
58             colorG = (y1192 - 833  * colorV - 400 * colorU);
59             colorB = (y1192 + 2066 * colorU);
60 
61             colorR = clamp(colorR, 0, 262143);
62             colorG = clamp(colorG, 0, 262143);
63             colorB = clamp(colorB, 0, 262143);
64 
65             // Combines R, G, B and A into the final pixel color.
66             bitmapContent[yIndex] = color(colorR,colorG,colorB);
67             bitmapContent[yIndex] &= pFilter;
68         }
69     }
70 
71     // Unlocks the bitmap and releases the Java array when finished.
72     (*pEnv)-> ReleasePrimitiveArrayCritical(pEnv,pSource,source, 0);
73     if (AndroidBitmap_unlockPixels(pEnv, pTarget) < 0) abort();
74 }
75 
76 static JNINativeMethod gMethodRegistry[] = {
77   { "decode", "(Landroid/graphics/Bitmap;[BI)V", (void *) decode }
78 };
79 
80 static int gMethodRegistrySize = sizeof(gMethodRegistry) / sizeof(gMethodRegistry[0]);
81 
82 JNIEXPORT jint JNI_OnLoad(JavaVM* pVM, void* reserved) {
83 
84     JNIEnv *env;
85     if ((*pVM)->GetEnv(pVM, (void**) &env, JNI_VERSION_1_6) != JNI_OK)
86     { abort(); }
87 
88     jclass LiveCameraActivity = (*env)->FindClass(env, "com/packtpub/livecamera/LiveCameraActivity");
89     if (LiveCameraActivity == NULL) abort();
90 
91     (*env)->RegisterNatives(env, LiveCameraActivity, gMethodRegistry, 1);
92     (*env)->DeleteLocalRef(env, LiveCameraActivity);
93 
94     return JNI_VERSION_1_6;
95 }

 

posted @ 2016-04-02 22:24  壬子木  阅读(147)  评论(0)    收藏  举报