android opencv NDK人脸识别和对比

  • 人脸对比

/**

*人脸对比

*/

extern “C”

JNIEXPORT jdouble JNICALL

Java_com_hxg_ndkface_FaceDetection_histogramMatch(JNIEnv *env, jobject instance, jobject bitmap1,

jobject bitmap2) {

//1.Bitmap转成opencv能操作的C++对象Mat

Mat mat, mat1;

bitmap2Mat(env, mat, bitmap1);

bitmap2Mat(env, mat1, bitmap2);

// 转灰度矩阵

cvtColor(mat, mat, COLOR_BGR2HSV);

cvtColor(mat1, mat1, COLOR_BGR2HSV);

int channels[] = {0, 1};

int histsize[] = {180, 255};

float r1[] = {0, 180};

float r2[] = {0, 255};

const float *ranges[] = {r1, r2};

Mat hist1, hist2;

calcHist(&mat, 3, channels, Mat(), hist1, 2, histsize, ranges, true);

//https://www.cnblogs.com/bjxqmy/p/12292421.html

normalize(hist1, hist1, 1, 0, NORM_L1);

calcHist(&mat1, 3, channels, Mat(), hist2, 2, histsize, ranges, true);

normalize(hist2, hist2, 1, 0, NORM_L1);

double similarity = compareHist(hist1, hist2, HISTCMP_CORREL);

__android_log_print(ANDROID_LOG_ERROR, “TTTTT”, “相识度:%f”, similarity);

return similarity;

}

  • Dnn模式的人脸识别,并抠图

private void copyCaseCadeFilePbtxt() {

InputStream is = null;

FileOutputStream os = null;

try {

// load cascade file from application resources

is = getResources().openRawResource(R.raw.opencv_face_detector);

File cascadeDir = getDir(“cascade”, Context.MODE_PRIVATE);

mCascadeFile = new File(cascadeDir, “opencv_face_detector.pbtxt”);

if (mCascadeFile.exists()) return;

os = new FileOutputStream(mCascadeFile);

byte[] buffer = new byte[1024 * 1024];

int bytesRead;

while ((bytesRead = is.read(buffer)) != -1) {

os.write(buffer, 0, bytesRead);

}

is.close();

os.close();

} catch (IOException e) {

e.printStackTrace();

} finally {

try {

if (is != null) {

is.close();

}

if (os != null) {

os.close();

}

} catch (IOException e) {

e.printStackTrace();

}

}

}

private void copyCaseCadeFileUint8() {

InputStream is = null;

FileOutputStream os = null;

try {

// load cascade file from application resources

is = getResources().openRawResource(R.raw.opencv_face_detector_uint8);

File cascadeDir = getDir(“cascade”, Context.MODE_PRIVATE);

mCascadeFile = new File(cascadeDir, “opencv_face_detector_uint8.pb”);

if (mCascadeFile.exists()) return;

os = new FileOutputStream(mCascadeFile);

byte[] buffer = new byte[1024 * 1024];

int bytesRead;

while ((bytesRead = is.read(buffer)) != -1) {

os.write(buffer, 0, bytesRead);

}

is.close();

os.close();

} catch (IOException e) {

e.printStackTrace();

} finally {

try {

if (is != null) {

is.close();

}

if (os != null) {

os.close();

}

} catch (IOException e) {

e.printStackTrace();

}

}

}

/**

*Dnn模式的人脸识别,并抠图

*/

extern “C”

JNIEXPORT jboolean JNICALL

Java_com_hxg_ndkface_FaceDetection_faceDnnDetection(JNIEnv *env, jobject instance,

jstring model_binary,

jstring model_desc,

jstring checkPath,

jstring resultPath) {

const char *model_binary_path = env->GetStringUTFChars(model_binary, 0);

const char *model_desc_path = env->GetStringUTFChars(model_desc, 0);

const char *check_path = env->GetStringUTFChars(checkPath, 0);

const char *result_path = env->GetStringUTFChars(resultPath, 0);

Net net = readNetFromTensorflow(model_binary_path, model_desc_path);

net.setPreferableBackend(DNN_BACKEND_OPENCV);

net.setPreferableTarget(DNN_TARGET_CPU);

if (net.empty()) {

__android_log_print(ANDROID_LOG_ERROR, “TTTTT”, “%s”, “could not load net…”);

return false;

}

Mat frame = imread(check_path); //读入检测文件

__android_log_print(ANDROID_LOG_ERROR, “TTTTT”, “%s”, “输入数据调整”);

// 输入数据调整

Mat inputBlob = blobFromImage(frame, 1.0,

Size(300, 300), Scalar(104.0, 177.0, 123.0), false, false);

net.setInput(inputBlob, “data”);

// 人脸检测

Mat detection = net.forward(“detection_out”);

Mat detectionMat(detection.size[2], detection.size[3], CV_32F, detection.ptr());

Mat face_area;

for (int i = 0; i < detectionMat.rows; i++) {

// 置信度 0~1之间

float confidence = detectionMat.at(i, 2);

if (confidence > 0.7) {

//count++;

int xLeftBottom = static_cast(detectionMat.at(i, 3) * frame.cols);

int yLeftBottom = static_cast(detectionMat.at(i, 4) * frame.rows);

int xRightTop = static_cast(detectionMat.at(i, 5) * frame.cols);

int yRightTop = static_cast(detectionMat.at(i, 6) * frame.rows);

Rect object((int) xLeftBottom, (int) yLeftBottom,

(int) (xRightTop - xLeftBottom),

(int) (yRightTop - yLeftBottom));

face_area = frame(object); //扣出图片

rectangle(frame, object, Scalar(0, 255, 0)); //画框

}

}

imwrite(result_path, face_area); //写出文件

env->ReleaseStringUTFChars(model_binary, model_binary_path);

env->ReleaseStringUTFChars(model_desc, model_desc_path);

env->ReleaseStringUTFChars(checkPath, check_path);

env->ReleaseStringUTFChars(resultPath, result_path);

return true;

}

  • Bitmap和Mat互转

/**

  • Bitmap转成opencv能操作的C++对象Mat

  • @param env

  • @param mat

  • @param bitmap

*/

void bitmap2Mat(JNIEnv *env, Mat &mat, jobject bitmap) {

//Mat 里面有个type :CV_8UC4刚好对上我们的Bitmap中的ARGB_8888 , CV_8UC2对应Bitmap中的RGB_555

//获取 bitmap 信息

AndroidBitmapInfo info;

void *pixels;

try {

// AndroidBitmap_getInfo(env, bitmap, &info);

//锁定Bitmap画布

// AndroidBitmap_lockPixels(env, bitmap, &pixels);

CV_Assert(AndroidBitmap_getInfo(env, bitmap, &info) >= 0);

CV_Assert(info.format == ANDROID_BITMAP_FORMAT_RGBA_8888 ||

info.format == ANDROID_BITMAP_FORMAT_RGB_565);

CV_Assert(AndroidBitmap_lockPixels(env, bitmap, &pixels) >= 0);

CV_Assert(pixels);

//指定mat的宽高type BGRA

mat.create(info.height, info.width, CV_8UC4);

if (info.format == ANDROID_BITMAP_FORMAT_RGBA_8888) {

//对应mat应该是CV_8UC4

Mat temp(info.height, info.width, CV_8UC4, pixels);

//把数据temp复制到mat里面

temp.copyTo(mat);

} else if (info.format == ANDROID_BITMAP_FORMAT_RGB_565) {

//对应mat应该是CV_8UC2

Mat temp(info.height, info.width, CV_8UC2, pixels);

//mat 是CV_8UC4 ,CV_8UC2 > CV_8UC4

cvtColor(temp, mat, COLOR_BGR5652BGRA);

}

//解锁Bitmap画布

AndroidBitmap_unlockPixels(env, bitmap);

return;

} catch (Exception &e) {

AndroidBitmap_unlockPixels(env, bitmap);

jclass je = env->FindClass(“java/lang/Exception”);

env->ThrowNew(je, e.what());

return;

} catch (…) {

AndroidBitmap_unlockPixels(env, bitmap);

jclass je = env->FindClass(“java/lang/Exception”);

env->ThrowNew(je, “Unknown exception in JNI code {nBitmapToMat}”);

return;

}

}

/**

  • 把mat转成bitmap

  • @param env

  • @param mat

  • @param bitmap

*/

void mat2Bitmap(JNIEnv *env, Mat mat, jobject bitmap) {

//Mat 里面有个type :CV_8UC4刚好对上我们的Bitmap中的ARGB_8888 , CV_8UC2对应Bitmap中的RGB_555

//获取 bitmap 信息

AndroidBitmapInfo info;

void *pixels;

try {

// AndroidBitmap_getInfo(env, bitmap, &info);

//锁定Bitmap画布

// AndroidBitmap_lockPixels(env, bitmap, &pixels);

CV_Assert(AndroidBitmap_getInfo(env, bitmap, &info) >= 0);

CV_Assert(info.format == ANDROID_BITMAP_FORMAT_RGBA_8888 ||

info.format == ANDROID_BITMAP_FORMAT_RGB_565);

CV_Assert(mat.dims == 2 && info.height == (uint32_t) mat.rows &&

info.width == (uint32_t) mat.cols);

CV_Assert(mat.type() == CV_8UC1 || mat.type() == CV_8UC3 || mat.type() == CV_8UC4);

CV_Assert(AndroidBitmap_lockPixels(env, bitmap, &pixels) >= 0);

CV_Assert(pixels);

if (info.format == ANDROID_BITMAP_FORMAT_RGBA_8888) {

//对应mat应该是CV_8UC4

Mat temp(info.height, info.width, CV_8UC4, pixels);

if (mat.type() == CV_8UC4) {

mat.copyTo(temp);

} else if (mat.type() == CV_8UC2) {

cvtColor(mat, temp, COLOR_BGR5652BGRA);

} else if (mat.type() == CV_8UC1) {//灰度mat

cvtColor(mat, temp, COLOR_GRAY2BGRA);

} else if (mat.type() == CV_8UC3) {

cvtColor(mat, temp, COLOR_RGB2BGRA);

}

} else if (info.format == ANDROID_BITMAP_FORMAT_RGB_565) {

//对应mat应该是CV_8UC2

Mat temp(info.height, info.width, CV_8UC2, pixels);

if (mat.type() == CV_8UC4) {

cvtColor(mat, temp, COLOR_BGRA2BGR565);

} else if (mat.type() == CV_8UC2) {

mat.copyTo(temp);

} else if (mat.type() == CV_8UC1) {//灰度mat

cvtColor(mat, temp, COLOR_GRAY2BGR565);

} else if (mat.type() == CV_8UC3) {

cvtColor(mat, temp, COLOR_RGB2BGR565);

}

}

//解锁Bitmap画布

AndroidBitmap_unlockPixels(env, bitmap);

return;

} catch (const Exception &e) {

AndroidBitmap_unlockPixels(env, bitmap);

jclass je = env->FindClass(“java/lang/Exception”);

env->ThrowNew(je, e.what());

return;

} catch (…) {

AndroidBitmap_unlockPixels(env, bitmap);

jclass je = env->FindClass(“java/lang/Exception”);

env->ThrowNew(je, “Unknown exception in JNI code {nMatToBitmap}”);

return;

}

}

  • 人脸检测Activity

package com.hxg.ndkface;

import android.Manifest;

import android.annotation.SuppressLint;

import android.content.Context;

import android.graphics.Bitmap;

import android.graphics.BitmapFactory;

import android.hardware.Camera;

import android.os.Bundle;

import androidx.appcompat.app.AppCompatActivity;

import androidx.appcompat.widget.AppCompatImageView;

import androidx.appcompat.widget.AppCompatTextView;

import androidx.arch.core.executor.ArchTaskExecutor;

import com.hxg.ndkface.camera.AutoTexturePreviewView;

import com.hxg.ndkface.manager.CameraPreviewManager;

import com.hxg.ndkface.model.SingleBaseConfig;

import com.hxg.ndkface.utils.CornerUtil;

import com.hxg.ndkface.utils.FileUtils;

import com.tbruyelle.rxpermissions3.RxPermissions;

import java.io.File;

import java.io.FileOutputStream;

import java.io.IOException;

import java.io.InputStream;

public class MainNorFaceActivity extends AppCompatActivity {

private Bitmap mFaceBitmap;

private FaceDetection mFaceDetection;

private File mCascadeFile;

private AppCompatTextView mTextView;

private RxPermissions rxPermissions;

private AutoTexturePreviewView mAutoCameraPreviewView;

// 图片越大,性能消耗越大,也可以选择640480, 1280720

private static final int PREFER_WIDTH = SingleBaseConfig.getBaseConfig().getRgbAndNirWidth();

private static final int PERFER_HEIGH = SingleBaseConfig.getBaseConfig().getRgbAndNirHeight();

@Override

public void onDetachedFromWindow() {

super.onDetachedFromWindow();

CameraPreviewManager.getInstance().stopPreview();

}

@Override

protected void onCreate(Bundle savedInstanceState) {

super.onCreate(savedInstanceState);

setContentView(R.layout.activity_main);

rxPermissions = new RxPermissions(this);

mTextView = findViewById(R.id.note);

mAutoCameraPreviewView = findViewById(R.id.auto_camera_preview_view);

mFaceBitmap = BitmapFactory.decodeResource(getResources(), R.drawable.face);

copyCaseCadeFile();

mFaceDetection = new FaceDetection();

mFaceDetection.loadCascade(mCascadeFile.getAbsolutePath());

}

/**

  • 加载人脸识别的分类器文件

*/

private void copyCaseCadeFile() {

try {

// load cascade file from application resources

InputStream is = getResources().openRawResource(R.raw.lbpcascade_frontalface);

File cascadeDir = getDir(“cascade”, Context.MODE_PRIVATE);

mCascadeFile = new File(cascadeDir, “lbpcascade_frontalface.xml”);

if (mCascadeFile.exists()) return;

FileOutputStream os = new FileOutputStream(mCascadeFile);

byte[] buffer = new byte[4096];

int bytesRead;

while ((bytesRead = is.read(buffer)) != -1) {

os.write(buffer, 0, bytesRead);

}

is.close();

os.close();

} catch (IOException e) {

e.printStackTrace();

}

}

@Override

public void onAttachedToWindow() {

super.onAttachedToWindow();

CornerUtil.clipViewCircle(mAutoCameraPreviewView);

rxPermissions.request(

Manifest.permission.WRITE_EXTERNAL_STORAGE,

Manifest.permission.READ_EXTERNAL_STORAGE,

Manifest.permission.CAMERA)

.subscribe(aBoolean -> {

startTestOpenDebugRegisterFunction();

});

}

@SuppressLint(“RestrictedApi”)

private void startTestOpenDebugRegisterFunction() {

CameraPreviewManager.getInstance().setCameraFacing(CameraPreviewManager.CAMERA_FACING_FRONT);

CameraPreviewManager.getInstance().startPreview(this, mAutoCameraPreviewView,

PREFER_WIDTH, PERFER_HEIGH, (byte[] data, Camera camera, int width, int height) -> {

//识别人脸,保存人脸特征信息

// String name = FileUtils.createFile(this) + “/test.png”;

// int type = mFaceDetection.faceDetectionSaveInfo(name, mFaceBitmap);

ArchTaskExecutor.getIOThreadExecutor().execute(() -> {

Bitmap bitmap = FileUtils.decodeToBitMap(data, camera);

boolean haveFace = mFaceDetection.faceDetection(bitmap);

runOnUiThread(() -> {

((AppCompatImageView) findViewById(R.id.tv_img)).setImageBitmap(bitmap);

});

if (haveFace) {

double similarity = mFaceDetection.histogramMatch(mFaceBitmap, bitmap);

String str = “对比度:”;

runOnUiThread(() -> {

mTextView.setText(str + similarity);

});

}

});

});

}

}

  • Dnn模式的人脸识别 Activity

package com.hxg.ndkface;

import android.Manifest;

import android.annotation.SuppressLint;

import android.content.Context;

import android.graphics.Bitmap;

import android.graphics.BitmapFactory;

import android.os.Bundle;

import android.util.Log;

import androidx.appcompat.app.AppCompatActivity;

import androidx.appcompat.widget.AppCompatImageView;

import androidx.arch.core.executor.ArchTaskExecutor;

import com.hxg.ndkface.utils.FileUtils;

import com.tbruyelle.rxpermissions3.RxPermissions;

import java.io.File;

import java.io.FileOutputStream;

import java.io.IOException;

import java.io.InputStream;

public class MainDnnFaceActivity extends AppCompatActivity {

private FaceDetection mFaceDetection;

private String mModelBinary;

private String mModelDesc;

private String mCheckPath;

private RxPermissions rxPermissions;

private File mCascadeFile;

private AppCompatImageView mIvHeader;

private AppCompatImageView mFace;

private AppCompatImageView mFace2;

@Override

protected void onCreate(Bundle savedInstanceState) {

super.onCreate(savedInstanceState);

setContentView(R.layout.activity_main_dnn_face);

mIvHeader = findViewById(R.id.iv_header);

mFace = findViewById(R.id.iv_header_face);

mFace2 = findViewById(R.id.iv_header_face2);

mFaceDetection = new FaceDetection();

copyCaseCadeFileUint8();

copyCaseCadeFilePbtxt();

copyCaseCadeFileTest();

rxPermissions = new RxPermissions(this);

}

private void copyCaseCadeFileTest() {

InputStream is = null;

FileOutputStream os = null;

try {

// load cascade file from application resources

is = getResources().openRawResource(R.raw.test1);

File cascadeDir = getDir(“cascade”, Context.MODE_PRIVATE);

mCascadeFile = new File(cascadeDir, “test1.jpg”);

if (mCascadeFile.exists()) return;

os = new FileOutputStream(mCascadeFile);

byte[] buffer = new byte[1024 * 1024];

int bytesRead;

while ((bytesRead = is.read(buffer)) != -1) {

os.write(buffer, 0, bytesRead);

}

is.close();

os.close();

} catch (IOException e) {

e.printStackTrace();

} finally {

try {

if (is != null) {

is.close();

}

if (os != null) {

os.close();

}

} catch (IOException e) {

e.printStackTrace();

}

}

}

private void copyCaseCadeFilePbtxt() {

InputStream is = null;

FileOutputStream os = null;

try {

// load cascade file from application resources

is = getResources().openRawResource(R.raw.opencv_face_detector);

File cascadeDir = getDir(“cascade”, Context.MODE_PRIVATE);

mCascadeFile = new File(cascadeDir, “opencv_face_detector.pbtxt”);

if (mCascadeFile.exists()) return;

os = new FileOutputStream(mCascadeFile);

byte[] buffer = new byte[1024 * 1024];

int bytesRead;

while ((bytesRead = is.read(buffer)) != -1) {

os.write(buffer, 0, bytesRead);

}

is.close();

os.close();

} catch (IOException e) {

e.printStackTrace();

} finally {

try {

if (is != null) {

is.close();

}

if (os != null) {

os.close();

}

} catch (IOException e) {

e.printStackTrace();

}

}

}

private void copyCaseCadeFileUint8() {

InputStream is = null;

FileOutputStream os = null;

try {

// load cascade file from application resources

is = getResources().openRawResource(R.raw.opencv_face_detector_uint8);

File cascadeDir = getDir(“cascade”, Context.MODE_PRIVATE);

mCascadeFile = new File(cascadeDir, “opencv_face_detector_uint8.pb”);

if (mCascadeFile.exists()) return;

os = new FileOutputStream(mCascadeFile);

byte[] buffer = new byte[1024 * 1024];

int bytesRead;

while ((bytesRead = is.read(buffer)) != -1) {

os.write(buffer, 0, bytesRead);

}

is.close();

os.close();

} catch (IOException e) {

e.printStackTrace();

} finally {

try {

if (is != null) {

is.close();

}

if (os != null) {

os.close();

}

} catch (IOException e) {

e.printStackTrace();

}

}

}

@Override

public void onAttachedToWindow() {

super.onAttachedToWindow();

File cascadeDir = getDir(“cascade”, Context.MODE_PRIVATE);

mModelBinary = cascadeDir + “/opencv_face_detector_uint8.pb”;

mModelDesc = cascadeDir + “/opencv_face_detector.pbtxt”;

mCheckPath = cascadeDir + “/test1.jpg”;

mIvHeader.setImageBitmap(BitmapFactory.decodeFile(mCheckPath));

rxPermissions.request(

自我介绍一下,小编13年上海交大毕业,曾经在小公司待过,也去过华为、OPPO等大厂,18年进入阿里一直到现在。

深知大多数初中级Android工程师,想要提升技能,往往是自己摸索成长或者是报班学习,但对于培训机构动则近万的学费,着实压力不小。自己不成体系的自学效果低效又漫长,而且极易碰到天花板技术停滞不前!

因此收集整理了一份《2024年Android移动开发全套学习资料》,初衷也很简单,就是希望能够帮助到想自学提升又不知道该从何学起的朋友,同时减轻大家的负担。

img

img

img

img

既有适合小白学习的零基础资料,也有适合3年以上经验的小伙伴深入学习提升的进阶课程,基本涵盖了95%以上Android开发知识点,真正体系化!

由于文件比较大,这里只是将部分目录截图出来,每个节点里面都包含大厂面经、学习笔记、源码讲义、实战项目、讲解视频,并且会持续更新!

如果你觉得这些内容对你有帮助,可以扫码获取!!(备注:Android)

最后

今天关于面试的分享就到这里,还是那句话,有些东西你不仅要懂,而且要能够很好地表达出来,能够让面试官认可你的理解,例如Handler机制,这个是面试必问之题。有些晦涩的点,或许它只活在面试当中,实际工作当中你压根不会用到它,但是你要知道它是什么东西。

最后在这里小编分享一份自己收录整理上述技术体系图相关的几十套腾讯、头条、阿里、美团等公司19年的面试题,把技术点整理成了视频和PDF(实际上比预期多花了不少精力),包含知识脉络 + 诸多细节,由于篇幅有限,这里以图片的形式给大家展示一部分。

还有 高级架构技术进阶脑图、Android开发面试专题资料,高级进阶架构资料 帮助大家学习提升进阶,也节省大家在网上搜索资料的时间来学习,也可以分享给身边好友一起学习。

【Android核心高级技术PDF文档,BAT大厂面试真题解析】

【算法合集】

【延伸Android必备知识点】

【Android部分高级架构视频学习资源】

**Android精讲视频领取学习后更加是如虎添翼!**进军BATJ大厂等(备战)!现在都说互联网寒冬,其实无非就是你上错了车,且穿的少(技能),要是你上对车,自身技术能力够强,公司换掉的代价大,怎么可能会被裁掉,都是淘汰末端的业务Curd而已!现如今市场上初级程序员泛滥,这套教程针对Android开发工程师1-6年的人员、正处于瓶颈期,想要年后突破自己涨薪的,进阶Android中高级、架构师对你更是如鱼得水,赶快领取吧!

《互联网大厂面试真题解析、进阶开发核心学习笔记、全套讲解视频、实战项目源码讲义》点击传送门即可获取!

MUg22g1S-1713721991481)]

[外链图片转存中…(img-oVvKMiaj-1713721991482)]

既有适合小白学习的零基础资料,也有适合3年以上经验的小伙伴深入学习提升的进阶课程,基本涵盖了95%以上Android开发知识点,真正体系化!

由于文件比较大,这里只是将部分目录截图出来,每个节点里面都包含大厂面经、学习笔记、源码讲义、实战项目、讲解视频,并且会持续更新!

如果你觉得这些内容对你有帮助,可以扫码获取!!(备注:Android)

[外链图片转存中…(img-xL5XDcv5-1713721991483)]

最后

今天关于面试的分享就到这里,还是那句话,有些东西你不仅要懂,而且要能够很好地表达出来,能够让面试官认可你的理解,例如Handler机制,这个是面试必问之题。有些晦涩的点,或许它只活在面试当中,实际工作当中你压根不会用到它,但是你要知道它是什么东西。

最后在这里小编分享一份自己收录整理上述技术体系图相关的几十套腾讯、头条、阿里、美团等公司19年的面试题,把技术点整理成了视频和PDF(实际上比预期多花了不少精力),包含知识脉络 + 诸多细节,由于篇幅有限,这里以图片的形式给大家展示一部分。

还有 高级架构技术进阶脑图、Android开发面试专题资料,高级进阶架构资料 帮助大家学习提升进阶,也节省大家在网上搜索资料的时间来学习,也可以分享给身边好友一起学习。

【Android核心高级技术PDF文档,BAT大厂面试真题解析】

[外链图片转存中…(img-xNZ7X1rx-1713721991484)]

【算法合集】

[外链图片转存中…(img-VRlLcxFU-1713721991484)]

【延伸Android必备知识点】

[外链图片转存中…(img-x4MvoofM-1713721991485)]

【Android部分高级架构视频学习资源】

**Android精讲视频领取学习后更加是如虎添翼!**进军BATJ大厂等(备战)!现在都说互联网寒冬,其实无非就是你上错了车,且穿的少(技能),要是你上对车,自身技术能力够强,公司换掉的代价大,怎么可能会被裁掉,都是淘汰末端的业务Curd而已!现如今市场上初级程序员泛滥,这套教程针对Android开发工程师1-6年的人员、正处于瓶颈期,想要年后突破自己涨薪的,进阶Android中高级、架构师对你更是如鱼得水,赶快领取吧!

《互联网大厂面试真题解析、进阶开发核心学习笔记、全套讲解视频、实战项目源码讲义》点击传送门即可获取!

  • 4
    点赞
  • 9
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值