ffmpeg组件化再封装(五)

6 将AVFrame保存成图片

/*
 * after done with return AVFrame,user must free it with av_frame_free()
 *
*/
AVFrame *ConvertVideoFrameTo(AVFrame *piFrame,uint32_t nWidth, uint32_t nHeight,enum AVPixelFormat sPixFmt)
{
    int nRet;
    if(NULL == piFrame)
        return NULL;

    VideoFrameBaseParam sSrcParam = CreateVideoFrameBaseParamByAVFrame(piFrame);
    VideoFrameBaseParam sDstParam = CreateVideoFrameBaseParam(nWidth,nHeight,sPixFmt);
    sDstParam = GetRealVideoParamByFirst(sSrcParam,sDstParam);
    VideoFrameScaler *piScale = CreateVideoFrameScaler(sSrcParam,sDstParam);
    if(piScale)
    {
        AVFrame *piDstFrame = CreateVideoFrameAndBuffered(sDstParam);
        nRet = ScaleVideo(piScale,piFrame,piDstFrame);
        FreeVideoFrameScaler(&piScale);
        if(nRet <= 0)
        {
            av_frame_free(&piDstFrame);
            return NULL;
        }
        return piDstFrame;
    }

    return NULL;
}

/*
 * function do not free piFrame,user must deal with it
 *
 * support : *.jpg *.jpeg *.png *.tiff
 *
 * other format is not tested,you can try
*/
int SaveAVFrameToImageFile(AVFrame *piFrame, const std::string &strFilePath)
{
    if(NULL == piFrame || strFilePath.empty())
        return -1;

    if(piFrame->format != AV_PIX_FMT_YUV420P)
    {
        AVFrame *piYUV420Frame = ConvertVideoFrameTo(piFrame,0,0,AV_PIX_FMT_YUV420P);
        int nRet = SaveAVFrameToImageFile(piYUV420Frame,strFilePath);
        av_frame_free(&piYUV420Frame);
        return nRet;
    }

    OutputFileHandler *piOutput = OutputFileHandler::CreateOutputFileHandler(strFilePath,NULL);
    if(nullptr == piOutput)
        return -2;

    OutputStreamHandler *piOutStream = OutputStreamHandler::CreateOutputStreamHandlerToFile(piOutput);

    EncoderSetting fnSetting = [piFrame](AVCodecContext *piCodecCxt){
        piCodecCxt->width = piFrame->width;
        piCodecCxt->height = piFrame->height;
        piCodecCxt->pix_fmt = AV_PIX_FMT_YUVJ420P;
        piCodecCxt->time_base = {1, 25};
    };

    EncodeHandler *piEncoder = EncodeHandler::CreateEncodeHandler(piOutput->GetAVFormatContext()->oformat->video_codec,
                                                                  piOutput->GetAVFormatContext(),NULL,fnSetting);

    FrameListWrapperHandler *piFrameListHandler = new FrameListWrapperHandler(false);
    piFrameListHandler->AddFrame(piFrame);
    piFrameListHandler->AddFinished();

    piOutStream->AddSourceReadPoint(piEncoder->GetOutputReadPoints()[0]);
    piEncoder->AddSourceReadPoint(piFrameListHandler->GetOutputReadPoints()[0]);

    piOutput->StartSaveToFile();

    delete piOutput;
    delete piOutStream;
    delete piEncoder;
    delete piFrameListHandler;
    return 0;
}

        从这里可以看出,保存图片和保存成其他常见格式没有什么不同,仅在细节上有些差异。

        这里用到了 FrameListWrapperHandler 这个类型,它的作用是将AVFrame封装为一个Handler,功能上和 VideoSimulationHandler 类似,只不过这里的AVFrame都是手动加入的。这样就可以和其他Handler相互配合使用了。

        注意:构造 FrameListWrapperHandler 时,有一个参数,表示是否将AVFrame的所有权传递给 FrameListWrapperHandler ,如果为true,FrameListWrapperHandler 负责使用后释放对应的AVFrame,如果为false,则由用户负责释放传入的AVFrame。

        注意:当调用  AddFrame() 时,FrameListWrapperHandler 并不负责检测 AVFrame中的格式,这点要注意。

        注意:当没有调用 AddFinished()时,表示还有未处理的数据,此时即使 frame list wapper中已经没有了数据,返回的是AE_READ_AGAIN。而调用了AddFinished(),就表示没有更多的数据了,当frame list wapper中已经加入的Frame处理完后,会返回AE_READ_EOF。

7 将图片读取为AVFrame

AVFrame *ReadImageFileToAVFrame(const std::string &strFileName)
{
    AVIOHandlerError nErr;

    InputFileHandler *inputFile = InputFileHandler::CreateInputFileHandlerByFile(strFileName);
    if(nullptr != inputFile)
    {
        InputStreamHandler *inputstream = InputStreamHandler::CreateInputStreamHandlerFromFileByMediaType(inputFile,AVMEDIA_TYPE_VIDEO);
        DecodeHandler *piDecoder = DecodeHandler::CreateDecodeHandlerFromAVStream(inputstream->GetAVStream(),NULL);

        piDecoder->AddSourceReadPoint(inputstream->GetOutputReadPoints()[0]);
        ReadPoint *piPoint = piDecoder->GetOutputReadPoints()[0];

        while(1)
        {
            AVFrame *piFrame;
            AVPacket *piPacket;
            nErr = piPoint->ReadData(piFrame,piPacket);
            if(nErr == AE_READ_AGAIN)
                continue;
            else if(nErr != AE_NONE)
                break;

            VideoFrameBaseParam sParam = CreateVideoFrameBaseParam(piFrame->width,piFrame->height,(enum AVPixelFormat)piFrame->format);
            AVFrame *piDstFrame = CreateVideoFrameAndBuffered(sParam);
            av_frame_copy(piDstFrame,piFrame);

            delete piDecoder;
            delete inputstream;
            delete inputFile;

            return piDstFrame;
        }
    }

    return NULL;
}

        这是上一个示例的相反的操作。

        有了上一个示例和这一个示例,读取gif,生成gif的功能也就很简单了,照猫画虎即可。

8 使用ReadPoint

void SaveVideoFrameToJPGFile()
{
    VideoSimulateHandler *piVideoSimulator = new VideoSimulateHandler(640,480);
    piVideoSimulator->SetFunctionForEOF([](int nIndex,AVFrame *piFrame){return nIndex > 5;});
    
    ReadPoint *piPoint = piVideoSimulator->GetOutputReadPoints()[0];
    AVIOHandlerError nErr;
    
    int index = 0;
    while(1)
    {
        AVFrame *piFrame;
        AVPacket *piPacket;
        nErr = piPoint->ReadData(piFrame,piPacket);
        if(nErr == AE_READ_AGAIN)
            continue;
        else if(nErr != AE_NONE)
            break;
        
        index++;
        std::string str = "frame" + std::to_string(index) + ".jpg";
        SaveAVFrameToImageFile(piFrame,str);
    }
    
    delete piVideoSimulator;
}

        这个示例,就是展示一下如何使用ReadPoint,Handler使用了ReadPoint,我们也可以直接使用ReadPoint。

9 Transport

void Transport()
{
    OutputFileHandler *piOutput = OutputFileHandler::CreateOutputFileHandler("sio.avi");
    if(piOutput)
    {
        InputFileHandler *inputFile = InputFileHandler::CreateInputFileHandlerByFile("03.mp4");
        InputStreamHandler *inputAudioStream = InputStreamHandler::CreateInputStreamHandlerFromFileByMediaType(inputFile,AVMEDIA_TYPE_AUDIO);
        InputStreamHandler *inputVideoStream = InputStreamHandler::CreateInputStreamHandlerFromFileByMediaType(inputFile,AVMEDIA_TYPE_VIDEO);
        
        DecodeHandler *piAudioDecoder = DecodeHandler::CreateDecodeHandlerFromAVStream(inputAudioStream->GetAVStream(),NULL);
        DecodeHandler *piVideoDecoder = DecodeHandler::CreateDecodeHandlerFromAVStream(inputVideoStream->GetAVStream(),NULL);
        
        
        
        EncodeHandler *piAudioEncoder = EncodeHandler::CreateEncodeHandler(piOutput->GetAVFormatContext()->oformat->audio_codec,
                                                                           piOutput->GetAVFormatContext(),NULL,NULL);
        
        
        EncoderSetting setting = [=](AVCodecContext *piFmtCtx){
            piFmtCtx->framerate = {40,1};
            piFmtCtx->time_base = {1,40};
            piFmtCtx->bit_rate = 5*1024*1024;
            piFmtCtx->width = 640;
            piFmtCtx->height = 480;
        };
        EncodeHandler *piVideoEncoder = EncodeHandler::CreateEncodeHandler(piOutput->GetAVFormatContext()->oformat->video_codec,
                                                                           piOutput->GetAVFormatContext(),NULL,setting);
        
        
        AudioResampleHandler *piResampler = new AudioResampleHandler(piAudioEncoder->GetAudioFrameBaseParam());
        
        VideoScaleHandler *piScaler = new VideoScaleHandler(piVideoEncoder->GetVideoFrameBaseParam());
        
        OutputStreamHandler *piAudioOutStream = OutputStreamHandler::CreateOutputStreamHandlerToFile(piOutput);
        OutputStreamHandler *piVideoOutStream = OutputStreamHandler::CreateOutputStreamHandlerToFile(piOutput);
        
        FilterGraphHandler *piFilter = new FilterGraphHandler;
        piFilter->CreateFilterGraph();
        piFilter->AddVideoBufferSource("in",CreateBufferSrcParametersByDecoder(piVideoDecoder->GetCodecContext(),
                                                                               inputVideoStream->GetAVStream()->time_base));
        piFilter->AddVideoBufferSink("out");
        
        std::stringstream ss;
        ss << "[in]fps=fps=" << av_q2d(piVideoEncoder->GetAVCodecContext()->framerate) << "[out]";
        std::cout << ss.str() << std::endl;
        std::cout << piFilter->GraphParseAndConfig(ss.str()) << std::endl;
        
        
        piAudioOutStream->AddSourceReadPoint(piAudioEncoder->GetOutputReadPoints()[0]);
        piAudioEncoder->AddSourceReadPoint(piResampler->GetOutputReadPoints()[0]);
        piResampler->AddSourceReadPoint(piAudioDecoder->GetOutputReadPoints()[0]);
        piAudioDecoder->AddSourceReadPoint(inputAudioStream->GetOutputReadPoints()[0]);
        
        piVideoOutStream->AddSourceReadPoint(piVideoEncoder->GetOutputReadPoints()[0]);
        piVideoEncoder->AddSourceReadPoint(piScaler->GetOutputReadPoints()[0]);
        piScaler->AddSourceReadPoint(piFilter->GetOutputReadPoints()[0]);
        piFilter->AddSourceReadPoint(piVideoDecoder->GetOutputReadPoints()[0]);
        piVideoDecoder->AddSourceReadPoint(inputVideoStream->GetOutputReadPoints()[0]);
        
        AVIOHandler::SetLogSwitch(false);
        piOutput->SetHandlerName("piOutput");
        piAudioOutStream->SetHandlerName("piAudioOutStream");
        piAudioEncoder->SetHandlerName("piAudioEncoder");
        piResampler->SetHandlerName("piResampler");
        piAudioDecoder->SetHandlerName("piAudioDecoder");
        inputAudioStream->SetHandlerName("inputAudioStream");
        piVideoOutStream->SetHandlerName("piVideoOutStream");
        piVideoEncoder->SetHandlerName("piVideoEncoder");
        piScaler->SetHandlerName("piScaler");
        piVideoDecoder->SetHandlerName("piVideoDecoder");
        inputVideoStream->SetHandlerName("inputVideoStream");
        inputFile->SetHandlerName("inputFile");
        piFilter->SetHandlerName("filter");
        
        piOutput->StartSaveToFile();
        
        delete piOutput;
        delete piAudioOutStream;
        delete piVideoOutStream;
        delete piAudioEncoder;
        delete piAudioDecoder;
        delete piVideoEncoder;
        delete piVideoDecoder;
        delete piResampler;
        delete piScaler;
        delete inputAudioStream;
        delete inputVideoStream;
        delete inputFile;
    }
}

        ffmpeg最经典,最常见的操作就是转码文件。

10 裁剪与拼接

void Concat()
{
    OutputFileHandler *piOutput = OutputFileHandler::CreateOutputFileHandler("concat.mp4");
    if(piOutput)
    {
        OutputStreamHandler *piOutVideoStream = OutputStreamHandler::CreateOutputStreamHandlerToFile(piOutput);
        OutputStreamHandler *piOutAudioStream = OutputStreamHandler::CreateOutputStreamHandlerToFile(piOutput);
        

        EncoderSetting fnVideoSetting = [=](AVCodecContext *piCodecCxt){
            piCodecCxt->width = 800;
            piCodecCxt->height = 600;
            piCodecCxt->framerate = {40,1};
            piCodecCxt->time_base = {1,40};
            piCodecCxt->bit_rate = 3 * 1024 * 1024;
        };
        EncodeHandler *piVideoEncoder = EncodeHandler::CreateEncodeHandler(piOutput->GetAVFormatContext()->oformat->video_codec,
                                                                           piOutput->GetAVFormatContext(),NULL,fnVideoSetting);
        EncodeHandler *piAudioEncoder = EncodeHandler::CreateEncodeHandler(piOutput->GetAVFormatContext()->oformat->audio_codec,
                                                                           piOutput->GetAVFormatContext(),NULL,NULL);
        
        
        
        
        InputFileCutterHandler *piCut1 = InputFileCutterHandler::CreateInputFileCutterHandlerByFile("03.mp4");
        ReadPoint *piVideoPoint1 = piCut1->AddCutterByAVMediaType(AVMEDIA_TYPE_VIDEO,"in1v");
        ReadPoint *piAudioPoint1 = piCut1->AddCutterByAVMediaType(AVMEDIA_TYPE_AUDIO,"in1a");
        piCut1->SetRange(60*1000,100*1000);
        
        InputFileCutterHandler *piCut2 = InputFileCutterHandler::CreateInputFileCutterHandlerByFile("01.mp4");
        ReadPoint *piVideoPoint2 = piCut2->AddCutterByAVMediaType(AVMEDIA_TYPE_VIDEO,"in2v");
        ReadPoint *piAudioPoint2 = piCut2->AddCutterByAVMediaType(AVMEDIA_TYPE_AUDIO,"in2a");
        piCut2->SetRange(140*1000,200*1000);

        FilterGraphHandler *piFilter1 = new FilterGraphHandler;
        piFilter1->CreateFilterGraph();
        piFilter1->AddVideoBufferSource("in",CreateBufferSrcParametersByDecoder(piCut1->GetDecoderHandlerByReadPoint(piVideoPoint1)->GetCodecContext(),
                                                                               piCut1->GetInputStreamHandlerByReadPoint(piVideoPoint1)->GetAVStream()->time_base));
        piFilter1->AddVideoBufferSink("out");

        std::stringstream ss;
        ss << "[in]fps=fps=" << av_q2d(piVideoEncoder->GetAVCodecContext()->framerate) << "[out]";
        piFilter1->GraphParseAndConfig(ss.str());

        FilterGraphHandler *piFilter2 = new FilterGraphHandler;
        piFilter2->CreateFilterGraph();
        piFilter2->AddVideoBufferSource("in",CreateBufferSrcParametersByDecoder(piCut2->GetDecoderHandlerByReadPoint(piVideoPoint2)->GetCodecContext(),
                                                                               piCut2->GetInputStreamHandlerByReadPoint(piVideoPoint2)->GetAVStream()->time_base));
        piFilter2->AddVideoBufferSink("out");
        piFilter2->GraphParseAndConfig(ss.str());

        VideoScaleHandler *piScaler1 = new VideoScaleHandler(piVideoEncoder->GetVideoFrameBaseParam());
        AudioResampleHandler *piResample1 = new AudioResampleHandler(piAudioEncoder->GetAudioFrameBaseParam());

        VideoScaleHandler *piScaler2 = new VideoScaleHandler(piVideoEncoder->GetVideoFrameBaseParam());
        AudioResampleHandler *piResample2 = new AudioResampleHandler(piAudioEncoder->GetAudioFrameBaseParam());
        
        FrameConcatHandler *piVideoConcat = new FrameConcatHandler;
        FrameConcatHandler *piAudioConcat = new FrameConcatHandler;

        piOutVideoStream->AddSourceReadPoint(piVideoEncoder->GetOutputReadPoints()[0]);
        piOutAudioStream->AddSourceReadPoint(piAudioEncoder->GetOutputReadPoints()[0]);
        piVideoEncoder->AddSourceReadPoint(piVideoConcat->GetOutputReadPoints()[0]);
        piAudioEncoder->AddSourceReadPoint(piAudioConcat->GetOutputReadPoints()[0]);

        piVideoConcat->AddSourceReadPoint(piScaler1->GetOutputReadPoints()[0]);
        piVideoConcat->AddSourceReadPoint(piScaler2->GetOutputReadPoints()[0]);
        piAudioConcat->AddSourceReadPoint(piResample1->GetOutputReadPoints()[0]);
        piAudioConcat->AddSourceReadPoint(piResample2->GetOutputReadPoints()[0]);

        piScaler1->AddSourceReadPoint(piFilter1->GetOutputReadPoints()[0]);
        piFilter1->AddSourceReadPoint(piVideoPoint1);
        piResample1->AddSourceReadPoint(piAudioPoint1);

        piScaler2->AddSourceReadPoint(piFilter2->GetOutputReadPoints()[0]);
        piFilter2->AddSourceReadPoint(piVideoPoint2);
        piResample2->AddSourceReadPoint(piAudioPoint2);
        
        AVIOHandler::SetLogSwitch(true);
        piOutput->SetHandlerName("output");
        piOutAudioStream->SetHandlerName("output_audio_stream");
        piOutVideoStream->SetHandlerName("output_video_stream");
        piVideoEncoder->SetHandlerName("video_encoder");
        piVideoEncoder->SetHandlerName("audio_encoder");
        piVideoConcat->SetHandlerName("videoconcat");
        piAudioConcat->SetHandlerName("audioconcat");
        piFilter1->SetHandlerName("filter1");
        piFilter2->SetHandlerName("filter2");


        piOutput->StartSaveToFile();
        
        delete piOutput;
        delete piOutVideoStream;
        delete piOutAudioStream;
        delete piVideoEncoder;
        delete piAudioEncoder;

        delete piVideoConcat;
        delete piAudioConcat;

        delete piScaler1;
        delete piResample1;
        delete piScaler2;
        delete piResample2;

        delete piFilter1;
        delete piFilter2;
        
        delete piCut1;
        delete piCut2;
    }
}

        这个示例,从两个输入中各自裁剪一部分,然后将裁剪出来的两块拼接成一个文件。

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 1
    评论
基于ffmpeg 进行视频转换 flv\mp4\3gp\wmv等 package cn.fourtwoone.main; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; /** * @author Eoge E-mail:18802012501@139.com * @version 创建时间:2017年6月23日 上午9:13:12 * 类说明 */ public class FfmpegManager { private static ConcurrentMap<String, ConcurrentMap<String, Object>> handlerMap = new ConcurrentHashMap<String, ConcurrentMap<String, Object>>(20); private static int checkContentType(String path) { String type = path.substring(path.lastIndexOf(".") + 1, path.length()).toLowerCase(); // ffmpeg能解析的格式:(asx,asf,mpg,wmv,3gp,mp4,mov,avi,flv等) if (type.equals("avi")) { return 0; } else if (type.equals("mpg")) { return 0; } else if (type.equals("wmv")) { return 0; } else if (type.equals("3gp")) { return 0; } else if (type.equals("mov")) { return 0; } else if (type.equals("mp4")) { return 0; } else if (type.equals("asf")) { return 0; } else if (type.equals("asx")) { return 0; } else if (type.equals("flv")) { return 0; } // 对ffmpeg无法解析的文件格式(wmv9,rm,rmvb等), // 可以先用别的工具(mencoder)转换为avi(ffmpeg能解析的)格式. else if (type.equals("wmv9")) { return 1; } else if (type.equals("rm")) { return 1; } else if (type.equals("rmvb")) { return 1; } return 9; } private static boolean checkfile(String path) { File file = new File(path); if (!file.isFile()) { return false; } return true; } protected String getComm4Map(Map<String, Object> paramMap) { // -i:输入流地址或者文件绝对地址 StringBuilder comm = new StringBuilder(); if(paramMap.containsKey("fp")) { comm.append(paramMap.get("fp")).append(" -i"); }else comm.append("ffmpeg -i "); // 是否有必输项:输入地址,输出地址,应用名 if (paramMap.containsKey("input") && paramMap.containsKey("output") && paramMap.containsKey("name")) { comm.append(paramMap.get("input")).append(" "); // -f :转换格式,默认flv comm.append(" -f ").append(paramMap.containsKey("fmt") ? paramMap.get("fmt") : "flv").append(" "); // -r :帧率,默认25 comm.append("-r ").append(paramMap.containsKey("fps") ? paramMap.get("fps") : "30").append(" "); // -s 分辨率 默认是原分辨率 comm.append("-s ").append(paramMap.containsKey("rs") ? paramMap.get("rs") : "").append(" "); // -an 禁用音频 comm.append("-an ").append( paramMap.containsKey("disableAudio") && ((Boolean) paramMap.get("disableAudio")) ? "-an" : "") .append(" "); // 输出地址 comm.append(paramMap.get("output")); // 发布的应用名 comm.append(paramMap.get("name")); // 一个视频源,可以有多个输出,第二个输出为拷贝源视频输出,不改变视频的各项参数 comm.append(" ").append(" -vcodec copy -f flv -an rtmp://192.168.30.21/live/test2"); System.out.println(comm.toString()); return comm.toString(); } else { throw new RuntimeException("输入流地址不能为空!"); } } public String push(Map<String, Object> paramMap) throws IOException { // 从map里面取数据,组装成命令 String comm = getComm4Map(paramMap); ConcurrentMap<String, Object> resultMap = null; // 执行命令行 final Process proc = Runtime.getRuntime().exec(comm); System.out.println("执行命令----start commond"); OutHandler errorGobbler = new OutHandler(proc.getErrorStream(), "Error"); OutHandler outputGobbler = new OutHandler(proc.getInputStream(), "Info"); errorGobbler.start(); outputGobbler.start(); // 返回参数 resultMap = new ConcurrentHashMap<String, Object>(); resultMap.put("info", outputGobbler); resultMap.put("error", errorGobbler); resultMap.put("process", proc); String key = paramMap.hashCode()+""; handlerMap.put(key, resultMap); return key; } public void removePush(String pushId) { if (handlerMap.containsKey(pushId)) { ConcurrentMap<String, Object> map = handlerMap.get(pushId); //关闭两个线程 ((OutHandler)map.get("error")).destroy(); ((OutHandler)map.get("info")).destroy(); System.out.println("停止命令-----end commond"); //关闭命令主进程 ((Process)map.get("process")).destroy(); handlerMap.remove(pushId); } } public class OutHandler extends Thread { // 控制线程状态 volatile boolean status = true; BufferedReader br = null; String type = null; public OutHandler(InputStream is, String type) { br = new BufferedReader(new InputStreamReader(is)); this.type = type; } /** * 重写线程销毁方法,安全的关闭线程 */ @Override public void destroy() { status = false; } /** * 执行输出线程 */ @Override public void run() { String msg = null; try { while (status) { if ((msg = br.readLine()) != null) { System.out.println(type + "消息:" + msg); } } } catch (IOException e) { e.printStackTrace(); } } } public static void main(String[] args) { FfmpegManager pusher = new FfmpegManager(); Map<String, Object> map=new HashMap<String, Object>(); map.put("fp", "D:/Program Files/ffmpeg/bin/ffmpeg"); map.put("name", "test3"); map.put("input", "rtsp://admin:admin@192.168.2.236:37779/cam/realmonitor?channel=1&subtype=0"); map.put("output", "rtmp://192.168.30.21/live/"); map.put("fmt", "mp4"); map.put("fps", "25"); map.put("rs", "640x360"); map.put("disableAudio", true); try { // 推送后会获得该处理器的id,通过该id可关闭推送流 String id = pusher.push(map); Thread.sleep(100000); // 关闭推送流 pusher.removePush(id); } catch (Exception ee) { ee.printStackTrace(); } } }

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值