CURLcode的定义

经常性遇到libcurl的问题,而且都特别奇怪,记录一下CURLcode的定义:

http://curl.haxx.se/libcurl/c/libcurl-errors.html

 

#include <curl/curl.h>

enum CURLcode
{
    CURLE_OK = 0,
    CURLE_UNSUPPORTED_PROTOCOL, /* 1 */
    CURLE_FAILED_INIT, /* 2 */
    CURLE_URL_MALFORMAT, /* 3 */
    CURLE_OBSOLETE4, /* 4 - NOT USED */
    CURLE_COULDNT_RESOLVE_PROXY, /* 5 */
    CURLE_COULDNT_RESOLVE_HOST, /* 6 */
    CURLE_COULDNT_CONNECT, /* 7 */
    CURLE_FTP_WEIRD_SERVER_REPLY, /* 8 */
    CURLE_REMOTE_ACCESS_DENIED, /* 9 a service was denied by the server
                                        due to lack of access - when login fails
                                        this is not returned. */
    CURLE_OBSOLETE10, /* 10 - NOT USED */
    CURLE_FTP_WEIRD_PASS_REPLY, /* 11 */
    CURLE_OBSOLETE12, /* 12 - NOT USED */
    CURLE_FTP_WEIRD_PASV_REPLY, /* 13 */
    CURLE_FTP_WEIRD_227_FORMAT, /* 14 */
    CURLE_FTP_CANT_GET_HOST, /* 15 */
    CURLE_OBSOLETE16, /* 16 - NOT USED */
    CURLE_FTP_COULDNT_SET_TYPE, /* 17 */
    CURLE_PARTIAL_FILE, /* 18 */
    CURLE_FTP_COULDNT_RETR_FILE, /* 19 */
    CURLE_OBSOLETE20, /* 20 - NOT USED */
    CURLE_QUOTE_ERROR, /* 21 - quote command failure */
    CURLE_HTTP_RETURNED_ERROR, /* 22 */
    CURLE_WRITE_ERROR, /* 23 */
    CURLE_OBSOLETE24, /* 24 - NOT USED */
    CURLE_UPLOAD_FAILED, /* 25 - failed upload "command" */
    CURLE_READ_ERROR, /* 26 - couldn't open/read from file */
    CURLE_OUT_OF_MEMORY, /* 27 */
    /* Note: CURLE_OUT_OF_MEMORY may sometimes indicate a conversion error
               instead of a memory allocation error if CURL_DOES_CONVERSIONS
               is defined
      */
    CURLE_OPERATION_TIMEDOUT, /* 28 - the timeout time was reached */
    CURLE_OBSOLETE29, /* 29 - NOT USED */
    CURLE_FTP_PORT_FAILED, /* 30 - FTP PORT operation failed */
    CURLE_FTP_COULDNT_USE_REST, /* 31 - the REST command failed */
    CURLE_OBSOLETE32, /* 32 - NOT USED */
    CURLE_RANGE_ERROR, /* 33 - RANGE "command" didn't work */
    CURLE_HTTP_POST_ERROR, /* 34 */
    CURLE_SSL_CONNECT_ERROR, /* 35 - wrong when connecting with SSL */
    CURLE_BAD_DOWNLOAD_RESUME, /* 36 - couldn't resume download */
    CURLE_FILE_COULDNT_READ_FILE, /* 37 */
    CURLE_LDAP_CANNOT_BIND, /* 38 */
    CURLE_LDAP_SEARCH_FAILED, /* 39 */
    CURLE_OBSOLETE40, /* 40 - NOT USED */
    CURLE_FUNCTION_NOT_FOUND, /* 41 */
    CURLE_ABORTED_BY_CALLBACK, /* 42 */
    CURLE_BAD_FUNCTION_ARGUMENT, /* 43 */
    CURLE_OBSOLETE44, /* 44 - NOT USED */
    CURLE_INTERFACE_FAILED, /* 45 - CURLOPT_INTERFACE failed */
    CURLE_OBSOLETE46, /* 46 - NOT USED */
    CURLE_TOO_MANY_REDIRECTS, /* 47 - catch endless re-direct loops */
    CURLE_UNKNOWN_TELNET_OPTION, /* 48 - User specified an unknown option */
    CURLE_TELNET_OPTION_SYNTAX, /* 49 - Malformed telnet option */
    CURLE_OBSOLETE50, /* 50 - NOT USED */
    CURLE_PEER_FAILED_VERIFICATION, /* 51 - peer's certificate or fingerprint
                                         wasn't verified fine */
    CURLE_GOT_NOTHING, /* 52 - when this is a specific error */
    CURLE_SSL_ENGINE_NOTFOUND, /* 53 - SSL crypto engine not found */
    CURLE_SSL_ENGINE_SETFAILED, /* 54 - can not set SSL crypto engine as
                                        default */
    CURLE_SEND_ERROR, /* 55 - failed sending network data */
    CURLE_RECV_ERROR, /* 56 - failure in receiving network data */
    CURLE_OBSOLETE57, /* 57 - NOT IN USE */
    CURLE_SSL_CERTPROBLEM, /* 58 - problem with the local certificate */
    CURLE_SSL_CIPHER, /* 59 - couldn't use specified cipher */
    CURLE_SSL_CACERT, /* 60 - problem with the CA cert (path?) */
    CURLE_BAD_CONTENT_ENCODING, /* 61 - Unrecognized transfer encoding */
    CURLE_LDAP_INVALID_URL, /* 62 - Invalid LDAP URL */
    CURLE_FILESIZE_EXCEEDED, /* 63 - Maximum file size exceeded */
    CURLE_USE_SSL_FAILED, /* 64 - Requested FTP SSL level failed */
    CURLE_SEND_FAIL_REWIND, /* 65 - Sending the data requires a rewind
                                        that failed */
    CURLE_SSL_ENGINE_INITFAILED, /* 66 - failed to initialise ENGINE */
    CURLE_LOGIN_DENIED, /* 67 - user, password or similar was not
                                        accepted and we failed to login */
    CURLE_TFTP_NOTFOUND, /* 68 - file not found on server */
    CURLE_TFTP_PERM, /* 69 - permission problem on server */
    CURLE_REMOTE_DISK_FULL, /* 70 - out of disk space on server */
    CURLE_TFTP_ILLEGAL, /* 71 - Illegal TFTP operation */
    CURLE_TFTP_UNKNOWNID, /* 72 - Unknown transfer ID */
    CURLE_REMOTE_FILE_EXISTS, /* 73 - File already exists */
    CURLE_TFTP_NOSUCHUSER, /* 74 - No such user */
    CURLE_CONV_FAILED, /* 75 - conversion failed */
    CURLE_CONV_REQD, /* 76 - caller must register conversion
                                        callbacks using curl_easy_setopt options
                                        CURLOPT_CONV_FROM_NETWORK_FUNCTION,
                                        CURLOPT_CONV_TO_NETWORK_FUNCTION, and
                                        CURLOPT_CONV_FROM_UTF8_FUNCTION */
    CURLE_SSL_CACERT_BADFILE, /* 77 - could not load CACERT file, missing
                                        or wrong format */
    CURLE_REMOTE_FILE_NOT_FOUND, /* 78 - remote file not found */
    CURLE_SSH, /* 79 - error from the SSH layer, somewhat
                                        generic so the error message will be of
                                        interest when this has happened */
   
    CURLE_SSL_SHUTDOWN_FAILED, /* 80 - Failed to shut down the SSL
                                        connection */
    CURLE_AGAIN, /* 81 - socket is not ready for send/recv,
                                        wait till it's ready and try again (Added
                                        in 7.18.2) */
    CURLE_SSL_CRL_BADFILE, /* 82 - could not load CRL file, missing or
                                        wrong format (Added in 7.19.0) */
    CURLE_SSL_ISSUER_ERROR, /* 83 - Issuer check failed.  (Added in
                                        7.19.0) */
    CURL_LAST /* never use! */
};

  • 1
    点赞
  • 4
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
### 回答1: 好的,下面是一个使用 C 语言写的爬虫的示例代码。这个爬虫可以在命令行中输入网站地址,然后爬取该网站上的图片,并将图片保存在 C 盘中。 #include <stdio.h> #include <stdlib.h> #include <string.h> #include <curl/curl.h> // 需要安装 libcurl 库 // 回调函数,用于将爬取到的图片写入文件 size_t write_data(void *ptr, size_t size, size_t nmemb, FILE *stream) { size_t written = fwrite(ptr, size, nmemb, stream); return written; } int main(void) { char url[100]; // 存储网站地址 char filename[100]; // 存储图片文件名 int i = 0; printf("请输入网站地址:"); scanf("%s", url); CURL *curl; // 初始化 CURL 结构体 CURLcode res; curl = curl_easy_init(); // 初始化 CURL 库 if (curl) { curl_easy_setopt(curl, CURLOPT_URL, url); // 设置网站地址 curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_data); // 设置回调函数 // 循环爬取网站上的图片 while (1) { sprintf(filename, "C:/%d.jpg", i++); // 设置图片文件名 FILE *fp = fopen(filename, "wb"); // 以二进制写入模式打开文件 if (fp == NULL) { printf("无法打开文件\n"); return 1; } curl_easy_setopt(curl, CURLOPT_WRITEDATA, fp); // 设置写入文件的句柄 res = curl_easy_perform(curl); // 执行爬取 if (res != CURLE_OK) { // 如果发生错误 ### 回答2: 在C语言中实现一个爬虫可以较为复杂,但是可以通过库的引入来简化开发过程。以下是一个简单的C语言爬虫示例,可以在窗口界面上输入网站地址,然后通过爬取网站上的图片,并将其保存在C盘中: ```c #include <stdio.h> #include <stdlib.h> #include <string.h> #include <curl/curl.h> // 定义回调函数用于处理爬取到的数据 size_t write_data(void *ptr, size_t size, size_t nmemb, FILE *stream) { return fwrite(ptr, size, nmemb, stream); } int main() { CURL *curl; CURLcode res; char url[100]; char filename[50] = "C:\\"; char filepath[100]; // 初始化libcurl curl_global_init(CURL_GLOBAL_DEFAULT); curl = curl_easy_init(); if (curl) { // 获取用户输入的网站URL printf("请输入需要爬取的网站地址:"); scanf("%s", url); // 设置URL curl_easy_setopt(curl, CURLOPT_URL, url); // 将爬取到的数据写入文件 sprintf(filepath, "%s\\%s.html", filename, url); FILE *file = fopen(filepath, "w"); curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_data); curl_easy_setopt(curl, CURLOPT_WRITEDATA, file); // 执行爬取 res = curl_easy_perform(curl); if (res != CURLE_OK) { fprintf(stderr, "curl_easy_perform() failed: %s\n", curl_easy_strerror(res)); } // 关闭文件和连接 fclose(file); curl_easy_cleanup(curl); curl_global_cleanup(); } printf("已爬取网站内容并保存在文件中。\n"); return 0; } ``` 请注意,上述代码需要安装libcurl库来进行编译。并且只是简单实现爬取网站内容并保存在文件中,尚未包含解析网页中的图片链接以及下载图片保存到C盘的功能。如需完整实现这些功能,需要进一步解析HTML页面,并通过相应的HTTP请求获取指定图片并保存。这超出了300字的篇幅,代码也会更加复杂。希望上述示例能够帮助您理解如何用C语言实现一个简单的爬虫。

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值