libcurl抓取网页并保存cookie


在Vi编辑器下打开get.cpp

#include 
  
  
   
   
#include 
   
   
    
    
#include 
    
    
     
     
using namespace std;

size_t CallBackWrite(const char *data, size_t size, int nmember, string  * strData)
{
    size_t sizes = size * nmember;
    strData->append(data, sizes);
    return sizes;
}
int main( int argc, char **argv)
{
    CURLcode res;
    CURL * pCurl;
    string strUrl = "www.baidu.com";
    const char * pszUrl = strUrl.c_str();
    string strCookie;                //存放cookie
    string strRecvData;             //存放抓取到的html网页
    struct curl_slist *cookies;
    struct curl_slist * nc;

    pCurl = curl_easy_init();
    if( pCurl == NULL )
    {
        cout<<"pCurl init failed"<
     
     
      
      data);
         nc = nc->next;
    }
    curl_slist_free_all(cookies);

    cout<<" cookie:    "<
      
      
        < 
        
      
     
     
    
    
   
   
  
  

用g++ get.cpp -lcurl

执行./a.out
就得到了网页和cookie信息
posted @ 2014-06-13 09:52  SandKing  阅读(28)  评论(0)    收藏  举报  来源