scrapy 通过FormRequest模拟登录再继续

1.参考

https://doc.scrapy.org/en/latest/topics/spiders.html#scrapy.spiders.Spider.start_requests

自动提交 login.php 返回表单

https://doc.scrapy.org/en/latest/topics/request-response.html#using-formrequest-from-response-to-simulate-a-user-login

 

2.模拟登录雪球

# -*- coding: utf-8 -*-
import os
import scrapy
from scrapy.shell import inspect_response

# https://doc.scrapy.org/en/latest/topics/spiders.html start_requests() 章节

class LoginSpider(scrapy.Spider):
    name = 'login'
    allowed_domains = ['xueqiu.com']
    # start_urls = ['http://xueqiu.com/']  #The default implementation generates Request(url, dont_filter=True) for each url in start_urls.
    
    url_login = 'https://xueqiu.com/snowman/login',
    url_somebody = 'https://xueqiu.com/u/6146070786'
    data_dict = {
    'remember_me': 'true',
    # 'username': 'fake',  #返回200 {"error_description":"用户名或密码错误","error_uri":"/provider/oauth/token","error_code":"20082"}
    'username': os.getenv('xueqiu_username'),
    'password': os.getenv('xueqiu_password'),
    }
    
    def start_requests(self):
        return [scrapy.FormRequest(url = self.url_login,
                                    headers={'X-Requested-With': 'XMLHttpRequest'},  #否则404将导致退出,抓包页面显示登录成功
                                    meta={'proxy': 'http://127.0.0.1:8888'},  #否则fiddler导致返回缓慢
                                    formdata = self.data_dict,
                                    callback=self.logged_in)]

    def logged_in(self, response):
        # inspect_response(response, self)
        assert os.getenv('xueqiu_nickname') in response.text  #AssertionError 将导致退出
        return scrapy.Request(self.url_somebody, dont_filter=True, meta={'proxy': 'http://127.0.0.1:8888'})
        
    def parse(self, response):
        # inspect_response(response, self)
        self.log(os.getenv('xueqiu_nickname') in response.text)

 

posted @ 2017-12-27 16:14  my8100  阅读(2494)  评论(0编辑  收藏  举报