1.urlopen函数

1 from urllib import request
2 resp = request.urlopen('http://www.google.com')
3 print(resp.readline()

2.urlretrieve函数

1 from urllib import request
2 
3 request.urlretrieve('http://www.huacolor.com/article/UploadPic/2016-8/201684181124814.jpg','鲁班.jpg')

3.urlencode函数

 

1 from urllib import parse
2 params = {'name':'张三','age':18,'greet':'hello world'}
3 result = parse.urlencode(params)
4 print(result)

4.request.Request类

 1 from urllib import request
 2 
 3 url = 'https://www.lagou.com/jobs/list_python?labelWords=&fromSearch=true&suginput='
 4 headers = {
 5     'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.75 Safari/537.36'
 6 }
 7 req = request.Request(url,headers=headers)
 8 resp = request.urlopen(req)
 9 print(resp.read())
10 
11 from urllib import request
12 from urllib import parse
13 url = 'https://movie.douban.com/chart'
14 
15 headers = {
16     'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.75 Safari/537.36',
17     'Referer':'https://movie.douban.com/chart'
18 }
19 data = {
20     'first':'true',
21     'pn':1,
22     'kd':'python'
23 }
24 
25 req = request.Request(url,headers=headers,data=parse.urlencode(data).encode('utf-8'),method='POST')
26 resp = request.urlopen(req)
27 print(resp.read().decode('utf-8'))

5.ProxyHandler处理器

 1 from urllib import request
 2 
 3 #没有使用代理的
 4 url = 'http://httpbin.org/ip'
 5 resp = request.urlopen(url)
 6 print(resp.read())
 7 
 8 #使用代理的
 9 from urllib import request
10 #1.使用PronxyHandler,传入代理构建一个handler
11 url = 'http://httpbin.org/ip'
12 handler = request.ProxyHandler({'http':'60.179.237.106:6666'})
13 #2.使用上面创建的handler构建一个opener
14 opener = request.build_opener(handler)
15 
16 #3.使用open去发送一个请求
17 resp = opener.open(url)
18 print(resp.read())
19 '''
20 1:代理的原理:在请求目标网站之前,先请求服务器,然后让代理服务器代理服务器去请求目的网站,
21     代理服务器达到目的网站数据后,再转发给我们代码。
22 2:http://httpbin.org:这个网站可以方便的查看http请求的一些参数。
23 3:在代码中使用代理:
24         *使用'urllib.request.ProxyHandler',传入一个代理。这个代理是一个字典,字典的key
25         依赖于代理服务器能够接受的类型,一般是'http'或者‘https’,值是‘ip:port'。
26         
27         *使用上一步创建的'handler',以及'request.build_opener'创建一个'opener'对象
28         
29         *使用上一部创建的'opener',调用’open‘函数,发起请求
30 '''

6.cookie库和HTTPCookieProcessor模拟登录

 

 1 from urllib import request
 2 dapeng_url = 'http://www.renren.com/880151247/profile'
 3 headers = {
 4     'User-Agent':'ozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) 
Chrome/68.0.3440.75 Safari/537.36
', 5 # 'Referer':'http://browse.renren.com/s/all?from=opensearch&q=%E5%A4%A7%E9%B9%8F%E8%91%A3%E6%88%90%E9%B9%8F' 6 'Cookie':'anonymid=jl6nqsuu44h803; depovince=GW; _r01_=1; JSESSIONID=abc_hQ0r79a4SbMk3kNvw; ick_login=8189308
    9-8787-4d1b-8d91-093a5608ae53; ick=948391ec-cf0c-4e1a-b488-65a5fdadccec; t=92ad1c02b8570a1c8c691a5d2f4628eb9; soci
    etyguester=92ad1c02b8570a1c8c691a5d2f4628eb9; id=967709729; xnsid=98d2e93c; XNESSESSIONID=1706376df76c; wp_fold=0;
    jebecookies=454f9f2b-7352-46de-a69e-5ee9ffbb1e34|||||; jebe_key=c4f55e14-a6fe-45f6-a80e-13d17265ba82%7C1f4466a4eb
    a75f8ec6ca90b9b13e94ae%7C1535035343188%7C1; BAIDU_SSP_lcr=http://localhost:63342/S1/renren.html?_ijt=3shk5scmj5l77
    d13j4mu526npu
' 7 } 8 req = request.Request(url=dapeng_url,headers=headers) 9 resp = request.urlopen(req) 10 with open('renren.html','w',encoding='utf-8') as fp: 11 ''' 12 write函数必须写入一个str数据类型 13 resp.read()读出来是一个bytes数据类型 14 bytes --> decode --> str 15 str --> encode -->bytes 16 ''' 17 fp.write(resp.read().decode('utf-8'))

7.http.cookiejar模块:

 

 1 from urllib import request
 2 from urllib import parse
 3 from http.cookiejar import CookieJar
 4 
 5 headers ={
 6 'User-Agent':'ozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.75 Safari/537.36'
 7 }
 8 
 9 #1:登录
10 def get_opener():
11     # 1.1创建一个cookiejar对象
12     cookiejar = CookieJar()
13     # 1.2使用cookiejar创建一个HTTPCookieProcessor对象
14     handler = request.HTTPCookieProcessor(cookiejar)
15     #1.3使用上一步创建的handler创建一个opener
16     opener = request.build_opener(handler)
17     return opener
18 
19 def login_renren(opener):
20     #1.4使用opener发送登录请求(人人网邮箱和密码)
21     data = {
22         'email':'970138074@qq.com',
23         'password':'pythonspider'
24     }
25     login_url = 'http://www.renren.com/PLogin.do'
26     req = request.Request(login_url,data=parse.urlencode(data).encode('utf-8'),headers=headers)
27     opener.open(req)
28 
29 #2.访问个人主页
30 def visit_profile(opener):
31     dapeng_url = 'http://www.renren.com/880151247/profile'
32     #获取个人主页不需要新建一个opener
33     #而应该使用之前的那个opener,因为之前那个opener已经包含了
34     #登录所需要的cookie信息
35     req = request.Request(dapeng_url,headers=headers)
36     resp = opener.open(req)
37     with open('renren.html','w',encoding='utf-8') as f:
38         f.write(resp.read().decode('utf-8'))
39 
40 
41 opener = get_opener()
42 login_renren(opener)
43 visit_profile(opener)

 

 
 
 
 
 
posted on 2018-08-24 01:03  铁子  阅读(699)  评论(0编辑  收藏  举报