import requests
import re
import json
from requests.exceptions import RequestException
import time
def get_one_page(url):
try:
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36"
}
response = requests.get(url, headers=headers)
if response.status_code == 200:
return response.text
return None
except RequestException:
return None
def parse_one_page(html):
pattern = re.compile('<dd>.*?board-index.*?>(\d+)</i>.*?data-src="(.*?)".*?name"><a'
+ '.*?>(.*?)</a>.*?star">(.*?)</p>.*?releasetime">(.*?)</p>'
+ '.*?integer">(.*?)</i>.*?fraction">(.*?)</i>.*?</dd>', re.S)
items = re.findall(pattern, html)
print(items)
for items in items:
yield {
'排名': items[0],
'图片地址': items[1],
'电影名称': items[2],
'演员': items[3].strip()[3:],
'时间': items[4].strip()[5:],
'评分': items[5]+items[6]
}
def write_to_file(content):
with open("maoyan.txt",'a',encoding='utf-8')as f:
f.write(json.dumps(content,ensure_ascii=False)+'\n')
def main(page):
url = "https://maoyan.com/board/4"
html = get_one_page(url)
for item in parse_one_page(html):
print(item)
write_to_file(item)
if __name__ == '__main__':
for i in range(10):
main(page=i*10)
time.sleep(1)