拼多多商品采集三天

# -*- coding: utf-8 -*- 
"""
Project: spider_project
Creator: turing
Create time: 2019-11-07 11:08
IDE: PyCharm
Introduction:CryptUnprotectData
"""
import base64
import time
from urllib.parse import quote
import os
import browsercookie
from fake_useragent import UserAgent
from openpyxl import load_workbook
import json
import re
import sys
import uuid
import requests
import urllib3  # 可能会报错,不用担心,继续运行即可
from requests.exceptions import Timeout, ReadTimeout
from requests.exceptions import ConnectionError
from requests.exceptions import ConnectTimeout
from requests.exceptions import SSLError
from requests.exceptions import ChunkedEncodingError
from json.decoder import JSONDecodeError
import csv
from lxml import etree
import random
import csv
import demjson
import you_get
import fake_useragent
from requests.adapters import HTTPAdapter
from win32crypt import CryptUnprotectData
from cryptography.hazmat.primitives.ciphers.aead import AESGCM

import requests

requests.packages.urllib3.disable_warnings()
# from requests.packages.urllib3.exceptions import InsecureRequestWarning
# # 禁用安全请求警告
# requests.packages.urllib3.disable_warnings(InsecureRequestWarning)

# 设置requests.sesion
# 默认重试
requests.adapters.DEFAULT_RETRIES = 5
# 关闭多余的连接
# cj = browsercookie.load()
# cj = browsercookie.chrome()
ses = requests.Session()
ses.mount('http://', HTTPAdapter(max_retries=3))
ses.mount('https://', HTTPAdapter(max_retries=3))
ses.keep_alive = False


# def get_header():
# location = os.getcwd() + r'/fake_useragent.json'
# ua = fake_useragent.UserAgent(path=location)
# return ua.chrome
def ipp():
	headers = {
		'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
		'accept-encoding': 'gzip, deflate',
		'accept-language': 'zh-CN,zh;q=0.9',
		'cache-control': 'no-cache',
		'cookie': '__cfduid=de994f33595a839f81ac64be5f263fc381585896996; UM_distinctid=1713ed3bf76f6-0abf44327678ac-f313f6d-1fa400-1713ed3bf77658; CNZZDATA123770=cnzz_eid%3D857040343-1585891692-%26ntime%3D1585897092',
		'pragma': 'no-cache',
		'sec-fetch-dest': 'document',
		'sec-fetch-mode': 'navigate',
		'sec-fetch-site': 'none',
		'sec-fetch-user': '?1',
		'upgrade-insecure-requests': '1',
		'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
	}
	res = requests.get(url="http://webapi.liziip.cn/userip?appid=7e2ed874980cf44d74dc9a640640fada&diqv=2&geshi=2&num=1&order=568&qc=-1&sheng_id=7%2C11%2C12%2C13%2C14%2C15%2C21%2C24%2C25%2C26%2C27%2C28%2C29%2C30%2C32%2C35%2C36%2C37&user=420&xieyi=1")
	# print(res.text.strip())
	# print(type(res.text.strip()))
	# ret = requests.get(url='https://ip.cn/', headers=headers,proxies={'https': res.text.strip()})
	# print(ret.text)
	return res.text.strip()
# ipp()
# exit()

# 获取随机的请求头
def get_header():
	ua = UserAgent()
	# print(ua.ie)     #ie的 内核
	# print(ua.chrome) #谷歌浏览器 内核
	useragent = ua.chrome  ##随机选取一个浏览器内核
	headers = useragent
	return headers


def sousuo(url_, hea, count=0, ip_=''):
	print(url_)
	headers_pc = {
		'Accept-Language': 'zh-CN,zh;q=0.9',
		'User-Agent': get_header(),
		'cookie': 'api_uid=rBQpml5cpnxkZgBQCWEiAg==; _nano_fp=XpdJXqgqX5Cql0dal9_Wy4pttcO5PXavI0Sibuj5; ua=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36; webp=1; chat_list_rec_list=chat_list_rec_list_Qvk4ry; msec=1800000; PDDAccessToken=B63WUI4YPGZIPPHFZVB3UFZ2EEQUMWZFJM6PHHSAMNIMFHUAXYPA113a3b3; pdd_user_id=8812576675; pdd_user_uin=LL2G5VDUDT6SL3FFRDUXEPUI3I_GEXDA'
	}
	Android_USER_AGENT = [
		"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; AcooBrowser; .NET CLR 1.1.4322; .NET CLR 2.0.50727)",
		"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.0; Acoo Browser; SLCC1; .NET CLR 2.0.50727; Media Center PC 5.0; .NET CLR 3.0.04506)",
		"Mozilla/4.0 (compatible; MSIE 7.0; AOL 9.5; AOLBuild 4337.35; Windows NT 5.1; .NET CLR 1.1.4322; .NET CLR 2.0.50727)",
		"Mozilla/5.0 (Windows; U; MSIE 9.0; Windows NT 9.0; en-US)",
		"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 2.0.50727; Media Center PC 6.0)",
		"Mozilla/5.0 (compatible; MSIE 8.0; Windows NT 6.0; Trident/4.0; WOW64; Trident/4.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; .NET CLR 1.0.3705; .NET CLR 1.1.4322)",
		"Mozilla/4.0 (compatible; MSIE 7.0b; Windows NT 5.2; .NET CLR 1.1.4322; .NET CLR 2.0.50727; InfoPath.2; .NET CLR 3.0.04506.30)",
		"Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN) AppleWebKit/523.15 (KHTML, like Gecko, Safari/419.3) Arora/0.3 (Change: 287 c9dfb30)",
		"Mozilla/5.0 (X11; U; Linux; en-US) AppleWebKit/527+ (KHTML, like Gecko, Safari/419.3) Arora/0.6",
		"Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.2pre) Gecko/20070215 K-Ninja/2.1.1",
		"Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-CN; rv:1.9) Gecko/20080705 Firefox/3.0 Kapiko/3.0",
		"Mozilla/5.0 (X11; Linux i686; U;) Gecko/20070322 Kazehakase/0.4.5",
		"Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.8) Gecko Fedora/1.9.0.8-1.fc10 Kazehakase/0.5.6",
		"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11",
		"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/535.20 (KHTML, like Gecko) Chrome/19.0.1036.7 Safari/535.20",
		"Opera/9.80 (Macintosh; Intel Mac OS X 10.6.8; U; fr) Presto/2.9.168 Version/11.52",
		"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.11 TaoBrowser/2.0 Safari/536.11",
		"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER",
		"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; LBBROWSER)",
		"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E; LBBROWSER)",
		"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.84 Safari/535.11 LBBROWSER",
		"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)",
		"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; QQBrowser/7.0.3698.400)",
		"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)",
		"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SV1; QQDownload 732; .NET4.0C; .NET4.0E; 360SE)",
		"Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)",
		"Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; WOW64; Trident/5.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E)",
		"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.89 Safari/537.1",
		"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.89 Safari/537.1",
		"Mozilla/5.0 (iPad; U; CPU OS 4_2_1 like Mac OS X; zh-cn) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148 Safari/6533.18.5",
		"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:2.0b13pre) Gecko/20110307 Firefox/4.0b13pre",
		"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:16.0) Gecko/20100101 Firefox/16.0",
		"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11",
		"Mozilla/5.0 (X11; U; Linux x86_64; zh-CN; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10"
	]

	headers_android = {
		'Upgrade-Insecure-Requests': '1',
		'User-Agent': random.choice(Android_USER_AGENT),
		'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
		'Accept-Encoding': 'gzip, deflate',
		'Accept-Language': 'zh-CN,zh;q=0.9',

	}
	dom = '<!DOCTYPE html><html lang="en"><head><meta charset="UTF-8"><title>Title</title></head><body><p>nothing</p></body></html>'
	if hea == "pc":
		headers = headers_pc
	else:
		headers = headers_android
	try:
		# 请求网页
		res_content = ses.get(url=url_, headers=headers, stream=True, verify=False, allow_redirects=True, timeout=15,
							  proxies={'https': ip_})
		if res_content.encoding == 'ISO-8859-1':
			encodings = requests.utils.get_encodings_from_content(res_content.text)
			if encodings:
				encoding = encodings[0]
			else:
				encoding = res_content.apparent_encoding
		else:
			encoding = res_content.encoding
		encode_content = res_content.content.decode(encoding, 'replace').encode('utf-8', 'replace').decode('utf-8')
		if res_content.status_code == 404 or "Bad Request" in res_content.text:
			# res_content.raise_for_status() #爆出错误情况
			print("这是一个404页面寻找捕获方法!")
			count += 1
			if count == 3:
				return ["https://www.nothing.com", dom, 1]
			return sousuo(url_, hea, count, ip_)

		# 获得真实URL
		elif len(res_content.text) == 0:
			print(res_content.text, "..................similar_article_text is None.....................")
			print(url_, "..................similar_article_text is None.....................")
			time.sleep(random.uniform(1.5,3))
			return sousuo(url_, hea, count, ip_)  # 获得文章内容
		res_content.encoding = res_content.apparent_encoding
		print("res_content.text:", len(res_content.text))
		return [res_content.url, encode_content, 0]
	except (ConnectionError, ReadTimeout, ConnectTimeout, SSLError, ChunkedEncodingError) as e:
		print(str(e), ": ...........出错啦!.............here is :", __file__, sys._getframe().f_lineno)
		count += 1
		if count > 1:
			print("请求次数过多")
			time.sleep(2)
			return ["https://www.nothing.com", dom, 1]
		time.sleep(2)
		return sousuo(url_, hea, count, ipp())


if __name__ == '__main__':

	# 需要重新生成的文件名字
	# file_new_name = os.path.join(os.getcwd(), "picture_nike.csv")
	# print(file_new_name)
	# with open(r"file_new_name", "a", encoding="GB18030", newline="") as wd:
	#
	# 	filenames = ["dp_id", "goods_name", "goods_id", 'sp_id', "goods_price", "goods_num", "goods_link", "mall_id",
	# 				 "pic_url"]
	# 	head_line = {"dp_id": "店铺id", "goods_name": "商品型号", "goods_id": "商品名称", "sp_id": "商品id", "goods_price": "商品价格",
	# 				 "goods_num": "商品数量", "goods_link": "商品链接", "mall_id": "店铺id", "pic_url": "图片"}
	#
	# 	writer = csv.DictWriter(wd, fieldnames=filenames, dialect="excel")
	#
	# 	writer.writerow(head_line)

	with open(r"nike AJ数据2020.4.3.csv", 'r', encoding="gb18030", newline='') as csvfile_r:
		reader = csv.reader(csvfile_r)
		next(reader)
		true = ''
		null = ''
		false = ''
		for index, row in enumerate(list(reader)[:], 1):
			print(f'第{index}个商品')
			good_url = row[6].strip()
			ip_ = ipp()
			res = sousuo(good_url, "pc", count=0, ip_=ip_)
			# print(res)
			try:
				data1 = re.compile(r"<script>.*?window.rawData=(.*?}});.*?</script>", re.S).search(res[1])
				if data1:
					data = data1.group(1)
					print(data[:100])
					if not os.path.isfile(f'll/{row[3].strip()}'):
						with open(f'll/{row[3].strip()}.text', 'w', encoding='utf-8') as f:
							f.write(str(json.loads(data)))
					if data:
						real_data_price = ''
						real_xl = ''
						try:
							if 'goods' in dict.keys(json.loads(data)["store"]):
								real_data_price = json.loads(data)["store"]["goods"]['minGroupPrice']
								real_xl = json.loads(data)["store"]["goods"]['sideSalesTip']

							else:
								real_data_price = json.loads(data)["store"]['initDataObj']['goods']['minGroupPrice']
								real_xl = json.loads(data)["store"]['initDataObj']['goods']['sideSalesTip']
							print(real_data_price)
							print(real_xl)
						except KeyError as e:
							print('不存在')
						row = {"dp_id": row[0], "goods_name": row[1], "goods_id": row[2], "sp_id": row[3],
							   "goods_price": real_data_price, "goods_num": real_xl, "goods_link": row[6],
							   "mall_id": row[7], "pic_url": row[8]}
						with open("pdd_goods最终.csv", 'a', encoding="gb18030",newline='') as csvfile:
							fieldnames = ['dp_id', 'goods_name', 'goods_id', 'sp_id',
										  'goods_price', 'goods_num', 'goods_link', 'mall_id', 'pic_url']
							writer = csv.DictWriter(csvfile, fieldnames=fieldnames, dialect='excel')
							# writer.writeheader()  # 写标题英文
							# writer.writerow(row_)
							# writer.writerows(last_data_)
							writer.writerow(row)
			except JSONDecodeError as e:
				print(e)

  

posted @ 2020-04-03 20:46  7411  阅读(1173)  评论(0)    收藏  举报