import requests import time import random from libs.proxy import Proxy from log.print_log import PrintLog class LiveCommodityDetailV1: @staticmethod def get_data(product_id): url = 'http://ec.snssdk.com/product/lubanajaxstaticitem?id=' + product_id + '&page_id=&scope_type=5&item_id=&b_type_new=0' userAgentList = [ 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.81 Safari/537.36 SE 2.X MetaSr 1.0', 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3908.2 Mobile Safari/537.36' 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.116' ] headers = { 'Host': 'ec.snssdk.com', 'Connection': 'keep-alive', 'Cache-Control': 'max-age=0', 'User-Agent': random.choice(userAgentList), 'Referer': 'https://haohuo.jinritemai.com/', 'Accept': 'application/json, text/plain, */*', } while True: proxy = Proxy.get() proxies = { "http": "http://" + proxy, "https": "http://" + proxy } try: response = requests.get( url, headers=headers, proxies=proxies, timeout=10 ) if response.text is not None: break else: print(response) print('爬取http连接失败!') time.sleep(1) except requests.exceptions.ProxyError as e: PrintLog.print( time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + '代理过期!' + str(e) + '\n' + product_id + '\n' + Proxy.proxy_info ) Proxy.del_proxy(proxy) pass except requests.exceptions.ConnectTimeout as e: PrintLog.print( time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + ' ConnectTimeout!' + str(e) + '\n' + product_id + '\n' + Proxy.proxy_info ) Proxy.del_proxy(proxy) pass except Exception as e: PrintLog.print( time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) + ' 请求抛出异常!' + str(e) + '\n' + product_id + '\n' + Proxy.proxy_info ) pass return response.text