本文主要是介绍小程序端的懂车帝二手车数据采集,希望对大家解决编程问题提供一定的参考价值,需要的开发者们随着小编来一起学习吧!
import datetime
import random
import string
import time
import pymysql
import requests
import json
def mysql_sql(conn, cur):
"""创建一个存储数据表和一个存储链接表"""
cur.execute('''
CREATE TABLE if not exists dcd_xcx_car (
sku_id VARCHAR(200) primary key COMMENT 'id',
car_img_url VARCHAR(2000) COMMENT '车辆照片链接',
car_name VARCHAR(200) COMMENT '汽车发动机名称',
series_name VARCHAR(200) COMMENT '汽车名称',
brand_name VARCHAR(200) comment '品牌名称',
mileage VARCHAR(200) comment '里程数',
first_registration_time VARCHAR(200) comment '汽车第一次注时间',
car_age VARCHAR(200) comment '汽车年龄',
sh_price VARCHAR(200) comment '价格',
title VARCHAR(200) comment '标题',
sub_title VARCHAR(200) comment '副标题',
link_url VARCHAR(2000) comment '详情链接',
car_year VARCHAR(200) comment '汽车生产年份',
car_source_city_name VARCHAR(200) comment '车源城市名称',
fixed_price VARCHAR(200) comment '固定价格',
origin_sh_price VARCHAR(200) comment '原产地价格'
) COMMENT='微信小程序端表数据';
''')
cur.execute('''
CREATE TABLE if not exists xcx_dcd_url (
url VARCHAR(2000) COMMENT '链接信息'
) COMMENT='异常存储微信小程序端链接';
''')
# 提交事务
conn.commit()
def generate_random_string(length):
"""生成随机字符串用于id"""
letters = string.ascii_letters
result_str = ''.join(random.choice(letters) for i in range(length))
return result_str
def start_load(requests, city, pagenum, visited_urls, conn, cur):
"""主函数"""
cookies = {
'tt_webid': '7365037271119562292',
}
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/107.0.0.0 Safari/537.36 MicroMessenger/7.0.20.1781(0x6700143B) NetType/WIFI MiniProgramEnv/Windows WindowsWechat/WMPF WindowsWechat(0x63090a13) XWEB/8555',
'xweb_xhr': '1',
'sec-fetch-site': 'cross-site',
'sec-fetch-mode': 'cors',
'sec-fetch-dest': 'empty',
'referer': 'https://servicewechat.com/wx0688e7bcdd17106e/308/page-frame.html',
'accept-language': 'zh-CN,zh;q=0.9',
}
url = f'https://m.dcdapp.com/motor/sh_go/api/shop/sku_search?aid=1556&device_id=7365037271119562292&master_aid&user_unique_id=7365037271119562292&os_version=Windows 10 x64&ma_version=5.10.429&app_name=wechat&data_from=tt_mp&device_platform=windows&device_type=microsoft&device_brand=microsoft&sdk_verison=3.2.4&api_version=2&version_code=0&city_name={city}&limit=10&offset={pagenum}&sort=4&shop_type=9&entry=main_page&sh_city_name={city}&selected_city_name={city}'
cur.execute("INSERT INTO xcx_dcd_url (url) VALUES (%s)", (url))
conn.commit()
if url not in visited_urls: # 检查URL是否已经被爬取过
try:
response = requests.get(
url=url,
cookies=cookies,
headers=headers,
)
except Exception as e:
print(e)
time_date = random.uniform(3, 5)
time.sleep(time_date)
df = json.loads(response.text)
sc = df['data']['sku_list']
has_more = df['data']['has_more']
if has_more == False:
print("结束咯,往下面也没有数据啦")
flag = 0
return flag
else:
print(f"第{pagenum/10}页",url)
for i in sc:
rom_str = generate_random_string(5)
sku_id = rom_str + str(i['sku_id'])
car_img_url = i['car_img_url']
car_name = i['car_name'] # 发动机名称
series_name = i['series_name'] # 系列名称
brand_name = i['brand_name'] # 品牌名称
mileage = i['mileage'] # 里程数
first_registration_time = i['first_registration_time'] # 第一次注时间
first_registration_time = datetime.datetime.fromtimestamp(first_registration_time)
car_age = i['car_age'] # 汽车年龄
sh_price = i['sh_price'] + '万' # 价格
title = i['title']
sub_title = i['sub_title']
link_url = i['link_url'] # 详情
car_year = i['car_year'] # 汽车年龄
car_source_city_name = i['car_source_city_name'] # 车源城市名称
fixed_price = i['fixed_price'] # 固定价格
origin_sh_price = i['origin_sh_price'] + '万' # 原产地价格
# print(sku_id, car_img_url, car_name, series_name, brand_name, mileage, dt_object,
# car_age, sh_price, title, sub_title, link_url, car_year, car_source_city_name, fixed_price,
# origin_sh_price)
cur.execute(
"INSERT INTO dcd_xcx_car (sku_id, car_img_url, car_name, series_name, brand_name, mileage, first_registration_time, car_age, sh_price, title, sub_title, link_url, car_year, car_source_city_name, fixed_price, origin_sh_price) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)",
(sku_id, car_img_url, car_name, series_name, brand_name, mileage, first_registration_time, car_age,
sh_price, title, sub_title, link_url, car_year, car_source_city_name, fixed_price, origin_sh_price)
)
conn.commit()
if __name__ == '__main__':
conn = pymysql.connect(host='localhost', user='admin', password='admin147', database='demo', port=3306)
# 创建一个游标对象
cur = conn.cursor()
try:
mysql_sql(conn, cur)
# 从数据库中获取已经爬取过的URL
cur.execute("SELECT url FROM xcx_dcd_url")
fetched_urls = cur.fetchall()
# 创建一个集合来存储已经爬取过的URL
visited_urls = set(url[0] for url in fetched_urls)
city = input("请输入要爬取的城市:")
for pagenum in range(0, 6000, 10):
flag = start_load(requests, city, pagenum, visited_urls, conn, cur)
if flag == 0:
break
except Exception as e:
print(e)
finally:
# 关闭连接
cur.close()
conn.close()
这篇关于小程序端的懂车帝二手车数据采集的文章就介绍到这儿,希望我们推荐的文章对编程师们有所帮助!