fix: 修复中国经济导报偶尔出现日期匹配失败问题

This commit is contained in:
皓月归尘 2024-11-11 20:23:21 +08:00
parent 4cb5d4a5d3
commit b86a7737cb

View File

@ -66,6 +66,7 @@ async def getData(start_date: datetime, end_date: datetime):
months = []
# 从开始日期到结束日期,每个月份都添加到列表中
current_date = start_date
current_date = current_date.replace(day=1)
while current_date <= end_date:
months.append(current_date)
# 增加一个月
@ -78,23 +79,23 @@ async def getData(start_date: datetime, end_date: datetime):
# 构造URL
url = f'http://www.ceh.com.cn/epaper/uniflows/html/{month.strftime("%Y/%m")}/date.txt'
"""http://www.ceh.com.cn/epaper/uniflows/html/2012/09/date.txt"""
print(url)
async with AsyncClient(headers=headers, timeout=60) as client:
# 发送GET请求
response = await client.get(url)
response.encoding = "gb2312"
print(f"一级连接状态:{response.status_code}")
if response.status_code == 200:
# 解析XML
soup = response.text.split("|")
for period in soup:
period_id, element = period.split(",")
if len(element) < 5:
continue
try:
print(datetime.now().strftime("%Y-%m-%d %H:%M:%S"), url)
try:
async with AsyncClient(headers=headers, timeout=60) as client:
# 发送GET请求
response = await client.get(url)
response.encoding = "gb2312"
print(f"一级连接状态:{response.status_code}")
if response.status_code == 200:
# 解析XML
soup = response.text.split("|")
for period in soup:
period_id, element = period.split(",")
if len(element) < 5:
continue
url1 = f"http://www.ceh.com.cn/epaper/uniflows/html/{month.strftime('%Y/%m')}/{period_id}/boardurl.htm"
"""http://www.ceh.com.cn/epaper/uniflows/html/2012/09/01/boardurl.htm"""
print(url1)
print(datetime.now().strftime("%Y-%m-%d %H:%M:%S"), url1)
response2 = await client.get(url1)
response2.encoding = "gb2312"
print(f"二级连接状态:{response2.status_code}")
@ -106,7 +107,7 @@ async def getData(start_date: datetime, end_date: datetime):
"""http://www.ceh.com.cn/epaper/uniflows/html/2024/11/07/01/default.htm"""
banmianming = item.text.split("")[-1].strip()
banmianhao = item.text.split("")[0].replace("&nbsp;", "").replace(" ", "").strip()
print(url2)
print(datetime.now().strftime("%Y-%m-%d %H:%M:%S"), url2)
response3 = await client.get(url2)
response3.encoding = "gb2312"
print(f"三级连接状态:{response3.status_code}")
@ -118,7 +119,7 @@ async def getData(start_date: datetime, end_date: datetime):
if await collection.find_one({"detail_url": url3}, {"_id": False}):
continue
title = item2.text.strip()
print(url3)
print(datetime.now().strftime("%Y-%m-%d %H:%M:%S"), url3)
response4 = await client.get(url3)
response4.encoding = "gb2312"
print(f"四级连接状态:{response4.status_code}")
@ -161,24 +162,23 @@ async def getData(start_date: datetime, end_date: datetime):
print(
f"中国经济导报---{month.strftime('%Y-%m')}-{period_id}---{banmianming}---{banmianhao}----采集完成!")
await asyncio.sleep(random.randint(5, 15))
print(
f"中国经济导报---{month.strftime('%Y-%m')}-{period_id}-----采集完成!")
print(f"中国经济导报---{month.strftime('%Y-%m')}-{period_id}-----采集完成!")
await asyncio.sleep(random.randint(5, 15))
except Exception as e:
await collection.insert_one(
{'banmianhao': 'empty',
'banmianming': 'empty',
'preTitle': 'empty',
'title': 'empty',
'subtitle': 'empty',
'author': 'empty',
'keywordlist': 'empty',
'detail_url': url,
'release_time': month + timedelta(days=int(period_id)),
'insert_timestamp': datetime.today(),
'content': 'empty'}
)
print(e)
except Exception as e:
await collection.insert_one(
{'banmianhao': 'empty',
'banmianming': 'empty',
'preTitle': 'empty',
'title': 'empty',
'subtitle': 'empty',
'author': 'empty',
'keywordlist': 'empty',
'detail_url': url,
'release_time': month + timedelta(days=int(period_id)),
'insert_timestamp': datetime.today(),
'content': 'empty'}
)
print(e)
print(f"中国经济导报采集完毕,共采集{crawl_num}条数据!")