From 5950005bac76ecdb2f78ae8fca603647cca2de90 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=9A=93=E6=9C=88=E5=BD=92=E5=B0=98?= Date: Mon, 11 Nov 2024 20:30:02 +0800 Subject: [PATCH] =?UTF-8?q?fix:=20=E4=BF=AE=E5=A4=8D=E7=A7=91=E6=8A=80?= =?UTF-8?q?=E6=97=A5=E6=8A=A5=E5=81=B6=E5=B0=94=E5=87=BA=E7=8E=B0=E6=97=A5?= =?UTF-8?q?=E6=9C=9F=E5=8C=B9=E9=85=8D=E5=A4=B1=E8=B4=A5=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- 国内党媒/CrawlKejiribao.py | 59 +++++++++++++++++++------------------- 1 file changed, 30 insertions(+), 29 deletions(-) diff --git a/国内党媒/CrawlKejiribao.py b/国内党媒/CrawlKejiribao.py index 7c99046..3f2b5f9 100644 --- a/国内党媒/CrawlKejiribao.py +++ b/国内党媒/CrawlKejiribao.py @@ -68,6 +68,7 @@ async def getData(start_date: datetime, end_date: datetime): months = [] # 从开始日期到结束日期,每个月份都添加到列表中 current_date = start_date + current_date = current_date.replace(day=1) while current_date <= end_date: months.append(current_date) # 增加一个月 @@ -80,20 +81,20 @@ async def getData(start_date: datetime, end_date: datetime): # 构造URL url = f'https://digitalpaper.stdaily.com/http_www.kjrb.com/kjrb/html/{month.strftime("%Y-%m")}/period.xml' """https://digitalpaper.stdaily.com/http_www.kjrb.com/kjrb/html/2011-10/period.xml""" - print(url) - async with AsyncClient(headers=headers, timeout=60) as client: - response = await client.get(url) - response.encoding = response.charset_encoding - print(f"一级连接状态:{response.status_code}") - if response.status_code == 200: - soup = BeautifulSoup(response.text, 'xml') - for period in soup.select("period"): - period_name = datetime.strptime(period.select_one("period_name").text.strip(), "%Y-%m-%d") - front_page = period.select_one("front_page").text.strip() - try: + print(datetime.now().strftime("%Y-%m-%d %H:%M:%S"), url) + try: + async with AsyncClient(headers=headers, timeout=60) as client: + response = await client.get(url) + response.encoding = response.charset_encoding + print(f"一级连接状态:{response.status_code}") + if response.status_code == 200: + soup = BeautifulSoup(response.text, 'xml') + for period in soup.select("period"): + period_name = datetime.strptime(period.select_one("period_name").text.strip(), "%Y-%m-%d") + front_page = period.select_one("front_page").text.strip() url1 = f"https://digitalpaper.stdaily.com/http_www.kjrb.com/kjrb/html/{period_name.strftime('%Y-%m/%d')}/{front_page}" """https://digitalpaper.stdaily.com/http_www.kjrb.com/kjrb/html/2024-10/30/node_2.htm""" - print(url1) + print(datetime.now().strftime("%Y-%m-%d %H:%M:%S"), url1) response2 = await client.get(url1) response2.encoding = response2.charset_encoding print(f"二级连接状态:{response2.status_code}") @@ -104,7 +105,7 @@ async def getData(start_date: datetime, end_date: datetime): banmianhao = item.text.split(":")[0] url2 = f"https://digitalpaper.stdaily.com/http_www.kjrb.com/kjrb/html/{period_name.strftime('%Y-%m/%d')}/" + item.get( "href").replace("./", "").strip() - print(url2) + print(datetime.now().strftime("%Y-%m-%d %H:%M:%S"), url2) response3 = await client.get(url2) response3.encoding = response3.charset_encoding print(f"三级连接状态:{response3.status_code}") @@ -129,7 +130,7 @@ async def getData(start_date: datetime, end_date: datetime): if await collection.find_one({"detail_url": url3}, {"_id": False}): continue title = item2.text.strip() - print(url3) + print(datetime.now().strftime("%Y-%m-%d %H:%M:%S"), url3) response4 = await client.get(url3) response4.encoding = response4.charset_encoding print(f"四级连接状态:{response4.status_code}") @@ -178,21 +179,21 @@ async def getData(start_date: datetime, end_date: datetime): await asyncio.sleep(random.randint(5, 15)) print(f"科技日报---{period_name.strftime('%Y-%m-%d')}-----采集完成!") await asyncio.sleep(random.randint(5, 15)) - except Exception as e: - print(e) - await collection.insert_one( - {'banmianhao': 'empty', - 'banmianming': 'empty', - 'preTitle': 'empty', - 'title': 'empty', - 'subtitle': 'empty', - 'author': 'empty', - 'keywordlist': 'empty', - 'detail_url': url, - 'release_time': period_name, - 'insert_timestamp': datetime.today(), - 'content': 'empty'} - ) + except Exception as e: + print(e) + await collection.insert_one( + {'banmianhao': 'empty', + 'banmianming': 'empty', + 'preTitle': 'empty', + 'title': 'empty', + 'subtitle': 'empty', + 'author': 'empty', + 'keywordlist': 'empty', + 'detail_url': url, + 'release_time': period_name, + 'insert_timestamp': datetime.today(), + 'content': 'empty'} + ) print(f"科技日报采集完毕,共采集{crawl_num}条数据!")