import csv import requests import re, os from bs4 import BeautifulSoup url = ' https://mp.weixin.qq.com/s/K0u_qPFQtWuH4hk5K2xWfQ' response = requests.get(url) response.encoding = response.apparent_encoding response.encoding = 'utf-8' html = response.text soup = BeautifulSoup(html, 'html.parser') ans = soup.select('div.rich_media > div.rich_media_inner ') ans1 = ans[0].text.encode() def openreadtxt(file_name): data = [] file = open(file_name, 'r', encoding='utf-8') # 打开文件 file_data = file.readlines() # 读取所有行 for row in file_data: tmp_list = row.split(' ') # 按‘,’切分每行的数据 tmp_list[-1] = tmp_list[-1].replace('\n', '') # 去掉换行符 data.append(tmp_list) # 将每行数据插入data中 file.close() return data def updateFile(file, old_str, new_str): with open(file, "r", encoding="utf-8") as f1, open("%s.bak" % file, "w", encoding="utf-8") as f2: for line in f1: f2.write(re.sub(old_str, new_str, line)) os.remove(file) os.rename("%s.bak" % file, file) f2.close() f1.close()