parent
168635dc69
commit
689c821842
@ -1,98 +0,0 @@
|
|||||||
from bs4 import BeautifulSoup
|
|
||||||
import requests
|
|
||||||
import csv
|
|
||||||
def ask_url(url):
|
|
||||||
head = {
|
|
||||||
"User-Agent":
|
|
||||||
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36 Edg/122.0.0.0"
|
|
||||||
}
|
|
||||||
try:
|
|
||||||
r = requests.get(url, headers=head, timeout=30)
|
|
||||||
r.raise_for_status()
|
|
||||||
r.encoding = 'utf-8'
|
|
||||||
return r.text
|
|
||||||
except:
|
|
||||||
return ""
|
|
||||||
|
|
||||||
def get_data(base_url):
|
|
||||||
data_list = []
|
|
||||||
|
|
||||||
for i in range(0, 10):
|
|
||||||
url = base_url + str(i + 1)
|
|
||||||
html = ask_url(url)
|
|
||||||
if html == "":
|
|
||||||
continue
|
|
||||||
soup = BeautifulSoup(html, 'html.parser')
|
|
||||||
for item in soup.find_all('div', class_="result_item"):
|
|
||||||
data = {}
|
|
||||||
|
|
||||||
if item.div.p.span.string == "疾病":
|
|
||||||
data['diseaseName'] = item.div.p.a.string
|
|
||||||
|
|
||||||
symptoms = []
|
|
||||||
p = item.find('p', class_='result_item_content_label')
|
|
||||||
for symptom in p.find_all('a'):
|
|
||||||
symptoms.append(symptom.string)
|
|
||||||
|
|
||||||
sub_url = item.div.p.a.attrs["href"]
|
|
||||||
sub_html = ask_url(sub_url)
|
|
||||||
if sub_html == "":
|
|
||||||
continue
|
|
||||||
sub_soup = BeautifulSoup(sub_html, 'html.parser')
|
|
||||||
|
|
||||||
information_ul = sub_soup.find('ul', class_="information_ul")
|
|
||||||
for detail in information_ul.find_all('li'):
|
|
||||||
if detail.i.string == '别名:':
|
|
||||||
data['diseaseAlias'] = detail.span.string
|
|
||||||
elif detail.i.string == '发病部位:':
|
|
||||||
data['siteOfOnset'] = []
|
|
||||||
for site in detail.span.find_all('a'):
|
|
||||||
data['siteOfOnset'].append(site.string)
|
|
||||||
elif detail.i.string == '传染性:':
|
|
||||||
data['infectivity'] = detail.span.string
|
|
||||||
elif detail.i.string == '多发人群:':
|
|
||||||
data['multiplePopulation'] = detail.span.string
|
|
||||||
elif detail.i.string == '并发症:':
|
|
||||||
data['complication'] = []
|
|
||||||
for complication in detail.span.find_all('a'):
|
|
||||||
data['complication'].append(complication.string)
|
|
||||||
elif detail.i.string == '挂号科室:':
|
|
||||||
data['registrationDepartment'] = []
|
|
||||||
for department in detail.span.find_all('a'):
|
|
||||||
data['registrationDepartment'].append(department.string)
|
|
||||||
elif detail.i.string == '临床检查:':
|
|
||||||
data['clinicalExamination'] = []
|
|
||||||
for examination in detail.span.find_all('a'):
|
|
||||||
data['clinicalExamination'].append(examination.string)
|
|
||||||
elif detail.i.string == '典型症状:':
|
|
||||||
for symptom in detail.span.find_all('a'):
|
|
||||||
symptoms.append(symptom.string)
|
|
||||||
data['commonDrugs'] = symptoms
|
|
||||||
|
|
||||||
information_ul1 = sub_soup.find('ul', class_="information_ul information_ul_bottom")
|
|
||||||
for detail in information_ul1.find_all('li'):
|
|
||||||
if detail.i.string == '常用药品:':
|
|
||||||
data['commonDrugs'] = []
|
|
||||||
for drug in detail.span.find_all('a'):
|
|
||||||
data['commonDrugs'].append(drug.string)
|
|
||||||
|
|
||||||
data_list.append(data)
|
|
||||||
|
|
||||||
return data_list
|
|
||||||
|
|
||||||
def save_data(data_list, save_path):
|
|
||||||
with open(save_path, 'w', newline='', encoding='utf-8') as csvfile:
|
|
||||||
fieldnames = ["diseaseName", "diseaseAlias", "siteOfOnset", "infectivity", "multiplePopulation", "complication", "registrationDepartment", "clinicalExamination", "commonDrugs"]
|
|
||||||
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
|
|
||||||
|
|
||||||
writer.writeheader()
|
|
||||||
for data in data_list:
|
|
||||||
writer.writerow(data)
|
|
||||||
return ""
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
base_url = "https://jbk.39.net/bw/jizhenke_p"
|
|
||||||
save_path = "D:\数据采集\PycharmProjects\pythonProject\数据采集\文件\疾病.csv"
|
|
||||||
|
|
||||||
data_list = get_data(base_url)
|
|
||||||
save_data(data_list, save_path)
|
|
Loading…
Reference in new issue