admin 管理员组文章数量: 1086019
2024年3月9日发(作者:虚拟机下的linux系统有哪些)
lists = []flag=0for pro in province: for i in all_data[pro]: if flag==0: name = list(()) flag+=1 (list(()))import pandas as pd
test=ame(columns=name,data=lists)print(test)_csv('./数据可视化/课设/data/province_history_',encoding='utf-8')#
爬取各省市数据header = { 'Cookie':'RK=KzwZgVEeXg; ptcz=157737b47be19a589c1f11e7d5ea356f466d6f619b5db6525e3e4e9ea568b156; pgv_pvid=8404792176; o_cookie=1332806659; pac_uid=1_1332806659; luin=o1332806659; lskey=00010000f347903107135dfbd497aa640c9344fe129f3f881877cca68a1e46b4821572bebdf89eb35565f4e6; _qpsvr_localtk=0.24584627136079518; uin=o1332806659; skey=@twSZvHSZD; qzone_check=1332806659_1624245080', 'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.106 Safari/537.36'}all_data={}every_province = province[1]for i in children[1]: url = '/newsqa/v1/query/pubished/daily/list?province={0}&city={1}'.format(every_province,i)# print(url) response = (url,headers=header) data = ()['data'] all_data[i]=datalists = []flag=0for child in children[1]: for i in all_data[child]: if flag==0: name = list(()) flag+=1 (list(()))test=ame(columns=name,data=lists)print(test)# _csv('./数据可视化/课设/data/' + every_province + 'province_history_',encoding='utf-8')test = _values(['city','y','date']).reset_index(drop=True)print(test)# _csv('D:/学习资料/数据可视化/课设/data/' + every_province + 'province_history_',encoding='utf-8')
#
爬取各省市数据header = { 'Cookie':'RK=KzwZgVEeXg; ptcz=157737b47be19a589c1f11e7d5ea356f466d6f619b5db6525e3e4e9ea568b156; pgv_pvid=8404792176; o_cookie=1332806659; pac_uid=1_1332806659; luin=o1332806659; lskey=00010000f347903107135dfbd497aa640c9344fe129f3f881877cca68a1e46b4821572bebdf89eb35565f4e6; _qpsvr_localtk=0.24584627136079518; uin=o1332806659; skey=@twSZvHSZD; qzone_check=1332806659_1624245080', 'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.106 Safari/537.36'}index = 0 #
控制爬取哪个省for every_province in province: all_data={} for i in children[index]: url = '/newsqa/v1/query/pubished/daily/list?province={0}&city={1}'.format(every_province,i) print(url) response = (url,headers=header) data = ()['data'] all_data[i]=data
lists = [] flag=0 for child in children[index]: if child == '地区待确认': continue
print(child) if all_data[child] is None: continue for i in all_data[child]: if flag==0: name = list(()) flag+=1 (list(()))
index+=1
test=ame(columns=name,data=lists) print(()) test = _values(['city','y','date']) test = _index(drop=True) print(test) _csv('./数据可视化/课设/data/' + every_province + 'province_history_',encoding='utf-8')
版权声明:本文标题:疫情可视化--1.爬虫--腾讯疫情数据(各省市各个时间段)---附完整代码 内容由网友自发贡献,该文观点仅代表作者本人, 转载请联系作者并注明出处:http://roclinux.cn/p/1709957478a551163.html, 本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌抄袭侵权/违法违规的内容,一经查实,本站将立刻删除。
发表评论