以上代码就是爬虫获取排行

时间:2019-09-26 14:23来源:快三平台下载编程
#!/usr/bin/env python# -*- coding:utf-8 -*-#爬虫,搜索热点排行import urllib.requestimport urllibimport reimport jsonimport xlwtimport os#获取网站首页全部内容cnt = 50 #只能1-50url = 'https://zhidao.baidu.com/question/api
#!/usr/bin/env python# -*- coding:utf-8 -*-#爬虫,搜索热点排行import urllib.requestimport urllibimport reimport jsonimport xlwtimport os#获取网站首页全部内容cnt = 50 #只能1-50url = 'https://zhidao.baidu.com/question/api/hotword?rn='+cnt.__str__()+'&t=1535421904906'printuser_agent = 'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US; rv:1.9.1.6) Gecko/20091201 Firefox/3.5.6'req = urllib.request.Request(url, headers={'User-Agent': user_agent})response = urllib.request.urlopencontent = response.read().decode#printworkbook = xlwt.Workbook()sheet1 = workbook.add_sheet('sheet1',cell_overwrite_ok=True)sheet1.writesheet1.write(0,1,'新闻名称')sheet1.write(0,2,'搜索人数')sheet1.write(0,3,'变化数量')sheet1.write(0,4,'新的新闻')sheet1.write(0,5,'热度上升')dataList = json.loads['data']j = 1for data in dataList:    print    sheet1.write    sheet1.write(j, 1,data['keyword'])    sheet1.write(j, 2, data['searches'])    sheet1.write(j, 3, data['changeRate'])    isNew = data['isNew'];    if isNew==0:        isNew = '否'    elif isNew==1:        isNew = '是'    sheet1.write(j, 4, isNew.__str__    trend = data['trend']    style5 = xlwt.XFStyle()    font = xlwt.Font()    style5.font = font    if trend == 'fall':        font.colour_index = 3        trend = '下降'    elif trend == 'rise':        font.colour_index = 2        trend = '上升'    sheet1.write(j, 5, trend,style5)    j = j + 1#保存该excel文件,有同名文件时直接覆盖path = 'D:\Python'if not os.path.isdir:    os.makedirspaths = path + '\'filename = 'test1'workbook.save('{}{}.xls'.format(paths,filename))print('创建excel文件完成!')

  百度时候总能看到热搜排行,以上代码就是爬虫获取排行

图片 1

编辑:快三平台下载编程 本文来源:以上代码就是爬虫获取排行

关键词: