Python|Python|网页转PDF,PDF转图片爬取校园课表~

1 import pdfkit 2 import requests 3 from bs4 import BeautifulSoup 4 from PIL import Image 5 from pdf2image import convert_from_path 6 7 8 def main(): 9header={ 10"Accept": "text/html,application/xhtml+xml,application/xml; q=0.9,image/webp,image/apng,*/*; q=0.8,application/signed-exchange; v=b3", 11"Referer": "http://192.168.10.10/kb/", 12"Accept-Language": "zh-CN,zh; q=0.9", 13"Content-Type": "application/x-www-form-urlencoded", 14"Accept-Encoding": "gzip, deflate", 15"Connection": "Keep-Alive", 16"User-Agent":"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.169 Safari/537.36", 17"Accept-Encoding": "gzip, deflate", 18"Origin": "http://192.168.10.10", 19"Upgrade-Insecure-Requests": "1", 20"Cache-Control": "max-age=0", 21"Content-Length": "113" 22} 23 24url = 'http://192.168.10.10/kb/index.php/kcb/kcb/submit' #这是所在学校的课表查询响应页面,外网不可访问! 25 26yx = ["1院信息工程学院", "2院智能制造与控制术学院","3院外国语学院","4院经济与管理学院","5院艺术与设计学院" ] 27ulist = [] 28n = 0 29 30 #自动获取班号 31kburl = 'http://192.168.10.10/kb/'#这是所在学校的课表查询查询页面,外网不可访问! 32r = requests.get(kburl) 33r.encoding = r.apparent_encoding 34soup2 = BeautifulSoup(r.text, 'html.parser') 35script = soup2.find('script', {'language': "JavaScript", 'type': "text/javascript"})# 获取js片段 36bjhs = script.text[13:-287].split(',\r\n\r\n')# 截取js需求区间,以空格符号为界,此处对嵌入式js处理! 37bjh = [] 38for bjhx in range(5): 39a = bjhs[bjhx][1:-1].replace('"', '')# 删除多余引号 40bjh.append(a.split(','))# 追加新数组,字符串转化为数组 41 42 #以下开始爬取课表 43path = input('请粘贴存储地址:')#手动输入文件保存地址 44for i ,j in zip(yx,bjh):#以学院进行外循环 45for g in range(len(j)):#以班号进行内循环 46data = https://www.it610.com/article/{"province": i, 47"bjh": j[g], 48"Submit": "查 询"}#post查询提交参数 49 50Gg = path + r'\\'+ str(j[g]) + '.html'#爬取网页暂存地址 51Pp = path + r'\\'+ str(j[g]) + '.pdf'#网页转pdf暂存地址 52Pu = path + r'\\'+ str(j[g]) + '.jpeg'#pdf转图片暂存地址 53r = requests.post(url,data=https://www.it610.com/article/data,headers=header)#发起查询请求,获取响应页面 54soup = BeautifulSoup(r.content,'html.parser')#解析网页格式 55body = soup.find_all(name='body')#爬取响应内容的课表部分 56html = str(body)#转换内容格式,方便后续操作。(此处为调错添加) 57with open(Gg,'w',encoding='utf-8') as f: #保存爬取到的课表,html格式 58f.write(html) 59f.close() 60 61#以上过程,课表爬取结束,初始爬取结果为html。以下为格式处理过程(html-pdf,pdf-.jpg) 62Pppath_wk = r'D:\wkhtmltopdf\bin\wkhtmltopdf.exe'# wkhtmltopdf安装位置 63#Pupath_wk = r'D:\wkhtmltopdf\bin\wkhtmltoimage.exe'#这里原准备用它pdf来转图片 64Ppconfig = pdfkit.configuration(wkhtmltopdf=Pppath_wk) #设置调用程序路径位置(环境变量) 65#Puconfig = pdfkit.configuration(wkhtmltopdf=Pupath_wk) 66 67 68options1 = { 69'page-size':'Letter', 70'encoding':'UTF-8', 71'custom-header': [('Accept-Encoding', 'gzip')] 72} #options1为设置保存pdf的格式 73'''options2 = { 74'page-size': 'Letter', 75'encoding': 'base64', 76'custom-header': [('Accept-Encoding', 'gzip')] 77}'''#options2为设置保存图片的格式,未使用到,注释以便后续研究 78pdfkit.from_file(Gg,Pp,options=options1,configuration=Ppconfig)#转换html文件为pdf 79#pdfkit.from_file(Gg,Pu,options=options2,configuration=Puconfig) 80 81try: 82convert_from_path(Pp, 300, path, fmt="JPEG", output_file=str(j[g]), thread_count=1) #pdf转为图片格式,此处注意保存路径的设置! 83 84except(OSError, NameError): 85pass 86 87n+=1 88print('正在打印第%s张课表!' % n) 89print("*" * 100) 90print('%s打印完毕!'% str(i)) 91 92 93 94 main() 95 96 ''' 97 **********第一版本需手动输入班级列表格式(供参考)************ 98bjh = [ 99 ["10111501","10111502","10111503","10111504","10121501","10121502","10121503","10131501","10141501","10111503","10111504","10121503","ZB0111501","ZB0131501","ZB0141501","10111601","10111602","10111603","10121601","10121602","10131601","10141601","10161601","ZB0111601","ZB0121601","ZB0131601","10111701","10111702","10111703","10111704","10111705","10121701","10121702","10121703","10131701","10141701","10161701","ZB0111701","10211501","10211502","10211503","10211504","10211505","10221501","10221502","10221503","10231501","10231502","10241501","10241502","ZB0211501","ZB0221501","10211601","10211602","10221601","10231601","10241601","ZB0211601","ZB0221601","ZB0231601","10211701","10211702","10221701","10231701","10241701","ZB0211701","101011801","101011802","101011803","101011804","101021801","101021802","101021803","101031801","101041801","101051801","101051802","101061801","101071801","201011801","201051801"], 100 101 ["10611501","10611502","10611503","10611504","10621501","10641501","10641502","10641503","ZB0641501","ZB0611501","10611601","10611602","10611603","10621601","10641601","10641602","ZB0611601","ZB0641601","10611701","10611702","10621701","10641701","10641702","ZB0611701","10911501","10911502","10921501","10921502","10931501","10931502","ZB0911501","ZB0921501","10911601","10921601","10931601","10911701","10931701","102011801","102011802","102021801","102031801","102041801","102041802","102051801","202011801","202051801"], 102 103 ["10311501","10311502","10311503","10331501","10341501","ZB0311501","10311601","10311602","10311603","10311604","10311605","10311606","10321501","10321601","10331601","10331602","10341601","10351601","ZB0311601","10311701","10311702","10311703","10311704","10311705","10311706","10311707","10321701","10331701","10331702","10341701","10351701","ZB0311701","SX0341701","103011801","103011802","103011803","103011804","103011805","103011806","103011807","103011808","103011809","103031801","103031802","103041801","103051801","203011801"], 104 105 ["10411501","10411502","10421501","10451501","10451502","10451503","10451504","10451505","10451506","ZB0451501","ZB0411501","10411601","10411602","10421601","10451601","10451602","10451603","10451604","10451605","ZB0411601","ZB0451601","10411701","10411702","10421701","10451701","10451702","10451703","ZB0411701","ZB0451701","ZB0451702","SX0411701","10711501","10731501","10731502","10731503","10731504","10731505","10731506","10731507","10731508","10731509","ZB0711501","ZB0731501","10711601","10731601","10731602","10731603","10731604","10731605","10731606","10731607","10731608","10731609","10731610","10731611","10731612","10741601","10741602","ZB0711601","ZB0731601","ZB0731602","ZB0731603","10711701","10731701","10731702","10731703","10731704","10731705","10731706","10731707","10741701","10741702","ZB0711701","ZB0731701","ZB0731702","ZB0731703","SX0711701","104011801","104011802","104021801","104021802","104021803","104031801","104031802","104041801","104051801","104051802","104051803","104051804","104051805","104051806","104051807","104051808","104051809","104061801","104061802","204021801","204021802","204031801","204041801","204051801","204051802","204051803","204051804"], 106 107 ["10511501","10511502","10521501","10521502","10521503","10531501","10531502","10531503","10541501","10541502","10541503","ZB0521501","ZB0521502","ZB0511501","10511601","10511602","10511603","10521601","10521602","10521603","10521604","10531601","10531602","10531603","10531604","10541601","ZB0511601","ZB0521601","10511701","10511702","10521701","10521702","10521703","10521704","10531701","10531702","10531703","10531704","10541701","ZB0511701","ZB0521701","105011801","105011802","105011803","105021801","105021802","105021803","105021804","105021805","105031801","105031802","105031803","105031804","105031805","105041801","205011801","205021801"] 108 ] 109 110 **********制作人:秦小道************ 111 **********版本号:第二版************ 112 ********发布日期:2019.6.21********* 113 '''

爬取结果预览图:
Python|Python|网页转PDF,PDF转图片爬取校园课表~
文章图片

【Python|Python|网页转PDF,PDF转图片爬取校园课表~】
爬取过程中碰到了许多错误,比如poppler与wkhtmltopdf为引入软件,需要将其bin目录添加至环境变量path中;
整个脚本只写了主函数~,习惯有大问题,得慢慢纠正!
整个脚本都做了注释,其中爬取地址为局域网址,如需参考,请按需求更改~

打包为.exe文件使用的是pyinstaller,但文件打包后仍需将poppler与wkhtmltopdf文件手动加入,到新的环境需手动设置这俩个应用的环境变量。
转载于:https://www.cnblogs.com/huhahuhahu/p/QinXD.html

    推荐阅读