爬取步骤:
1.验证码图片截取,云打码识别
2.模拟登陆
3.通过webdriver选择点击所需数据
4.html保存解析,数清清洗
5.数据存入数据库
6.触发器自动更新数据库
一、验证码图片下载
import time
from selenium import webdriver
from PIL import Image
coding='utf-8'
#driver = webdriver.Firefox(executable_path=r'C:\Program Files\Mozilla Firefox\geckodriver.exe')
driver = webdriver.Chrome(executable_path=r'C:\Program Files (x86)\Google\Chrome\Application\chromedriver.exe')
driver.maximize_window()
driver.implicitly_wait(6)
driver.get("********************************************") #此处为url地址
time.sleep(1)
driver.get_screenshot_as_file('截图.png')
imgelement = driver.find_element_by_id('codeImage') #定位验证码
# /html/body/form/div[3]/div[3]/div/ul/li[4]/img
location = imgelement.location
# location = imgelement.location_once_scrolled_into_view
size = imgelement.size
a1 = tuple(location.values()) #(x,y)
a2 = tuple(size.values()) #(height,width)
print(location)
print(size)
driver.close()
rangle = (a1[0]*1.25,a1[1]*1.25,a1[0]*1.25+a2[1]*1.25,a1[1]*1.25+a2[0]*1.25) #(x,y,x+width,y+height) 注意本机设置的缩放比例为125%,故所有元素*1.25
print(rangle)
i = Image.open('截图.png')
frame4 = i.crop(rangle)
frame4.save('验证码.png')
二、使用云打码API验证码识别
#云打码平台,识别图片验证吗
from ctypes import *
def ydm_func(ydm_appid, ydm_appkey, ydm_username, ydm_password, filename, type1):
"""
封装的云打码函数
"""
YDMApi = windll.LoadLibrary('yundamaAPI-x64')
# 查询所有类型 http://www.yundama.com/price.html
# 分配30个字节存放识别结果
result = c_char_p(b" ")
timeout = 30
Id = YDMApi.YDM_EasyDecodeByPath(ydm_username, ydm_password, ydm_appid, ydm_appkey, filename, type1,timeout, result)
print("一键识别:验证码ID:%d,识别结果:%s" % (Id, result.value))
print(result.value)
return result.value
YDM_APPID = 10723 # 软件ID,开发者分成必要参数。登录开发者后台【我的软件】获得!
YDM_APPKEY = b'**********************************' # 软件密钥,开发者分成必要参数。登录开发者后台【我的软件】获得!
YDM_USERNAME = b'jinghua'
YDM_PASSWORD = b'******'
FILE_NAME = b'min.png'
ydm_func(YDM_APPID, YDM_APPKEY, YDM_USERNAME, YDM_PASSWORD, FILE_NAME,1004)
三、模拟登录并跳转到指定网页,及爬虫部分
import os
import time
import datetime
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from bs4 import BeautifulSoup
import pymysql
import pandas as pd
from sqlalchemy import create_engine
class yd_djfb_Spyder():
def __init__(self):
# 登录用户名,用户命名
today = datetime.date.today()
startdate = today - datetime.timedelta(days=8) #开始日期为当天日期前推8天
#enddate = today - datetime.timedelta(days=0)
startdate = str(startdate)+' 00:00:00' #格式化日期,备用
enddate = str(today)+' 00:00:00' #格式化日期,备用
self.username_ = "******"
self.password_ = "******"
self.href = '***********************************************' #所爬取后台的网址URL
self.date1 = startdate
self.date2 = enddate
# 获取浏览器对象,火狐或者谷歌,需要提前把相应文本driver放入下边指定文件夹
self.browser = webdriver.Firefox(executable_path=r'C:\Program Files\Mozilla Firefox\geckodriver.exe')
# self.browser = webdriver.Chrome(executable_path=r'C:\Program Files (x86)\Google\Chrome\Application\chromedriver.exe')
# html请求
self.browser.get(self.href)
time.sleep(2)
# 获取输入用户名,密码元素 并输入用户名和密码
username = self.browser.find_element_by_xpath('//input[@id="txtName"]') #通过XPATH找到用户名输入框
password = self.browser.find_element_by_xpath('//input[@id="txtPassword"]') #通过XPATH找到密码输入框
username.clear()
username.send_keys(self.username_) #输入用户名
password.clear() #输入密码
password.send_keys(self.password_)
# 下面是验证码部分,如果需要验证码的化
while True:
try:
verify_img = self.browser.find_element_by_xpath('//img[@id="codeImage"]')
except NoSuchElementException:
break
if verify_img:
# 输入验证码
verify_code = self.browser.find_element_by_xpath('//input[@name="checkCode"]')
verify_code_ = input('verify_code > ')
verify_code.clear()
verify_code.send_keys(verify_code_)
# 获取提交登陆元素 并点击
sub_btn = self.browser.find_element_by_xpath('//input[@name="btnSubmit"]')
sub_btn.click()
time.sleep(5)
else:
break
menu2 =self.browser.find_element_by_xpath('//a/span[@id="SpanChannelMenu_Menu3"]') #找到点击查询按钮
menu2.click() #点击
time.sleep(2) #休眠2秒
self.browser.switch_to.frame('left') #转换到left的frame
time.sleep(5) #休眠5秒
main_left1 = self.browser.find_element_by_link_text('平台整体数据汇总') #找到平台整体数据汇总按钮
main_left1.click() #点击
time.sleep(5) #休眠5秒
self.browser.switch_to.default_content() #回到主框架
self.browser.switch_to.frame('main_right') #进入要定位元素被包含的那层框架,框架可以用id和name直接定位
start_date_select=self.browser.find_element_by_xpath('//input[@id="txtStartDate"]')
start_date_select.clear()
start_date_select.send_keys(self.date1) #填写开始日期
start_date_select = self.browser.find_element_by_xpath('//input[@id="txtEndDate"]')
start_date_select.clear()
start_date_select.send_keys(self.date2) #填写结束日期
quiry = self.browser.find_element_by_xpath('//input[@id="btnDaySearch"]') #找到查询按钮
quiry.click()
time.sleep(15)
os_path = os.getcwd() #得到当前文件夹
print(os_path )
dst_dir = os.path.join(os_path, '平台整体数据汇总') #拼接为HTML保存的文件夹
if not os.path.isdir(dst_dir): #创建文件夹
os.mkdir(dst_dir)
file_name = os.path.join(dst_dir, '{}.html'.format('平台整体数据汇总'))
# 必须指定编码格式
with open(file_name, 'w') as f:
f.write(self.browser.page_source)
print('Page {} download finish!'.format('平台整体数据汇总'))
time.sleep(3)
def get_report_text(file_name):
text = []
soup = BeautifulSoup(open(file_name), 'lxml')
items = soup.find_all("tr")
if not items:
text = []
e = ""
bt = items[0]
bt1 = bt.find_all("th")
for x in range(len(bt1)):
m = bt1[x].get_text()
e = e + m + ","
e = e[0:-1].encode('utf-8')
# print(e)
set1 = set()
for item in items:
try:
a=""
sj=item.find_all("td")
for i in range(len(sj)):
b = sj[i].get_text()
a = a + b + ","
a=a[0:-1].encode('utf-8')
set1.add(a)
except AttributeError:
pass
l1 = list(set1)
path1 = os.getcwd()
path = os.path.join(path1,'jd_djfb.csv')
if os.path.exists(path):
os.remove(path)
with open("jd_djfb.csv", "ab+") as f:
f.truncate()
f.write(e)
for i in range(len(l1)):
f.write(l1[i])
f.write('\n'.encode('utf-8'))
def data_to_sql():
path = "D:/Program Files/PyCharm Community Edition 2018.2.4/sj_data_spider/jd_djfb.csv"
d1 = pd.read_csv(path, sep=',', encoding='utf-8')
engine = create_engine("mysql+pymysql://root:1qaz@localhost:3306/基地电竞风暴?charset=utf8")
d1.to_sql(name='ptztsj_数据池', con=engine, if_exists='append', index=False, index_label=False)
if __name__ == '__main__':
yd_djfb_Spyder()
print(os.getcwd())
get_report_text("C:/Users/82122/Desktop/办公/合并文件等/平台整体数据汇总/平台整体数据汇总.html")
data_to_sql()