修复账户错误判断

This commit is contained in:
RainSun 2020-10-09 16:09:02 +08:00
parent 27222f6722
commit 7ec91c5073
2 changed files with 20 additions and 24 deletions

View File

@ -13,7 +13,6 @@ class Crawler(object):
self.__password = password self.__password = password
self.__phone = phone self.__phone = phone
self.__session = None self.__session = None
self.__ip = None
self.__student_id = None self.__student_id = None
self.__student_name = None self.__student_name = None
self.__grade_data = '' self.__grade_data = ''
@ -54,21 +53,19 @@ class Crawler(object):
r = self.__session.get( r = self.__session.get(
url='http://portal-cust-edu-cn-s.webvpn.cust.edu.cn:8118/custp/index') url='http://portal-cust-edu-cn-s.webvpn.cust.edu.cn:8118/custp/index')
soup = BeautifulSoup(r.text, 'html.parser') soup = BeautifulSoup(r.text, 'html.parser')
try: if soup.findAll(name='a')[-2]['href'] != 'logout':
self.__ip = soup.findAll(name='a')[7]['href'][7:].split("-")
except:
return ('账号或者密码错误', 510) return ('账号或者密码错误', 510)
r = self.__session.get(url='http://mysso-cust-edu-cn-s.webvpn.cust.edu.cn:8118/cas/login?service=http://' + r = self.__session.get(
self.__ip[0] + '.' + self.__ip[1] + '.' + self.__ip[2] + '.' + self.__ip[3] + ':8080/welcome', allow_redirects=False) url='http://mysso-cust-edu-cn-s.webvpn.cust.edu.cn:8118/cas/login?service=http://jwgls1.cust.edu.cn:8080/welcome', allow_redirects=False)
ticket = r.headers['Location'][72:] ticket = r.headers['Location'][72:]
asp_net_sessionid_param = {'Ticket': ticket, 'Url': 'http://' + asp_net_sessionid_param = {
self.__ip[0] + '.' + self.__ip[1] + '.' + self.__ip[2] + '.' + self.__ip[3] + ':8080/welcome'} 'Ticket': ticket, 'Url': 'http://jwgls1.cust.edu.cn:8080/welcome'}
asp_net_sessionid_param = base64.b64encode( asp_net_sessionid_param = base64.b64encode(
quote(json.dumps(asp_net_sessionid_param)).encode('utf-8')).decode('utf-8') quote(json.dumps(asp_net_sessionid_param)).encode('utf-8')).decode('utf-8')
asp_net_sessionid_param = {'param': asp_net_sessionid_param} asp_net_sessionid_param = {'param': asp_net_sessionid_param}
headers = {'Content-Type': 'application/json'} headers = {'Content-Type': 'application/json'}
r = self.__session.post(url='http://' + self.__ip[0] + '-' + self.__ip[1] + '-' + self.__ip[2] + '-' + self.__ip[3] + r = self.__session.post(url='http://jwgls1-cust-edu-cn-8080-p.webvpn.cust.edu.cn:8118/api/LoginApi/LGSSOLocalLogin?sf_request_type=ajax',
'-8080-p.webvpn.cust.edu.cn:8118/api/LoginApi/LGSSOLocalLogin?sf_request_type=ajax', data=json.dumps(asp_net_sessionid_param), headers=headers) data=json.dumps(asp_net_sessionid_param), headers=headers)
data = json.loads(r.content.decode('utf-8')) data = json.loads(r.content.decode('utf-8'))
# 提示未建立教务信息 # 提示未建立教务信息
if data['state'] == 1: if data['state'] == 1:
@ -84,13 +81,13 @@ class Crawler(object):
def getGrade(self): def getGrade(self):
headers = {'Content-Type': 'application/json'} headers = {'Content-Type': 'application/json'}
r = self.__session.post( r = self.__session.post(
url='http://' + self.__ip[0] + '-' + self.__ip[1] + '-' + self.__ip[2] + '-' + self.__ip[3] + url='http://jwgls1-cust-edu-cn-8080-p.webvpn.cust.edu.cn:8118/api/ClientStudent/QueryService/GradeQueryApi/GetDataByStudent?sf_request_type=ajax',
'-8080-p.webvpn.cust.edu.cn:8118/api/ClientStudent/QueryService/GradeQueryApi/GetDataByStudent?sf_request_type=ajax',
data=json.dumps({"param": "JTdCJTIyU2hvd0dyYWRlVHlwZSUyMiUzQTElN0Q=", "__permission": {"MenuID": "4443798E-EB6E-4D88-BFBD-BB0A76FF6BD5", data=json.dumps({"param": "JTdCJTIyU2hvd0dyYWRlVHlwZSUyMiUzQTElN0Q=", "__permission": {"MenuID": "4443798E-EB6E-4D88-BFBD-BB0A76FF6BD5",
"Operation": 0}, "__log": {"MenuID": "4443798E-EB6E-4D88-BFBD-BB0A76FF6BD5", "Logtype": 6, "Context": "查询"}}), "Operation": 0}, "__log": {"MenuID": "4443798E-EB6E-4D88-BFBD-BB0A76FF6BD5", "Logtype": 6, "Context": "查询"}}),
headers=headers headers=headers
) )
data = json.loads(r.content.decode('utf-8')) data = json.loads(r.content.decode('utf-8'))
print(data)
if data['state'] != 0: if data['state'] != 0:
return ('教务挂了', 512) return ('教务挂了', 512)
# 分解数据并重命名 # 分解数据并重命名
@ -237,16 +234,14 @@ class Crawler(object):
def getSchedule(self): def getSchedule(self):
headers = {'Content-Type': 'application/json'} headers = {'Content-Type': 'application/json'}
r = self.__session.post( r = self.__session.post(
url='http://'+self.__ip[0]+'-'+self.__ip[1]+'-'+self.__ip[2]+'-'+self.__ip[3] + url='http://jwgls1-cust-edu-cn-8080-p.webvpn.cust.edu.cn:8118/api/ClientStudent/Home/StudentHomeApi/GetHomeCurWeekTime?sf_request_type=ajax',
'-8080-p.webvpn.cust.edu.cn:8118/api/ClientStudent/Home/StudentHomeApi/GetHomeCurWeekTime?sf_request_type=ajax',
data=json.dumps({"param": "JTdCJTdE", "__permission": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E", data=json.dumps({"param": "JTdCJTdE", "__permission": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E",
"Operation": 0}, "__log": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E", "Logtype": 6, "Context": "查询"}}), "Operation": 0}, "__log": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E", "Logtype": 6, "Context": "查询"}}),
headers=headers headers=headers
) )
CurWeek = json.loads(r.content.decode('utf-8'))['data']['CurWeek'] CurWeek = json.loads(r.content.decode('utf-8'))['data']['CurWeek']
r = self.__session.post( r = self.__session.post(
url='http://'+self.__ip[0]+'-'+self.__ip[1]+'-'+self.__ip[2]+'-'+self.__ip[3] + url='http://jwgls1-cust-edu-cn-8080-p.webvpn.cust.edu.cn:8118/api/ClientStudent/Home/StudentHomeApi/QueryStudentScheduleData?sf_request_type=ajax',
'-8080-p.webvpn.cust.edu.cn:8118/api/ClientStudent/Home/StudentHomeApi/QueryStudentScheduleData?sf_request_type=ajax',
data=json.dumps({"param": "JTdCJTdE", "__permission": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E", data=json.dumps({"param": "JTdCJTdE", "__permission": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E",
"Operation": 0}, "__log": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E", "Logtype": 6, "Context": "查询"}}), "Operation": 0}, "__log": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E", "Logtype": 6, "Context": "查询"}}),
headers=headers headers=headers
@ -336,5 +331,5 @@ class Crawler(object):
'grade': self.__grade_data, 'grade': self.__grade_data,
'schedule': self.__schedule_data 'schedule': self.__schedule_data
}, },
200 200
) )

View File

@ -9,17 +9,18 @@ class TestCrawler(unittest.TestCase):
self.assertEqual(c.connection(), ('ok', 200)) self.assertEqual(c.connection(), ('ok', 200))
# 测试获取成绩 # 测试获取成绩
# def test_grade(self): def test_grade(self):
# self.assertEqual(c.getGrade(), ('ok', 200)) self.assertEqual(c.getGrade(), ('ok', 200))
# 测试获取课表 # 测试获取课表
# def test_schedule(self): def test_schedule(self):
# self.assertEqual(c.getSchedule(), ('ok', 200)) self.assertEqual(c.getSchedule(), ('ok', 200))
# 测试返回信息 # 测试返回信息
# def test_getData(self): def test_getData(self):
# get_res = c.getData() get_res = c.getData()
# self.assertEqual(get_res['errcode'], '200') print(get_res)
self.assertEqual(get_res[-1], 200)
if __name__ == '__main__': if __name__ == '__main__':
unittest.main() unittest.main()