From cc33a6a8d565e8e756bcc5974e6ca7abc7c3c0a1 Mon Sep 17 00:00:00 2001 From: RainSun Date: Sat, 6 Feb 2021 20:04:05 +0800 Subject: [PATCH 1/5] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E6=95=B0=E6=8D=AE?= =?UTF-8?q?=E5=BA=93=E4=BB=A5=E5=8F=8A=E7=88=AC=E8=99=AB?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- test/db.py | 42 ++++++++++++++++++++++ test/kong.py | 97 ++++++++++++++++++++++++++++++++++++++++++++++++++ test/lesson.py | 86 ++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 225 insertions(+) create mode 100644 test/db.py create mode 100644 test/kong.py create mode 100644 test/lesson.py diff --git a/test/db.py b/test/db.py new file mode 100644 index 0000000..24cd63e --- /dev/null +++ b/test/db.py @@ -0,0 +1,42 @@ +from pymongo import MongoClient +from bson import ObjectId, json_util + +# 主环境 (生产环境为production,开发环境为development) +ENV = 'development' + +def col(arg): + """ + 获取数据集 + """ + # 链接数据库 + conn = MongoClient('mongodb://192.168.2:27017/cherry') + # 判断环境 + if ENV == 'development': + arg += '_test' + return conn.cherry[arg] + +col('user_y').insert({'cid': 1, 'sid': 2}) +col('group_y').insert([ + { + 'group_id': 3, + 'avatar': 4 + }, + { + 'group_id': 5, + 'avatar': 6 + } +]) +col('link_y').insert([ + { + 'sid': 2, + 'group_id': 3 + }, + { + 'sid': 2, + 'group_id': 5 + } +]) + +print(col('user_y').find({})) +print(col('link_y').find({})) +print(col('group_y').find({})) \ No newline at end of file diff --git a/test/kong.py b/test/kong.py new file mode 100644 index 0000000..94ad5c7 --- /dev/null +++ b/test/kong.py @@ -0,0 +1,97 @@ +from urllib.request import quote, unquote +import base64 +import json + +# 西区第一教学楼 +WESTID = { + # 西区第一教学楼 + 'teaching_building_1': 'f74dac26-c58a-4eae-bc46-ff2055b2de19', + # 西区第二教学楼 + 'teaching_building_2': '201a429b-df6d-489e-9a1d-de7c85f0081e', + # 西区图书馆 + 'library': '3adb80f2-7e27-4058-a60c-fbb32cb36587' +} +EASTID = { + # 东区第一教学楼 + 'teaching_building_1': 'd91cc53c-a9ad-4be3-becf-7f3ed62e8762', + # 东区第二教学楼 + 'teaching_building_2': 'e14b90bd-0c92-422e-b299-7009118104b9', + # 东区第三教学楼 + 'teaching_building_3': '3534f8ce-f10b-4058-a818-95a116d9bca4', + # 东区前楼 + 'front_building': '6accca4d-b092-4bdc-b2e0-0c1941782eec' +} +SOUTHID = { + # 南区研究生教学楼 + 'graduate_building': '20a207f7-65ef-4ae4-9286-2a2b5a73e1c9', + # 南区实训楼 + 'practical_training_building': 'cb5265e8-84a1-41ed-985b-3920449738aa' +} + +IDLIST = { + 'wtb1': WESTID['teaching_building_1'], + 'wtb2': WESTID['teaching_building_2'], + 'wl': WESTID['library'], + 'etb1': EASTID['teaching_building_1'], + 'etb2': EASTID['teaching_building_2'], + 'etb3': EASTID['teaching_building_3'], + 'efb': EASTID['front_building'], + 'sgb': SOUTHID['graduate_building'], + 'sptb': SOUTHID['practical_training_building'], +} + +COURSELIST = ['0102', '0304', '0506', '0708', '0910', '1112'] + +# JSON转base64 +def btoa(content): + return base64.b64encode(quote(content).encode()) + +# base64转JSON +def atob(content): + return unquote(base64.b64decode(content).decode()) + +def getParam(SJ, JSs, Addr): + try: + checkData(SJ, JSs, Addr) + except Exception as e: + print(e) + raise Exception(str(e)) + param = { + "EmptyRoomParam": { + "SJ": SJ, + "JCs": JSs + }, + "PagingParam": { + "IsPaging": 1, + "Offset": 0, + "Limit": 500, + "Conditions": { + "PropertyParams": [ + { + "Field": "BDJXLXXID", + "Value": IDLIST[Addr] + } + ] + } + } + } + return str(btoa(json.dumps(param)))[2:-1] + +# 校验数据 +def checkData(sj, jss, addr): + # 校验sj + sj_split = sj.split('-') + if len(sj_split) != 3: + raise Exception('SJ错误') + for content in sj_split: + if len(content) == 0: + raise Exception('SJ错误') + # 校验jss + if not isinstance(jss,list): + raise Exception('JSs错误') + for content in jss: + if content not in COURSELIST: + raise Exception('JSs错误') + # 校验addr + if addr not in IDLIST: + raise Exception('Addr错误') \ No newline at end of file diff --git a/test/lesson.py b/test/lesson.py new file mode 100644 index 0000000..01f94ef --- /dev/null +++ b/test/lesson.py @@ -0,0 +1,86 @@ +from urllib.request import quote, unquote +import base64 +import json + + +def btoa(content): + """ + JSON转base64 + """ + return base64.b64encode(quote(content).encode()) + + +def atob(content): + """ + base64转JSON + """ + return unquote(base64.b64decode(content).decode()) + +print(atob("JTdCJTIyS0JMWCUyMiUzQSUyMjIlMjIlMkMlMjJDWExYJTIyJTNBJTIyMCUyMiUyQyUyMlhOWFElMjIlM0ElMjIyMDIwMiUyMiUyQyUyMkNYSUQlMjIlM0ElMjJkZDcwOWU3Ny0zNGY4LTQzZjctOGVmYS0wODM4ZmQxMzg0MzAlMjIlMkMlMjJDWFpDJTIyJTNBJTIyMCUyMiUyQyUyMkpYQkxYJTIyJTNBJTIyJTIyJTdE")) + +""" +param = { + "pagingParam": { + "IsPaging": false, + "Offset": 0, + "Limit": 50, + "Orders": { + "PropertyParams": [ + { + "Field": "ClassInfo.ProfessionInfoYear.NF", + "IsDesc": true + }, + { + "Field": "ClassInfo.ProfessionInfoYear.Department.DWBH", + "IsDesc": false + }, + { + "Field": "ClassInfo.ProfessionInfoYear.XNZYBH", + "IsDesc": false + }, + { + "Field": "ClassInfo.BJBH", + "IsDesc": false + }, + { + "Field": "XH", + "IsDesc": false + } + ] + }, + "Searchs": { + "PropertyParams": [ + + ] + }, + "Conditions": { + "PropertyParams": [ + { + "Field": "ClassInfo.ProfessionInfoYear.NF", + "Value": "2019", + "Operation": 3, + "Logic": 0 + }, + { + "Field": "ClassInfo.ProfessionInfoYear.Department.BDDWXXID", + "Value": "3c438133-0ede-443f-8fb7-1a7a58c7e104", + "Operation": 3, + "Logic": 0 + }, + { + "Field": "ClassInfo.ProfessionInfoYear.BDZYXXNDID", + "Value": "d70fd0d5-7521-4a31-a068-70f484daa8ef", + "Operation": 3, + "Logic": 0 + }, + { + "Field": "ClassInfo.BDBJXXID", + "Value": "33eb5cf7-1688-44e6-bd09-fc08a2b3e8a5", + "Operation": 3, + "Logic": 0 + } + ] + } + } +} +""" \ No newline at end of file From 3c99c1bf785da817e3ede55d80ec781083572164 Mon Sep 17 00:00:00 2001 From: RainSun Date: Sat, 6 Feb 2021 20:06:07 +0800 Subject: [PATCH 2/5] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E6=95=B0=E6=8D=AE?= =?UTF-8?q?=E5=BA=93=E4=BB=A5=E5=8F=8A=E7=88=AC=E8=99=AB?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 104 +++++++++++- lib/allFunction.py | 4 +- lib/crawler.py | 273 ++++++++++++++++++-------------- lib/crawler_test.py | 57 ++++--- lib/db.py | 373 +++++++++++++++++++++++++++++++++++++++++--- lib/utils.py | 28 ++++ 6 files changed, 672 insertions(+), 167 deletions(-) create mode 100644 lib/utils.py diff --git a/README.md b/README.md index 5a8b2ea..ae00130 100644 --- a/README.md +++ b/README.md @@ -41,15 +41,16 @@ deactivate 510:账号或密码错误 511:请填写手机号 512:教务挂了 +513:教务报错 200:OK /api/game/schedule/upload 400:数据校验失败 -401:排名表修改失败 +510:排名表修改失败 /api/game/schedule/get -410: 数据校验失败 -411:排名表获取失败 +400: 数据校验失败 +510:排名表获取失败 # game * /api/game/schedule/upload @@ -60,4 +61,99 @@ deactivate * sign * /api/game/schedule/get * data - * sign \ No newline at end of file + * sign + +# 新版改造 +## 用户组功能 +* [ ] 主进程挂一个我的账号用来进行课表的爬取 +* [ ] 第一次登录要进行账号密码的校验,然后对称加密进数据库 +* [ ] 每天0点开始刷新所有人的课表,以及所有人的成绩 +* [ ] 每个用户组的id是五位数字加字母的组合 +* [ ] 每个用户组成员不设上限 +* [ ] 提供每个用户组下的个人课表以及个人总体课表 +* [ ] 允许设置组级别的课程(全组个人课表及组课表可见) +* [ ] 数据库按课程存储,字段:sid, real_name, course, weeks, weeks_split, teacher, room, is_personal, day, period +## 背景图片上传 +* [ ] 上传图片,id保存在数据库,和个人信息一起 + +## 数据库字段 +### config +* key +* value +### user +* invite_list 邀请内容 +* cid 一卡通号 +* sid 学号 +* real_name 真实姓名 +* pwd 加密后的密码 +* setting 用户设置 + * bg 课表背景图片 +* avatar 头像 +* last_update 最后课程更新时间 +### group +* group_id 唯一标识 +* group_name 用户组名称 +* log_list 操作记录 +* admin_list 管理列表 +* creater_sid 创建者学号 +* avatar 头像 +### link +* cid 一卡通号 +* group_id 用户组id +### user_crouse +* crouse_id 课程id +* sid 学号 +* real_name 真实姓名 +* crouse 课程名 +* weeks 中文周数 +* weeks_split 渲染用周数列表 +* teacher 教师名 +* room 教室名 +* is_personal 是否是自定义课程 +* day 星期 +* period 上课时间 + +### group_crouse +* crouse_id 课程id +* group_id 用户组id +* sid 学号 +* real_name 真实姓名 +* crouse 课程名 +* weeks 中文周数 +* weeks_split 渲染用周数列表 +* teacher 教师名 +* room 教室名 +* day 星期 +* period 上课时间 +## 数据库错误码 +### OK 200 +### insert 1 +insertUser '学生信息数据库插入失败', 100 +insertInvite '用户组邀请数据库插入失败', 101 +bindUserGroup '用户与用户组绑定数据库插入失败', 102 +insertGroup '用户组数据库插入失败', 103 +addLog '操作记录数据库插入失败', 104 +groupInsertAdmin '用户组侧加入管理数据库插入失败', 106 +userInsertCrouse '用户自定义课程数据库插入失败', 107 +userInsertAllCrouse '用户所有课程数据库插入失败', 108 +insertRank '排名表数据库插入失败', 109 +groupInsertCrouse '用户组课程数据库插入失败', 110 +### find 3 +findCookie 'cookie数据库查询失败', 300 +findUser '学生信息数据库查询失败', 301 +findGroup '用户组信息数据库查询失败', 302 +findRank '排名表数据库查询失败', 303 +findUserCrouse '用户课程数据库查询失败', 304 +findGroupCrouse '用户组课程数据库查询失败', 305 +### update 4 +updateCookie 'cookie数据库更新失败', 400 +updateAvatar '头像数据库更新失败', 401 +updateBg '背景图数据库更新失败', 402 +### delete 5 +deleteInvite '用户组邀请数据库删除失败', 500 +unbindUserGroup '用户与用户组解绑数据库删除失败', 501 +userDeleteAllCrouse '用户所有课程数据库删除失败', 502 +groupDeleteAdmin '用户组侧移除管理数据库删除失败', 503 +deleteGroup '解散用户组数据库删除失败', 504 +userDeleteCrouse '用户自定义课程数据库删除失败', 505 +groupDeleteCrouse '用户组课程数据库删除失败', 506 \ No newline at end of file diff --git a/lib/allFunction.py b/lib/allFunction.py index c23b21c..4db5c6b 100644 --- a/lib/allFunction.py +++ b/lib/allFunction.py @@ -40,7 +40,7 @@ def manageScheduleUpload(request): add_res = addRank( data_cache['nick'], data_cache['count'], data_cache['time']) return add_res else: - return {'errcode': 400, 'errmsg': '数据校验失败'} + return '数据校验失败', 400 # 处理获取课表游戏排名信息 def manageScheduleGet(request): @@ -54,7 +54,7 @@ def manageScheduleGet(request): get_res = getRank() return get_res else: - return {'errcode': 400, 'errmsg': '数据校验失败'} + return '数据校验失败', 400 # 工具函数 diff --git a/lib/crawler.py b/lib/crawler.py index 7d7cb5c..27b4310 100644 --- a/lib/crawler.py +++ b/lib/crawler.py @@ -5,31 +5,39 @@ import base64 from bs4 import BeautifulSoup import random import sys - +from utils import btoa, signCode class Crawler(object): - def __init__(self, username, password, phone): - self.__username = username - self.__password = password - self.__phone = phone + def __init__(self): self.__session = None - self.__student_id = None - self.__student_name = None - self.__grade_data = '' - self.__schedule_data = '' + self.__pwd = None + self.__phone = None + self.cid = None + self.sid = None + self.uid = None + self.real_name = None + # 获取用户基本信息 + def getUserInfo(self): + return { + 'cid': self.cid, + 'pwd': signCode(self.__pwd), + 'sid': self.sid, + 'uid': self.uid, + 'real_name': self.real_name, + } + # 链接教务 ----------------------------------------------------------------------------- def connection(self): try: - self.__session = requests.Session() # 获取统一身份系统的网页 r = self.__session.get( url='https://mysso.cust.edu.cn/cas/login?service=https://jwgls1.cust.edu.cn/welcome') soup = BeautifulSoup(r.text, 'html.parser') execution = soup.find_all(name='input')[6]['value'] formdata = { - 'username': self.__username, - 'password': self.__password, + 'username': self.cid, + 'password': self.__pwd, 'execution': execution, '_eventId': 'submit', 'geolocation': '' @@ -40,7 +48,7 @@ class Crawler(object): flag = soup.find(name='title') if(flag.text == "手机号设置"): if self.__phone == '': - return ('请填写手机号', 511) + return '请填写手机号', 511 execution = soup.find_all(name='input')[1]['value'] formdata = { 'phone': self.__phone, @@ -57,25 +65,29 @@ class Crawler(object): if soup.findAll(name='a')[4]['href'] != 'logout': raise('账号或密码错误') except: - return ('账号或者密码错误', 510) - r = self.__session.get(url='https://mysso.cust.edu.cn/cas/login?service=https://jwgls1.cust.edu.cn/welcome', allow_redirects=False) + return '账号或者密码错误', 510 + r = self.__session.get( + url='https://mysso.cust.edu.cn/cas/login?service=https://jwgls1.cust.edu.cn/welcome', allow_redirects=False) ticket = r.headers['Location'][42:] - asp_net_sessionid_param = {'Ticket': ticket, 'Url': 'https://jwgls1.cust.edu.cn/welcome'} + asp_net_sessionid_param = { + 'Ticket': ticket, 'Url': 'https://jwgls1.cust.edu.cn/welcome'} asp_net_sessionid_param = base64.b64encode( quote(json.dumps(asp_net_sessionid_param)).encode('utf-8')).decode('utf-8') asp_net_sessionid_param = {'param': asp_net_sessionid_param} headers = {'Content-Type': 'application/json'} - r = self.__session.post(url='https://jwgls1.cust.edu.cn/api/LoginApi/LGSSOLocalLogin?sf_request_type=ajax', data=json.dumps(asp_net_sessionid_param), headers=headers) + r = self.__session.post(url='https://jwgls1.cust.edu.cn/api/LoginApi/LGSSOLocalLogin?sf_request_type=ajax', + data=json.dumps(asp_net_sessionid_param), headers=headers) data = json.loads(r.content.decode('utf-8')) # 提示未建立教务信息 if data['state'] == 1: - return (data['message'], 513) - self.__student_name = data['data']['StudentDto']['XM'] - self.__student_id = data['data']['StudentDto']['XH'] - return ('ok', 200) + return data['message'], 513 + self.real_name = data['data']['StudentDto']['XM'] + self.sid = data['data']['StudentDto']['XH'] + self.uid = data['data']['StudentDto']['SMXSJBXXID'] + return self.getUserInfo(), 200 except Exception as e: print(e) - return ('教务挂了', 512) + return '教务挂了', 512 # 获取成绩 ----------------------------------------------------------------------------- def getGrade(self): @@ -88,7 +100,7 @@ class Crawler(object): ) data = json.loads(r.content.decode('utf-8')) if data['state'] != 0: - return ('教务挂了', 512) + return '教务挂了', 512 # 分解数据并重命名 total = data['data']['GradeStatistics'] split = data['data']['GradeList'] @@ -222,113 +234,142 @@ class Crawler(object): }) total_grade['total_bixiu_GPA'] = total_bixiu_c_x_g / \ total_bixiu_credit - # 合并数据 - self.__grade_data = { - 'total': total_grade, - 'split': grade_list - } - return ('ok', 200) + return { + 'total_grade': total_grade, + 'grade_list': grade_list + }, 200 - # 获取课表 ----------------------------------------------------------------------------- - def getSchedule(self): + # 获取当前周数 + def getCurWeek(self): headers = {'Content-Type': 'application/json'} r = self.__session.post( url='https://jwgls1.cust.edu.cn/api/ClientStudent/Home/StudentHomeApi/GetHomeCurWeekTime?sf_request_type=ajax', data=json.dumps({"param": "JTdCJTdE", "__permission": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E", - "Operation": 0}, "__log": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E", "Logtype": 6, "Context": "查询"}}), + "Operation": 0}, "__log": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E", "Logtype": 6, "Context": "查询"}}), headers=headers ) - CurWeek = json.loads(r.content.decode('utf-8'))['data']['CurWeek'] + return json.loads( + r.content.decode('utf-8'))['data']['CurWeek'], 200 + + # 处理课表信息 + def manageSchedule(self, data): + time = ['AM__TimePieces', 'PM__TimePieces', 'EV__TimePieces'] + data = data['data']['AdjustDays'] + lessons = [] + for i in range(7): + for j in range(3): + for k in range(2): + if(data[i][time[j]][k]['Dtos']): + for l in data[i][time[j]][k]['Dtos']: + temp_lesson = { + 'sid': self.sid, + 'real_name': self.real_name, + 'is_personal': False, + 'day': i, + 'period': j*2+k, + 'is_groups_course': False, + } + weeks_split = [0] * 23 + mod = '' + for m in l['Content']: + key = m['Key'] + if m['Key'] == 'Teacher': + key = 'teacher' + elif m['Key'] == 'Lesson': + key = 'course' + elif m['Key'] == 'Room': + key = 'room' + elif m['Key'] == 'Time': + key = 'weeks' + if temp_lesson.get(key): + temp_lesson[key] += ','+m['Name'] + else: + temp_lesson[key] = m['Name'] + temp_weeks = temp_lesson['weeks'] + temp_lesson['weeks'] = temp_weeks[0:int( + temp_weeks.find('周') + 1)] + if '单周' in temp_weeks: + mod = 'single' + elif '双周' in temp_weeks: + mod = 'double' + else: + mod = 'all' + zhou_pos = temp_weeks.find('周') + temp_weeks = temp_weeks[0:zhou_pos] + temp_weeks = temp_weeks.split(',') + index = 0 + for n in temp_weeks: + temp_weeks[index] = n.split('-') + index += 1 + index = 0 + for n in temp_weeks: + if len(n) > 1: + for o in range(int(n[0]), int(n[1]) + 1): + if (o % 2 == 0 and mod == 'double') or (o % 2 == 1 and mod == 'single') or (mod == 'all'): + weeks_split[o] = 1 + else: + weeks_split[o] = 0 + else: + weeks_split[int(n[0])] = 1 + index += 1 + temp_lesson['weeks_split'] = weeks_split + lessons.append(temp_lesson) + return lessons, 200 + + # 获取个人课表 + def getOwnSchedule(self): + headers = {'Content-Type': 'application/json'} r = self.__session.post( url='https://jwgls1.cust.edu.cn/api/ClientStudent/Home/StudentHomeApi/QueryStudentScheduleData?sf_request_type=ajax', data=json.dumps({"param": "JTdCJTdE", "__permission": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E", - "Operation": 0}, "__log": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E", "Logtype": 6, "Context": "查询"}}), + "Operation": 0}, "__log": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E", "Logtype": 6, "Context": "查询"}}), headers=headers ) data = json.loads(r.content.decode('utf-8')) if data['state'] != 0: return ('教务挂了', 512) - time = ['AM__TimePieces', 'PM__TimePieces', 'EV__TimePieces'] - data = data['data']['AdjustDays'] - days_per_week = [0] * 23 - lesson = [[0] * 6 for _ in range(7)] - lesson_set = {} - color_set = [0] * 9 - color_used = 9 - for i in range(7): - for j in range(3): - for k in range(2): - if(data[i][time[j]][k]['Dtos']): - lesson[i][j*2+k] = [] - for l in data[i][time[j]][k]['Dtos']: - temp_lesson = {} - Time = [0] * 23 - mod = '' - for m in l['Content']: - if temp_lesson.get(m['Key']): - temp_lesson[m['Key']] += ','+m['Name'] - else: - temp_lesson[m['Key']] = m['Name'] - if lesson_set.get(l['Content'][0]['Name']): - temp_lesson['color'] = lesson_set[l['Content'][0]['Name']] - else: - color = random.randint(0, 8) - while color_set[color]: - if color_used <= 0: - break - color = random.randint(0, 8) - temp_lesson['color'] = color - lesson_set[l['Content'][0]['Name']] = color - color_used -= 1 - color_set[color] = 1 - temp_Time = temp_lesson['Time'] - temp_lesson['Time'] = temp_Time[0:int( - temp_Time.find('周') + 1)] - if '单周' in temp_Time: - mod = 'single' - # temp_Time = temp_Time[0:len(temp_Time)-5] - elif '双周' in temp_Time: - mod = 'double' - # temp_Time = temp_Time[0:len(temp_Time)-5] - else: - mod = 'all' - # temp_Time = temp_Time[0:-1] - zhou_pos = temp_Time.find('周') - temp_Time = temp_Time[0:zhou_pos] - temp_Time = temp_Time.split(',') - index = 0 - for n in temp_Time: - temp_Time[index] = n.split('-') - index += 1 - index = 0 - for n in temp_Time: - if len(n) > 1: - for o in range(int(n[0]), int(n[1]) + 1): - if (o % 2 == 0 and mod == 'double') or (o % 2 == 1 and mod == 'single') or (mod == 'all'): - days_per_week[o] = max( - days_per_week[o], i+1) - Time[o] = 1 - else: - Time[o] = 0 - else: - days_per_week[int(n[0])] = max( - days_per_week[int(n[0])], i+1) - Time[int(n[0])] = 1 - index += 1 - temp_lesson['Time_split'] = Time - lesson[i][j*2+k].append(temp_lesson) - self.__schedule_data = {'lesson': lesson, - 'days_per_week': days_per_week, 'cur_week': CurWeek} - return ('ok', 200) + return self.manageSchedule(data) - # 获取信息 ----------------------------------------------------------------------------- - def getData(self): - return ( - { - 'student_id': self.__student_id, - 'student_name': self.__student_name, - 'grade': self.__grade_data, - 'schedule': self.__schedule_data - }, - 200 + # 获取他人课表 + def getOtherschedule(self): + headers = {'Content-Type': 'application/json'} + params = {"KBLX":"2","CXLX":"0","XNXQ":"20202","CXID":self.uid,"CXZC":"0","JXBLX":""} + params = str(btoa(json.dumps(params)))[2:-1] + r = self.__session.post( + url='https://jwgls1.cust.edu.cn/api/ClientStudent/QueryService/OccupyQueryApi/QueryScheduleData?sf_request_type=ajax', + data=json.dumps({"param": params, "__permission": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E", + "Operation": 0}, "__log": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E", "Logtype": 6, "Context": "查询"}}), + headers=headers ) + data = json.loads(r.content.decode('utf-8')) + if data['state'] != 0: + return ('教务挂了', 512) + return self.manageSchedule(data) + + # 获取cookie + def getCookie(self): + return self.__session.cookies.items(), 200 + + # 设置cookie + def setCookie(self, cookies): + requests.utils.add_dict_to_cookiejar( + self.__session.cookies, dict(cookies)) + return 'OK', 200 + + # 默认初始化 + def defaultInit(self, cid, pwd, phone): + self.cid = cid + self.__pwd = pwd + self.__phone = phone + self.__session = requests.Session() + return self.connection() + + # 使用我的cookie初始化,用于快速刷新课表 + def cookieInit(self, cookies, uid, cid, sid, real_name): + self.cid = cid + self.sid = sid + self.uid = uid + self.real_name = real_name + self.__session = requests.Session() + self.setCookie(cookies) + return self.getOtherschedule() \ No newline at end of file diff --git a/lib/crawler_test.py b/lib/crawler_test.py index dfad67b..d6bd7e7 100644 --- a/lib/crawler_test.py +++ b/lib/crawler_test.py @@ -1,25 +1,42 @@ import unittest from crawler import Crawler +import time +# time_start=time.time() +c = Crawler() +userinfo = c.defaultInit('2019002380', '@yuning20010329', '15143211127') +c.getOwnSchedule() +# time_end=time.time() +# print('time cost',time_end-time_start,'s') +userinfo = userinfo[0] +# print(userinfo) +# print(c.getOwnSchedule()) +# print(c.getGrade()) +cookies = c.getCookie() +cookies = cookies[0] +print(cookies) +print(str(cookies)) +cookies = str(cookies) +cookies = eval(cookies) +# time_start=time.time() +print(c.cookieInit(cookies, userinfo['uid'], userinfo['cid'], userinfo['sid'], userinfo['real_name'])) +# time_end=time.time() +# print('time cost',time_end-time_start,'s') +# c = Crawler('2017002372', '623910ert&', '15143211127') +# c = Crawler('2019002380', '@yuning20010329', '15143211127') +# c.connection() +# c.getOtherschedule("dd709e77-34f8-43f7-8efa-0838fd138430") +# class TestCrawler(unittest.TestCase): +# # 测试链接 +# def test_connection(self): +# self.assertEqual(c.connection(), ('ok', 200)) -c = Crawler('2017002372', '623910ert&', '15143211127') +# #测试获取成绩 +# def test_grade(self): +# self.assertEqual(c.getGrade(), ('ok', 200)) -class TestCrawler(unittest.TestCase): - # 测试链接 - def test_connection(self): - self.assertEqual(c.connection(), ('ok', 200)) +# #测试获取课表 +# def test_schedule(self): +# self.assertEqual(c.getSchedule(), ('ok', 200)) - #测试获取成绩 - def test_grade(self): - self.assertEqual(c.getGrade(), ('ok', 200)) - - #测试获取课表 - def test_schedule(self): - self.assertEqual(c.getSchedule(), ('ok', 200)) - - #测试返回信息 - def test_getData(self): - get_res = c.getData() - self.assertEqual(get_res[1], 200) - -if __name__ == '__main__': - unittest.main() +# if __name__ == '__main__': +# unittest.main() \ No newline at end of file diff --git a/lib/db.py b/lib/db.py index 53dccbb..e66e500 100644 --- a/lib/db.py +++ b/lib/db.py @@ -2,42 +2,317 @@ from pymongo import MongoClient from bson import ObjectId, json_util # 主环境 (生产环境为production,开发环境为development) -setting = 'production' -env = 'ali' - -# 获取数据集 - +ENV = 'production' def col(arg): - if env == 'coc': - conn = MongoClient('mongodb://coc:qlSfefSor5@0.0.0.0:12236/coc') - else: - conn = MongoClient('mongodb://cherry:fR1jW2xG3bE9@mongo:27017/cherry') - - if setting == 'development': + """ + 获取数据集 + """ + # 链接数据库 + conn = MongoClient('mongodb://cherry:fR1jW2xG3bE9@39.96.28.83:27017/cherry') + # 判断环境 + if ENV == 'development': arg += '_test' - if arg == 'rank': - return conn[env].rank - elif arg == 'rank_test': - return conn[env].rank_test - else: - return False + return conn.cherry[arg] -# 向排名表里增加或覆写数据 +def updateCookie(new_cookie): + """ + 更新cookie + """ + # 字符串化 + new_cookie = str(new_cookie) + try: + col('config').update({'key': 'cookie'}, {'$set': {'value': new_cookie}}, {'upsert': 'true'}) + except Exception as e: + print(e) + return 'cookie数据库更新失败', 400 +def findCookie(): + """ + 获取cookie + """ + try: + res = col('config').find_one({'key': 'cookie'}, {'_id': 0}) + value = res['value'] + cookie = eval(value) + return cookie, 200 + except Exception as e: + print(e) + return 'cookie数据库查询失败', 300 -def addRank(nick, count, time): +def insertUser(userinfo): + """" + 插入新学生信息 + """ + try: + col('user').insert_one(userinfo) + return 'OK', 200 + except Exception as e: + print(e) + return '学生信息数据库插入失败', 100 + +def findUser(cid): + """ + 获取学生信息 + 在group_list里边存放了[{'group_id'}] + 取出之后需要遍历组的信息 + """ + try: + userinfo = col('user').aggregate([ + { + '$match': { + 'cid': cid + } + }, + { + '$lookup': { + 'from': 'link', + 'localField': 'sid', + 'foreignField': 'sid', + 'as': 'group_list' + } + }, + { + '$project': { + '_id': 0, + 'group_list._id': 0, + 'group_list.sid': 0 + } + } + ]) + return userinfo, 200 + except Exception as e: + print(e) + return '学生信息数据库查询失败', 301 + +def insertInvite(cid, group_id): + """ + 对用户添加用户组邀请 + 测试:多次重复邀请,用户接受或者删除的时候是否能删掉所有,前端Set去重 + """ + try: + col('user').update({'cid': cid}, {'$push': {'invite_list': group_id}}) + return 'OK', 200 + except Exception as e: + print(e) + return '用户组邀请数据库插入失败', 101 + +def deleteInvite(cid, group_id): + """ + 用户或者管理员删除用户组邀请 + """ + try: + col('user').update({'cid': cid}, {'$pull': {'invite': {'group_id': group_id}}}) + return 'OK', 200 + except Exception as e: + print(e) + return '用户组邀请数据库删除失败', 500 + +def bindUserGroup(sid, group_id): + """ + 绑定用户以及用户组 + """ + try: + col('link').insert_one({'sid': sid, 'group_id': group_id}) + return 'OK', 200 + except Exception as e: + print(e) + return '用户与用户组绑定数据库插入失败', 102 + +def unbindUserGroup(sid, group_id): + """ + 解绑用户以及用户组 + """ + try: + col('link').remove({'sid': sid, 'group_id': group_id}) + return 'OK', 200 + except Exception as e: + print(e) + return '用户与用户组解绑数据库删除失败', 501 + +def updateAvatar(cid, img_id): + """ + 用户更新头像 + """ + try: + col('user').update({'cid': cid}, {'$set': {'avatar': img_id}}) + return 'OK', 200 + except Exception as e: + print(e) + return '头像数据库更新失败', 401 + +def updateBg(cid, img_id): + """ + 用户更新背景图片 + """ + try: + col('user').update('cid': cid, {'$set': {'setting.bg': img_id}}) + return 'OK', 200 + except Exception as e: + print(e) + return '背景图数据库更新失败', 402 + +def insertGroup(group_info): + """ + 用户创建用户组 + """ + try: + col('group').insert_one(group_info) + return 'OK', 200 + except Exception as e: + print(e) + return '用户组数据库插入失败', 103 + +def findGroup(group_id): + """ + 查询用户组信息,附带用户组中用户信息 + """ + try: + groupinfo = col('group').aggregate([ + { + '$match': { + 'group_id': group_id + } + }, + { + '$lookup': { + 'from': 'link', + 'localField': 'group_id', + 'foreignField': 'group_id', + 'as': 'user_list' + } + }, + { + '$lookup': { + 'from': 'user', + 'localField': 'cid', + 'foreignField': 'cid', + 'as': 'user_list' + } + }, + { + '$project': { + '_id': 0, + 'user_list._id': 0, + 'user_list.invite_list': 0, + 'user_list.cid':0, + 'user_list.pwd':0, + 'user_list.setting':0, + } + } + ]) + return groupinfo, 200 + except Exception as e: + print(e) + return '用户组信息数据库查询失败', 302 + +def deleteGroup(group_id): + """ + 解散用户组 + """ + try: + col('group').remove({'group_id': group_id}) + return 'OK', 200 + except Exception as e: + print(e) + return '解散用户组数据库删除失败', 504 + +def addLog(group_id, log): + """ + 向用户组中添加操作记录 + """ + try: + col('group').update({'group_id': group_id}, {'$push': {'log_list': log}}) + return 'OK', 200 + except Exception as e: + print(e) + return '操作记录数据库插入失败', 104 + +def groupInsertAdmin(cid, group_id): + """ + 向管理组中添加管理 + """ + try: + col('group').update({'group_id': group_id}, {'$push': {'admin_list': cid}}) + return 'OK', 200 + except Exception as e: + print(e) + return '用户组侧加入管理数据库插入失败', 106 + +def groupDeleteAdmin(cid, group_id): + """ + 从管理组中删除管理 + """ + try: + col('group').update({'group_id': group_id}, {'$pull': {'admin_list': cid}}) + return 'OK', 200 + except Exception as e: + print(e) + return '用户组侧移除管理数据库删除失败', 503 + +def userInsertCrouse(crouse): + """ + 用户添加自定义课程 + """ + try: + col('user_crouse').insert_one(crouse) + return 'OK', 200 + except Exception as e: + print(e) + return '用户自定义课程数据库插入失败', 107 + +def userDeleteCrouse(crouse_id): + """ + 用户删除自定义课程 + 加入是否自定义的判断,防止用户改前端导致删除非定义课程 + """ + try: + col('user_crouse').remove({'crouse_id': crouse_id, 'is_personal': False}) + return 'OK', 200 + except Exception as e: + print(e) + return '用户自定义课程数据库删除失败', 505 + +def userDeleteAllCrouse(sid): + """ + 用户删除所有课程(刷新课表用) + 不删除自定义课程 + """ + try: + col('user_crouse').remove({'sid': sid, 'is_personal': False}) + return 'OK', 200 + except Exception as e: + print(e) + return '用户所有课程数据库删除失败', 502 + +def userInsertAllCrouse(crouses): + """ + 用户批量添加课表(刷新课表用) + """ + try: + col('user_crouse').insert_many(crouses) + return 'OK', 200 + except Exception as e: + print(e) + return '用户所有课程数据库插入失败', 108 + +def insertRank(nick, count, time): + """ + 向排名表里增加或者覆写数据 + """ try: col('rank').update({"cid": cid}, {'$setOnInsert': {"nick": nick}, '$set': { "count": count, "time": time}} , {'upsert': 'true'}) # col('rank').insert_one({"count":count,"time":time,"nick":nick}) + return 'OK', 200 except Exception as e: # 失败 - return {'errcode': 401, 'errmsg': '排名表修改失败'} - return {'errcode': 200, 'errmsg': 'ok'} + return '排名表数据库插入失败', 109 -# 获取排名表所有信息(除了id) -def getRank(): +def findRank(): + """ + 获取所有排名信息 + """ time_rank = [] count_rank = [] try: @@ -45,7 +320,55 @@ def getRank(): time_rank.append(i) for i in col('rank').find({}, {"_id": 0}).sort([("count", 1), ("time", 1)]).limit(10): count_rank.append(i) + return {'time_rank': time_rank, 'count_rank': count_rank}, 200 except Exception as e: print(e) - return {'errcode': 411, 'errmsg': '排名表获取失败'} - return {'errcode': 200, 'time_rank': time_rank, 'count_rank': count_rank, 'errmsg': 'ok'} + return '排名表数据库查询失败', 303 + +def findUserCrouse(sid): + """ + 获取所有用户课程 + """ + crouse_list - [] + try: + for i in col('user_crouse').find({'sid': sid}, {'_id': 0}): + crouse_list.append(i) + return crouse_list, 200 + except Exception as e: + print(e) + reutrn '用户课程数据库查询失败', 304 + +def groupInsertCrouse(crouse): + """ + 用户组添加课程 + """ + try: + col('group_crouse').insert_one(crouse) + return 'OK', 200 + except Exception as e: + print(e) + return '用户组课程数据库插入失败', 110 + +def groupDeleteCrouse(crouse_id): + """ + 用户组删除课程 + """ + try: + col('group_crouse').remove({'crouse_id': crouse_id}) + return 'OK', 200 + except Exception as e: + print(e) + return '用户组课程数据库删除失败', 506 + +def findGroupCrouse(group_id): + """ + 获取所有指定用户组课程 + """ + crouse_list = [] + try: + for i in col('group_crouse').find({'group_id': group_id}): + crouse_list.append(i) + return crouse_list, 200 + except Exception as e: + print(e) + return '用户组课程数据库查询失败', 305 \ No newline at end of file diff --git a/lib/utils.py b/lib/utils.py new file mode 100644 index 0000000..4471f7d --- /dev/null +++ b/lib/utils.py @@ -0,0 +1,28 @@ +from urllib.request import quote, unquote +import base64 +import json +from hashlib import md5 + +def btoa(content): + """ + JSON转base64 + """ + return base64.b64encode(quote(content).encode()) + + +def atob(content): + """ + base64转JSON + """ + return unquote(base64.b64decode(content).decode()) + +def signCode(code): + """ + 给str签名,用于加密密码 + """ + d = str(code) + d = d.replace(' ', '') + md = md5() + md.update(d.encode('utf-8')) + r = md.hexdigest().upper() + return r \ No newline at end of file From d79dec478b699767cee3833cf80509515d0c98cd Mon Sep 17 00:00:00 2001 From: RainSun Date: Mon, 8 Feb 2021 21:09:02 +0800 Subject: [PATCH 3/5] =?UTF-8?q?=E6=9B=B4=E6=96=B0=E7=99=BB=E5=BD=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 23 +++++++- lib/functions.py | 0 lib/process/__init__.py | 0 lib/process/login.py | 101 ++++++++++++++++++++++++++++++++++ lib/public/__init__.py | 0 lib/{ => public}/crawler.py | 14 ++--- lib/{ => public}/db.py | 28 +++++++++- lib/{ => public}/utils.py | 13 ++++- {lib => test}/cache.py | 0 {lib => test}/crawler_test.py | 0 10 files changed, 169 insertions(+), 10 deletions(-) create mode 100644 lib/functions.py create mode 100644 lib/process/__init__.py create mode 100644 lib/process/login.py create mode 100644 lib/public/__init__.py rename lib/{ => public}/crawler.py (98%) rename lib/{ => public}/db.py (93%) rename lib/{ => public}/utils.py (72%) rename {lib => test}/cache.py (100%) rename {lib => test}/crawler_test.py (100%) diff --git a/README.md b/README.md index ae00130..cbd9ae4 100644 --- a/README.md +++ b/README.md @@ -73,14 +73,23 @@ deactivate * [ ] 提供每个用户组下的个人课表以及个人总体课表 * [ ] 允许设置组级别的课程(全组个人课表及组课表可见) * [ ] 数据库按课程存储,字段:sid, real_name, course, weeks, weeks_split, teacher, room, is_personal, day, period +* [ ] 登录的时候返回所有必要信息,剩下信息单独返回 +* [ ] 组信息 +* [ ] 刷新课表(使用cookie加速 +* [ ] 刷新成绩(使用账号密码 ## 背景图片上传 * [ ] 上传图片,id保存在数据库,和个人信息一起 ## 数据库字段 +## log +* time +* type +* value ### config * key * value ### user +* uid 用户唯一识别码,教务下发 * invite_list 邀请内容 * cid 一卡通号 * sid 学号 @@ -138,6 +147,7 @@ userInsertCrouse '用户自定义课程数据库插入失败', 107 userInsertAllCrouse '用户所有课程数据库插入失败', 108 insertRank '排名表数据库插入失败', 109 groupInsertCrouse '用户组课程数据库插入失败', 110 +insertLog '系统操作记录数据库插入失败', 111 ### find 3 findCookie 'cookie数据库查询失败', 300 findUser '学生信息数据库查询失败', 301 @@ -145,6 +155,7 @@ findGroup '用户组信息数据库查询失败', 302 findRank '排名表数据库查询失败', 303 findUserCrouse '用户课程数据库查询失败', 304 findGroupCrouse '用户组课程数据库查询失败', 305 +findLog '系统操作记录数据库查询失败', 306 ### update 4 updateCookie 'cookie数据库更新失败', 400 updateAvatar '头像数据库更新失败', 401 @@ -156,4 +167,14 @@ userDeleteAllCrouse '用户所有课程数据库删除失败', 502 groupDeleteAdmin '用户组侧移除管理数据库删除失败', 503 deleteGroup '解散用户组数据库删除失败', 504 userDeleteCrouse '用户自定义课程数据库删除失败', 505 -groupDeleteCrouse '用户组课程数据库删除失败', 506 \ No newline at end of file +groupDeleteCrouse '用户组课程数据库删除失败', 506 + +## 接口错误代码 +### /login +400 数据不合法 +510 学生信息数据库查询失败 +511 账号或密码错误 +512 用户组信息数据库查询失败 +513 请填写手机号 +514 教务返回的错误 +515 教务挂了 \ No newline at end of file diff --git a/lib/functions.py b/lib/functions.py new file mode 100644 index 0000000..e69de29 diff --git a/lib/process/__init__.py b/lib/process/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lib/process/login.py b/lib/process/login.py new file mode 100644 index 0000000..6995149 --- /dev/null +++ b/lib/process/login.py @@ -0,0 +1,101 @@ +from ..public.db import findUser +from ..public.utils import checkData, signCode, findGroup +from ..public.crawler import Crawler +import json + +def check(request): + """ + 校验数据 + 目标内容 cid,pwd,phone + """ + try: + data = request.json + if not checkData(data): + raise Exception + if not data.__contains__('cid'): + raise Exception + if not data.__contains__('pwd'): + raise Exception + return data, 200 + except Exception as e: + print(e) + return '数据不合法', 400 + +def manageLogin(request): + """ + 用户登录 + """ + # 校验数据 + check_res = check(request) + # 抛出错误 + if check_res[-1] != 200: + return check_res + data = check_res[0] + # 查找用户 + find_res = findUser(data['cid']) + # 抛出错误 + if find_res[-1] != 200: + return find_res[0], 510 + user_info = find_res[0] + # 无用户进行注册 + if not user_info: + return sign(data['cid'], data['pwd']) + # 校验密码 + if user_info['pwd'] != signCode(data['pwd']): + return '账号或密码错误', 511 + # 接下来需要返回组信息,课表信息以及成绩信息 + + # 组信息 + find_res = manageFindGroup(user_info['group_list']) + if find_res[-1] != 200: + return find_res + user_info['group_list'] = find_res[0] + + # 课表信息以及成绩信息 + crawler_res = manageCrawler(user_info['cid'], user_info['pwd'], user_info.get('phone')) + if crawler_res[-1] != 200: + return crawler_res + + +def manageFindGroup(group_list): + """ + 根据组id查询组信息 + """ + list = [] + for group_id in group_list: + find_res = findGroup(group_id) + if find_res[-1] != 200: + return find_res[0], 512 + list.append(find_res[0]) + return list, 200 + +def manageCrawler(cid, pwd, phone): + """ + 处理爬虫,返回课表和成绩 + """ + try: + c = Crawler() + init_res = c.defaultInit(cid, pwd, phone) + if init_res[-1] != 200: + return init_res + get_res = c.getOwnSchedule() + if get_res[-1] != 200: + return get_res + schedule = get_res[0] + get_res = c.getGrade() + if get_res[-1] != 200: + return get_res + grade = get_res[0] + return { + 'grade': grade, + 'schedule': schedule, + }, 200 + except Exception as e: + print(e) + return '教务挂了', 515 + +def manageSign(cid, pwd, phone): + """ + 用户注册 + """ + pass \ No newline at end of file diff --git a/lib/public/__init__.py b/lib/public/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lib/crawler.py b/lib/public/crawler.py similarity index 98% rename from lib/crawler.py rename to lib/public/crawler.py index 27b4310..694988e 100644 --- a/lib/crawler.py +++ b/lib/public/crawler.py @@ -48,7 +48,7 @@ class Crawler(object): flag = soup.find(name='title') if(flag.text == "手机号设置"): if self.__phone == '': - return '请填写手机号', 511 + return '请填写手机号', 513 execution = soup.find_all(name='input')[1]['value'] formdata = { 'phone': self.__phone, @@ -65,7 +65,7 @@ class Crawler(object): if soup.findAll(name='a')[4]['href'] != 'logout': raise('账号或密码错误') except: - return '账号或者密码错误', 510 + return '账号或者密码错误', 511 r = self.__session.get( url='https://mysso.cust.edu.cn/cas/login?service=https://jwgls1.cust.edu.cn/welcome', allow_redirects=False) ticket = r.headers['Location'][42:] @@ -80,14 +80,14 @@ class Crawler(object): data = json.loads(r.content.decode('utf-8')) # 提示未建立教务信息 if data['state'] == 1: - return data['message'], 513 + return data['message'], 514 self.real_name = data['data']['StudentDto']['XM'] self.sid = data['data']['StudentDto']['XH'] self.uid = data['data']['StudentDto']['SMXSJBXXID'] return self.getUserInfo(), 200 except Exception as e: print(e) - return '教务挂了', 512 + return '教务挂了', 515 # 获取成绩 ----------------------------------------------------------------------------- def getGrade(self): @@ -100,7 +100,7 @@ class Crawler(object): ) data = json.loads(r.content.decode('utf-8')) if data['state'] != 0: - return '教务挂了', 512 + return '教务挂了', 515 # 分解数据并重命名 total = data['data']['GradeStatistics'] split = data['data']['GradeList'] @@ -327,7 +327,7 @@ class Crawler(object): ) data = json.loads(r.content.decode('utf-8')) if data['state'] != 0: - return ('教务挂了', 512) + return ('教务挂了', 515) return self.manageSchedule(data) # 获取他人课表 @@ -343,7 +343,7 @@ class Crawler(object): ) data = json.loads(r.content.decode('utf-8')) if data['state'] != 0: - return ('教务挂了', 512) + return ('教务挂了', 515) return self.manageSchedule(data) # 获取cookie diff --git a/lib/db.py b/lib/public/db.py similarity index 93% rename from lib/db.py rename to lib/public/db.py index e66e500..8e8d71a 100644 --- a/lib/db.py +++ b/lib/public/db.py @@ -198,6 +198,7 @@ def findGroup(group_id): 'user_list.cid':0, 'user_list.pwd':0, 'user_list.setting':0, + 'user_list.last_update': 0, } } ]) @@ -371,4 +372,29 @@ def findGroupCrouse(group_id): return crouse_list, 200 except Exception as e: print(e) - return '用户组课程数据库查询失败', 305 \ No newline at end of file + return '用户组课程数据库查询失败', 305 + +def insertLog(log): + """ + 插入操作记录 + """ + try: + col('log').insert_one(log) + return 'OK', 200 + except Exception as e: + print(e) + return '系统操作记录数据库插入失败', 111 + +def findLog(has_read_page): + """ + 查询操作记录,默认50条分割 + """ + skip = 50 * (has_read_page - 1) + log_list = [] + try: + for i in col('log').find({}).limit(50).skip(skip): + log_list.append(i) + return log_list, 200 + except Exception as e: + print(e) + return '系统操作记录数据库查询失败', 306 \ No newline at end of file diff --git a/lib/utils.py b/lib/public/utils.py similarity index 72% rename from lib/utils.py rename to lib/public/utils.py index 4471f7d..3fcbd2f 100644 --- a/lib/utils.py +++ b/lib/public/utils.py @@ -25,4 +25,15 @@ def signCode(code): md = md5() md.update(d.encode('utf-8')) r = md.hexdigest().upper() - return r \ No newline at end of file + return r + +def checkData(data): + """ + MD5校验数据 + """ + d = data.copy() + try: + d.pop('sign') + except KeyError: + pass + return data['sign'] == signCode(d) \ No newline at end of file diff --git a/lib/cache.py b/test/cache.py similarity index 100% rename from lib/cache.py rename to test/cache.py diff --git a/lib/crawler_test.py b/test/crawler_test.py similarity index 100% rename from lib/crawler_test.py rename to test/crawler_test.py From 6c6fd026573f8fbee9463cd6b0476be21ddd1320 Mon Sep 17 00:00:00 2001 From: RainSun Date: Wed, 17 Feb 2021 15:28:07 +0800 Subject: [PATCH 4/5] =?UTF-8?q?=E5=AE=8C=E6=88=90=E7=99=BB=E5=BD=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 7 +- lib/process/login.py | 150 ++++++++++++++++++++++++--- {test => lib/public}/crawler_test.py | 19 ++-- lib/public/db.py | 12 ++- test/some.py | 4 + 5 files changed, 163 insertions(+), 29 deletions(-) rename {test => lib/public}/crawler_test.py (75%) create mode 100644 test/some.py diff --git a/README.md b/README.md index cbd9ae4..070b2a3 100644 --- a/README.md +++ b/README.md @@ -160,6 +160,7 @@ findLog '系统操作记录数据库查询失败', 306 updateCookie 'cookie数据库更新失败', 400 updateAvatar '头像数据库更新失败', 401 updateBg '背景图数据库更新失败', 402 +updateLastUpdate '数据更新时间数据库更新失败', 403 ### delete 5 deleteInvite '用户组邀请数据库删除失败', 500 unbindUserGroup '用户与用户组解绑数据库删除失败', 501 @@ -177,4 +178,8 @@ groupDeleteCrouse '用户组课程数据库删除失败', 506 512 用户组信息数据库查询失败 513 请填写手机号 514 教务返回的错误 -515 教务挂了 \ No newline at end of file +515 教务挂了 +516 用户所有课程数据库删除失败 +517 用户所有课程数据库插入失败 +518 用户课程数据库查询失败 +519 用户组课程数据库查询失败 \ No newline at end of file diff --git a/lib/process/login.py b/lib/process/login.py index 6995149..d5a3110 100644 --- a/lib/process/login.py +++ b/lib/process/login.py @@ -1,7 +1,9 @@ -from ..public.db import findUser +from ..public.db import updateLastUpdate, insertUser, findUser, userDeleteAllCrouse, userInsertAllCrouse, findUserCrouse, findGroupCrouse from ..public.utils import checkData, signCode, findGroup from ..public.crawler import Crawler import json +import time + def check(request): """ @@ -21,6 +23,7 @@ def check(request): print(e) return '数据不合法', 400 + def manageLogin(request): """ 用户登录 @@ -40,21 +43,8 @@ def manageLogin(request): # 无用户进行注册 if not user_info: return sign(data['cid'], data['pwd']) - # 校验密码 - if user_info['pwd'] != signCode(data['pwd']): - return '账号或密码错误', 511 - # 接下来需要返回组信息,课表信息以及成绩信息 - - # 组信息 - find_res = manageFindGroup(user_info['group_list']) - if find_res[-1] != 200: - return find_res - user_info['group_list'] = find_res[0] - - # 课表信息以及成绩信息 - crawler_res = manageCrawler(user_info['cid'], user_info['pwd'], user_info.get('phone')) - if crawler_res[-1] != 200: - return crawler_res + # 存在用户进行登录 + return login(data, user_info) def manageFindGroup(group_list): @@ -69,6 +59,7 @@ def manageFindGroup(group_list): list.append(find_res[0]) return list, 200 + def manageCrawler(cid, pwd, phone): """ 处理爬虫,返回课表和成绩 @@ -87,6 +78,7 @@ def manageCrawler(cid, pwd, phone): return get_res grade = get_res[0] return { + 'user_info': init_res[0] 'grade': grade, 'schedule': schedule, }, 200 @@ -94,8 +86,132 @@ def manageCrawler(cid, pwd, phone): print(e) return '教务挂了', 515 + +def manageNewCrouse(sid, schedule): + """ + 向数据库中插入新的课程信息 + """ + # 先删除所有课程 + del_res = userDeleteAllCrouse(sid) + if del_res[-1] != 200: + return '用户所有课程数据库删除失败', 516 + # 添加所有课程 + ins_res = userInsertAllCrouse(schedule) + if ins_res[-1] != 200: + return '用户所有课程数据库插入失败', 517 + # 更新数据库时间 + now = time.time() + updateLastUpdate(sid, now) + return 'OK', 200 + + +def getAllUserCrouse(group_id_list, sid): + """ + 查找用户所有的课程,包括组课程以及个人课程 + """ + find_res = findUserCrouse(sid) + if find_res[-1] != 200: + return '用户课程数据库查询失败', 518 + user_crouse = find_res[0] + group_crouse = {} + for group_id in group_id_list: + find_res = findGroupCrouse(group_id) + if find_res[-1] != 200: + return '用户组课程数据库查询失败', 519 + group_crouse[group_id] = find_res[0] + return { + 'group_crouse': group_crouse, + 'user_crouse': user_crouse + }, 200 + + def manageSign(cid, pwd, phone): """ 用户注册 """ - pass \ No newline at end of file + # 课表信息以及成绩信息 + crawler_res = manageCrawler( + user_info['cid'], user_info['pwd'], user_info.get('phone')) + if crawler_res[-1] != 200: + return crawler_res + + schedule = crawler_res[0]['schedule'] + grade = crawler_res[0]['grade'] + user_info = crawler_res[0]['user_info'] + user_info['pwd'] = signCode(pwd) + user_info['invite_list'] = [] + user_info['setting'] = { + 'bg': '' + } + user_info['avatar'] = 'default' + user_info['last_update'] = time.time() + # 插入用户 + ins_res = insertUser(user_info) + + user_info.pop('_id') + user_info.pop('pwd') + user_info['group_list'] = [] + + # 更新新的课程信息 + ins_res = manageNewCrouse(user_info['sid'], schedule) + if ins_res[-1] != 200: + return ins_res + + # 获取用户所有的课程信息 + find_res = getAllUserCrouse(group_id_list, user_info['sid']) + if find_res[-1] != 200: + return find_res + + schedule = find_res[0] + + return { + 'user_info': user_info, + 'schedule': schedule, + 'grade': grade, + } + + +def login(data, user_info): + """ + 处理登录操作 + """ + # 校验密码 + if user_info['pwd'] != signCode(data['pwd']): + return '账号或密码错误', 511 + # 接下来需要返回组信息,课表信息以及成绩信息 + + # 组信息 + group_id_list = user_info['group_list'] + find_res = manageFindGroup(group_id_list) + if find_res[-1] != 200: + return find_res + user_info['group_list'] = find_res[0] + + # 课表信息以及成绩信息 + crawler_res = manageCrawler( + user_info['cid'], user_info['pwd'], user_info.get('phone')) + if crawler_res[-1] != 200: + return crawler_res + + schedule = crawler_res[0]['schedule'] + grade = crawler_res[0]['grade'] + + # 更新新的课程信息 + ins_res = manageNewCrouse(user_info['sid'], schedule) + if ins_res[-1] != 200: + return ins_res + + # 获取用户所有的课程信息 + find_res = getAllUserCrouse(group_id_list, user_info['sid']) + if find_res[-1] != 200: + return find_res + + schedule = find_res[0] + + user_info.pop('pwd') + + return { + 'user_info': user_info, + 'schedule': schedule, + 'grade': grade, + } diff --git a/test/crawler_test.py b/lib/public/crawler_test.py similarity index 75% rename from test/crawler_test.py rename to lib/public/crawler_test.py index d6bd7e7..975b54e 100644 --- a/test/crawler_test.py +++ b/lib/public/crawler_test.py @@ -4,21 +4,22 @@ import time # time_start=time.time() c = Crawler() userinfo = c.defaultInit('2019002380', '@yuning20010329', '15143211127') -c.getOwnSchedule() +print(userinfo[-1]) +print(c.getOwnSchedule()) # time_end=time.time() # print('time cost',time_end-time_start,'s') -userinfo = userinfo[0] +# userinfo = userinfo[0] # print(userinfo) # print(c.getOwnSchedule()) # print(c.getGrade()) -cookies = c.getCookie() -cookies = cookies[0] -print(cookies) -print(str(cookies)) -cookies = str(cookies) -cookies = eval(cookies) +# cookies = c.getCookie() +# cookies = cookies[0] +# print(cookies) +# print(str(cookies)) +# cookies = str(cookies) +# cookies = eval(cookies) # time_start=time.time() -print(c.cookieInit(cookies, userinfo['uid'], userinfo['cid'], userinfo['sid'], userinfo['real_name'])) +# print(c.cookieInit(cookies, userinfo['uid'], userinfo['cid'], userinfo['sid'], userinfo['real_name'])) # time_end=time.time() # print('time cost',time_end-time_start,'s') # c = Crawler('2017002372', '623910ert&', '15143211127') diff --git a/lib/public/db.py b/lib/public/db.py index 8e8d71a..7b4b52d 100644 --- a/lib/public/db.py +++ b/lib/public/db.py @@ -1,6 +1,5 @@ from pymongo import MongoClient from bson import ObjectId, json_util - # 主环境 (生产环境为production,开发环境为development) ENV = 'production' @@ -40,6 +39,15 @@ def findCookie(): print(e) return 'cookie数据库查询失败', 300 +def updateLastUpdate(sid, now): + """ + 更新最后一次课程更新信息 + """ + try: + col('user').update({'sid': sid}, {'$set': {'last_update': now}}) + except Exception as e: + print(e) + return '数据更新时间数据库更新失败', 403 def insertUser(userinfo): """" 插入新学生信息 @@ -330,7 +338,7 @@ def findUserCrouse(sid): """ 获取所有用户课程 """ - crouse_list - [] + crouse_list = [] try: for i in col('user_crouse').find({'sid': sid}, {'_id': 0}): crouse_list.append(i) diff --git a/test/some.py b/test/some.py new file mode 100644 index 0000000..59228ed --- /dev/null +++ b/test/some.py @@ -0,0 +1,4 @@ +a = [1,2,3] +b = a +a = 1 +print(b) \ No newline at end of file From a7d30aea72e28e1448d271ebc2d33bfcf261257c Mon Sep 17 00:00:00 2001 From: RainSun Date: Tue, 2 Mar 2021 19:45:27 +0800 Subject: [PATCH 5/5] update --- lib/process/login.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/process/login.py b/lib/process/login.py index d5a3110..45ddf57 100644 --- a/lib/process/login.py +++ b/lib/process/login.py @@ -168,7 +168,7 @@ def manageSign(cid, pwd, phone): 'user_info': user_info, 'schedule': schedule, 'grade': grade, - } + }, 200 def login(data, user_info): @@ -214,4 +214,4 @@ def login(data, user_info): 'user_info': user_info, 'schedule': schedule, 'grade': grade, - } + }, 200