完成登录

This commit is contained in:
RainSun 2021-02-17 15:28:07 +08:00
parent d79dec478b
commit 6c6fd02657
5 changed files with 163 additions and 29 deletions

View File

@ -160,6 +160,7 @@ findLog '系统操作记录数据库查询失败', 306
updateCookie 'cookie数据库更新失败', 400 updateCookie 'cookie数据库更新失败', 400
updateAvatar '头像数据库更新失败', 401 updateAvatar '头像数据库更新失败', 401
updateBg '背景图数据库更新失败', 402 updateBg '背景图数据库更新失败', 402
updateLastUpdate '数据更新时间数据库更新失败', 403
### delete 5 ### delete 5
deleteInvite '用户组邀请数据库删除失败', 500 deleteInvite '用户组邀请数据库删除失败', 500
unbindUserGroup '用户与用户组解绑数据库删除失败', 501 unbindUserGroup '用户与用户组解绑数据库删除失败', 501
@ -177,4 +178,8 @@ groupDeleteCrouse '用户组课程数据库删除失败', 506
512 用户组信息数据库查询失败 512 用户组信息数据库查询失败
513 请填写手机号 513 请填写手机号
514 教务返回的错误 514 教务返回的错误
515 教务挂了 515 教务挂了
516 用户所有课程数据库删除失败
517 用户所有课程数据库插入失败
518 用户课程数据库查询失败
519 用户组课程数据库查询失败

View File

@ -1,7 +1,9 @@
from ..public.db import findUser from ..public.db import updateLastUpdate, insertUser, findUser, userDeleteAllCrouse, userInsertAllCrouse, findUserCrouse, findGroupCrouse
from ..public.utils import checkData, signCode, findGroup from ..public.utils import checkData, signCode, findGroup
from ..public.crawler import Crawler from ..public.crawler import Crawler
import json import json
import time
def check(request): def check(request):
""" """
@ -21,6 +23,7 @@ def check(request):
print(e) print(e)
return '数据不合法', 400 return '数据不合法', 400
def manageLogin(request): def manageLogin(request):
""" """
用户登录 用户登录
@ -40,21 +43,8 @@ def manageLogin(request):
# 无用户进行注册 # 无用户进行注册
if not user_info: if not user_info:
return sign(data['cid'], data['pwd']) return sign(data['cid'], data['pwd'])
# 校验密码 # 存在用户进行登录
if user_info['pwd'] != signCode(data['pwd']): return login(data, user_info)
return '账号或密码错误', 511
# 接下来需要返回组信息,课表信息以及成绩信息
# 组信息
find_res = manageFindGroup(user_info['group_list'])
if find_res[-1] != 200:
return find_res
user_info['group_list'] = find_res[0]
# 课表信息以及成绩信息
crawler_res = manageCrawler(user_info['cid'], user_info['pwd'], user_info.get('phone'))
if crawler_res[-1] != 200:
return crawler_res
def manageFindGroup(group_list): def manageFindGroup(group_list):
@ -69,6 +59,7 @@ def manageFindGroup(group_list):
list.append(find_res[0]) list.append(find_res[0])
return list, 200 return list, 200
def manageCrawler(cid, pwd, phone): def manageCrawler(cid, pwd, phone):
""" """
处理爬虫返回课表和成绩 处理爬虫返回课表和成绩
@ -87,6 +78,7 @@ def manageCrawler(cid, pwd, phone):
return get_res return get_res
grade = get_res[0] grade = get_res[0]
return { return {
'user_info': init_res[0]
'grade': grade, 'grade': grade,
'schedule': schedule, 'schedule': schedule,
}, 200 }, 200
@ -94,8 +86,132 @@ def manageCrawler(cid, pwd, phone):
print(e) print(e)
return '教务挂了', 515 return '教务挂了', 515
def manageNewCrouse(sid, schedule):
"""
向数据库中插入新的课程信息
"""
# 先删除所有课程
del_res = userDeleteAllCrouse(sid)
if del_res[-1] != 200:
return '用户所有课程数据库删除失败', 516
# 添加所有课程
ins_res = userInsertAllCrouse(schedule)
if ins_res[-1] != 200:
return '用户所有课程数据库插入失败', 517
# 更新数据库时间
now = time.time()
updateLastUpdate(sid, now)
return 'OK', 200
def getAllUserCrouse(group_id_list, sid):
"""
查找用户所有的课程包括组课程以及个人课程
"""
find_res = findUserCrouse(sid)
if find_res[-1] != 200:
return '用户课程数据库查询失败', 518
user_crouse = find_res[0]
group_crouse = {}
for group_id in group_id_list:
find_res = findGroupCrouse(group_id)
if find_res[-1] != 200:
return '用户组课程数据库查询失败', 519
group_crouse[group_id] = find_res[0]
return {
'group_crouse': group_crouse,
'user_crouse': user_crouse
}, 200
def manageSign(cid, pwd, phone): def manageSign(cid, pwd, phone):
""" """
用户注册 用户注册
""" """
pass # 课表信息以及成绩信息
crawler_res = manageCrawler(
user_info['cid'], user_info['pwd'], user_info.get('phone'))
if crawler_res[-1] != 200:
return crawler_res
schedule = crawler_res[0]['schedule']
grade = crawler_res[0]['grade']
user_info = crawler_res[0]['user_info']
user_info['pwd'] = signCode(pwd)
user_info['invite_list'] = []
user_info['setting'] = {
'bg': ''
}
user_info['avatar'] = 'default'
user_info['last_update'] = time.time()
# 插入用户
ins_res = insertUser(user_info)
user_info.pop('_id')
user_info.pop('pwd')
user_info['group_list'] = []
# 更新新的课程信息
ins_res = manageNewCrouse(user_info['sid'], schedule)
if ins_res[-1] != 200:
return ins_res
# 获取用户所有的课程信息
find_res = getAllUserCrouse(group_id_list, user_info['sid'])
if find_res[-1] != 200:
return find_res
schedule = find_res[0]
return {
'user_info': user_info,
'schedule': schedule,
'grade': grade,
}
def login(data, user_info):
"""
处理登录操作
"""
# 校验密码
if user_info['pwd'] != signCode(data['pwd']):
return '账号或密码错误', 511
# 接下来需要返回组信息,课表信息以及成绩信息
# 组信息
group_id_list = user_info['group_list']
find_res = manageFindGroup(group_id_list)
if find_res[-1] != 200:
return find_res
user_info['group_list'] = find_res[0]
# 课表信息以及成绩信息
crawler_res = manageCrawler(
user_info['cid'], user_info['pwd'], user_info.get('phone'))
if crawler_res[-1] != 200:
return crawler_res
schedule = crawler_res[0]['schedule']
grade = crawler_res[0]['grade']
# 更新新的课程信息
ins_res = manageNewCrouse(user_info['sid'], schedule)
if ins_res[-1] != 200:
return ins_res
# 获取用户所有的课程信息
find_res = getAllUserCrouse(group_id_list, user_info['sid'])
if find_res[-1] != 200:
return find_res
schedule = find_res[0]
user_info.pop('pwd')
return {
'user_info': user_info,
'schedule': schedule,
'grade': grade,
}

View File

@ -4,21 +4,22 @@ import time
# time_start=time.time() # time_start=time.time()
c = Crawler() c = Crawler()
userinfo = c.defaultInit('2019002380', '@yuning20010329', '15143211127') userinfo = c.defaultInit('2019002380', '@yuning20010329', '15143211127')
c.getOwnSchedule() print(userinfo[-1])
print(c.getOwnSchedule())
# time_end=time.time() # time_end=time.time()
# print('time cost',time_end-time_start,'s') # print('time cost',time_end-time_start,'s')
userinfo = userinfo[0] # userinfo = userinfo[0]
# print(userinfo) # print(userinfo)
# print(c.getOwnSchedule()) # print(c.getOwnSchedule())
# print(c.getGrade()) # print(c.getGrade())
cookies = c.getCookie() # cookies = c.getCookie()
cookies = cookies[0] # cookies = cookies[0]
print(cookies) # print(cookies)
print(str(cookies)) # print(str(cookies))
cookies = str(cookies) # cookies = str(cookies)
cookies = eval(cookies) # cookies = eval(cookies)
# time_start=time.time() # time_start=time.time()
print(c.cookieInit(cookies, userinfo['uid'], userinfo['cid'], userinfo['sid'], userinfo['real_name'])) # print(c.cookieInit(cookies, userinfo['uid'], userinfo['cid'], userinfo['sid'], userinfo['real_name']))
# time_end=time.time() # time_end=time.time()
# print('time cost',time_end-time_start,'s') # print('time cost',time_end-time_start,'s')
# c = Crawler('2017002372', '623910ert&', '15143211127') # c = Crawler('2017002372', '623910ert&', '15143211127')

View File

@ -1,6 +1,5 @@
from pymongo import MongoClient from pymongo import MongoClient
from bson import ObjectId, json_util from bson import ObjectId, json_util
# 主环境 (生产环境为production开发环境为development) # 主环境 (生产环境为production开发环境为development)
ENV = 'production' ENV = 'production'
@ -40,6 +39,15 @@ def findCookie():
print(e) print(e)
return 'cookie数据库查询失败', 300 return 'cookie数据库查询失败', 300
def updateLastUpdate(sid, now):
"""
更新最后一次课程更新信息
"""
try:
col('user').update({'sid': sid}, {'$set': {'last_update': now}})
except Exception as e:
print(e)
return '数据更新时间数据库更新失败', 403
def insertUser(userinfo): def insertUser(userinfo):
"""" """"
插入新学生信息 插入新学生信息
@ -330,7 +338,7 @@ def findUserCrouse(sid):
""" """
获取所有用户课程 获取所有用户课程
""" """
crouse_list - [] crouse_list = []
try: try:
for i in col('user_crouse').find({'sid': sid}, {'_id': 0}): for i in col('user_crouse').find({'sid': sid}, {'_id': 0}):
crouse_list.append(i) crouse_list.append(i)

4
test/some.py Normal file
View File

@ -0,0 +1,4 @@
a = [1,2,3]
b = a
a = 1
print(b)