完成登录
This commit is contained in:
parent
d79dec478b
commit
6c6fd02657
@ -160,6 +160,7 @@ findLog '系统操作记录数据库查询失败', 306
|
||||
updateCookie 'cookie数据库更新失败', 400
|
||||
updateAvatar '头像数据库更新失败', 401
|
||||
updateBg '背景图数据库更新失败', 402
|
||||
updateLastUpdate '数据更新时间数据库更新失败', 403
|
||||
### delete 5
|
||||
deleteInvite '用户组邀请数据库删除失败', 500
|
||||
unbindUserGroup '用户与用户组解绑数据库删除失败', 501
|
||||
@ -177,4 +178,8 @@ groupDeleteCrouse '用户组课程数据库删除失败', 506
|
||||
512 用户组信息数据库查询失败
|
||||
513 请填写手机号
|
||||
514 教务返回的错误
|
||||
515 教务挂了
|
||||
515 教务挂了
|
||||
516 用户所有课程数据库删除失败
|
||||
517 用户所有课程数据库插入失败
|
||||
518 用户课程数据库查询失败
|
||||
519 用户组课程数据库查询失败
|
@ -1,7 +1,9 @@
|
||||
from ..public.db import findUser
|
||||
from ..public.db import updateLastUpdate, insertUser, findUser, userDeleteAllCrouse, userInsertAllCrouse, findUserCrouse, findGroupCrouse
|
||||
from ..public.utils import checkData, signCode, findGroup
|
||||
from ..public.crawler import Crawler
|
||||
import json
|
||||
import time
|
||||
|
||||
|
||||
def check(request):
|
||||
"""
|
||||
@ -21,6 +23,7 @@ def check(request):
|
||||
print(e)
|
||||
return '数据不合法', 400
|
||||
|
||||
|
||||
def manageLogin(request):
|
||||
"""
|
||||
用户登录
|
||||
@ -40,21 +43,8 @@ def manageLogin(request):
|
||||
# 无用户进行注册
|
||||
if not user_info:
|
||||
return sign(data['cid'], data['pwd'])
|
||||
# 校验密码
|
||||
if user_info['pwd'] != signCode(data['pwd']):
|
||||
return '账号或密码错误', 511
|
||||
# 接下来需要返回组信息,课表信息以及成绩信息
|
||||
|
||||
# 组信息
|
||||
find_res = manageFindGroup(user_info['group_list'])
|
||||
if find_res[-1] != 200:
|
||||
return find_res
|
||||
user_info['group_list'] = find_res[0]
|
||||
|
||||
# 课表信息以及成绩信息
|
||||
crawler_res = manageCrawler(user_info['cid'], user_info['pwd'], user_info.get('phone'))
|
||||
if crawler_res[-1] != 200:
|
||||
return crawler_res
|
||||
# 存在用户进行登录
|
||||
return login(data, user_info)
|
||||
|
||||
|
||||
def manageFindGroup(group_list):
|
||||
@ -69,6 +59,7 @@ def manageFindGroup(group_list):
|
||||
list.append(find_res[0])
|
||||
return list, 200
|
||||
|
||||
|
||||
def manageCrawler(cid, pwd, phone):
|
||||
"""
|
||||
处理爬虫,返回课表和成绩
|
||||
@ -87,6 +78,7 @@ def manageCrawler(cid, pwd, phone):
|
||||
return get_res
|
||||
grade = get_res[0]
|
||||
return {
|
||||
'user_info': init_res[0]
|
||||
'grade': grade,
|
||||
'schedule': schedule,
|
||||
}, 200
|
||||
@ -94,8 +86,132 @@ def manageCrawler(cid, pwd, phone):
|
||||
print(e)
|
||||
return '教务挂了', 515
|
||||
|
||||
|
||||
def manageNewCrouse(sid, schedule):
|
||||
"""
|
||||
向数据库中插入新的课程信息
|
||||
"""
|
||||
# 先删除所有课程
|
||||
del_res = userDeleteAllCrouse(sid)
|
||||
if del_res[-1] != 200:
|
||||
return '用户所有课程数据库删除失败', 516
|
||||
# 添加所有课程
|
||||
ins_res = userInsertAllCrouse(schedule)
|
||||
if ins_res[-1] != 200:
|
||||
return '用户所有课程数据库插入失败', 517
|
||||
# 更新数据库时间
|
||||
now = time.time()
|
||||
updateLastUpdate(sid, now)
|
||||
return 'OK', 200
|
||||
|
||||
|
||||
def getAllUserCrouse(group_id_list, sid):
|
||||
"""
|
||||
查找用户所有的课程,包括组课程以及个人课程
|
||||
"""
|
||||
find_res = findUserCrouse(sid)
|
||||
if find_res[-1] != 200:
|
||||
return '用户课程数据库查询失败', 518
|
||||
user_crouse = find_res[0]
|
||||
group_crouse = {}
|
||||
for group_id in group_id_list:
|
||||
find_res = findGroupCrouse(group_id)
|
||||
if find_res[-1] != 200:
|
||||
return '用户组课程数据库查询失败', 519
|
||||
group_crouse[group_id] = find_res[0]
|
||||
return {
|
||||
'group_crouse': group_crouse,
|
||||
'user_crouse': user_crouse
|
||||
}, 200
|
||||
|
||||
|
||||
def manageSign(cid, pwd, phone):
|
||||
"""
|
||||
用户注册
|
||||
"""
|
||||
pass
|
||||
# 课表信息以及成绩信息
|
||||
crawler_res = manageCrawler(
|
||||
user_info['cid'], user_info['pwd'], user_info.get('phone'))
|
||||
if crawler_res[-1] != 200:
|
||||
return crawler_res
|
||||
|
||||
schedule = crawler_res[0]['schedule']
|
||||
grade = crawler_res[0]['grade']
|
||||
user_info = crawler_res[0]['user_info']
|
||||
user_info['pwd'] = signCode(pwd)
|
||||
user_info['invite_list'] = []
|
||||
user_info['setting'] = {
|
||||
'bg': ''
|
||||
}
|
||||
user_info['avatar'] = 'default'
|
||||
user_info['last_update'] = time.time()
|
||||
# 插入用户
|
||||
ins_res = insertUser(user_info)
|
||||
|
||||
user_info.pop('_id')
|
||||
user_info.pop('pwd')
|
||||
user_info['group_list'] = []
|
||||
|
||||
# 更新新的课程信息
|
||||
ins_res = manageNewCrouse(user_info['sid'], schedule)
|
||||
if ins_res[-1] != 200:
|
||||
return ins_res
|
||||
|
||||
# 获取用户所有的课程信息
|
||||
find_res = getAllUserCrouse(group_id_list, user_info['sid'])
|
||||
if find_res[-1] != 200:
|
||||
return find_res
|
||||
|
||||
schedule = find_res[0]
|
||||
|
||||
return {
|
||||
'user_info': user_info,
|
||||
'schedule': schedule,
|
||||
'grade': grade,
|
||||
}
|
||||
|
||||
|
||||
def login(data, user_info):
|
||||
"""
|
||||
处理登录操作
|
||||
"""
|
||||
# 校验密码
|
||||
if user_info['pwd'] != signCode(data['pwd']):
|
||||
return '账号或密码错误', 511
|
||||
# 接下来需要返回组信息,课表信息以及成绩信息
|
||||
|
||||
# 组信息
|
||||
group_id_list = user_info['group_list']
|
||||
find_res = manageFindGroup(group_id_list)
|
||||
if find_res[-1] != 200:
|
||||
return find_res
|
||||
user_info['group_list'] = find_res[0]
|
||||
|
||||
# 课表信息以及成绩信息
|
||||
crawler_res = manageCrawler(
|
||||
user_info['cid'], user_info['pwd'], user_info.get('phone'))
|
||||
if crawler_res[-1] != 200:
|
||||
return crawler_res
|
||||
|
||||
schedule = crawler_res[0]['schedule']
|
||||
grade = crawler_res[0]['grade']
|
||||
|
||||
# 更新新的课程信息
|
||||
ins_res = manageNewCrouse(user_info['sid'], schedule)
|
||||
if ins_res[-1] != 200:
|
||||
return ins_res
|
||||
|
||||
# 获取用户所有的课程信息
|
||||
find_res = getAllUserCrouse(group_id_list, user_info['sid'])
|
||||
if find_res[-1] != 200:
|
||||
return find_res
|
||||
|
||||
schedule = find_res[0]
|
||||
|
||||
user_info.pop('pwd')
|
||||
|
||||
return {
|
||||
'user_info': user_info,
|
||||
'schedule': schedule,
|
||||
'grade': grade,
|
||||
}
|
||||
|
@ -4,21 +4,22 @@ import time
|
||||
# time_start=time.time()
|
||||
c = Crawler()
|
||||
userinfo = c.defaultInit('2019002380', '@yuning20010329', '15143211127')
|
||||
c.getOwnSchedule()
|
||||
print(userinfo[-1])
|
||||
print(c.getOwnSchedule())
|
||||
# time_end=time.time()
|
||||
# print('time cost',time_end-time_start,'s')
|
||||
userinfo = userinfo[0]
|
||||
# userinfo = userinfo[0]
|
||||
# print(userinfo)
|
||||
# print(c.getOwnSchedule())
|
||||
# print(c.getGrade())
|
||||
cookies = c.getCookie()
|
||||
cookies = cookies[0]
|
||||
print(cookies)
|
||||
print(str(cookies))
|
||||
cookies = str(cookies)
|
||||
cookies = eval(cookies)
|
||||
# cookies = c.getCookie()
|
||||
# cookies = cookies[0]
|
||||
# print(cookies)
|
||||
# print(str(cookies))
|
||||
# cookies = str(cookies)
|
||||
# cookies = eval(cookies)
|
||||
# time_start=time.time()
|
||||
print(c.cookieInit(cookies, userinfo['uid'], userinfo['cid'], userinfo['sid'], userinfo['real_name']))
|
||||
# print(c.cookieInit(cookies, userinfo['uid'], userinfo['cid'], userinfo['sid'], userinfo['real_name']))
|
||||
# time_end=time.time()
|
||||
# print('time cost',time_end-time_start,'s')
|
||||
# c = Crawler('2017002372', '623910ert&', '15143211127')
|
@ -1,6 +1,5 @@
|
||||
from pymongo import MongoClient
|
||||
from bson import ObjectId, json_util
|
||||
|
||||
# 主环境 (生产环境为production,开发环境为development)
|
||||
ENV = 'production'
|
||||
|
||||
@ -40,6 +39,15 @@ def findCookie():
|
||||
print(e)
|
||||
return 'cookie数据库查询失败', 300
|
||||
|
||||
def updateLastUpdate(sid, now):
|
||||
"""
|
||||
更新最后一次课程更新信息
|
||||
"""
|
||||
try:
|
||||
col('user').update({'sid': sid}, {'$set': {'last_update': now}})
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return '数据更新时间数据库更新失败', 403
|
||||
def insertUser(userinfo):
|
||||
""""
|
||||
插入新学生信息
|
||||
@ -330,7 +338,7 @@ def findUserCrouse(sid):
|
||||
"""
|
||||
获取所有用户课程
|
||||
"""
|
||||
crouse_list - []
|
||||
crouse_list = []
|
||||
try:
|
||||
for i in col('user_crouse').find({'sid': sid}, {'_id': 0}):
|
||||
crouse_list.append(i)
|
||||
|
4
test/some.py
Normal file
4
test/some.py
Normal file
@ -0,0 +1,4 @@
|
||||
a = [1,2,3]
|
||||
b = a
|
||||
a = 1
|
||||
print(b)
|
Loading…
x
Reference in New Issue
Block a user