修改数据库以及爬虫

This commit is contained in:
RainSun 2021-02-06 20:06:07 +08:00
parent cc33a6a8d5
commit 3c99c1bf78
6 changed files with 672 additions and 167 deletions

104
README.md
View File

@ -41,15 +41,16 @@ deactivate
510账号或密码错误
511请填写手机号
512教务挂了
513教务报错
200OK
/api/game/schedule/upload
400数据校验失败
401:排名表修改失败
510:排名表修改失败
/api/game/schedule/get
410: 数据校验失败
411:排名表获取失败
400: 数据校验失败
510:排名表获取失败
# game
* /api/game/schedule/upload
@ -60,4 +61,99 @@ deactivate
* sign
* /api/game/schedule/get
* data
* sign
* sign
# 新版改造
## 用户组功能
* [ ] 主进程挂一个我的账号用来进行课表的爬取
* [ ] 第一次登录要进行账号密码的校验,然后对称加密进数据库
* [ ] 每天0点开始刷新所有人的课表以及所有人的成绩
* [ ] 每个用户组的id是五位数字加字母的组合
* [ ] 每个用户组成员不设上限
* [ ] 提供每个用户组下的个人课表以及个人总体课表
* [ ] 允许设置组级别的课程(全组个人课表及组课表可见)
* [ ] 数据库按课程存储字段sid, real_name, course, weeks, weeks_split, teacher, room, is_personal, day, period
## 背景图片上传
* [ ] 上传图片id保存在数据库和个人信息一起
## 数据库字段
### config
* key
* value
### user
* invite_list 邀请内容
* cid 一卡通号
* sid 学号
* real_name 真实姓名
* pwd 加密后的密码
* setting 用户设置
* bg 课表背景图片
* avatar 头像
* last_update 最后课程更新时间
### group
* group_id 唯一标识
* group_name 用户组名称
* log_list 操作记录
* admin_list 管理列表
* creater_sid 创建者学号
* avatar 头像
### link
* cid 一卡通号
* group_id 用户组id
### user_crouse
* crouse_id 课程id
* sid 学号
* real_name 真实姓名
* crouse 课程名
* weeks 中文周数
* weeks_split 渲染用周数列表
* teacher 教师名
* room 教室名
* is_personal 是否是自定义课程
* day 星期
* period 上课时间
### group_crouse
* crouse_id 课程id
* group_id 用户组id
* sid 学号
* real_name 真实姓名
* crouse 课程名
* weeks 中文周数
* weeks_split 渲染用周数列表
* teacher 教师名
* room 教室名
* day 星期
* period 上课时间
## 数据库错误码
### OK 200
### insert 1
insertUser '学生信息数据库插入失败', 100
insertInvite '用户组邀请数据库插入失败', 101
bindUserGroup '用户与用户组绑定数据库插入失败', 102
insertGroup '用户组数据库插入失败', 103
addLog '操作记录数据库插入失败', 104
groupInsertAdmin '用户组侧加入管理数据库插入失败', 106
userInsertCrouse '用户自定义课程数据库插入失败', 107
userInsertAllCrouse '用户所有课程数据库插入失败', 108
insertRank '排名表数据库插入失败', 109
groupInsertCrouse '用户组课程数据库插入失败', 110
### find 3
findCookie 'cookie数据库查询失败', 300
findUser '学生信息数据库查询失败', 301
findGroup '用户组信息数据库查询失败', 302
findRank '排名表数据库查询失败', 303
findUserCrouse '用户课程数据库查询失败', 304
findGroupCrouse '用户组课程数据库查询失败', 305
### update 4
updateCookie 'cookie数据库更新失败', 400
updateAvatar '头像数据库更新失败', 401
updateBg '背景图数据库更新失败', 402
### delete 5
deleteInvite '用户组邀请数据库删除失败', 500
unbindUserGroup '用户与用户组解绑数据库删除失败', 501
userDeleteAllCrouse '用户所有课程数据库删除失败', 502
groupDeleteAdmin '用户组侧移除管理数据库删除失败', 503
deleteGroup '解散用户组数据库删除失败', 504
userDeleteCrouse '用户自定义课程数据库删除失败', 505
groupDeleteCrouse '用户组课程数据库删除失败', 506

View File

@ -40,7 +40,7 @@ def manageScheduleUpload(request):
add_res = addRank( data_cache['nick'], data_cache['count'], data_cache['time'])
return add_res
else:
return {'errcode': 400, 'errmsg': '数据校验失败'}
return '数据校验失败', 400
# 处理获取课表游戏排名信息
def manageScheduleGet(request):
@ -54,7 +54,7 @@ def manageScheduleGet(request):
get_res = getRank()
return get_res
else:
return {'errcode': 400, 'errmsg': '数据校验失败'}
return '数据校验失败', 400
# 工具函数

View File

@ -5,31 +5,39 @@ import base64
from bs4 import BeautifulSoup
import random
import sys
from utils import btoa, signCode
class Crawler(object):
def __init__(self, username, password, phone):
self.__username = username
self.__password = password
self.__phone = phone
def __init__(self):
self.__session = None
self.__student_id = None
self.__student_name = None
self.__grade_data = ''
self.__schedule_data = ''
self.__pwd = None
self.__phone = None
self.cid = None
self.sid = None
self.uid = None
self.real_name = None
# 获取用户基本信息
def getUserInfo(self):
return {
'cid': self.cid,
'pwd': signCode(self.__pwd),
'sid': self.sid,
'uid': self.uid,
'real_name': self.real_name,
}
# 链接教务 -----------------------------------------------------------------------------
def connection(self):
try:
self.__session = requests.Session()
# 获取统一身份系统的网页
r = self.__session.get(
url='https://mysso.cust.edu.cn/cas/login?service=https://jwgls1.cust.edu.cn/welcome')
soup = BeautifulSoup(r.text, 'html.parser')
execution = soup.find_all(name='input')[6]['value']
formdata = {
'username': self.__username,
'password': self.__password,
'username': self.cid,
'password': self.__pwd,
'execution': execution,
'_eventId': 'submit',
'geolocation': ''
@ -40,7 +48,7 @@ class Crawler(object):
flag = soup.find(name='title')
if(flag.text == "手机号设置"):
if self.__phone == '':
return ('请填写手机号', 511)
return '请填写手机号', 511
execution = soup.find_all(name='input')[1]['value']
formdata = {
'phone': self.__phone,
@ -57,25 +65,29 @@ class Crawler(object):
if soup.findAll(name='a')[4]['href'] != 'logout':
raise('账号或密码错误')
except:
return ('账号或者密码错误', 510)
r = self.__session.get(url='https://mysso.cust.edu.cn/cas/login?service=https://jwgls1.cust.edu.cn/welcome', allow_redirects=False)
return '账号或者密码错误', 510
r = self.__session.get(
url='https://mysso.cust.edu.cn/cas/login?service=https://jwgls1.cust.edu.cn/welcome', allow_redirects=False)
ticket = r.headers['Location'][42:]
asp_net_sessionid_param = {'Ticket': ticket, 'Url': 'https://jwgls1.cust.edu.cn/welcome'}
asp_net_sessionid_param = {
'Ticket': ticket, 'Url': 'https://jwgls1.cust.edu.cn/welcome'}
asp_net_sessionid_param = base64.b64encode(
quote(json.dumps(asp_net_sessionid_param)).encode('utf-8')).decode('utf-8')
asp_net_sessionid_param = {'param': asp_net_sessionid_param}
headers = {'Content-Type': 'application/json'}
r = self.__session.post(url='https://jwgls1.cust.edu.cn/api/LoginApi/LGSSOLocalLogin?sf_request_type=ajax', data=json.dumps(asp_net_sessionid_param), headers=headers)
r = self.__session.post(url='https://jwgls1.cust.edu.cn/api/LoginApi/LGSSOLocalLogin?sf_request_type=ajax',
data=json.dumps(asp_net_sessionid_param), headers=headers)
data = json.loads(r.content.decode('utf-8'))
# 提示未建立教务信息
if data['state'] == 1:
return (data['message'], 513)
self.__student_name = data['data']['StudentDto']['XM']
self.__student_id = data['data']['StudentDto']['XH']
return ('ok', 200)
return data['message'], 513
self.real_name = data['data']['StudentDto']['XM']
self.sid = data['data']['StudentDto']['XH']
self.uid = data['data']['StudentDto']['SMXSJBXXID']
return self.getUserInfo(), 200
except Exception as e:
print(e)
return ('教务挂了', 512)
return '教务挂了', 512
# 获取成绩 -----------------------------------------------------------------------------
def getGrade(self):
@ -88,7 +100,7 @@ class Crawler(object):
)
data = json.loads(r.content.decode('utf-8'))
if data['state'] != 0:
return ('教务挂了', 512)
return '教务挂了', 512
# 分解数据并重命名
total = data['data']['GradeStatistics']
split = data['data']['GradeList']
@ -222,113 +234,142 @@ class Crawler(object):
})
total_grade['total_bixiu_GPA'] = total_bixiu_c_x_g / \
total_bixiu_credit
# 合并数据
self.__grade_data = {
'total': total_grade,
'split': grade_list
}
return ('ok', 200)
return {
'total_grade': total_grade,
'grade_list': grade_list
}, 200
# 获取课表 -----------------------------------------------------------------------------
def getSchedule(self):
# 获取当前周数
def getCurWeek(self):
headers = {'Content-Type': 'application/json'}
r = self.__session.post(
url='https://jwgls1.cust.edu.cn/api/ClientStudent/Home/StudentHomeApi/GetHomeCurWeekTime?sf_request_type=ajax',
data=json.dumps({"param": "JTdCJTdE", "__permission": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E",
"Operation": 0}, "__log": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E", "Logtype": 6, "Context": "查询"}}),
"Operation": 0}, "__log": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E", "Logtype": 6, "Context": "查询"}}),
headers=headers
)
CurWeek = json.loads(r.content.decode('utf-8'))['data']['CurWeek']
return json.loads(
r.content.decode('utf-8'))['data']['CurWeek'], 200
# 处理课表信息
def manageSchedule(self, data):
time = ['AM__TimePieces', 'PM__TimePieces', 'EV__TimePieces']
data = data['data']['AdjustDays']
lessons = []
for i in range(7):
for j in range(3):
for k in range(2):
if(data[i][time[j]][k]['Dtos']):
for l in data[i][time[j]][k]['Dtos']:
temp_lesson = {
'sid': self.sid,
'real_name': self.real_name,
'is_personal': False,
'day': i,
'period': j*2+k,
'is_groups_course': False,
}
weeks_split = [0] * 23
mod = ''
for m in l['Content']:
key = m['Key']
if m['Key'] == 'Teacher':
key = 'teacher'
elif m['Key'] == 'Lesson':
key = 'course'
elif m['Key'] == 'Room':
key = 'room'
elif m['Key'] == 'Time':
key = 'weeks'
if temp_lesson.get(key):
temp_lesson[key] += ','+m['Name']
else:
temp_lesson[key] = m['Name']
temp_weeks = temp_lesson['weeks']
temp_lesson['weeks'] = temp_weeks[0:int(
temp_weeks.find('') + 1)]
if '单周' in temp_weeks:
mod = 'single'
elif '双周' in temp_weeks:
mod = 'double'
else:
mod = 'all'
zhou_pos = temp_weeks.find('')
temp_weeks = temp_weeks[0:zhou_pos]
temp_weeks = temp_weeks.split(',')
index = 0
for n in temp_weeks:
temp_weeks[index] = n.split('-')
index += 1
index = 0
for n in temp_weeks:
if len(n) > 1:
for o in range(int(n[0]), int(n[1]) + 1):
if (o % 2 == 0 and mod == 'double') or (o % 2 == 1 and mod == 'single') or (mod == 'all'):
weeks_split[o] = 1
else:
weeks_split[o] = 0
else:
weeks_split[int(n[0])] = 1
index += 1
temp_lesson['weeks_split'] = weeks_split
lessons.append(temp_lesson)
return lessons, 200
# 获取个人课表
def getOwnSchedule(self):
headers = {'Content-Type': 'application/json'}
r = self.__session.post(
url='https://jwgls1.cust.edu.cn/api/ClientStudent/Home/StudentHomeApi/QueryStudentScheduleData?sf_request_type=ajax',
data=json.dumps({"param": "JTdCJTdE", "__permission": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E",
"Operation": 0}, "__log": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E", "Logtype": 6, "Context": "查询"}}),
"Operation": 0}, "__log": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E", "Logtype": 6, "Context": "查询"}}),
headers=headers
)
data = json.loads(r.content.decode('utf-8'))
if data['state'] != 0:
return ('教务挂了', 512)
time = ['AM__TimePieces', 'PM__TimePieces', 'EV__TimePieces']
data = data['data']['AdjustDays']
days_per_week = [0] * 23
lesson = [[0] * 6 for _ in range(7)]
lesson_set = {}
color_set = [0] * 9
color_used = 9
for i in range(7):
for j in range(3):
for k in range(2):
if(data[i][time[j]][k]['Dtos']):
lesson[i][j*2+k] = []
for l in data[i][time[j]][k]['Dtos']:
temp_lesson = {}
Time = [0] * 23
mod = ''
for m in l['Content']:
if temp_lesson.get(m['Key']):
temp_lesson[m['Key']] += ','+m['Name']
else:
temp_lesson[m['Key']] = m['Name']
if lesson_set.get(l['Content'][0]['Name']):
temp_lesson['color'] = lesson_set[l['Content'][0]['Name']]
else:
color = random.randint(0, 8)
while color_set[color]:
if color_used <= 0:
break
color = random.randint(0, 8)
temp_lesson['color'] = color
lesson_set[l['Content'][0]['Name']] = color
color_used -= 1
color_set[color] = 1
temp_Time = temp_lesson['Time']
temp_lesson['Time'] = temp_Time[0:int(
temp_Time.find('') + 1)]
if '单周' in temp_Time:
mod = 'single'
# temp_Time = temp_Time[0:len(temp_Time)-5]
elif '双周' in temp_Time:
mod = 'double'
# temp_Time = temp_Time[0:len(temp_Time)-5]
else:
mod = 'all'
# temp_Time = temp_Time[0:-1]
zhou_pos = temp_Time.find('')
temp_Time = temp_Time[0:zhou_pos]
temp_Time = temp_Time.split(',')
index = 0
for n in temp_Time:
temp_Time[index] = n.split('-')
index += 1
index = 0
for n in temp_Time:
if len(n) > 1:
for o in range(int(n[0]), int(n[1]) + 1):
if (o % 2 == 0 and mod == 'double') or (o % 2 == 1 and mod == 'single') or (mod == 'all'):
days_per_week[o] = max(
days_per_week[o], i+1)
Time[o] = 1
else:
Time[o] = 0
else:
days_per_week[int(n[0])] = max(
days_per_week[int(n[0])], i+1)
Time[int(n[0])] = 1
index += 1
temp_lesson['Time_split'] = Time
lesson[i][j*2+k].append(temp_lesson)
self.__schedule_data = {'lesson': lesson,
'days_per_week': days_per_week, 'cur_week': CurWeek}
return ('ok', 200)
return self.manageSchedule(data)
# 获取信息 -----------------------------------------------------------------------------
def getData(self):
return (
{
'student_id': self.__student_id,
'student_name': self.__student_name,
'grade': self.__grade_data,
'schedule': self.__schedule_data
},
200
# 获取他人课表
def getOtherschedule(self):
headers = {'Content-Type': 'application/json'}
params = {"KBLX":"2","CXLX":"0","XNXQ":"20202","CXID":self.uid,"CXZC":"0","JXBLX":""}
params = str(btoa(json.dumps(params)))[2:-1]
r = self.__session.post(
url='https://jwgls1.cust.edu.cn/api/ClientStudent/QueryService/OccupyQueryApi/QueryScheduleData?sf_request_type=ajax',
data=json.dumps({"param": params, "__permission": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E",
"Operation": 0}, "__log": {"MenuID": "F71C97D5-D3E2-4FDA-9209-D7FA8626390E", "Logtype": 6, "Context": "查询"}}),
headers=headers
)
data = json.loads(r.content.decode('utf-8'))
if data['state'] != 0:
return ('教务挂了', 512)
return self.manageSchedule(data)
# 获取cookie
def getCookie(self):
return self.__session.cookies.items(), 200
# 设置cookie
def setCookie(self, cookies):
requests.utils.add_dict_to_cookiejar(
self.__session.cookies, dict(cookies))
return 'OK', 200
# 默认初始化
def defaultInit(self, cid, pwd, phone):
self.cid = cid
self.__pwd = pwd
self.__phone = phone
self.__session = requests.Session()
return self.connection()
# 使用我的cookie初始化用于快速刷新课表
def cookieInit(self, cookies, uid, cid, sid, real_name):
self.cid = cid
self.sid = sid
self.uid = uid
self.real_name = real_name
self.__session = requests.Session()
self.setCookie(cookies)
return self.getOtherschedule()

View File

@ -1,25 +1,42 @@
import unittest
from crawler import Crawler
import time
# time_start=time.time()
c = Crawler()
userinfo = c.defaultInit('2019002380', '@yuning20010329', '15143211127')
c.getOwnSchedule()
# time_end=time.time()
# print('time cost',time_end-time_start,'s')
userinfo = userinfo[0]
# print(userinfo)
# print(c.getOwnSchedule())
# print(c.getGrade())
cookies = c.getCookie()
cookies = cookies[0]
print(cookies)
print(str(cookies))
cookies = str(cookies)
cookies = eval(cookies)
# time_start=time.time()
print(c.cookieInit(cookies, userinfo['uid'], userinfo['cid'], userinfo['sid'], userinfo['real_name']))
# time_end=time.time()
# print('time cost',time_end-time_start,'s')
# c = Crawler('2017002372', '623910ert&', '15143211127')
# c = Crawler('2019002380', '@yuning20010329', '15143211127')
# c.connection()
# c.getOtherschedule("dd709e77-34f8-43f7-8efa-0838fd138430")
# class TestCrawler(unittest.TestCase):
# # 测试链接
# def test_connection(self):
# self.assertEqual(c.connection(), ('ok', 200))
c = Crawler('2017002372', '623910ert&', '15143211127')
# #测试获取成绩
# def test_grade(self):
# self.assertEqual(c.getGrade(), ('ok', 200))
class TestCrawler(unittest.TestCase):
# 测试链接
def test_connection(self):
self.assertEqual(c.connection(), ('ok', 200))
# #测试获取课表
# def test_schedule(self):
# self.assertEqual(c.getSchedule(), ('ok', 200))
#测试获取成绩
def test_grade(self):
self.assertEqual(c.getGrade(), ('ok', 200))
#测试获取课表
def test_schedule(self):
self.assertEqual(c.getSchedule(), ('ok', 200))
#测试返回信息
def test_getData(self):
get_res = c.getData()
self.assertEqual(get_res[1], 200)
if __name__ == '__main__':
unittest.main()
# if __name__ == '__main__':
# unittest.main()

373
lib/db.py
View File

@ -2,42 +2,317 @@ from pymongo import MongoClient
from bson import ObjectId, json_util
# 主环境 (生产环境为production开发环境为development)
setting = 'production'
env = 'ali'
# 获取数据集
ENV = 'production'
def col(arg):
if env == 'coc':
conn = MongoClient('mongodb://coc:qlSfefSor5@0.0.0.0:12236/coc')
else:
conn = MongoClient('mongodb://cherry:fR1jW2xG3bE9@mongo:27017/cherry')
if setting == 'development':
"""
获取数据集
"""
# 链接数据库
conn = MongoClient('mongodb://cherry:fR1jW2xG3bE9@39.96.28.83:27017/cherry')
# 判断环境
if ENV == 'development':
arg += '_test'
if arg == 'rank':
return conn[env].rank
elif arg == 'rank_test':
return conn[env].rank_test
else:
return False
return conn.cherry[arg]
# 向排名表里增加或覆写数据
def updateCookie(new_cookie):
"""
更新cookie
"""
# 字符串化
new_cookie = str(new_cookie)
try:
col('config').update({'key': 'cookie'}, {'$set': {'value': new_cookie}}, {'upsert': 'true'})
except Exception as e:
print(e)
return 'cookie数据库更新失败', 400
def findCookie():
"""
获取cookie
"""
try:
res = col('config').find_one({'key': 'cookie'}, {'_id': 0})
value = res['value']
cookie = eval(value)
return cookie, 200
except Exception as e:
print(e)
return 'cookie数据库查询失败', 300
def addRank(nick, count, time):
def insertUser(userinfo):
""""
插入新学生信息
"""
try:
col('user').insert_one(userinfo)
return 'OK', 200
except Exception as e:
print(e)
return '学生信息数据库插入失败', 100
def findUser(cid):
"""
获取学生信息
在group_list里边存放了[{'group_id'}]
取出之后需要遍历组的信息
"""
try:
userinfo = col('user').aggregate([
{
'$match': {
'cid': cid
}
},
{
'$lookup': {
'from': 'link',
'localField': 'sid',
'foreignField': 'sid',
'as': 'group_list'
}
},
{
'$project': {
'_id': 0,
'group_list._id': 0,
'group_list.sid': 0
}
}
])
return userinfo, 200
except Exception as e:
print(e)
return '学生信息数据库查询失败', 301
def insertInvite(cid, group_id):
"""
对用户添加用户组邀请
测试多次重复邀请用户接受或者删除的时候是否能删掉所有前端Set去重
"""
try:
col('user').update({'cid': cid}, {'$push': {'invite_list': group_id}})
return 'OK', 200
except Exception as e:
print(e)
return '用户组邀请数据库插入失败', 101
def deleteInvite(cid, group_id):
"""
用户或者管理员删除用户组邀请
"""
try:
col('user').update({'cid': cid}, {'$pull': {'invite': {'group_id': group_id}}})
return 'OK', 200
except Exception as e:
print(e)
return '用户组邀请数据库删除失败', 500
def bindUserGroup(sid, group_id):
"""
绑定用户以及用户组
"""
try:
col('link').insert_one({'sid': sid, 'group_id': group_id})
return 'OK', 200
except Exception as e:
print(e)
return '用户与用户组绑定数据库插入失败', 102
def unbindUserGroup(sid, group_id):
"""
解绑用户以及用户组
"""
try:
col('link').remove({'sid': sid, 'group_id': group_id})
return 'OK', 200
except Exception as e:
print(e)
return '用户与用户组解绑数据库删除失败', 501
def updateAvatar(cid, img_id):
"""
用户更新头像
"""
try:
col('user').update({'cid': cid}, {'$set': {'avatar': img_id}})
return 'OK', 200
except Exception as e:
print(e)
return '头像数据库更新失败', 401
def updateBg(cid, img_id):
"""
用户更新背景图片
"""
try:
col('user').update('cid': cid, {'$set': {'setting.bg': img_id}})
return 'OK', 200
except Exception as e:
print(e)
return '背景图数据库更新失败', 402
def insertGroup(group_info):
"""
用户创建用户组
"""
try:
col('group').insert_one(group_info)
return 'OK', 200
except Exception as e:
print(e)
return '用户组数据库插入失败', 103
def findGroup(group_id):
"""
查询用户组信息附带用户组中用户信息
"""
try:
groupinfo = col('group').aggregate([
{
'$match': {
'group_id': group_id
}
},
{
'$lookup': {
'from': 'link',
'localField': 'group_id',
'foreignField': 'group_id',
'as': 'user_list'
}
},
{
'$lookup': {
'from': 'user',
'localField': 'cid',
'foreignField': 'cid',
'as': 'user_list'
}
},
{
'$project': {
'_id': 0,
'user_list._id': 0,
'user_list.invite_list': 0,
'user_list.cid':0,
'user_list.pwd':0,
'user_list.setting':0,
}
}
])
return groupinfo, 200
except Exception as e:
print(e)
return '用户组信息数据库查询失败', 302
def deleteGroup(group_id):
"""
解散用户组
"""
try:
col('group').remove({'group_id': group_id})
return 'OK', 200
except Exception as e:
print(e)
return '解散用户组数据库删除失败', 504
def addLog(group_id, log):
"""
向用户组中添加操作记录
"""
try:
col('group').update({'group_id': group_id}, {'$push': {'log_list': log}})
return 'OK', 200
except Exception as e:
print(e)
return '操作记录数据库插入失败', 104
def groupInsertAdmin(cid, group_id):
"""
向管理组中添加管理
"""
try:
col('group').update({'group_id': group_id}, {'$push': {'admin_list': cid}})
return 'OK', 200
except Exception as e:
print(e)
return '用户组侧加入管理数据库插入失败', 106
def groupDeleteAdmin(cid, group_id):
"""
从管理组中删除管理
"""
try:
col('group').update({'group_id': group_id}, {'$pull': {'admin_list': cid}})
return 'OK', 200
except Exception as e:
print(e)
return '用户组侧移除管理数据库删除失败', 503
def userInsertCrouse(crouse):
"""
用户添加自定义课程
"""
try:
col('user_crouse').insert_one(crouse)
return 'OK', 200
except Exception as e:
print(e)
return '用户自定义课程数据库插入失败', 107
def userDeleteCrouse(crouse_id):
"""
用户删除自定义课程
加入是否自定义的判断防止用户改前端导致删除非定义课程
"""
try:
col('user_crouse').remove({'crouse_id': crouse_id, 'is_personal': False})
return 'OK', 200
except Exception as e:
print(e)
return '用户自定义课程数据库删除失败', 505
def userDeleteAllCrouse(sid):
"""
用户删除所有课程刷新课表用
不删除自定义课程
"""
try:
col('user_crouse').remove({'sid': sid, 'is_personal': False})
return 'OK', 200
except Exception as e:
print(e)
return '用户所有课程数据库删除失败', 502
def userInsertAllCrouse(crouses):
"""
用户批量添加课表刷新课表用
"""
try:
col('user_crouse').insert_many(crouses)
return 'OK', 200
except Exception as e:
print(e)
return '用户所有课程数据库插入失败', 108
def insertRank(nick, count, time):
"""
向排名表里增加或者覆写数据
"""
try:
col('rank').update({"cid": cid}, {'$setOnInsert': {"nick": nick}, '$set': {
"count": count, "time": time}} , {'upsert': 'true'})
# col('rank').insert_one({"count":count,"time":time,"nick":nick})
return 'OK', 200
except Exception as e:
# 失败
return {'errcode': 401, 'errmsg': '排名表修改失败'}
return {'errcode': 200, 'errmsg': 'ok'}
return '排名表数据库插入失败', 109
# 获取排名表所有信息除了id
def getRank():
def findRank():
"""
获取所有排名信息
"""
time_rank = []
count_rank = []
try:
@ -45,7 +320,55 @@ def getRank():
time_rank.append(i)
for i in col('rank').find({}, {"_id": 0}).sort([("count", 1), ("time", 1)]).limit(10):
count_rank.append(i)
return {'time_rank': time_rank, 'count_rank': count_rank}, 200
except Exception as e:
print(e)
return {'errcode': 411, 'errmsg': '排名表获取失败'}
return {'errcode': 200, 'time_rank': time_rank, 'count_rank': count_rank, 'errmsg': 'ok'}
return '排名表数据库查询失败', 303
def findUserCrouse(sid):
"""
获取所有用户课程
"""
crouse_list - []
try:
for i in col('user_crouse').find({'sid': sid}, {'_id': 0}):
crouse_list.append(i)
return crouse_list, 200
except Exception as e:
print(e)
reutrn '用户课程数据库查询失败', 304
def groupInsertCrouse(crouse):
"""
用户组添加课程
"""
try:
col('group_crouse').insert_one(crouse)
return 'OK', 200
except Exception as e:
print(e)
return '用户组课程数据库插入失败', 110
def groupDeleteCrouse(crouse_id):
"""
用户组删除课程
"""
try:
col('group_crouse').remove({'crouse_id': crouse_id})
return 'OK', 200
except Exception as e:
print(e)
return '用户组课程数据库删除失败', 506
def findGroupCrouse(group_id):
"""
获取所有指定用户组课程
"""
crouse_list = []
try:
for i in col('group_crouse').find({'group_id': group_id}):
crouse_list.append(i)
return crouse_list, 200
except Exception as e:
print(e)
return '用户组课程数据库查询失败', 305

28
lib/utils.py Normal file
View File

@ -0,0 +1,28 @@
from urllib.request import quote, unquote
import base64
import json
from hashlib import md5
def btoa(content):
"""
JSON转base64
"""
return base64.b64encode(quote(content).encode())
def atob(content):
"""
base64转JSON
"""
return unquote(base64.b64decode(content).decode())
def signCode(code):
"""
给str签名用于加密密码
"""
d = str(code)
d = d.replace(' ', '')
md = md5()
md.update(d.encode('utf-8'))
r = md.hexdigest().upper()
return r