cleaning up
This commit is contained in:
parent
37e8007035
commit
71031e12d5
15
pipelines.py
15
pipelines.py
|
|
@ -1,6 +1,3 @@
|
||||||
# This Python file uses the following encoding: utf-8
|
|
||||||
|
|
||||||
#from __future__ import print_function
|
|
||||||
from time import strptime
|
from time import strptime
|
||||||
from bs4 import BeautifulSoup as bs
|
from bs4 import BeautifulSoup as bs
|
||||||
from util import UnicodeDictReader
|
from util import UnicodeDictReader
|
||||||
|
|
@ -82,13 +79,6 @@ local_data_folder = 'cache/canvas_data/'
|
||||||
mylog = codecs.open(local_data_folder + 'temp_log.txt','w')
|
mylog = codecs.open(local_data_folder + 'temp_log.txt','w')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
gp = {}
|
gp = {}
|
||||||
gp['ACCT'] = 'info'
|
gp['ACCT'] = 'info'
|
||||||
gp['AE'] = 'skill'
|
gp['AE'] = 'skill'
|
||||||
|
|
@ -425,7 +415,6 @@ def getSemesterSchedule(short='sp21'): # I used to be current_sch
|
||||||
#print schedule.columns
|
#print schedule.columns
|
||||||
return schedule
|
return schedule
|
||||||
|
|
||||||
|
|
||||||
def get_enrlmts_for_user(user,enrollments):
|
def get_enrlmts_for_user(user,enrollments):
|
||||||
#active enrollments
|
#active enrollments
|
||||||
u_en = enrollments[ lambda x: (x['user_id'] == user) & (x['workflow']=='active') ]
|
u_en = enrollments[ lambda x: (x['user_id'] == user) & (x['workflow']=='active') ]
|
||||||
|
|
@ -600,9 +589,6 @@ def row_has_data(r): # helper
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
#dbg = open('cache/temp_scheddebug_' + 'sp20' + '.txt','w')
|
|
||||||
|
|
||||||
|
|
||||||
def row_text(r): # helper
|
def row_text(r): # helper
|
||||||
#global dbg
|
#global dbg
|
||||||
|
|
||||||
|
|
@ -1079,6 +1065,7 @@ def recent_schedules():
|
||||||
# Take the generically named rosters uploads files and move them to a semester folder and give them a date.
|
# Take the generically named rosters uploads files and move them to a semester folder and give them a date.
|
||||||
def move_to_folder(sem,year,folder):
|
def move_to_folder(sem,year,folder):
|
||||||
semester = year+sem
|
semester = year+sem
|
||||||
|
semester_path = 'cache/rosters/%s' % semester
|
||||||
if not os.path.isdir('cache/rosters/'+semester):
|
if not os.path.isdir('cache/rosters/'+semester):
|
||||||
os.makedirs('cache/rosters/'+semester)
|
os.makedirs('cache/rosters/'+semester)
|
||||||
now = datetime.datetime.now().strftime('%Y-%m-%dT%H-%M')
|
now = datetime.datetime.now().strftime('%Y-%m-%dT%H-%M')
|
||||||
|
|
|
||||||
37
users.py
37
users.py
|
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
import json, codecs, requests, re, pdb, csv, textdistance
|
import json, codecs, requests, re, pdb, csv, textdistance, collections
|
||||||
import sys, csv, string, funcy, math, shutil, imghdr, os
|
import sys, csv, string, funcy, math, shutil, imghdr, os
|
||||||
import pytz, time
|
import pytz, time
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
|
|
@ -14,7 +14,7 @@ from localcache import teachers_courses_semester
|
||||||
from util import dept_from_name, most_common_item
|
from util import dept_from_name, most_common_item
|
||||||
from os.path import exists, getmtime
|
from os.path import exists, getmtime
|
||||||
|
|
||||||
#from localcache import users_file, com_channel_dim
|
from canvas_secrets import url
|
||||||
|
|
||||||
from dateutil import parser
|
from dateutil import parser
|
||||||
from datetime import datetime as dt
|
from datetime import datetime as dt
|
||||||
|
|
@ -350,13 +350,15 @@ def teacher_basic_info(sched, from_ilearn, names):
|
||||||
return c
|
return c
|
||||||
|
|
||||||
|
|
||||||
|
# TODO Old and broken
|
||||||
|
|
||||||
# what percentage of their sections were online / hybrid /lecture ?
|
# what percentage of their sections were online / hybrid /lecture ?
|
||||||
# Consumes: output/semesters/fa19_sched.json and etc for 1 year
|
# Consumes: output/semesters/fa19_sched.json and etc for 1 year
|
||||||
# Outputs: cache/teacher_by_semester.csv,
|
# Outputs: cache/teacher_by_semester.csv,
|
||||||
def teacherModalityHistory(sched=[],names=[]):
|
def teacherModalityHistory(sched=[],names=[]):
|
||||||
if not len(sched):
|
if not len(sched):
|
||||||
sched = oneYearSchedule()
|
sched = oneYearSchedule()
|
||||||
names = match_username()
|
#names = match_username()
|
||||||
|
|
||||||
# How many classes a teacher taught lect/online/hybrid/hours
|
# How many classes a teacher taught lect/online/hybrid/hours
|
||||||
sec_type = sched.groupby(['teacher','sem'])['type'].apply(' '.join)
|
sec_type = sched.groupby(['teacher','sem'])['type'].apply(' '.join)
|
||||||
|
|
@ -458,7 +460,7 @@ def teacherSharedCourses(a=[]):
|
||||||
return c
|
return c
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: this is broken
|
||||||
# Consumes: output/semesters/fa19_sched.json and etc for 1 year
|
# Consumes: output/semesters/fa19_sched.json and etc for 1 year
|
||||||
# Outputs: cache/num_courses_per_dept.csv (not teacher_course_oer_deptcount)
|
# Outputs: cache/num_courses_per_dept.csv (not teacher_course_oer_deptcount)
|
||||||
# How many courses in each department were taught in the last year?
|
# How many courses in each department were taught in the last year?
|
||||||
|
|
@ -467,14 +469,14 @@ def departmentCountCourses(a=[]):
|
||||||
|
|
||||||
tt = a.drop(['code','partofday','sem','site','type'],axis=1) #,'dept','codeletter'
|
tt = a.drop(['code','partofday','sem','site','type'],axis=1) #,'dept','codeletter'
|
||||||
|
|
||||||
records_by_sname = defaultdict(my_empty_dict, match_usernames())
|
"""records_by_sname = defaultdict(my_empty_dict, match_usernames())
|
||||||
tt.drop_duplicates(keep='first',inplace=True)
|
tt.drop_duplicates(keep='first',inplace=True)
|
||||||
tt['name'] = tt.apply(lambda x: records_by_sname[x['teacher']]['name'],axis=1)
|
tt['name'] = tt.apply(lambda x: records_by_sname[x['teacher']]['name'],axis=1)
|
||||||
tt['email'] = tt.apply(lambda x: records_by_sname[x['teacher']]['email'],axis=1)
|
tt['email'] = tt.apply(lambda x: records_by_sname[x['teacher']]['email'],axis=1)
|
||||||
tt = tt.drop(['teacher'],axis=1)
|
tt = tt.drop(['teacher'],axis=1)
|
||||||
tt.sort_values(by=['dept','name','codenum'],inplace=True)
|
tt.sort_values(by=['dept','name','codenum'],inplace=True)
|
||||||
count = tt['dept'].value_counts()
|
count = tt['dept'].value_counts()
|
||||||
count.to_csv('cache/num_courses_per_dept.csv', header=True)
|
count.to_csv('cache/num_courses_per_dept.csv', header=True)"""
|
||||||
|
|
||||||
|
|
||||||
def clean_nonprint(s):
|
def clean_nonprint(s):
|
||||||
|
|
@ -902,9 +904,11 @@ def getTeachersInfoMain():
|
||||||
#getAllTeachersInTerm()
|
#getAllTeachersInTerm()
|
||||||
|
|
||||||
|
|
||||||
|
# TODO - broken
|
||||||
|
|
||||||
def enroll_staff_shell():
|
def enroll_staff_shell():
|
||||||
staff = users_with_gavilan_email()
|
pass
|
||||||
|
"""staff = users_with_gavilan_email()
|
||||||
for i,s in staff.iterrows():
|
for i,s in staff.iterrows():
|
||||||
print(s['canvasid'],s['name'])
|
print(s['canvasid'],s['name'])
|
||||||
u = url + '/api/v1/courses/8528/enrollments'
|
u = url + '/api/v1/courses/8528/enrollments'
|
||||||
|
|
@ -916,6 +920,8 @@ def enroll_staff_shell():
|
||||||
|
|
||||||
res = requests.post(u, headers = header, data=param)
|
res = requests.post(u, headers = header, data=param)
|
||||||
print(res.text)
|
print(res.text)
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
#"Jun 28 2018 at 7:40AM" -> "%b %d %Y at %I:%M%p"
|
#"Jun 28 2018 at 7:40AM" -> "%b %d %Y at %I:%M%p"
|
||||||
#"September 18, 2017, 22:19:55" -> "%B %d, %Y, %H:%M:%S"
|
#"September 18, 2017, 22:19:55" -> "%B %d, %Y, %H:%M:%S"
|
||||||
|
|
@ -1033,8 +1039,9 @@ def get_recent_views(id=1):
|
||||||
codecs.open('gui/public/activitysummary.json','w','utf-8').write( json.dumps(summary,indent=2) )
|
codecs.open('gui/public/activitysummary.json','w','utf-8').write( json.dumps(summary,indent=2) )
|
||||||
|
|
||||||
|
|
||||||
|
# TODO broken?
|
||||||
# Have they taught online or hybrid classes?
|
# Have they taught online or hybrid classes?
|
||||||
|
"""
|
||||||
def categorize_user(u):
|
def categorize_user(u):
|
||||||
global role_table, term_courses
|
global role_table, term_courses
|
||||||
their_courses = get_enrlmts_for_user(u, role_table)
|
their_courses = get_enrlmts_for_user(u, role_table)
|
||||||
|
|
@ -1062,7 +1069,7 @@ def categorize_user(u):
|
||||||
if i==0: online_only = 0
|
if i==0: online_only = 0
|
||||||
#print "Type: " + type + " All online: " + str(online_only) + " Number courses this term: " + str(len(is_online))
|
#print "Type: " + type + " All online: " + str(online_only) + " Number courses this term: " + str(len(is_online))
|
||||||
return (u[0],type, online_only, len(is_online))
|
return (u[0],type, online_only, len(is_online))
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
##########
|
##########
|
||||||
|
|
@ -1336,7 +1343,7 @@ def uploadPhoto():
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
# Make api call to set avatar image to the token of the uploaded imaged (file_id)
|
# Make api call to set avatar image to the token of the uploaded imaged (file_id)
|
||||||
params = { 'as_user_id':'{0}'.format(id)}
|
params = { 'as_user_id':'{0}'.format(id)}
|
||||||
avatar_options = requests.get("https://%s/api/v1/users/%s/avatars"%(domain,'{0}'.format(id)),headers=header,params=params)
|
avatar_options = requests.get(url + "/api/v1/users/%s/avatars"% '{0}'.format(id),headers=header,params=params)
|
||||||
#print "\nAvatar options: "
|
#print "\nAvatar options: "
|
||||||
#print avatar_options.json()
|
#print avatar_options.json()
|
||||||
for ao in avatar_options.json():
|
for ao in avatar_options.json():
|
||||||
|
|
@ -1345,7 +1352,7 @@ def uploadPhoto():
|
||||||
#print("avatar option found...")
|
#print("avatar option found...")
|
||||||
#print((ao.get('display_name'),ao.get('token'), ao.get('url')))
|
#print((ao.get('display_name'),ao.get('token'), ao.get('url')))
|
||||||
params['user[avatar][token]'] = ao.get('token')
|
params['user[avatar][token]'] = ao.get('token')
|
||||||
set_avatar_user = requests.put("https://%s/api/v1/users/%s"%(domain,'{0}'.format(id)),headers=header,params=params)
|
set_avatar_user = requests.put(url + "/api/v1/users/%s"% '{0}'.format(id),headers=header,params=params)
|
||||||
if set_avatar_user.status_code == 200:
|
if set_avatar_user.status_code == 200:
|
||||||
print(('success uploading user avatar for {0}'.format(id)))
|
print(('success uploading user avatar for {0}'.format(id)))
|
||||||
account_count += 1
|
account_count += 1
|
||||||
|
|
@ -1367,8 +1374,8 @@ def uploadPhoto():
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def test_email():
|
#def test_email():
|
||||||
send_z_email("Peter Howell", "Peter", "phowell@gavilan.edu", ['CSIS85','CSIS42'])
|
# send_z_email("Peter Howell", "Peter", "phowell@gavilan.edu", ['CSIS85','CSIS42'])
|
||||||
|
|
||||||
|
|
||||||
def create_ztc_list():
|
def create_ztc_list():
|
||||||
|
|
@ -2118,8 +2125,8 @@ def find_no_goo():
|
||||||
try:
|
try:
|
||||||
user_port.append( fetch(url+'/api/v1/eportfolios/%s' % str(p_user['id']) ) )
|
user_port.append( fetch(url+'/api/v1/eportfolios/%s' % str(p_user['id']) ) )
|
||||||
if DO_DELETE_PORTFOLIOS:
|
if DO_DELETE_PORTFOLIOS:
|
||||||
output5.write("<br />deleted: <a href='https://ilearn.gavilan.edu/eportfolios/%s'>%s\n" % (str(p_user['id']),str(p_user['id'])) )
|
#output5.write("<br />deleted: <a href='https://ilearn.gavilan.edu/eportfolios/%s'>%s\n" % (str(p_user['id']),str(p_user['id'])) )
|
||||||
output5.flush()
|
#output5.flush()
|
||||||
del_request = requests.delete(url + "/api/v1/eportfolios/%s" % str(p_user['id']) ,headers=header)
|
del_request = requests.delete(url + "/api/v1/eportfolios/%s" % str(p_user['id']) ,headers=header)
|
||||||
print(del_request.text)
|
print(del_request.text)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue