stats - gather grades
This commit is contained in:
parent
562a8b3e26
commit
d0b7cd93f1
|
|
@ -1,3 +1,4 @@
|
|||
from sqlite3 import paramstyle
|
||||
from time import strptime
|
||||
from bs4 import BeautifulSoup as bs
|
||||
from util import UnicodeDictReader
|
||||
|
|
@ -227,12 +228,15 @@ def d(s,end=''):
|
|||
|
||||
|
||||
# Main canvas querying fxn
|
||||
def fetch(target,verbose=0):
|
||||
def fetch(target,verbose=0,params=0):
|
||||
# if there are more results, recursivly call myself, adding on to the results.
|
||||
results = 0
|
||||
if target[0:4] != "http": target = url + target
|
||||
if verbose:
|
||||
print("++ Fetching: " + target)
|
||||
if params:
|
||||
r2 = requests.get(target, headers = header, params = params)
|
||||
else:
|
||||
r2 = requests.get(target, headers = header)
|
||||
#if verbose:
|
||||
#print "++ Got: " + r2.text
|
||||
|
|
@ -261,7 +265,7 @@ def fetch(target,verbose=0):
|
|||
#link = re.sub(r'per_page=10$', 'per_page=100', link) # link.replace('per_page=10','per_page=500')
|
||||
#if (verbose): print("++ More link: " + link)
|
||||
|
||||
nest = fetch(link,verbose)
|
||||
nest = fetch(link,verbose,params)
|
||||
if isinstance(results,dict): results.update(nest)
|
||||
else: results.extend(nest)
|
||||
return results
|
||||
|
|
|
|||
115
stats.py
115
stats.py
|
|
@ -1,3 +1,82 @@
|
|||
# statistics
|
||||
|
||||
"""
|
||||
## Investigate: Success rates (grades) of students in:
|
||||
|
||||
- online courses (over all)
|
||||
- sync and async and online live
|
||||
- teachers/courses that have passed POCR (are all async?)
|
||||
- teachers that have done more than the minimum training in online teaching
|
||||
- in person classes, if grades are available
|
||||
|
||||
|
||||
|
||||
## Data collection
|
||||
|
||||
- Choose how many semesters (10?)
|
||||
- Script 1 - given a CRN and Semester, download all grades
|
||||
- Check if grades were used and make sense
|
||||
- Compute mean, % > 70, median, etc.
|
||||
|
||||
- Script 2 - given all semester schedules, generate lists of:
|
||||
- CRNs which are online, online live, hybrid, inperson, excluded
|
||||
- CRNs in which teacher and course have passed pocr (and semester is greater than their pass date)
|
||||
- CRNs in which teacher passed pocr for a different course (and semester is greater than their pass date)
|
||||
- CRNs to exclude, for example SP20, because of covid. Possibly SU20 and FA20
|
||||
- CRNs in which teacher has done more than the minimum training in online teaching
|
||||
|
||||
|
||||
## Hypothesis Testing
|
||||
|
||||
-
|
||||
"""
|
||||
|
||||
|
||||
import json, csv, requests
|
||||
from pipelines import fetch, url
|
||||
from courses import getCoursesInTerm
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
def get_all():
|
||||
terms = '178 177 176 175 174 173 172 171 168 65 64 62 63 61 60 25 26 23 22 21'.split(' ')
|
||||
sems = '202330 202310 202270 202250 202230 202210 202170 202150 202130 202070 202050 202030 202010 201970 201950 201930 201910 201870 201850 201830'.split(' ')
|
||||
# Save grades to a CSV file
|
||||
with open(f"cache/grades_all.csv", "w", newline="") as csvfile:
|
||||
writer = csv.writer(csvfile)
|
||||
writer.writerow(["crn", "sem", "coursecode", "s_can_id","g","name", "current", "final"])
|
||||
for (term,sem) in zip(terms,sems):
|
||||
print(term,sem,"\n")
|
||||
courses = getCoursesInTerm(term,get_fresh=0,show=0,active=1)
|
||||
for c in courses:
|
||||
print(c['name'])
|
||||
c_code = c['course_code']
|
||||
grades(writer, sem, c['id'], c_code)
|
||||
csvfile.flush()
|
||||
|
||||
|
||||
def grades(writer, sem, COURSE_ID, course_code):
|
||||
params = { "include[]": ["enrollments", "current_grading_period_scores"] }
|
||||
grades = fetch(url + f"/api/v1/courses/{COURSE_ID}/users",0, params)
|
||||
#grades = json.loads(grades.text)
|
||||
|
||||
for student in grades:
|
||||
try:
|
||||
id = student["id"]
|
||||
name = student["name"]
|
||||
g = student["login_id"]
|
||||
print("\t", name)
|
||||
if student['enrollments'][0]['type'] == 'StudentEnrollment':
|
||||
grade = student["enrollments"][0]["grades"]["final_score"]
|
||||
current = student["enrollments"][0]["grades"]["current_score"]
|
||||
writer.writerow([COURSE_ID, sem, course_code, id, g, name, current, grade])
|
||||
except Exception as e:
|
||||
print("Exception:", e)
|
||||
|
||||
|
||||
get_all()
|
||||
#grades()
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -17,16 +96,15 @@ def grades_rundown():
|
|||
for C in courses:
|
||||
activity_time_total = 0.0
|
||||
course_info = {'id':str(C['id']),'name':C['name'],'grades':[], 'teacher':[] }
|
||||
#print str(C['id']) + "\t " + C['name']
|
||||
#print(str(C['id']) + "\t " + C['name'])
|
||||
emts = course_enrollment(C['id'])
|
||||
for k,E in emts.items():
|
||||
#print E
|
||||
if E['type'] == 'TeacherEnrollment':
|
||||
course_info['teacher'].append(users_by_id[E['user_id']]['name'])
|
||||
all_ids[E['sis_user_id']] = 1
|
||||
""" if 'grades' in E and E['grades']['current_score']:
|
||||
#print str(E['grades']['final_score']) + ", ",
|
||||
#print str(E['grades']['current_score']) + ", ",
|
||||
#print(str(E['grades']['final_score']) + ", ",)
|
||||
#print(str(E['grades']['current_score']) + ", ",)
|
||||
course_info['grades'].append(E['grades']['current_score'])
|
||||
activity_time_total += E['total_activity_time']
|
||||
if course_info['grades']:
|
||||
|
|
@ -43,7 +121,7 @@ def grades_rundown():
|
|||
course_info['count_gt70'] = 0
|
||||
course_info['avg_activity_time'] = 0"""
|
||||
|
||||
#print course_info
|
||||
#print(course_info)
|
||||
all_sem_courses.append(course_info)
|
||||
w.writerow(course_info)
|
||||
f.flush()
|
||||
|
|
@ -65,11 +143,11 @@ def class_logs():
|
|||
# that we want to check for users/activity.
|
||||
#target = url + '/api/v1/accounts/1/terms' # list the terms
|
||||
target = url + '/api/v1/accounts/1/courses?published=true&enrollment_term_id=14'
|
||||
print "Getting term classes."
|
||||
print("Getting term classes.")
|
||||
while target:
|
||||
target = fetch(target)
|
||||
|
||||
print "\n\n\n"
|
||||
print("\n\n\n")
|
||||
|
||||
term_results = results
|
||||
full_results = []
|
||||
|
|
@ -77,10 +155,10 @@ def class_logs():
|
|||
results = []
|
||||
# now see who's logged in recently:
|
||||
target = url + '/api/v1/courses/' + str(x['id']) + '/recent_students'
|
||||
print "Getting class id: ", str(x['id'])
|
||||
print("Getting class id: ", str(x['id']))
|
||||
fetch(target)
|
||||
if len(results):
|
||||
#print results
|
||||
#print(results)
|
||||
LL = [ how_long_ago(z['last_login']) for z in results ]
|
||||
avg = 9999
|
||||
if len(LL): avg = sum(LL) / len(LL)
|
||||
|
|
@ -88,7 +166,7 @@ def class_logs():
|
|||
full_results.append(d)
|
||||
sorted_results = sorted(full_results, key=lambda k: k['avg'])
|
||||
for x in sorted_results:
|
||||
print x['id'], "\t", str(x['avg']), "\t", x['name']
|
||||
print(x['id'], "\t", str(x['avg']), "\t", x['name'])
|
||||
|
||||
|
||||
|
||||
|
|
@ -102,12 +180,12 @@ def user_logs():
|
|||
results = []
|
||||
target = url + '/api/v1/users/' + target_user + '/page_views?per_page=200'
|
||||
while target:
|
||||
print target
|
||||
print(target)
|
||||
target = fetch(target)
|
||||
# have all student's hits. Filter to only this class
|
||||
#results = filter(match59,results)
|
||||
times = []
|
||||
print users_by_id[ int(target_user) ]
|
||||
print(users_by_id[ int(target_user) ])
|
||||
f.write(str(users_by_id[ int(target_user) ]) + "\n")
|
||||
f.write( "link,updated_at,remote_ip,url,context_type,user_agent,action\n")
|
||||
for hit in results:
|
||||
|
|
@ -124,8 +202,8 @@ def recent_logins():
|
|||
target = url + "/api/v1/audit/authentication/accounts/1"
|
||||
results_dict = {}
|
||||
resp = fetch_dict(target,p)
|
||||
print resp
|
||||
print results_dict
|
||||
print(resp)
|
||||
print(results_dict)
|
||||
|
||||
|
||||
|
||||
|
|
@ -133,7 +211,7 @@ def userHitsThisSemester(uid=2):
|
|||
begin = "20170820T0000"
|
||||
t = url + "/api/v1/users/" + str(uid) + "/page_views?start_time=" + str(begin)
|
||||
while(t): t = fetch(t)
|
||||
print json.dumps(results, indent=4, sort_keys=True)
|
||||
print(json.dumps(results, indent=4, sort_keys=True))
|
||||
|
||||
|
||||
|
||||
|
|
@ -182,9 +260,9 @@ def getCurrentActivity(): # a dict
|
|||
|
||||
master_list_by_date = {}
|
||||
for sem in [sp17,su17,su17b,fa17,sp18,cmte,dev]:
|
||||
#print sem
|
||||
#print(sem)
|
||||
for record in sem:
|
||||
print record
|
||||
print(record)
|
||||
date = record['date']
|
||||
if date in master_list_by_date:
|
||||
master_list_by_date[date]['participations'] += record['participations']
|
||||
|
|
@ -216,8 +294,7 @@ def externaltool(): # a list
|
|||
# "course_navigation[default]": "false" }
|
||||
#t = url + '/api/v1/accounts/1/external_tools/704?course_navigation[text]=Video Chat&course_navigation[default]=false'
|
||||
#r = requests.put(t, headers=header)
|
||||
#print r.text
|
||||
t = url + '/api/v1/accounts/1/external_tools/'
|
||||
while(t): t = fetch(t)
|
||||
print results
|
||||
print(results)
|
||||
|
||||
Loading…
Reference in New Issue