diff --git a/analytics/apps.py b/analytics/apps.py index 1a1b7fc..244a4a0 100644 --- a/analytics/apps.py +++ b/analytics/apps.py @@ -1,4 +1,4 @@ -from __future__ import unicode_literals + from django.apps import AppConfig diff --git a/analytics/request_counter.py b/analytics/request_counter.py index 3d452af..c7da12c 100644 --- a/analytics/request_counter.py +++ b/analytics/request_counter.py @@ -8,6 +8,7 @@ """ from .models import * +from django.utils.deprecation import MiddlewareMixin EXCLUDE_PATH_PREFIXES = [ "/favicon.ico", @@ -24,7 +25,7 @@ "spider" ] -class RequestCounterMiddleware(object): +class RequestCounterMiddleware(MiddlewareMixin): """A middleware that saves a RequestCount object each time a page is requested.""" def process_response(self, request, response): @@ -42,7 +43,7 @@ def process_response(self, request, response): if len(user_agent) > 150: user_agent = user_agent[:150] tally.user_agent = user_agent - if hasattr(request, "user") and request.user and request.user.is_authenticated(): + if hasattr(request, "user") and request.user and request.user.is_authenticated: tally.is_authenticated = True try: student = request.user.student diff --git a/analytics/urls.py b/analytics/urls.py index 8d6060b..8e96921 100755 --- a/analytics/urls.py +++ b/analytics/urls.py @@ -1,13 +1,13 @@ -from django.conf.urls import url +from django.urls import re_path from . import views urlpatterns = [ - url(r'total_requests/(?P[a-z-]*)', views.total_requests, name="total_requests"), - url(r'user_agents/(?P[a-z-]*)', views.user_agents, name="user_agents"), - url(r'logged_in_users/(?P[a-z-]*)', views.logged_in_users, name="logged_in_users"), - url(r'user_semesters/(?P[a-z-]*)', views.user_semesters, name="user_semesters"), - url(r'request_paths/(?P[a-z-]*)', views.request_paths, name="request_paths"), - url(r'active_documents/(?P[a-z-]*)', views.active_documents, name="active_documents"), - url(r'^$', views.dashboard, name='analytics_dashboard'), + re_path(r'total_requests/(?P[a-z-]*)', views.total_requests, name="total_requests"), + re_path(r'user_agents/(?P[a-z-]*)', views.user_agents, name="user_agents"), + re_path(r'logged_in_users/(?P[a-z-]*)', views.logged_in_users, name="logged_in_users"), + re_path(r'user_semesters/(?P[a-z-]*)', views.user_semesters, name="user_semesters"), + re_path(r'request_paths/(?P[a-z-]*)', views.request_paths, name="request_paths"), + re_path(r'active_documents/(?P[a-z-]*)', views.active_documents, name="active_documents"), + re_path(r'^$', views.dashboard, name='analytics_dashboard'), ] diff --git a/analytics/views.py b/analytics/views.py index fdf53cb..267724e 100755 --- a/analytics/views.py +++ b/analytics/views.py @@ -83,7 +83,7 @@ def total_requests(request, time_frame=None): timezone.activate(DISPLAY_TIME_ZONE) early_time, delta, format = get_time_bounds(time_frame) data = RequestCount.tabulate_requests(early_time, delta, lambda _: 1) - labels, counts = itertools.izip(*((format_date(t, format), item.get(1, 0)) for t, item in data)) + labels, counts = list(zip(*((format_date(t, format), item.get(1, 0)) for t, item in data))) return HttpResponse(json.dumps({"labels": labels, "data": counts, "total": "{:,}".format(sum(counts))}), content_type="application/json") USER_AGENT_TYPES = [ @@ -127,7 +127,7 @@ def logged_in_users(request, time_frame=None): early_time, delta, format = get_time_bounds(time_frame) data = RequestCount.tabulate_requests(early_time, delta, lambda _: 1, distinct_users=True) total_data = RequestCount.tabulate_requests(early_time, None, lambda _: 1, distinct_users=True) - labels, counts = itertools.izip(*((format_date(t, format), item.get(1, 0)) for t, item in data)) + labels, counts = list(zip(*((format_date(t, format), item.get(1, 0)) for t, item in data))) return HttpResponse(json.dumps({"labels": labels, "data": counts, "total": "{:,}".format(total_data.get(1, 0))}), content_type="application/json") SEMESTERS = [ @@ -170,7 +170,7 @@ def user_semesters(request, time_frame=None): labels = SEMESTERS semester_buckets = [0 for _ in SEMESTERS] - for semester, count in data.items(): + for semester, count in list(data.items()): if not semester or semester < 0 or semester >= len(semester_buckets): continue semester_buckets[semester] += count @@ -184,7 +184,7 @@ def request_paths(request, time_frame=None): data = RequestCount.tabulate_requests(early_time, None, lambda request: request.path) labels = set(data.keys()) - set([None]) counts = {label: data.get(label, 0) for label in labels} - labels, counts = itertools.izip(*sorted(counts.items(), key=lambda x: x[1], reverse=True)) + labels, counts = list(zip(*sorted(list(counts.items()), key=lambda x: x[1], reverse=True))) if len(labels) > 15: labels = labels[:15] counts = counts[:15] diff --git a/catalog/apps.py b/catalog/apps.py index 1d8c33e..be1b316 100644 --- a/catalog/apps.py +++ b/catalog/apps.py @@ -1,4 +1,4 @@ -from __future__ import unicode_literals + from django.apps import AppConfig diff --git a/catalog/models.py b/catalog/models.py index b8604d2..30dc0e4 100644 --- a/catalog/models.py +++ b/catalog/models.py @@ -1,4 +1,4 @@ -from __future__ import unicode_literals + from django.core.exceptions import ObjectDoesNotExist from django.db import models @@ -33,7 +33,7 @@ def combine(cls, attrs, unique_id): are combined together to form a course that satisfies each attribute's requirement, and a unique id is assigned to the course only if it makes sense for each attribute to exist multiple times in a list of courses""" - new_attr = cls(" ".join(map(lambda a: a.requirement,attrs)),True) + new_attr = cls(" ".join([a.requirement for a in attrs]),True) for attr in attrs: new_attr.course = attr.modify_course(new_attr.course) new_attr.needs_unique_id = new_attr.needs_unique_id and attr.needs_unique_id @@ -203,7 +203,7 @@ def list_converter(value): "Custom Color": (CourseFields.custom_color, string_converter) } -FIELD_TO_CSV = {field_name: csv_header for csv_header, (field_name, _) in CSV_HEADERS.items()} +FIELD_TO_CSV = {field_name: csv_header for csv_header, (field_name, _) in list(CSV_HEADERS.items())} # Create your models here. class Course(models.Model): diff --git a/catalog/tests.py b/catalog/tests.py index 31e4306..f0cf1d9 100644 --- a/catalog/tests.py +++ b/catalog/tests.py @@ -88,23 +88,23 @@ def test_json_object(self): course = Course.objects.get(subject_id="21M.030") result = course.to_json_object(full=False) expected = { - CourseFields.subject_id: u"21M.030", - CourseFields.title: u"World Music", + CourseFields.subject_id: "21M.030", + CourseFields.title: "World Music", CourseFields.total_units: 12, CourseFields.offered_fall: True, CourseFields.offered_IAP: False, CourseFields.offered_spring: True, CourseFields.offered_summer: False, CourseFields.public: True, - CourseFields.level: u"U", - CourseFields.joint_subjects: [u"21M.830", u"21M.290"], - CourseFields.equivalent_subjects: [u"21M.031"], - CourseFields.quarter_information: u"1,march 1", - CourseFields.not_offered_year: u"2019-2020", - CourseFields.instructors: [u"E. Zimmer", u"C. Smith"], - CourseFields.communication_requirement: u"CI-H", - CourseFields.hass_attribute: u"HASS-A", - CourseFields.gir_attribute: u"REST", + CourseFields.level: "U", + CourseFields.joint_subjects: ["21M.830", "21M.290"], + CourseFields.equivalent_subjects: ["21M.031"], + CourseFields.quarter_information: "1,march 1", + CourseFields.not_offered_year: "2019-2020", + CourseFields.instructors: ["E. Zimmer", "C. Smith"], + CourseFields.communication_requirement: "CI-H", + CourseFields.hass_attribute: "HASS-A", + CourseFields.gir_attribute: "REST", } self.maxDiff = None self.assertDictEqual(expected, result) @@ -113,23 +113,23 @@ def test_json_object_full(self): course = Course.objects.get(subject_id="21M.030") result = course.to_json_object() expected = { - CourseFields.subject_id: u"21M.030", - CourseFields.title: u"World Music", + CourseFields.subject_id: "21M.030", + CourseFields.title: "World Music", CourseFields.total_units: 12, CourseFields.offered_fall: True, CourseFields.offered_IAP: False, CourseFields.offered_spring: True, CourseFields.offered_summer: False, CourseFields.public: True, - CourseFields.level: u"U", - CourseFields.joint_subjects: [u"21M.830", u"21M.290"], - CourseFields.equivalent_subjects: [u"21M.031"], - CourseFields.quarter_information: u"1,march 1", - CourseFields.not_offered_year: u"2019-2020", - CourseFields.instructors: [u"E. Zimmer", u"C. Smith"], - CourseFields.communication_requirement: u"CI-H", - CourseFields.hass_attribute: u"HASS-A", - CourseFields.gir_attribute: u"REST", + CourseFields.level: "U", + CourseFields.joint_subjects: ["21M.830", "21M.290"], + CourseFields.equivalent_subjects: ["21M.031"], + CourseFields.quarter_information: "1,march 1", + CourseFields.not_offered_year: "2019-2020", + CourseFields.instructors: ["E. Zimmer", "C. Smith"], + CourseFields.communication_requirement: "CI-H", + CourseFields.hass_attribute: "HASS-A", + CourseFields.gir_attribute: "REST", CourseFields.lecture_units: 5, CourseFields.lab_units: 0, CourseFields.design_units: 0, @@ -138,11 +138,11 @@ def test_json_object_full(self): CourseFields.is_half_class: False, CourseFields.pdf_option: False, CourseFields.has_final: False, - CourseFields.description: u"Test description of 21M.030", - CourseFields.prerequisites: u"21M.051/''permission of instructor''", - CourseFields.schedule: u"Lecture,4-364/MW/0/9.30-11,4-364/MW/0/11-12.30", - CourseFields.url: u"http://student.mit.edu/catalog/m21Ma.html#21M.030", - CourseFields.related_subjects: [u"21M.011", u"21M.031"], + CourseFields.description: "Test description of 21M.030", + CourseFields.prerequisites: "21M.051/''permission of instructor''", + CourseFields.schedule: "Lecture,4-364/MW/0/9.30-11,4-364/MW/0/11-12.30", + CourseFields.url: "http://student.mit.edu/catalog/m21Ma.html#21M.030", + CourseFields.related_subjects: ["21M.011", "21M.031"], CourseFields.rating: 5.0, CourseFields.enrollment_number: 45.0, CourseFields.in_class_hours: 3.0, @@ -194,8 +194,8 @@ def test_lookup_subject(self): response = views.lookup(request, subject_id="2.001") self.assertEqual(200, response.status_code) self.assertDictContainsSubset({ - CourseFields.subject_id: u"2.001", - CourseFields.title: u"Foo" + CourseFields.subject_id: "2.001", + CourseFields.title: "Foo" }, json.loads(response.content)) def test_lookup_subject_not_existing(self): diff --git a/catalog/urls.py b/catalog/urls.py index 8184341..16214b3 100644 --- a/catalog/urls.py +++ b/catalog/urls.py @@ -1,10 +1,10 @@ -from django.conf.urls import url +from django.urls import re_path from . import views urlpatterns = [ - url(r'lookup/(?P[A-z0-9.]+)', views.lookup, name='lookup'), - url(r'search/(?P[^?]+)', views.search, name='search'), - url(r'dept/(?P[A-z0-9.]+)', views.department, name='department'), - url(r'all', views.list_all, name='list_all') + re_path(r'lookup/(?P[A-z0-9.]+)', views.lookup, name='lookup'), + re_path(r'search/(?P[^?]+)', views.search, name='search'), + re_path(r'dept/(?P[A-z0-9.]+)', views.department, name='department'), + re_path(r'all', views.list_all, name='list_all') ] diff --git a/catalog_parse/catalog_parser.py b/catalog_parse/catalog_parser.py index ca2383c..05f0155 100644 --- a/catalog_parse/catalog_parser.py +++ b/catalog_parse/catalog_parser.py @@ -31,9 +31,9 @@ # For type checking str or unicode in Python 2 and 3 try: - basestring + str except NameError: - basestring = str + str = str COURSE_NUMBERS = [ "1", "2", "3", "4", @@ -83,7 +83,7 @@ def load_course_elements(url): course_ids = [] courses = [] for element in course_elements[0].getchildren(): - if element.tag == "a" and "name" in element.keys(): + if element.tag == "a" and "name" in list(element.keys()): subject_id = element.get("name") course_ids.append(subject_id) courses.append([]) @@ -105,7 +105,7 @@ def load_course_elements(url): def get_inner_html(node): """Gets the inner HTML of a node, including tags.""" - children = ''.join(etree.tostring(e).decode('utf-8') for e in node) + children = ''.join(etree.tostring(e, encoding="unicode") for e in node) if node.text is None: return children return node.text + children @@ -119,10 +119,10 @@ def recursively_extract_info(node): contents = get_inner_html(node) if node.tag == "img": - if "title" in node.keys() and len(node.get("title")): + if "title" in list(node.keys()) and len(node.get("title")): info_items.append(node.get("title")) elif node.tag == "a": - if "name" in node.keys(): + if "name" in list(node.keys()): return (info_items, True) text = node.text_content().strip() if len(text): @@ -410,7 +410,7 @@ def merge_duplicates(courses): if len(course_dict[subject_id]) > 1: total_course = {} - keys = set().union(*(other.keys() for other in course_dict[subject_id])) + keys = set().union(*(list(other.keys()) for other in course_dict[subject_id])) for key in keys: vals = [other.get(key, '') for other in course_dict[subject_id]] @@ -478,7 +478,7 @@ def courses_from_dept_code(dept_code, **options): schedules = {other_id: schedules[""] for other_id in subject_ids} for other_id in subject_ids: - copied_course = {key: val for key, val in attribs.items()} + copied_course = {key: val for key, val in list(attribs.items())} copied_course[CourseAttribute.subjectID] = other_id if other_id in schedules: copied_course[CourseAttribute.schedule] = schedules[other_id] @@ -511,7 +511,7 @@ def writing_description_for_attribute(course, attribute): return "" item = course[attribute] - if isinstance(item, basestring): + if isinstance(item, str): return '"' + item.replace('"', "'").replace('\n', '\\n') + '"' elif isinstance(item, bool): return "Y" if item == True else "N" @@ -524,7 +524,7 @@ def writing_description_for_attribute(course, attribute): elif isinstance(item, list): return '"' + ",".join(item) + '"' else: - print("Don't have a way to represent attribute {}: {} ({})".format(attribute, item, type(item))) + print(("Don't have a way to represent attribute {}: {} ({})".format(attribute, item, type(item)))) return str(item) def write_courses(courses, filepath, attributes): @@ -536,10 +536,7 @@ def write_courses(courses, filepath, attributes): csv_comps.append([writing_description_for_attribute(course, attrib) for attrib in attributes]) with open(filepath, 'w') as file: - if sys.version_info > (3, 0): - file.write("\n".join(",".join(item) for item in csv_comps)) - else: - file.write("\n".join(",".join(item) for item in csv_comps).encode('utf-8')) + file.write("\n".join(",".join(item) for item in csv_comps)) ### Main method @@ -583,7 +580,7 @@ def parse(output_dir, equivalences_path=None, write_related=True, if len(addl_courses) == 0: continue - print("======", total_code) + print(("======", total_code)) dept_courses += addl_courses if original_html is None: original_html = LAST_PAGE_HTML diff --git a/catalog_parse/consensus_catalog.py b/catalog_parse/consensus_catalog.py index 67d7a87..283ffdc 100644 --- a/catalog_parse/consensus_catalog.py +++ b/catalog_parse/consensus_catalog.py @@ -34,7 +34,7 @@ def make_corrections(corrections, consensus): if correction[col]: if col not in consensus.columns: consensus[col] = "" - print("Correction for {}: {} ==> {}".format(idx, col, correction[col])) + print(("Correction for {}: {} ==> {}".format(idx, col, correction[col]))) consensus.ix[idx][col] = correction[col] elif subject_id in consensus.index: @@ -43,12 +43,12 @@ def make_corrections(corrections, consensus): for col in correction: if col == "Subject Id": continue if correction[col]: - print("Correction for {}: {} ==> {}".format(subject_id, col, correction[col])) + print(("Correction for {}: {} ==> {}".format(subject_id, col, correction[col]))) consensus_row[col] = correction[col] else: # Add the subject - print("Correction: adding subject {}".format(subject_id)) + print(("Correction: adding subject {}".format(subject_id))) consensus.loc[subject_id] = {col: correction.get(col, None) for col in consensus.columns} @@ -70,7 +70,7 @@ def build_consensus(base_path, out_path, corrections=None, semester_data[semester] = all_courses # Sort in reverse chronological order - semester_data = sorted(semester_data.items(), key=lambda x: semester_sort_key(x[0]), reverse=True) + semester_data = sorted(list(semester_data.items()), key=lambda x: semester_sort_key(x[0]), reverse=True) if len(semester_data) == 0: print("No raw semester data found.") return @@ -109,7 +109,7 @@ def build_consensus(base_path, out_path, corrections=None, consensus = pd.concat([consensus, data], sort=False) consensus = consensus.drop_duplicates(subset=[CourseAttribute.subjectID], keep='first') - print("Added {} courses with {}.".format(len(consensus) - last_size, semester)) + print(("Added {} courses with {}.".format(len(consensus) - last_size, semester))) last_size = len(consensus) consensus.set_index(CourseAttribute.subjectID, inplace=True) @@ -169,7 +169,7 @@ def write_df(df, path): eval_path = None if os.path.exists(out_path): - print("Fatal: the directory {} already exists. Please delete it or choose a different location.".format(out_path)) + print(("Fatal: the directory {} already exists. Please delete it or choose a different location.".format(out_path))) exit(1) build_consensus(in_path, out_path, evaluations_path=eval_path) diff --git a/catalog_parse/delta_gen.py b/catalog_parse/delta_gen.py index d6de5be..6bc0825 100644 --- a/catalog_parse/delta_gen.py +++ b/catalog_parse/delta_gen.py @@ -40,7 +40,7 @@ def write_delta_file(semester_name, delta, outpath): file.write(str(version_num) + "\n") file.write("\n".join(delta)) - print("Delta file written to {}.".format(delta_file_path)) + print(("Delta file written to {}.".format(delta_file_path))) def delta_file_name(path): if ".txt" in path: @@ -96,8 +96,8 @@ def commit_delta(new_directory, old_directory, server_path, delta): delta = make_delta(new_directory, old_directory) for file in delta: print(file) - if raw_input("Ready to write files?") in ['y', 'yes', '\n']: + if eval(input("Ready to write files?")) in ['y', 'yes', '\n']: commit_delta(new_directory, old_directory, server_path, delta) - print("Old files moved to {}. New files moved to {}.".format(os.path.join(os.path.dirname(old_directory), old_name + "-old"), old_directory)) + print(("Old files moved to {}. New files moved to {}.".format(os.path.join(os.path.dirname(old_directory), old_name + "-old"), old_directory))) else: print("Aborting.") diff --git a/catalog_parse/utils/course_nlp.py b/catalog_parse/utils/course_nlp.py index 8f394e7..c728650 100644 --- a/catalog_parse/utils/course_nlp.py +++ b/catalog_parse/utils/course_nlp.py @@ -99,8 +99,8 @@ def term_frequencies(description): def tf_idf(tf_list, all_term_frequencies): scores = {} - for word, frequency in tf_list.items(): - idf = math.log(len(all_term_frequencies) / sum(1 for x in all_term_frequencies.values() if word in x)) + for word, frequency in list(tf_list.items()): + idf = math.log(len(all_term_frequencies) / sum(1 for x in list(all_term_frequencies.values()) if word in x)) scores[word] = frequency * idf return scores @@ -136,9 +136,9 @@ def write_course_features(courses_by_dept, tf_lists, related_matrix, outpath, ma keywords_by_subject = {} subjects_by_keyword = {} max_generated_keywords = max_keywords * 3 - for dept, courses in courses_by_dept.items(): + for dept, courses in list(courses_by_dept.items()): print(dept) - for id, course in courses.items(): + for id, course in list(courses.items()): if id not in tf_lists: continue tfidf = tf_idf(tf_lists[id], tf_lists) sorted_items = sorted(tfidf, key=tfidf.get, reverse=True) @@ -153,8 +153,8 @@ def write_course_features(courses_by_dept, tf_lists, related_matrix, outpath, ma # Find the minimum number of keywords that capture the entire course database sorted_keywords = sorted(subjects_by_keyword, key=lambda x: (len(subjects_by_keyword[x]), len(x)), reverse=True) sorted_keywords = sorted_keywords[int(len(sorted_keywords) * KEYWORD_COVERAGE_TRIM):] - print("Total: {} keywords. Top 100:".format(len(sorted_keywords))) - print(sorted_keywords[:100]) + print(("Total: {} keywords. Top 100:".format(len(sorted_keywords)))) + print((sorted_keywords[:100])) covered_subjects = set() partially_covered_subjects = set() @@ -171,12 +171,12 @@ def write_course_features(courses_by_dept, tf_lists, related_matrix, outpath, ma partially_covered_subjects.add(subject) if len(covered_subjects) == len(keywords_by_subject): break - print("Needed {} keywords to cover dataset: {}".format(len(necessary_keywords), necessary_keywords)) + print(("Needed {} keywords to cover dataset: {}".format(len(necessary_keywords), necessary_keywords))) with open(outpath, 'w') as file: - for dept, courses in courses_by_dept.items(): + for dept, courses in list(courses_by_dept.items()): for id in courses: if id not in keywords_by_subject: - print("No keywords for {}".format(id)) + print(("No keywords for {}".format(id))) continue keywords = keywords_by_subject[id] allowed_keywords = [kw for kw in keywords if kw in necessary_keywords] @@ -194,7 +194,7 @@ def write_course_features(courses_by_dept, tf_lists, related_matrix, outpath, ma for other_course in courses[id].get(equiv_key, []): if '.' not in other_course: continue depts.add(other_course[:other_course.find('.')]) - file.write((",".join([id] + list(depts) + level_list + allowed_keywords + region_indexes) + "\n").encode('utf-8')) + file.write((",".join([id] + list(depts) + level_list + allowed_keywords + region_indexes) + "\n")) def find_related_regions(related_matrix, min_count=5, threshold=0.2): """ @@ -203,7 +203,7 @@ def find_related_regions(related_matrix, min_count=5, threshold=0.2): """ filtered_matrix = {} for subject in related_matrix: - filtered_matrix[subject] = {subject_2: relation for subject_2, relation in related_matrix[subject].items() if relation >= threshold} + filtered_matrix[subject] = {subject_2: relation for subject_2, relation in list(related_matrix[subject].items()) if relation >= threshold} sets = [] discovered_subjects = set() @@ -245,8 +245,8 @@ def write_related_and_features(courses_by_dept, dest, progress_callback=None, pr progress_callback(start + 0.1 * (100.0 - start), "Computing term frequencies...") print("Computing term frequencies...") - for dept, courses in courses_by_dept.items(): - for id, course in courses.items(): + for dept, courses in list(courses_by_dept.items()): + for id, course in list(courses.items()): tf_lists[id] = term_frequencies(course.get(CourseAttribute.description, "") + "\n" + course.get(CourseAttribute.title, "")) if progress_callback is not None: @@ -256,10 +256,10 @@ def write_related_and_features(courses_by_dept, dest, progress_callback=None, pr # First determine which departments are closely related to each other dept_lists = {} - for dept, courses in courses_by_dept.items(): - for id, course in courses.items(): + for dept, courses in list(courses_by_dept.items()): + for id, course in list(courses.items()): if dept not in dept_lists: dept_lists[dept] = {} - for term, freq in tf_lists[id].items(): + for term, freq in list(tf_lists[id].items()): if term in dept_lists[dept]: dept_lists[dept][term] += freq else: @@ -284,12 +284,12 @@ def write_related_and_features(courses_by_dept, dest, progress_callback=None, pr max_relation = -1e20 with open(os.path.join(dest, "related.txt"), "w") as file: - for dept, courses in courses_by_dept.items(): - for id, course in courses.items(): + for dept, courses in list(courses_by_dept.items()): + for id, course in list(courses.items()): related_matrix[id] = {} ranks = [("", 0) for i in range(k)] - for other_id, tf in tf_lists.items(): + for other_id, tf in list(tf_lists.items()): if is_equivalent(id, course, other_id): related_matrix[id][other_id] = related_max continue @@ -320,11 +320,11 @@ def write_related_and_features(courses_by_dept, dest, progress_callback=None, pr if progress_callback is not None: start = progress_start if progress_start is not None else 0.0 progress_callback(start + (0.2 + progress_stepwise / 17.0) * (100.0 - start), "Writing related courses ({}%)...".format(progress_stepwise * 10)) - print("{}% complete...".format(progress_stepwise * 10)) + print(("{}% complete...".format(progress_stepwise * 10))) # Divide every element in the relation matrix by the maximum attained value for subject in related_matrix: - related_matrix[subject] = {subject_2: min(value / max_relation, 1.0) for subject_2, value in related_matrix[subject].items()} + related_matrix[subject] = {subject_2: min(value / max_relation, 1.0) for subject_2, value in list(related_matrix[subject].items())} if progress_callback is not None: start = progress_start if progress_start is not None else 0.0 diff --git a/catalog_parse/utils/parse_evaluations.py b/catalog_parse/utils/parse_evaluations.py index 4336e7f..964081d 100644 --- a/catalog_parse/utils/parse_evaluations.py +++ b/catalog_parse/utils/parse_evaluations.py @@ -63,7 +63,7 @@ def parse_evaluations(evals, courses): year = int(term_data[EvaluationConstants.term][:-2]) averaging_data.setdefault(key, []).append((value, year)) - for eval_key, course_key in KEYS_TO_AVERAGE.items(): + for eval_key, course_key in list(KEYS_TO_AVERAGE.items()): if eval_key not in averaging_data: continue values = [value for value, year in averaging_data[eval_key]] max_year = max(year for value, year in averaging_data[eval_key]) diff --git a/catalog_parse/utils/parse_prereqs.py b/catalog_parse/utils/parse_prereqs.py index 5795d74..76c1328 100644 --- a/catalog_parse/utils/parse_prereqs.py +++ b/catalog_parse/utils/parse_prereqs.py @@ -58,12 +58,12 @@ def process_req_list_item(item): paren_levels.append("") elif c == ")": if len(paren_levels) <= 1: - print("Invalid prerequisite syntax:", item) + print(("Invalid prerequisite syntax:", item)) continue sub_key = "#@%" + str(len(substitutions)) + "%@#" last_item = paren_levels.pop() sub_result = process_single_level_req_item(last_item) - for key, sub in substitutions.items(): + for key, sub in list(substitutions.items()): sub_result = sub_result.replace("''" + key + "''", "(" + sub + ")") substitutions[sub_key] = sub_result paren_levels[-1] += sub_key @@ -71,9 +71,9 @@ def process_req_list_item(item): paren_levels[-1] += c if len(paren_levels) == 0: - print("Unmatched parentheses:", item) + print(("Unmatched parentheses:", item)) result = process_single_level_req_item(paren_levels[-1]) - for key, sub in substitutions.items(): + for key, sub in list(substitutions.items()): result = result.replace("''" + key + "''", "(" + sub + ")") return result @@ -134,7 +134,7 @@ def process_single_level_req_item(item): filtered_item = filtered_item[match.end(0):].strip() continue - print(filtered_item, "doesn't match anything") + print((filtered_item, "doesn't match anything")) break if is_or: diff --git a/catalog_parse/utils/parse_schedule.py b/catalog_parse/utils/parse_schedule.py index 0ced90d..9950201 100644 --- a/catalog_parse/utils/parse_schedule.py +++ b/catalog_parse/utils/parse_schedule.py @@ -107,7 +107,7 @@ def parse_schedule(schedule): time_comps.append("1") time_comps.append(submatch.group(4)) else: - print("Couldn't get time of day in", time) + print(("Couldn't get time of day in", time)) for loc in location_comps: type_comps.append("/".join([loc] + time_comps)) @@ -118,7 +118,7 @@ def parse_schedule(schedule): schedule_comps.append(",".join(type_comps)) - joined_scheds = {id: ";".join(schedule_comps) for id, schedule_comps in schedule_comps_by_id.items()} + joined_scheds = {id: ";".join(schedule_comps) for id, schedule_comps in list(schedule_comps_by_id.items())} virtual_items = (["Virtual"] if has_virtual else []) + (["In-Person"] if has_in_person else []) joined_virtual = "/".join(virtual_items) diff --git a/common/decorators.py b/common/decorators.py index 14e0480..e4f3f5b 100644 --- a/common/decorators.py +++ b/common/decorators.py @@ -28,7 +28,7 @@ def view_or_basicauth(view, request, test_func, realm = "", *args, **kwargs): and returning the view if all goes well, otherwise responding with a 401. """ - if request.user is None or not request.user.is_authenticated() or not user_has_student(request.user) or ALWAYS_LOGIN: + if request.user is None or not request.user.is_authenticated or not user_has_student(request.user) or ALWAYS_LOGIN: key = 'HTTP_AUTHORIZATION' if key not in request.META: key = 'REDIRECT_HTTP_AUTHORIZATION' @@ -101,7 +101,7 @@ def wrapper(request, *args, **kwargs): if request.method == 'OPTIONS': return HttpResponse() return view_or_basicauth(func, request, - lambda u: u.is_authenticated(), + lambda u: u.is_authenticated, realm, *args, **kwargs) return wrapper @@ -146,7 +146,7 @@ def wrapper(request, *args, **kwargs): if not getattr(permissions, p_name): raise PermissionDenied except AttributeError: - print("Attribute not found: " + p_name) + print(("Attribute not found: " + p_name)) raise PermissionDenied return view_func(request, *args, **kwargs) return wrapper diff --git a/common/models.py b/common/models.py index c759c70..593d790 100644 --- a/common/models.py +++ b/common/models.py @@ -119,11 +119,11 @@ def universal_permission_flag(): def _format_abilities_list(self, abilities): if len(abilities) > 1: - abilities[-1] = u"and " + abilities[-1] + abilities[-1] = "and " + abilities[-1] if len(abilities) == 2: - text = u" ".join(abilities) + text = " ".join(abilities) else: - text = u", ".join(abilities) + text = ", ".join(abilities) return text[0].upper() + text[1:].lower() @@ -164,9 +164,9 @@ def permissions_descriptions(self): items.append("View your recommendations") return items - def __unicode__(self): - return u"{} (by {}) - {} permissions".format(self.name, self.contact_name, - self.num_permissions()) + def __str__(self): + return "{} (by {}) - {} permissions".format(self.name, self.contact_name, + self.num_permissions()) class RedirectURL(models.Model): """Defines a registered redirect URL for the login endpoint.""" diff --git a/common/oauth_client.py b/common/oauth_client.py index 6265c2c..77abe45 100644 --- a/common/oauth_client.py +++ b/common/oauth_client.py @@ -4,7 +4,7 @@ import requests import os import base64 -import urllib +import urllib.request, urllib.parse, urllib.error from .models import OAuthCache import random from django.utils import timezone @@ -47,8 +47,8 @@ def oauth_code_url(request, after_redirect=None): AUTH_CODE_URL, AUTH_RESPONSE_TYPE, get_client_info()[0], - urllib.quote(REDIRECT_URI), - urllib.quote(' '.join(AUTH_SCOPES)), + urllib.parse.quote(REDIRECT_URI), + urllib.parse.quote(' '.join(AUTH_SCOPES)), cache.state, cache.nonce) @@ -67,7 +67,7 @@ def get_user_info(request): result, status = get_user_info_with_token(request, acc_token) if result is not None: if "refresh_token" in all_json: - result[u'refresh_token'] = all_json["refresh_token"] + result['refresh_token'] = all_json["refresh_token"] return result, status, info def get_oauth_id_token(request, code, state, refresh=False): diff --git a/common/token_gen.py b/common/token_gen.py index 1c8f5a8..28deb82 100644 --- a/common/token_gen.py +++ b/common/token_gen.py @@ -7,7 +7,7 @@ from django.utils.timezone import is_aware, make_aware from django.utils import timezone import datetime -from oauth_client import generate_random_string, LOGIN_TIMEOUT +from .oauth_client import generate_random_string, LOGIN_TIMEOUT from .models import TemporaryCode, APIClient from django.core.exceptions import PermissionDenied diff --git a/common/urls.py b/common/urls.py index ab6ebdc..72b8246 100644 --- a/common/urls.py +++ b/common/urls.py @@ -1,43 +1,43 @@ -from django.conf.urls import url +from django.urls import re_path from . import views from django.views.generic import TemplateView -from django.contrib.auth.views import logout +from django.contrib.auth.views import LogoutView urlpatterns = [ - url('verify/', views.verify, name='verify'), - url('new_user/', views.new_user, name='new_user'), - url('signup/', views.signup, name='signup'), - url('^login/', views.login_oauth, name='login'), - url('^dev_login/', views.dev_login, name='dev_login'), - url('login_touchstone/', views.login_touchstone, name='login_touchstone'), - url('logout/', logout, {'next_page': 'index'}, name='logout'), - url('set_semester/', views.set_semester, name='set_semester'), - url('prefs/favorites/', views.favorites, name='favorites'), - url('prefs/set_favorites/', views.set_favorites, name='set_favorites'), - url('prefs/progress_overrides/', views.progress_overrides, name='progress_overrides'), - url('prefs/set_progress_overrides/', views.set_progress_overrides, name='set_progress_overrides'), - url('prefs/notes/', views.notes, name='notes'), - url('prefs/set_notes/', views.set_notes, name='set_notes'), - url('prefs/custom_courses/', views.custom_courses, name='custom_courses'), - url('prefs/set_custom_course/', views.set_custom_course, name='set_custom_course'), - url('prefs/remove_custom_course/', views.remove_custom_course, name='remove_custom_course'), - url('decline/', TemplateView.as_view(template_name='common/decline.html'), name='decline'), - url('fetch_token/', views.fetch_token, name='fetch_token'), - url('user_info/', views.user_info, name='user_info'), - url('^disapprove_client/', views.approval_page_failure, name='approval_page_failure'), - url('^approve_client/', views.approval_page_success, name='approval_page_success'), + re_path('verify/', views.verify, name='verify'), + re_path('new_user/', views.new_user, name='new_user'), + re_path('signup/', views.signup, name='signup'), + re_path('^login/', views.login_oauth, name='login'), + re_path('^dev_login/', views.dev_login, name='dev_login'), + re_path('login_touchstone/', views.login_touchstone, name='login_touchstone'), + re_path('logout/', LogoutView.as_view(), name='logout'), + re_path('set_semester/', views.set_semester, name='set_semester'), + re_path('prefs/favorites/', views.favorites, name='favorites'), + re_path('prefs/set_favorites/', views.set_favorites, name='set_favorites'), + re_path('prefs/progress_overrides/', views.progress_overrides, name='progress_overrides'), + re_path('prefs/set_progress_overrides/', views.set_progress_overrides, name='set_progress_overrides'), + re_path('prefs/notes/', views.notes, name='notes'), + re_path('prefs/set_notes/', views.set_notes, name='set_notes'), + re_path('prefs/custom_courses/', views.custom_courses, name='custom_courses'), + re_path('prefs/set_custom_course/', views.set_custom_course, name='set_custom_course'), + re_path('prefs/remove_custom_course/', views.remove_custom_course, name='remove_custom_course'), + re_path('decline/', TemplateView.as_view(template_name='common/decline.html'), name='decline'), + re_path('fetch_token/', views.fetch_token, name='fetch_token'), + re_path('user_info/', views.user_info, name='user_info'), + re_path('^disapprove_client/', views.approval_page_failure, name='approval_page_failure'), + re_path('^approve_client/', views.approval_page_success, name='approval_page_success'), # reference - url('reference/$', TemplateView.as_view(template_name='common/docs/overview.html'), name='overview'), - url('reference/auth', TemplateView.as_view(template_name='common/docs/auth.html'), name='auth'), - url('reference/catalog', TemplateView.as_view(template_name='common/docs/catalog.html'), name='catalog'), - url('reference/requirements', TemplateView.as_view(template_name='common/docs/requirements.html'), name='requirements'), - url('reference/sync', TemplateView.as_view(template_name='common/docs/sync.html'), name='sync'), - url('reference/recommender', TemplateView.as_view(template_name='common/docs/recommender.html'), name='recommender'), - url('reference/file_formats', TemplateView.as_view(template_name='common/docs/file_formats.html'), name='file_formats'), + re_path('reference/$', TemplateView.as_view(template_name='common/docs/overview.html'), name='overview'), + re_path('reference/auth', TemplateView.as_view(template_name='common/docs/auth.html'), name='auth'), + re_path('reference/catalog', TemplateView.as_view(template_name='common/docs/catalog.html'), name='catalog'), + re_path('reference/requirements', TemplateView.as_view(template_name='common/docs/requirements.html'), name='requirements'), + re_path('reference/sync', TemplateView.as_view(template_name='common/docs/sync.html'), name='sync'), + re_path('reference/recommender', TemplateView.as_view(template_name='common/docs/recommender.html'), name='recommender'), + re_path('reference/file_formats', TemplateView.as_view(template_name='common/docs/file_formats.html'), name='file_formats'), # index - url(r'^$', TemplateView.as_view(template_name='common/index.html'), name='index'), + re_path(r'^$', TemplateView.as_view(template_name='common/index.html'), name='index'), ] diff --git a/common/views.py b/common/views.py index aed2c1a..cd4d9fb 100644 --- a/common/views.py +++ b/common/views.py @@ -11,7 +11,7 @@ import base64 import json import re -from token_gen import * +from .token_gen import * from django.utils import timezone from dateutil.relativedelta import relativedelta from catalog.models import Course, CourseFields @@ -37,8 +37,8 @@ def login_oauth(request): return login_error_response(request, 'Please try again later.') # Save the user's profile, check if there are any other accounts - email = result.get(u'email', None) - sub = result.get(u'sub', None) + email = result.get('email', None) + sub = result.get('sub', None) if sub is None: return login_error_response(request, 'Please try again and allow FireRoad to access your OpenID information.') @@ -50,7 +50,7 @@ def login_oauth(request): if email is None: email = "user{}@fireroad.mit.edu".format(user.username) - student = Student(user=user, unique_id=sub, academic_id=email, name=result.get(u'name', 'Anonymous')) + student = Student(user=user, unique_id=sub, academic_id=email, name=result.get('name', 'Anonymous')) student.current_semester = info.get('sem', '0') student.save() else: diff --git a/courseupdater/urls.py b/courseupdater/urls.py index 6627d1d..47a555f 100644 --- a/courseupdater/urls.py +++ b/courseupdater/urls.py @@ -1,20 +1,20 @@ -from django.conf.urls import url +from django.urls import re_path from . import views urlpatterns = [ - url(r'^$', views.index, name='index'), - url('check/', views.check, name='check'), - url('semesters/', views.semesters, name='semesters'), + re_path(r'^$', views.index, name='index'), + re_path('check/', views.check, name='check'), + re_path('semesters/', views.semesters, name='semesters'), - url(r'update_catalog/', views.update_catalog, name='update_catalog'), - url(r'update_progress/', views.update_progress, name='update_progress'), - url(r'reset_update/', views.reset_update, name='reset_update'), + re_path(r'update_catalog/', views.update_catalog, name='update_catalog'), + re_path(r'update_progress/', views.update_progress, name='update_progress'), + re_path(r'reset_update/', views.reset_update, name='reset_update'), - url(r'corrections/delete/(?P\d+)', views.delete_correction, name='delete_catalog_correction'), - url(r'corrections/edit/(?P\d+)', views.edit_correction, name='edit_catalog_correction'), - url(r'corrections/new', views.new_correction, name='new_catalog_correction'), - url(r'corrections/', views.view_corrections, name='catalog_corrections'), + re_path(r'corrections/delete/(?P\d+)', views.delete_correction, name='delete_catalog_correction'), + re_path(r'corrections/edit/(?P\d+)', views.edit_correction, name='edit_catalog_correction'), + re_path(r'corrections/new', views.new_correction, name='new_catalog_correction'), + re_path(r'corrections/', views.view_corrections, name='catalog_corrections'), - url(r'download_data/', views.download_catalog_data, name='download_data') + re_path(r'download_data/', views.download_catalog_data, name='download_data') ] diff --git a/courseupdater/views.py b/courseupdater/views.py index 7a4366f..6f05e26 100644 --- a/courseupdater/views.py +++ b/courseupdater/views.py @@ -10,7 +10,7 @@ from django.forms.models import model_to_dict from django.conf import settings from zipfile import ZipFile -from StringIO import StringIO +from io import StringIO from requirements.diff import * import catalog_parse as cp @@ -56,7 +56,7 @@ def compute_updated_files(version, base_dir): break semester, version, delta = read_delta(url) if version != version_num: - print("Wrong version number in {}".format(url)) + print(("Wrong version number in {}".format(url))) updated_files.update(set(delta)) updated_version = version_num version_num += 1 @@ -91,7 +91,7 @@ def url_comp(x): if req_version_num != -1: updated_files, updated_version = compute_updated_files(req_version_num, os.path.join(settings.CATALOG_BASE_DIR, deltas_directory, requirements_dir)) - urls_to_update = list(map(lambda x: requirements_dir + '/' + x + '.reql', sorted(list(updated_files)))) + urls_to_update = list([requirements_dir + '/' + x + '.reql' for x in sorted(list(updated_files))]) resp['rv'] = updated_version resp['r_delta'] = urls_to_update return resp @@ -138,7 +138,7 @@ def check(request): each one.""" def semesters(request): sems = list_semesters() - resp = list(map(lambda x: {"sem": x, "v": current_version_for_catalog(semester_dir_prefix + x)}, sems)) + resp = list([{"sem": x, "v": current_version_for_catalog(semester_dir_prefix + x)} for x in sems]) return HttpResponse(json.dumps(resp), content_type="application/json") ### Catalog parser UI @@ -232,9 +232,9 @@ def get_field_value(form_data, field): def view_corrections(request): """Creates the page that displays all current catalog corrections.""" diffs = [] - for correction in CatalogCorrection.objects.order_by("subject_id").values(): + for correction in list(CatalogCorrection.objects.order_by("subject_id").values()): subject_id = correction["subject_id"] - changed_course = Course.public_courses().filter(subject_id=subject_id).values().first() + changed_course = list(Course.public_courses().filter(subject_id=subject_id).values()).first() diff = {} if changed_course: @@ -360,4 +360,4 @@ def download_catalog_data(request): buffer.seek(0) response = HttpResponse(buffer, content_type='application/force-download') response['Content-Disposition'] = 'attachment; filename="catalogs.zip"' - return response \ No newline at end of file + return response diff --git a/fireroad/settings.py b/fireroad/settings.py index f041cd0..f779fc3 100644 --- a/fireroad/settings.py +++ b/fireroad/settings.py @@ -16,6 +16,8 @@ # Use the Django default login page for local debugging LOGIN_URL = "/dev_login" +LOGOUT_REDIRECT_URL = "/" + # Security settings # If True, login redirects will be required to be registered as a RedirectURL @@ -52,7 +54,7 @@ 'analytics' ] -MIDDLEWARE_CLASSES = [ +MIDDLEWARE = [ # Cors middleware should only be on local development (not settings_dev or settings_prod) 'middleware.cors.CorsMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', @@ -175,3 +177,5 @@ } }, } + +DEFAULT_AUTO_FIELD = "django.db.models.AutoField" diff --git a/fireroad/settings_dev.py b/fireroad/settings_dev.py index d101be3..7329794 100644 --- a/fireroad/settings_dev.py +++ b/fireroad/settings_dev.py @@ -33,7 +33,7 @@ } } -MIDDLEWARE_CLASSES = [ +MIDDLEWARE = [ 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', diff --git a/fireroad/settings_prod.py b/fireroad/settings_prod.py index 01643f1..ea7ff3b 100644 --- a/fireroad/settings_prod.py +++ b/fireroad/settings_prod.py @@ -33,7 +33,7 @@ } } -MIDDLEWARE_CLASSES = [ +MIDDLEWARE = [ 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', diff --git a/fireroad/urls.py b/fireroad/urls.py index 78fe45a..1a8a769 100644 --- a/fireroad/urls.py +++ b/fireroad/urls.py @@ -13,7 +13,7 @@ 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ -from django.conf.urls import url, include +from django.urls import include, re_path from django.contrib.admin.views.decorators import staff_member_required from django.views.generic.base import RedirectView from django.contrib import admin @@ -24,27 +24,27 @@ # admin.site.login = staff_member_required(admin.site.login, login_url=settings.LOGIN_URL) urlpatterns = [ - url(r'courses/', include('catalog.urls')), - url(r'courseupdater/', include('courseupdater.urls')), - url(r'recommend/', include('recommend.urls')), - url(r'admin/', admin.site.urls), - url(r'sync/', include('sync.urls')), - url(r'analytics/', include('analytics.urls')), - url(r'requirements/', include('requirements.urls')), - url(r'', include('common.urls')), + re_path(r'courses/', include('catalog.urls')), + re_path(r'courseupdater/', include('courseupdater.urls')), + re_path(r'recommend/', include('recommend.urls')), + re_path(r'admin/', admin.site.urls), + re_path(r'sync/', include('sync.urls')), + re_path(r'analytics/', include('analytics.urls')), + re_path(r'requirements/', include('requirements.urls')), + re_path(r'', include('common.urls')), ] # Redirect to the appropriate login page if one is specified in the settings module if settings.LOGIN_URL: if settings.LOGIN_URL.strip("/") != 'dev_login': - urlpatterns.insert(0, url(r'^admin/login/$', RedirectView.as_view(url=settings.LOGIN_URL, + urlpatterns.insert(0, re_path(r'^admin/login/$', RedirectView.as_view(url=settings.LOGIN_URL, permanent=True, query_string=True))) - urlpatterns.insert(0, url(r'^dev_login/$', RedirectView.as_view(url=settings.LOGIN_URL, + urlpatterns.insert(0, re_path(r'^dev_login/$', RedirectView.as_view(url=settings.LOGIN_URL, permanent=True, query_string=True))) if settings.LOGIN_URL.strip("/") != 'login': - urlpatterns.insert(0, url(r'^login/$', RedirectView.as_view(url=settings.LOGIN_URL, + urlpatterns.insert(0, re_path(r'^login/$', RedirectView.as_view(url=settings.LOGIN_URL, permanent=True, query_string=True))) diff --git a/middleware/cors.py b/middleware/cors.py index 32c42a4..1745b9e 100644 --- a/middleware/cors.py +++ b/middleware/cors.py @@ -1,9 +1,11 @@ +from django.utils.deprecation import MiddlewareMixin + """ Small middleware to allow cross-origin resource sharing. Should only be enabled in a local development environment. """ -class CorsMiddleware(object): +class CorsMiddleware(MiddlewareMixin): def process_response(self, req, resp): resp["Access-Control-Allow-Origin"] = "*" resp["Access-Control-Allow-Headers"] = "*" diff --git a/readme.md b/readme.md index 0671580..9e5af7a 100644 --- a/readme.md +++ b/readme.md @@ -7,7 +7,7 @@ FireRoad is an iOS/Android application that allows MIT students to plan their co Follow these instructions to set up and run your own instance of the FireRoad server. You may want to create a new virtual environment using `conda`, for example: ``` -conda create -n fireroad python=2.7 +conda create -n fireroad python=3.10 conda activate fireroad ``` diff --git a/recommend/urls.py b/recommend/urls.py index f51da63..335c358 100644 --- a/recommend/urls.py +++ b/recommend/urls.py @@ -1,10 +1,10 @@ -from django.conf.urls import url +from django.urls import re_path from . import views from django.views.generic import TemplateView urlpatterns = [ - url('rate/', views.rate, name='rate'), - url('get/', views.get, name='get') + re_path('rate/', views.rate, name='rate'), + re_path('get/', views.get, name='get') ] diff --git a/recommender.py b/recommender.py index 53e37e0..b3bc3d6 100644 --- a/recommender.py +++ b/recommender.py @@ -51,11 +51,11 @@ # Don't generate related-subject recommendations for these subjects excluded_subjects = ["18.01", "18.02", "8.01", "8.02"] -ROAD_SELECTED_SUBJECTS_KEY = u"selectedSubjects" -ROAD_SUBJECT_ID_KEY = u"id" -ROAD_SUBJECT_ID_ALT_KEY = u"subject_id" -ROAD_SEMESTER_KEY = u"semester" -ROAD_COURSES_KEY = u"coursesOfStudy" +ROAD_SELECTED_SUBJECTS_KEY = "selectedSubjects" +ROAD_SUBJECT_ID_KEY = "id" +ROAD_SUBJECT_ID_ALT_KEY = "subject_id" +ROAD_SEMESTER_KEY = "semester" +ROAD_COURSES_KEY = "coursesOfStudy" keyword_indexes = {} @@ -92,8 +92,8 @@ def generate_subject_features(features_path): subject_arrays = {} dim = len(keyword_indexes) - print("Dimension of vectors: 1 by {}".format(dim)) - for subject_id, keywords in subjects.items(): + print(("Dimension of vectors: 1 by {}".format(dim))) + for subject_id, keywords in list(subjects.items()): mat = np.zeros((dim,)) for k in keywords: mat[k] = 1 @@ -295,7 +295,7 @@ def replace(self, object, new_object): """Replaces an equivalent object with another object.""" idx = next((i for i in range(len(self.list)) if self.list[i][0] == object), None) if idx is None: - print("Didn't find {} in rank list.".format(object)) + print(("Didn't find {} in rank list.".format(object))) self.list[idx] = (new_object, self.list[idx][1]) def __contains__(self, object): @@ -388,13 +388,13 @@ def basic_rating_predictor(profiles, subject_ids, subject_id_dict, course_data=N """ profiles = [p for p in profiles if len(p.filtered_rated_subjects()) > 0] - print("{} users with available rated subjects".format(len(profiles))) + print(("{} users with available rated subjects".format(len(profiles)))) similarities = user_similarities(profiles) # Build course semester distributions course_distributions = {} # Keys are subject IDs, values are dictionaries {semester: count} for prof in profiles: - for subj, semesters in prof.roads[0].items(): + for subj, semesters in list(prof.roads[0].items()): if subj not in course_distributions: course_distributions[subj] = {} for sem in semesters: @@ -421,7 +421,7 @@ def basic_rating_predictor(profiles, subject_ids, subject_id_dict, course_data=N top_ratings.add(subject, rating) - subject_items = {subj: float("{:.2f}".format(rating)) for subj, rating in top_ratings.items()} + subject_items = {subj: float("{:.2f}".format(rating)) for subj, rating in list(top_ratings.items())} if len(subject_items) < REC_MIN_COUNT: continue yield Recommendation(user=User.objects.get(username=profile.username), rec_type=DEFAULT_RECOMMENDATION_TYPE, subjects=json.dumps(subject_items)) @@ -446,11 +446,11 @@ def by_major_predictor(profiles, subject_ids, subject_id_dict, course_data=None) applicable_users = [p for p in profiles if course in p.courses_of_study] if len(applicable_users) < BY_MAJOR_USER_CUTOFF: continue - print("Generating recommendations for {}...".format(course)) + print(("Generating recommendations for {}...".format(course))) course_distributions = {} # Keys are subject IDs, values are dictionaries {semester: count} for prof in applicable_users: - for subj, semesters in prof.roads[0].items(): + for subj, semesters in list(prof.roads[0].items()): if subj not in course_distributions: course_distributions[subj] = {} for sem in semesters: @@ -470,10 +470,10 @@ def by_major_predictor(profiles, subject_ids, subject_id_dict, course_data=None) continue if update_by_equivalent_subjects(subj, recs, prof, course_data): continue - relevance = sum((1.0 - abs(sem - prof.semester) * SEMESTER_DISTANCE_COEFFICIENT) * freq for sem, freq in course_distributions[subj].items()) * avg_ratings.get(subj, -99999) + relevance = sum((1.0 - abs(sem - prof.semester) * SEMESTER_DISTANCE_COEFFICIENT) * freq for sem, freq in list(course_distributions[subj].items())) * avg_ratings.get(subj, -99999) recs.add(subj, relevance) - subject_items = {subj: float("{:.2f}".format(rating)) for subj, rating in recs.items()} + subject_items = {subj: float("{:.2f}".format(rating)) for subj, rating in list(recs.items())} if len(subject_items) < REC_MIN_COUNT: continue yield Recommendation(user=User.objects.get(username=prof.username), rec_type="course:" + course, subjects=json.dumps(subject_items)) @@ -496,7 +496,7 @@ def related_subjects_predictor(profiles, subject_ids, subject_id_dict, course_da course_distributions = {} # Keys are subject IDs, values are dictionaries {semester: count} for prof in applicable_users: - for subj, semesters in prof.roads[0].items(): + for subj, semesters in list(prof.roads[0].items()): if subj == subject_id: continue if subj not in course_distributions: course_distributions[subj] = {} @@ -505,15 +505,15 @@ def related_subjects_predictor(profiles, subject_ids, subject_id_dict, course_da course_distributions[subj][sem] = 0 course_distributions[subj][sem] += 1 - course_totals = {subj: float(sum(freqs.values())) / float(len(applicable_users)) for subj, freqs in course_distributions.items() if subj not in covered_subjects} + course_totals = {subj: float(sum(freqs.values())) / float(len(applicable_users)) for subj, freqs in list(course_distributions.items()) if subj not in covered_subjects} if len(course_totals) < BY_MAJOR_REC_COUNT: continue avg_ratings = {subj: sum(prof.regression_predictions[subject_id_dict[subj]] for prof in applicable_users) / len(applicable_users) for subj in course_distributions if subj in subject_id_dict} - covered_subjects |= set(subj for subj, prop in course_totals.items() if prop >= RELATED_SUBJECTS_FREQ_CUTOFF) + covered_subjects |= set(subj for subj, prop in list(course_totals.items()) if prop >= RELATED_SUBJECTS_FREQ_CUTOFF) - print("Generating recommendations for {} ({} related courses)...".format(subject_id, len(course_totals))) + print(("Generating recommendations for {} ({} related courses)...".format(subject_id, len(course_totals)))) # For each applicable user, generate a rank list by degree of # commonness and proximity with the user's current semester @@ -526,16 +526,16 @@ def related_subjects_predictor(profiles, subject_ids, subject_id_dict, course_da continue if update_by_equivalent_subjects(subj, recs, prof, course_data): continue - relevance = sum((1.0 - abs(sem - prof.semester) * SEMESTER_DISTANCE_COEFFICIENT) * freq for sem, freq in course_distributions[subj].items()) * avg_ratings.get(subj, -99999) + relevance = sum((1.0 - abs(sem - prof.semester) * SEMESTER_DISTANCE_COEFFICIENT) * freq for sem, freq in list(course_distributions[subj].items())) * avg_ratings.get(subj, -99999) recs.add(subj, relevance) - subject_items = {subj: float("{:.2f}".format(rating)) for subj, rating in recs.items()} + subject_items = {subj: float("{:.2f}".format(rating)) for subj, rating in list(recs.items())} if len(subject_items) < REC_MIN_COUNT: continue rec = Recommendation(user=User.objects.get(username=prof.username), rec_type="subject:" + subject_id, subjects=json.dumps(subject_items)) best_recommendation_per_user[prof.username].add(rec, random_perturbation(float(sum(subject_items.values())) / float(len(subject_items)))) - for prof, list in best_recommendation_per_user.items(): + for prof, list in list(best_recommendation_per_user.items()): if len(list.objects()) > 0: for rec in list.objects(): yield rec @@ -583,13 +583,13 @@ def store_recommendation(rec): all_user_ids = set(rating_data.keys()) & set(road_data.keys()) if verbose: for user_id in all_user_ids: - print(user_id + ":") + print((user_id + ":")) if user_id in rating_data: - print(rating_data[user_id]) + print((rating_data[user_id])) if user_id in road_data: - print(road_data[user_id]) + print((road_data[user_id])) if user_id in majors_data: - print(majors_data[user_id]) + print((majors_data[user_id])) # Close the connection because computation will take a while if django.VERSION[1] >= 10 or django.VERSION[0] >= 2: @@ -599,7 +599,7 @@ def store_recommendation(rec): # Build user profiles subject_ids = sorted(subject_arrays.keys()) - subject_id_dict = dict(zip(subject_ids, range(len(subject_ids)))) + subject_id_dict = dict(list(zip(subject_ids, list(range(len(subject_ids)))))) X_test = np.vstack([subject_arrays[id] for id in subject_ids]) subject_dists = subject_distances(X_test) profiles = [UserRecommenderProfile.build(user_id, diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..e2a9fd1 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,8 @@ +django +pandas +nltk +lxml +scipy +scikit-learn +requests +pyjwt diff --git a/requirements/apps.py b/requirements/apps.py index ea1b61f..c5f75f2 100644 --- a/requirements/apps.py +++ b/requirements/apps.py @@ -1,4 +1,4 @@ -from __future__ import unicode_literals + from django.apps import AppConfig diff --git a/requirements/diff.py b/requirements/diff.py index 373dda2..e501eee 100644 --- a/requirements/diff.py +++ b/requirements/diff.py @@ -15,8 +15,8 @@ def best_diff_sequence(old, new, allow_subs=True, max_delta=None): parent_pointers[:,-1] = -1 parent_pointers[-1,-1] = 0 - for i in reversed(range(len(old) + 1)): - for j in reversed(range(len(new) + 1)): + for i in reversed(list(range(len(old) + 1))): + for j in reversed(list(range(len(new) + 1))): if i == len(old) and j == len(new): continue if max_delta is not None: if i - j >= max_delta: diff --git a/requirements/editor.py b/requirements/editor.py index d5dcc6f..5480999 100644 --- a/requirements/editor.py +++ b/requirements/editor.py @@ -12,11 +12,11 @@ import requests from courseupdater.views import * import re -from progress import RequirementsProgress +from .progress import RequirementsProgress from catalog.models import Course, Attribute, HASSAttribute, GIRAttribute, CommunicationAttribute import logging -from reqlist import * -from views import REQUIREMENTS_EXT +from .reqlist import * +from .views import REQUIREMENTS_EXT from django.http import Http404 from .diff import build_diff @@ -87,7 +87,7 @@ def save_change_request(form, type, list_id="", committed=False): def is_staff(request): """Returns whether or not the request's user is an authenticated staff member.""" - return request.user is not None and request.user.is_staff and request.user.is_authenticated() + return request.user is not None and request.user.is_staff and request.user.is_authenticated def populate_initial_text(request, params, edit_req): params['initial_text'] = edit_req.contents @@ -104,7 +104,7 @@ def populate_initial_text(request, params, edit_req): def create(request): if request.method == 'POST': form = EditForm(request.POST) - print(form.errors) + print((form.errors)) if form.is_valid(): should_commit = is_staff(request) save_change_request(form, REQUEST_TYPE_CREATE, list_id=form.cleaned_data['new_list_id'], committed=should_commit) @@ -161,7 +161,7 @@ def preview(request): if request.method != 'POST': return HttpResponseBadRequest("Must use POST") - req_contents = request.body.decode('utf-8') + req_contents = request.body req_list = RequirementsList.objects.create() try: req_list.parse(req_contents, full=True) @@ -186,7 +186,7 @@ def show_in_row(requirement): def make_row(requirement): """Returns HTML for displaying the given requirement in a row.""" - html = u"
" + html = "
" if requirement.requirements.exists(): reqs = requirement.requirements.all() @@ -228,15 +228,15 @@ def presentation_items(requirement, level, always_show_title=False): title_text = requirement.title if len(desc) > 0 and requirement.connection_type != CONNECTION_TYPE_ALL and not requirement.is_plain_string: title_text += " (" + desc + ")" - items.append(u"<{} class=\"req-title\">{}".format(tag, title_text, tag)) + items.append("<{} class=\"req-title\">{}".format(tag, title_text, tag)) elif len(desc) > 0 and (requirement.connection_type != CONNECTION_TYPE_ALL or always_show_title) and not requirement.is_plain_string: - items.append(u"

{}:

".format(desc[0].upper() + desc[1:])) + items.append("

{}:

".format(desc[0].upper() + desc[1:])) if requirement.description is not None and len(requirement.description) > 0: - items.append(u"

{}

".format(requirement.description.replace("\n\n", "

"))) + items.append("

{}

".format(requirement.description.replace("\n\n", "

"))) if level == 0 and requirement.title is None and len(desc) > 0 and not (requirement.connection_type != CONNECTION_TYPE_ALL or always_show_title): - items.append(u"

{}:

".format(desc[0].upper() + desc[1:])) + items.append("

{}:

".format(desc[0].upper() + desc[1:])) if show_in_row(requirement): # Show all the child requirements in a single row @@ -259,10 +259,10 @@ def build_presentation_items(list): ret = presentation_items(list, 0) else: if list.title is not None and len(list.title) > 0: - ret.append(u"

{}

".format(list.title)) + ret.append("

{}

".format(list.title)) if list.description is not None and len(list.description) > 0: - ret.append(u"

{}

".format(list.description.replace("\n\n", "

"))) + ret.append("

{}

".format(list.description.replace("\n\n", "

"))) for top_req in list.requirements.all(): rows = presentation_items(top_req, 0) diff --git a/requirements/models.py b/requirements/models.py index 1db09b1..453942a 100644 --- a/requirements/models.py +++ b/requirements/models.py @@ -1,4 +1,4 @@ -from __future__ import unicode_literals + from django.db import models from django import forms @@ -23,8 +23,8 @@ class RequirementsList(RequirementsStatement): #description = models.TextField(null=True) - def __unicode__(self): - return u"{} - {}".format(self.short_title, self.title) + def __str__(self): + return "{} - {}".format(self.short_title, self.title) def to_json_object(self, full=True, child_fn=None): """Encodes this requirements list into a dictionary that can be sent @@ -77,14 +77,14 @@ def parse(self, contents_str, full=True): if "=" in comp: arg_comps = comp.split("=") if len(arg_comps) != 2: - print("{}: Unexpected number of = symbols in first line argument".format(self.list_id)) + print(("{}: Unexpected number of = symbols in first line argument".format(self.list_id))) continue if arg_comps[0].strip() == "threshold": self.threshold_type = THRESHOLD_TYPE_GTE try: self.threshold_cutoff = int(arg_comps[1]) except: - print("{}: Invalid threshold argument {}".format(self.list_id, arg_comps[1])) + print(("{}: Invalid threshold argument {}".format(self.list_id, arg_comps[1]))) continue self.threshold_criterion = CRITERION_SUBJECTS elif arg_comps[0].strip() == "url": @@ -103,10 +103,10 @@ def parse(self, contents_str, full=True): self.save() if len(lines) == 0: - print("{}: Reached end of file early!".format(self.list_id)) + print(("{}: Reached end of file early!".format(self.list_id))) return if len(lines[0]) != 0: - print("{}: Third line isn't empty (contains \"{}\")".format(self.list_id, lines[0])) + print(("{}: Third line isn't empty (contains \"{}\")".format(self.list_id, lines[0]))) return lines.pop(0) @@ -114,15 +114,15 @@ def parse(self, contents_str, full=True): # Parse top-level list top_level_sections = [] while len(lines) > 0 and len(lines[0]) > 0: - if lines.count <= 2: - print("{}: Not enough lines for top-level sections - need variable names and descriptions on two separate lines.".format(self.list_id)) + if len(lines) <= 2: + print(("{}: Not enough lines for top-level sections - need variable names and descriptions on two separate lines.".format(self.list_id))) return var_name = undecorated_component(lines.pop(0)) description = undecorated_component(lines.pop(0).replace("\\n", "\n")) if SyntaxConstants.declaration_character in var_name or SyntaxConstants.declaration_character in description: - print("{}: Encountered ':=' symbol in top-level section. Maybe you forgot the required empty line after the last section's description line?".format(self.list_id)) + print(("{}: Encountered ':=' symbol in top-level section. Maybe you forgot the required empty line after the last section's description line?".format(self.list_id))) top_level_sections.append((var_name, description)) if len(lines) == 0: @@ -136,11 +136,11 @@ def parse(self, contents_str, full=True): if len(current_line) == 0: continue if SyntaxConstants.declaration_character not in current_line: - print("{}: Unexpected line: {}".format(self.list_id, current_line)) + print(("{}: Unexpected line: {}".format(self.list_id, current_line))) continue comps = current_line.split(SyntaxConstants.declaration_character) if len(comps) != 2: - print("{}: Can't have more than one occurrence of \"{}\" on a line".format(self.list_id, SyntaxConstants.declaration_character)) + print(("{}: Can't have more than one occurrence of \"{}\" on a line".format(self.list_id, SyntaxConstants.declaration_character))) continue declaration = comps[0] @@ -158,7 +158,7 @@ def parse(self, contents_str, full=True): for name, description in top_level_sections: if name not in variables: - print("{}: Undefined variable: {}".format(self.list_id, name)) + print(("{}: Undefined variable: {}".format(self.list_id, name))) return req = variables[name] @@ -182,8 +182,8 @@ class Deployment(models.Model): summary = models.CharField(max_length=2000) date_executed = models.DateTimeField(null=True) - def __unicode__(self): - return u"{}Deployment by {} at {} ({} edits): {}".format("(Pending) " if self.date_executed is None else "", self.author, self.timestamp, self.edit_requests.count(), self.summary) + def __str__(self): + return "{}Deployment by {} at {} ({} edits): {}".format("(Pending) " if self.date_executed is None else "", self.author, self.timestamp, self.edit_requests.count(), self.summary) # Edit requests @@ -206,5 +206,5 @@ class EditRequest(models.Model): committed = models.BooleanField(default=False) deployment = models.ForeignKey(Deployment, null=True, on_delete=models.SET_NULL, related_name='edit_requests') - def __unicode__(self): - return u"{}{}{} request for '{}' by {}: {}".format("(Resolved) " if self.resolved else "", "(Committed) " if self.committed else "", self.type, self.list_id, self.email_address, self.reason) + def __str__(self): + return "{}{}{} request for '{}' by {}: {}".format("(Resolved) " if self.resolved else "", "(Committed) " if self.committed else "", self.type, self.list_id, self.email_address, self.reason) diff --git a/requirements/progress.py b/requirements/progress.py index 7fd2086..d6f3f56 100644 --- a/requirements/progress.py +++ b/requirements/progress.py @@ -1,6 +1,7 @@ -from reqlist import * +from .reqlist import * import random from catalog.models import Course +from functools import reduce def ceiling_thresh(progress, maximum): """Creates a progress object @@ -31,7 +32,7 @@ def sum_progresses(progresses, criterion_type, maxFunc): mapfunc = lambda p: p.subject_fulfillment elif criterion_type == CRITERION_UNITS: mapfunc = lambda p: p.unit_fulfillment - sum_progress = reduce(lambda p1, p2: p1.combine(p2, maxFunc), map(mapfunc, progresses)) + sum_progress = reduce(lambda p1, p2: p1.combine(p2, maxFunc), list(map(mapfunc, progresses))) return sum_progress @@ -46,7 +47,7 @@ def force_unfill_progresses(satisfied_by_category, current_distinct_threshold, c unit_cutoff = current_threshold.cutoff_for_criterion(CRITERION_UNITS) #list of subjects by category sorted by units - max_unit_subjects = map(lambda sat_cat: sorted(sat_cat, key = lambda s: s.total_units), satisfied_by_category) + max_unit_subjects = [sorted(sat_cat, key = lambda s: s.total_units) for sat_cat in satisfied_by_category] #split subjects into two sections: fixed and free #fixed subjects: must have one subject from each category @@ -202,7 +203,7 @@ def override_requirement(self, manual_progress): subject_progress = ceiling_thresh(subjects, self.threshold.cutoff_for_criterion(CRITERION_SUBJECTS)) unit_progress = ceiling_thresh(units, self.threshold.cutoff_for_criterion(CRITERION_UNITS)) #fill with dummy courses - random_ids = random.sample(range(1000, max(10000, subject_progress.progress + 1000)), subject_progress.progress) + random_ids = random.sample(list(range(1000, max(10000, subject_progress.progress + 1000))), subject_progress.progress) for rand_id in random_ids: dummy_course = Course(id = self.list_path + "_" + str(rand_id), subject_id = "gen_course_" + self.list_path + "_" + str(rand_id), title = "Generated Course " + self.list_path + " " + str(rand_id)) @@ -437,7 +438,7 @@ def compute(self, courses, progress_overrides, progress_assertions): satisfied_courses = set() num_courses_satisfied = 0 - for i, child in zip(range(num_progresses_to_count), open_children): + for i, child in zip(list(range(num_progresses_to_count)), open_children): satisfied_courses.update(satisfied_by_category[i]) if child.statement.connection_type == CONNECTION_TYPE_ALL: num_courses_satisfied += (child.is_fulfilled and len(child.satisfied_courses) > 0) @@ -502,7 +503,7 @@ def to_json_object(self, full = True, child_fn = None): stmt_json[JSONProgressConstants.progress] = self.progress stmt_json[JSONProgressConstants.progress_max] = self.progress_max stmt_json[JSONProgressConstants.percent_fulfilled] = self.percent_fulfilled - stmt_json[JSONProgressConstants.satisfied_courses] = map(lambda c: c.subject_id, self.satisfied_courses) + stmt_json[JSONProgressConstants.satisfied_courses] = [c.subject_id for c in self.satisfied_courses] if self.is_bypassed: stmt_json[JSONProgressConstants.is_bypassed] = self.is_bypassed diff --git a/requirements/reqlist.py b/requirements/reqlist.py index 6ee83ff..f5941d4 100644 --- a/requirements/reqlist.py +++ b/requirements/reqlist.py @@ -33,8 +33,8 @@ def unwrapped_component(component): and unwrapped out of any parenthesis pairs.""" unwrapping = component.strip(" \t\n\r") while unwrapping[0] == "(" and unwrapping[-1] == ")": - # Make sure these parentheses are not closed within the string - indent_level = 0 + # Make sure these parentheses are not closed within the string + indent_level = 0 stop_unwrapping = False for i in range(len(unwrapping)): if unwrapping[i] == "(": @@ -46,7 +46,7 @@ def unwrapped_component(component): break if stop_unwrapping: break - unwrapping = unwrapping[1:-1] + unwrapping = unwrapping[1:-1] return unwrapping @@ -352,7 +352,7 @@ def parse_modifier_component(self, modifier): try: cutoff = int(number_string) except ValueError: - print("Couldn't get number out of modifier string {}".format(modifier)) + print(("Couldn't get number out of modifier string {}".format(modifier))) return (threshold_type, cutoff, criterion) @@ -362,7 +362,7 @@ def parse_modifier(self, modifier): if "|" in modifier: comps = modifier.split("|") if len(comps) != 2: - print("Unsupported number of components in modifier string: {}".format(modifier)) + print(("Unsupported number of components in modifier string: {}".format(modifier))) return if len(comps[0]) > 0: diff --git a/requirements/urls.py b/requirements/urls.py index f06be48..d408585 100644 --- a/requirements/urls.py +++ b/requirements/urls.py @@ -1,4 +1,4 @@ -from django.conf.urls import url +from django.urls import re_path from . import views from . import editor @@ -6,19 +6,19 @@ from django.views.generic import TemplateView urlpatterns = [ - url(r'^edit/(?P.{1,50})', editor.edit, name='requirements_edit'), - url(r'^success/', editor.success, name='submit_success'), - url(r'^create/', editor.create, name='create'), - url(r'^preview/', editor.preview, name='preview'), - url(r'^review/(?P\d+)', editor.review, name='review'), - url(r'^review/', editor.review_all, name='review_all'), - url(r'^resolve/(?P\d+)', editor.resolve, name='resolve'), - url(r'^ignore_edit/(?P\d+)', editor.ignore_edit, name='ignore_edit'), - url(r'^uncommit/(?P\d+)', editor.uncommit, name='uncommit'), - url(r'^commit/(?P\d+)', editor.commit, name='commit'), - url(r'^list_reqs/', views.list_reqs, name='list_reqs'), - url(r'^get_json/(?P.{1,50})/', views.get_json, name='get_json'), - url(r'^progress/(?P.{1,50})/(?P.+)', views.progress, name='progress'), - url(r'^progress/(?P.{1,50})/', views.road_progress, name='road_progress'), - url(r'^$', editor.index, name='requirements_index'), + re_path(r'^edit/(?P.{1,50})', editor.edit, name='requirements_edit'), + re_path(r'^success/', editor.success, name='submit_success'), + re_path(r'^create/', editor.create, name='create'), + re_path(r'^preview/', editor.preview, name='preview'), + re_path(r'^review/(?P\d+)', editor.review, name='review'), + re_path(r'^review/', editor.review_all, name='review_all'), + re_path(r'^resolve/(?P\d+)', editor.resolve, name='resolve'), + re_path(r'^ignore_edit/(?P\d+)', editor.ignore_edit, name='ignore_edit'), + re_path(r'^uncommit/(?P\d+)', editor.uncommit, name='uncommit'), + re_path(r'^commit/(?P\d+)', editor.commit, name='commit'), + re_path(r'^list_reqs/', views.list_reqs, name='list_reqs'), + re_path(r'^get_json/(?P.{1,50})/', views.get_json, name='get_json'), + re_path(r'^progress/(?P.{1,50})/(?P.+)', views.progress, name='progress'), + re_path(r'^progress/(?P.{1,50})/', views.road_progress, name='road_progress'), + re_path(r'^$', editor.index, name='requirements_index'), ] diff --git a/requirements/views.py b/requirements/views.py index dd2e00a..5388fdf 100644 --- a/requirements/views.py +++ b/requirements/views.py @@ -11,7 +11,7 @@ from courseupdater.views import * from sync.models import Road import re -from progress import RequirementsProgress +from .progress import RequirementsProgress from catalog.models import Course, Attribute, HASSAttribute, GIRAttribute, CommunicationAttribute import logging @@ -52,7 +52,7 @@ def compute_progress(request, list_id, course_list, progress_overrides, progress course_objs.append(Course.make_generic(subject_id,unique_generic_id)) unique_generic_id += 1 except ValueError: - print("Warning: course {} does not exist in the catalog".format(subject_id)) + print(("Warning: course {} does not exist in the catalog".format(subject_id))) # Create a progress object for the requirements list diff --git a/setup.sh b/setup.sh index 8d248d3..9855a98 100755 --- a/setup.sh +++ b/setup.sh @@ -8,7 +8,7 @@ YELLOW='\033[1;33m' NC='\033[0m' # No Color # Installing dependencies -pip install django==1.11.15 pandas nltk==3.4 lxml scipy scikit-learn requests pyjwt==1.6.4 +pip install -r requirements.txt echo echo diff --git a/sync/models.py b/sync/models.py index 001c98f..a6db31a 100644 --- a/sync/models.py +++ b/sync/models.py @@ -23,7 +23,7 @@ class Road(models.Model): last_agent = models.CharField(max_length=50, default="") def __str__(self): - return "{}: {}, last modified {}".format(self.user.username, self.name.encode('utf-8'), self.modified_date) + return "{}: {}, last modified {}".format(self.user.username, self.name, self.modified_date) @staticmethod def compress(road_text): @@ -54,9 +54,9 @@ class RoadBackup(models.Model): contents = models.TextField() def __str__(self): - return "Backup of {}, saved {} by {}".format(self.document.name.encode("utf-8") if self.document else "", + return "Backup of {}, saved {} by {}".format(self.document.name if self.document else "", self.timestamp, - self.last_agent.encode("utf-8") if self.last_agent else "") + self.last_agent if self.last_agent else "") class Schedule(models.Model): user = models.ForeignKey(User, null=True, on_delete=models.CASCADE) @@ -66,7 +66,7 @@ class Schedule(models.Model): last_agent = models.CharField(max_length=50, default="") def __str__(self): - return "{}: {}, last modified {}".format(self.user.username, self.name.encode('utf-8'), self.modified_date) + return "{}: {}, last modified {}".format(self.user.username, self.name, self.modified_date) @staticmethod def compress(schedule_text): @@ -92,7 +92,7 @@ class ScheduleBackup(models.Model): contents = models.TextField() def __str__(self): - return "Backup of {}, saved {} by {}".format(self.document.name.encode("utf-8") if self.document else "", + return "Backup of {}, saved {} by {}".format(self.document.name if self.document else "", self.timestamp, - self.last_agent.encode("utf-8") if self.last_agent else "") + self.last_agent if self.last_agent else "") diff --git a/sync/urls.py b/sync/urls.py index f24e683..a81f69b 100644 --- a/sync/urls.py +++ b/sync/urls.py @@ -1,12 +1,12 @@ -from django.conf.urls import url +from django.urls import re_path from . import views urlpatterns = [ - url('sync_road/', views.sync_road, name='sync_road'), - url('delete_road/', views.delete_road, name='delete_road'), - url('roads/', views.roads, name='roads'), - url('sync_schedule/', views.sync_schedule, name='sync_schedule'), - url('delete_schedule/', views.delete_schedule, name='delete_schedule'), - url('schedules/', views.schedules, name='schedules') + re_path('sync_road/', views.sync_road, name='sync_road'), + re_path('delete_road/', views.delete_road, name='delete_road'), + re_path('roads/', views.roads, name='roads'), + re_path('sync_schedule/', views.sync_schedule, name='sync_schedule'), + re_path('delete_schedule/', views.delete_schedule, name='delete_schedule'), + re_path('schedules/', views.schedules, name='schedules') ] diff --git a/update_catalog.py b/update_catalog.py index b06ab9f..73e4cf6 100644 --- a/update_catalog.py +++ b/update_catalog.py @@ -52,7 +52,7 @@ def update_progress(progress, message): def get_corrections(): """Gets the corrections from the CatalogCorrection table and formats them appropriately.""" - raw_corrections = CatalogCorrection.objects.all().values() + raw_corrections = list(CatalogCorrection.objects.all().values()) corrections = [] def format(value): if isinstance(value, bool): @@ -61,7 +61,7 @@ def format(value): for corr in raw_corrections: new_corr = {} - for k, v in corr.items(): + for k, v in list(corr.items()): if k in FIELD_TO_CSV and k != "offered_this_year" and format(v): new_corr[FIELD_TO_CSV[k]] = format(v) corrections.append(new_corr) @@ -81,8 +81,8 @@ def write_diff(old_path, new_path, diff_path): old_file = open(old_path, 'r') new_file = open(new_path, 'r') - old_contents = old_file.read().decode('utf-8') - new_contents = new_file.read().decode('utf-8') + old_contents = old_file.read() + new_contents = new_file.read() old_lines = old_contents.split('\n') new_lines = new_contents.split('\n') @@ -97,15 +97,15 @@ def write_diff(old_path, new_path, diff_path): wrote_to_file = False for i, id in enumerate(ids): if i % 100 == 0: - print(i, "of", len(ids)) + print((i, "of", len(ids))) old_course = old_courses.get(id, "") new_course = new_courses.get(id, "") if old_course != new_course: if abs(len(new_course) - len(old_course)) >= 40: - diff = delete_insert_diff_line(old_course.encode('utf-8'), new_course.encode('utf-8')) + diff = delete_insert_diff_line(old_course, new_course) else: - diff = build_diff_line(old_course, new_course, max_delta=40).encode('utf-8') + diff = build_diff_line(old_course, new_course, max_delta=40) diff_file.write(diff) wrote_to_file = True diff --git a/update_db.py b/update_db.py index cfbe50b..5856dbd 100644 --- a/update_db.py +++ b/update_db.py @@ -48,7 +48,7 @@ def deploy_catalog_updates(): update.is_completed = True update.save() - print("Successfully deployed {}".format(update)) + print(("Successfully deployed {}".format(update))) def update_catalog_with_file(path, semester): """Updates the catalog database using the given CSV file path.""" @@ -60,18 +60,18 @@ def update_catalog_with_file(path, semester): headers = comps continue if headers is None: - print("Can't read CSV file {} - no headers".format(path)) - info = dict(zip(headers, comps)) + print(("Can't read CSV file {} - no headers".format(path))) + info = dict(list(zip(headers, comps))) try: course = Course.public_courses().get(subject_id=info[CourseAttribute.subjectID]) except ObjectDoesNotExist: course = Course.objects.create(public=True, subject_id=info[CourseAttribute.subjectID]) finally: course.catalog_semester = semester - for key, val in info.items(): + for key, val in list(info.items()): if key not in CSV_HEADERS: continue prop, converter = CSV_HEADERS[key] - setattr(course, prop, converter(val.decode('utf-8'))) + setattr(course, prop, converter(val)) course.save() def parse_related_file(path): @@ -128,7 +128,7 @@ def write_delta_file(delta, outpath): file.write(str(version_num) + "\n") file.write("\n".join(delta)) - print("Delta file written to {}.".format(delta_file_path)) + print(("Delta file written to {}.".format(delta_file_path))) def perform_deployments(): """Performs any pending deployments of updated requirements files.""" @@ -140,11 +140,11 @@ def perform_deployments(): try: for edit_req in deployment.edit_requests.all().order_by('pk'): if len(edit_req.contents) == 0: - print("Edit request {} has no contents, skipping".format(edit_req)) + print(("Edit request {} has no contents, skipping".format(edit_req))) continue with open(os.path.join(settings.CATALOG_BASE_DIR, requirements_dir, edit_req.list_id + ".reql"), 'w') as file: - file.write(edit_req.contents.encode('utf-8')) + file.write(edit_req.contents) edit_req.committed = False edit_req.save() @@ -153,7 +153,7 @@ def perform_deployments(): deployment.date_executed = timezone.now() deployment.save() except: - print(traceback.format_exc()) + print((traceback.format_exc())) # Write delta file if len(delta) > 0: @@ -170,13 +170,13 @@ def update_requirements(): print(path_name) try: new_req = RequirementsList.objects.create(list_id=os.path.basename(path_name)) - with open(os.path.join(settings.CATALOG_BASE_DIR, path_name), 'rb') as file: - new_req.parse(file.read().decode('utf-8')) + with open(os.path.join(settings.CATALOG_BASE_DIR, path_name), 'r') as file: + new_req.parse(file.read()) new_req.save() except Exception as e: print("Encountered exception: {}".format(e)) - print("The database was successfully updated with {} requirements files.".format(len(req_urls[REQUIREMENTS_INFO_KEY]))) + print(("The database was successfully updated with {} requirements files.".format(len(req_urls[REQUIREMENTS_INFO_KEY])))) ### EDIT REQUESTS @@ -190,7 +190,7 @@ def check_for_edits(): if edit_requests.count() > 0: message += "You have {} unresolved edit requests:\n".format(edit_requests.count()) for req in edit_requests: - message += unicode(req).encode("utf-8") + "\n" + message += str(req) + "\n" message += "\n" return message @@ -204,7 +204,7 @@ def log_analytics_summary(output_path, num_hours=26): # Count up total summary statistics over the past num_hours hours out_file = open(output_path, "a") - for offset in reversed(range(1, 25)): + for offset in reversed(list(range(1, 25))): early_time = timezone.now() - timezone.timedelta(hours=offset) early_time = early_time.replace(minute=0) late_time = timezone.now() - timezone.timedelta(hours=offset - 1) @@ -262,7 +262,7 @@ def save_backups_by_doc_type(doc_type, backup_type): if backup_type.objects.all().count() > 0: yesterday = timezone.now() - timezone.timedelta(days=1) docs_to_check = doc_type.objects.filter(modified_date__gte=yesterday) - print("Checking documents from {} to {}".format(yesterday, timezone.now())) + print(("Checking documents from {} to {}".format(yesterday, timezone.now()))) else: print("Checking all documents") docs_to_check = doc_type.objects.all() @@ -285,8 +285,8 @@ def save_backups_by_doc_type(doc_type, backup_type): contents=document.contents) new_backup.save() num_diff_backups += 1 - print("{} backups created for new documents, {} for old documents".format( - num_new_backups, num_diff_backups)) + print(("{} backups created for new documents, {} for old documents".format( + num_new_backups, num_diff_backups))) def save_backups(): """Saves backups for any roads that don't have a backup yet or are significantly different @@ -307,14 +307,14 @@ def clean_db(): if obj.date < expired_threshold: num_objs += 1 obj.delete() - print("{} OAuth caches deleted".format(num_objs)) + print(("{} OAuth caches deleted".format(num_objs))) num_objs = 0 for obj in TemporaryCode.objects.all(): if obj.date < expired_threshold: num_objs += 1 obj.delete() - print("{} temporary codes deleted".format(num_objs)) + print(("{} temporary codes deleted".format(num_objs))) def email_results(message, recipients): @@ -382,5 +382,5 @@ def email_results(message, recipients): message += traceback.format_exc() if len(message) > 0 and len(sys.argv) > 2: - email_results(message.decode("utf-8"), sys.argv[2:]) + email_results(message, sys.argv[2:]) print(message)