Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
E
edx-platform
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
edx
edx-platform
Commits
2aac2b9d
Commit
2aac2b9d
authored
Nov 20, 2016
by
Nimisha Asthagiri
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Update Grade Report to distinguish between Not Attempted and 0
parent
e87388e2
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
41 additions
and
26 deletions
+41
-26
lms/djangoapps/instructor_task/tasks_helper.py
+0
-0
lms/djangoapps/instructor_task/tests/test_tasks_helper.py
+41
-26
No files found.
lms/djangoapps/instructor_task/tasks_helper.py
View file @
2aac2b9d
This diff is collapsed.
Click to expand it.
lms/djangoapps/instructor_task/tests/test_tasks_helper.py
View file @
2aac2b9d
...
...
@@ -15,7 +15,7 @@ import urllib
import
ddt
from
freezegun
import
freeze_time
from
mock
import
Mock
,
patch
from
mock
import
Mock
,
patch
,
MagicMock
from
nose.plugins.attrib
import
attr
import
tempfile
import
unicodecsv
...
...
@@ -115,15 +115,14 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase):
self
.
assertDictContainsSubset
({
'attempted'
:
num_students
,
'succeeded'
:
num_students
,
'failed'
:
0
},
result
)
@patch
(
'lms.djangoapps.instructor_task.tasks_helper._get_current_task'
)
@patch
(
'lms.djangoapps.
instructor_task.tasks_helper.iterate_grades_fo
r'
)
def
test_grading_failure
(
self
,
mock_
iterate_grades_fo
r
,
_mock_current_task
):
@patch
(
'lms.djangoapps.
grades.new.course_grade.CourseGradeFactory.ite
r'
)
def
test_grading_failure
(
self
,
mock_
grades_ite
r
,
_mock_current_task
):
"""
Test that any grading errors are properly reported in the
progress dict and uploaded to the report store.
"""
# mock an error response from `iterate_grades_for`
mock_iterate_grades_for
.
return_value
=
[
(
self
.
create_student
(
'username'
,
'student@example.com'
),
{},
'Cannot grade student'
)
mock_grades_iter
.
return_value
=
[
(
self
.
create_student
(
'username'
,
'student@example.com'
),
None
,
'Cannot grade student'
)
]
result
=
upload_grades_csv
(
None
,
None
,
self
.
course
.
id
,
None
,
'graded'
)
self
.
assertDictContainsSubset
({
'attempted'
:
1
,
'succeeded'
:
0
,
'failed'
:
1
},
result
)
...
...
@@ -293,17 +292,20 @@ class TestInstructorGradeReport(InstructorGradeReportTestCase):
)
@patch
(
'lms.djangoapps.instructor_task.tasks_helper._get_current_task'
)
@patch
(
'lms.djangoapps.
instructor_task.tasks_helper.iterate_grades_fo
r'
)
def
test_unicode_in_csv_header
(
self
,
mock_
iterate_grades_fo
r
,
_mock_current_task
):
@patch
(
'lms.djangoapps.
grades.new.course_grade.CourseGradeFactory.ite
r'
)
def
test_unicode_in_csv_header
(
self
,
mock_
grades_ite
r
,
_mock_current_task
):
"""
Tests that CSV grade report works if unicode in headers.
"""
# mock a response from `iterate_grades_for`
mock_iterate_grades_for
.
return_value
=
[
mock_course_grade
=
MagicMock
()
mock_course_grade
.
summary
=
{
'section_breakdown'
:
[{
'label'
:
u'
\u8282\u540e\u9898
01'
}]}
mock_course_grade
.
letter_grade
=
None
mock_course_grade
.
percent
=
0
mock_grades_iter
.
return_value
=
[
(
self
.
create_student
(
'username'
,
'student@example.com'
),
{
'section_breakdown'
:
[{
'label'
:
u'
\u8282\u540e\u9898
01'
}],
'percent'
:
0
,
'grade'
:
None
}
,
'
Cannot grade student'
mock_course_grade
,
'
'
,
)
]
result
=
upload_grades_csv
(
None
,
None
,
self
.
course
.
id
,
None
,
'graded'
)
...
...
@@ -631,7 +633,8 @@ class TestProblemGradeReport(TestReportMixin, InstructorTaskModuleTestCase):
unicode
(
self
.
student_1
.
id
),
self
.
student_1
.
email
,
self
.
student_1
.
username
,
'0.01'
,
'1.0'
,
'2.0'
]
'0.01'
,
'1.0'
,
'2.0'
,
]
)),
dict
(
zip
(
header_row
,
...
...
@@ -639,23 +642,22 @@ class TestProblemGradeReport(TestReportMixin, InstructorTaskModuleTestCase):
unicode
(
self
.
student_2
.
id
),
self
.
student_2
.
email
,
self
.
student_2
.
username
,
'0.0'
,
'
0.0'
,
'2'
'0.0'
,
'
Not Attempted'
,
'2.0'
,
]
))
])
@patch
(
'lms.djangoapps.instructor_task.tasks_helper._get_current_task'
)
@patch
(
'lms.djangoapps.
instructor_task.tasks_helper.iterate_grades_fo
r'
)
@patch
(
'lms.djangoapps.
grades.new.course_grade.CourseGradeFactory.ite
r'
)
@ddt.data
(
u'Cannot grade student'
,
''
)
def
test_grading_failure
(
self
,
error_message
,
mock_
iterate_grades_fo
r
,
_mock_current_task
):
def
test_grading_failure
(
self
,
error_message
,
mock_
grades_ite
r
,
_mock_current_task
):
"""
Test that any grading errors are properly reported in the progress
dict and uploaded to the report store.
"""
# mock an error response from `iterate_grades_for`
student
=
self
.
create_student
(
u'username'
,
u'student@example.com'
)
mock_
iterate_grades_fo
r
.
return_value
=
[
(
student
,
{}
,
error_message
)
mock_
grades_ite
r
.
return_value
=
[
(
student
,
None
,
error_message
)
]
result
=
upload_problem_grade_report
(
None
,
None
,
self
.
course
.
id
,
None
,
'graded'
)
self
.
assertDictContainsSubset
({
'attempted'
:
1
,
'succeeded'
:
0
,
'failed'
:
1
},
result
)
...
...
@@ -694,7 +696,8 @@ class TestProblemReportSplitTestContent(TestReportMixin, TestConditionalContent,
In order to verify that the behavior of the grade report is correct, we submit answers for problems
that the student won't have access to. A/B tests won't restrict access to the problems, but it should
not show up in that student's course tree when generating the grade report, hence the N/A's in the grade report.
not show up in that student's course tree when generating the grade report, hence the Not Accessible's
in the grade report.
"""
# student A will get 100%, student B will get 50% because
# OPTION_1 is the correct option, and OPTION_2 is the
...
...
@@ -723,7 +726,7 @@ class TestProblemReportSplitTestContent(TestReportMixin, TestConditionalContent,
unicode
(
self
.
student_a
.
id
),
self
.
student_a
.
email
,
self
.
student_a
.
username
,
u'1.0'
,
u'2.0'
,
u'2.0'
,
u'N
/A'
,
u'N/A
'
u'1.0'
,
u'2.0'
,
u'2.0'
,
u'N
ot Accessible'
,
u'Not Accessible
'
]
)),
dict
(
zip
(
...
...
@@ -731,7 +734,7 @@ class TestProblemReportSplitTestContent(TestReportMixin, TestConditionalContent,
[
unicode
(
self
.
student_b
.
id
),
self
.
student_b
.
email
,
self
.
student_b
.
username
,
u'0.5'
,
u'N
/A'
,
u'N/A
'
,
u'1.0'
,
u'2.0'
self
.
student_b
.
username
,
u'0.5'
,
u'N
ot Accessible'
,
u'Not Accessible
'
,
u'1.0'
,
u'2.0'
]
))
])
...
...
@@ -864,10 +867,22 @@ class TestProblemReportCohortedContent(TestReportMixin, ContentGroupTestCase, In
header_row
+=
[
problem
+
' (Earned)'
,
problem
+
' (Possible)'
]
user_grades
=
[
{
'user'
:
self
.
staff_user
,
'grade'
:
[
u'0.0'
,
u'N/A'
,
u'N/A'
,
u'N/A'
,
u'N/A'
]},
{
'user'
:
self
.
alpha_user
,
'grade'
:
[
u'1.0'
,
u'2.0'
,
u'2.0'
,
u'N/A'
,
u'N/A'
]},
{
'user'
:
self
.
beta_user
,
'grade'
:
[
u'0.5'
,
u'N/A'
,
u'N/A'
,
u'1.0'
,
u'2.0'
]},
{
'user'
:
self
.
non_cohorted_user
,
'grade'
:
[
u'0.0'
,
u'N/A'
,
u'N/A'
,
u'N/A'
,
u'N/A'
]},
{
'user'
:
self
.
staff_user
,
'grade'
:
[
u'0.0'
,
u'Not Accessible'
,
u'Not Accessible'
,
u'Not Accessible'
,
u'Not Accessible'
],
},
{
'user'
:
self
.
alpha_user
,
'grade'
:
[
u'1.0'
,
u'2.0'
,
u'2.0'
,
u'Not Accessible'
,
u'Not Accessible'
],
},
{
'user'
:
self
.
beta_user
,
'grade'
:
[
u'0.5'
,
u'Not Accessible'
,
u'Not Accessible'
,
u'1.0'
,
u'2.0'
],
},
{
'user'
:
self
.
non_cohorted_user
,
'grade'
:
[
u'0.0'
,
u'Not Accessible'
,
u'Not Accessible'
,
u'Not Accessible'
,
u'Not Accessible'
],
},
]
# Verify generated grades and expected grades match
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment