Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
E
edx-ora2
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
edx
edx-ora2
Commits
fa48be6c
Commit
fa48be6c
authored
Mar 27, 2014
by
Stephen Sanchez
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #215 from edx/sanchez/assessments_start_dates
Adding start date validation to assessment modules
parents
875c14f6
9dc9dba8
Hide whitespace changes
Inline
Side-by-side
Showing
8 changed files
with
115 additions
and
13 deletions
+115
-13
apps/openassessment/templates/openassessmentblock/peer/oa_peer_assessment.html
+8
-1
apps/openassessment/templates/openassessmentblock/self/oa_self_assessment.html
+8
-1
apps/openassessment/xblock/peer_assessment_mixin.py
+7
-3
apps/openassessment/xblock/self_assessment_mixin.py
+9
-4
apps/openassessment/xblock/test/data/assessment_not_started.xml
+46
-0
apps/openassessment/xblock/test/data/over_grade_scenario.xml
+2
-2
apps/openassessment/xblock/test/test_peer.py
+20
-2
apps/openassessment/xblock/test/test_self.py
+15
-0
No files found.
apps/openassessment/templates/openassessmentblock/peer/oa_peer_assessment.html
View file @
fa48be6c
...
...
@@ -11,7 +11,14 @@
<span
class=
"step__counter"
></span>
<span
class=
"wrapper--copy"
>
<span
class=
"step__label"
>
Assess Peers
</span>
{% if peer_due %}
{% if peer_start %}
<span
class=
"step__deadline"
>
available
<span
class=
"date"
>
{{ peer_start }}
(in {{ peer_start|timeuntil }})
</span>
</span>
{% elif peer_due %}
<span
class=
"step__deadline"
>
due
<span
class=
"date"
>
{{ peer_due }}
...
...
apps/openassessment/templates/openassessmentblock/self/oa_self_assessment.html
View file @
fa48be6c
...
...
@@ -12,7 +12,14 @@
<span
class=
"step__counter"
></span>
<span
class=
"wrapper--copy"
>
<span
class=
"step__label"
>
Assess Yourself
</span>
{% if self_due %}
{% if self_start %}
<span
class=
"step__deadline"
>
available
<span
class=
"date"
>
{{ self_start }}
(in {{ self_start|timeuntil }})
</span>
</span>
{% elif self_due %}
<span
class=
"step__deadline"
>
due
<span
class=
"date"
>
{{ self_due }}
...
...
apps/openassessment/xblock/peer_assessment_mixin.py
View file @
fa48be6c
...
...
@@ -112,7 +112,7 @@ class PeerAssessmentMixin(object):
path
=
'openassessmentblock/peer/oa_peer_unavailable.html'
finished
=
False
problem_open
,
date
=
self
.
is_open
(
step
=
"peer"
)
problem_open
,
date
=
self
.
is_open
(
step
=
"peer
-assessment
"
)
context_dict
=
{
"rubric_criteria"
:
self
.
rubric_criteria
,
"estimated_time"
:
"20 minutes"
# TODO: Need to configure this.
...
...
@@ -153,12 +153,16 @@ class PeerAssessmentMixin(object):
context_dict
[
"submit_button_text"
]
=
(
"Submit your assessment & move to response #{}"
)
.
format
(
count
+
2
)
if
assessment
.
get
(
'due'
):
context_dict
[
"peer_due"
]
=
self
.
format_datetime_string
(
assessment
[
"due"
])
if
date
==
"due"
and
not
problem_open
:
if
date
==
'due'
and
not
problem_open
:
path
=
'openassessmentblock/peer/oa_peer_closed.html'
elif
date
==
'start'
and
not
problem_open
:
if
assessment
.
get
(
'start'
):
context_dict
[
"peer_start"
]
=
self
.
format_datetime_string
(
assessment
[
"start"
])
path
=
'openassessmentblock/peer/oa_peer_unavailable.html'
elif
workflow
.
get
(
"status"
)
==
"peer"
:
peer_sub
=
self
.
get_peer_submission
(
student_item
,
assessment
,
over_grading
)
if
peer_sub
:
...
...
apps/openassessment/xblock/self_assessment_mixin.py
View file @
fa48be6c
...
...
@@ -26,11 +26,16 @@ class SelfAssessmentMixin(object):
context
=
{}
assessment_module
=
self
.
get_assessment_module
(
'self-assessment'
)
if
assessment_module
and
assessment_module
.
get
(
'due'
):
context
[
"self_due"
]
=
self
.
format_datetime_string
(
assessment_module
[
"due"
])
path
=
'openassessmentblock/self/oa_self_unavailable.html'
problem_open
,
date
=
self
.
is_open
(
step
=
"self"
)
problem_open
,
date
=
self
.
is_open
(
step
=
"self-assessment"
)
due_date
=
assessment_module
.
get
(
'due'
)
if
date
==
'start'
and
not
problem_open
:
context
[
"self_start"
]
=
self
.
format_datetime_string
(
assessment_module
[
"start"
])
elif
due_date
:
context
[
"self_due"
]
=
self
.
format_datetime_string
(
assessment_module
[
"due"
])
workflow
=
self
.
get_workflow_info
()
if
not
workflow
:
return
self
.
render_assessment
(
path
,
context
)
...
...
@@ -55,7 +60,7 @@ class SelfAssessmentMixin(object):
}
elif
assessment
is
not
None
:
path
=
'openassessmentblock/self/oa_self_complete.html'
elif
date
==
"due"
and
not
problem_open
:
elif
date
==
'due'
and
not
problem_open
:
path
=
'openassessmentblock/self/oa_self_closed.html'
return
self
.
render_assessment
(
path
,
context
)
...
...
apps/openassessment/xblock/test/data/assessment_not_started.xml
0 → 100644
View file @
fa48be6c
<openassessment
submission_due=
"2014-03-05"
>
<title>
Open Assessment Test
</title>
<prompt>
Given the state of the world today, what do you think should be done to
combat poverty? Please answer in a short essay of 200-300 words.
</prompt>
<rubric>
<prompt>
Read for conciseness, clarity of thought, and form.
</prompt>
<criterion>
<name>
𝓒𝓸𝓷𝓬𝓲𝓼𝓮
</name>
<prompt>
How concise is it?
</prompt>
<option
points=
"3"
>
<name>
ﻉซƈﻉɭɭﻉกՇ
</name>
<explanation>
Extremely concise
</explanation>
</option>
<option
points=
"2"
>
<name>
Ġööḋ
</name>
<explanation>
Concise
</explanation>
</option>
<option
points=
"1"
>
<name>
ק๏๏г
</name>
<explanation>
Wordy
</explanation>
</option>
</criterion>
<criterion>
<name>
Form
</name>
<prompt>
How well-formed is it?
</prompt>
<option
points=
"3"
>
<name>
Good
</name>
<explanation>
Good
</explanation>
</option>
<option
points=
"2"
>
<name>
Fair
</name>
<explanation>
Fair
</explanation>
</option>
<option
points=
"1"
>
<name>
Poor
</name>
<explanation>
Poor
</explanation>
</option>
</criterion>
</rubric>
<assessments>
<assessment
name=
"peer-assessment"
must_grade=
"1"
must_be_graded_by=
"1"
start=
"4014-03-02"
/>
<assessment
name=
"self-assessment"
start=
"4014-03-8"
/>
</assessments>
</openassessment>
apps/openassessment/xblock/test/data/over_grade_scenario.xml
View file @
fa48be6c
...
...
@@ -40,7 +40,7 @@
</criterion>
</rubric>
<assessments>
<assessment
name=
"peer-assessment"
must_grade=
"1"
must_be_graded_by=
"1"
start=
"2014-03-02"
due=
"2014-03-10"
/>
<assessment
name=
"self-assessment"
start=
"2014-03-8"
due=
"2014-03-10"
/>
<assessment
name=
"peer-assessment"
must_grade=
"1"
must_be_graded_by=
"1"
start=
"2014-03-02"
/>
<assessment
name=
"self-assessment"
start=
"2014-03-8"
/>
</assessments>
</openassessment>
apps/openassessment/xblock/test/test_peer.py
View file @
fa48be6c
...
...
@@ -143,6 +143,24 @@ class TestPeerAssessment(XBlockHandlerTestCase):
resp
=
self
.
request
(
xblock
,
'peer_assess'
,
json
.
dumps
(
assessment
),
response_format
=
'json'
)
self
.
assertEqual
(
resp
[
'success'
],
False
)
@scenario
(
'data/assessment_not_started.xml'
,
user_id
=
'Bob'
)
def
test_start_dates
(
self
,
xblock
):
student_item
=
xblock
.
get_student_item_dict
()
submission
=
xblock
.
create_submission
(
student_item
,
u"Bob's answer"
)
workflow_info
=
xblock
.
get_workflow_info
()
self
.
assertEqual
(
workflow_info
[
"status"
],
u'peer'
)
# Validate Submission Rendering.
request
=
namedtuple
(
'Request'
,
'params'
)
request
.
params
=
{}
peer_response
=
xblock
.
render_peer_assessment
(
request
)
self
.
assertIsNotNone
(
peer_response
)
self
.
assertNotIn
(
submission
[
"answer"
][
"text"
]
.
encode
(
'utf-8'
),
peer_response
.
body
)
# Validate Peer Rendering.
self
.
assertIn
(
"available"
.
encode
(
'utf-8'
),
peer_response
.
body
)
@scenario
(
'data/over_grade_scenario.xml'
,
user_id
=
'Bob'
)
def
test_turbo_grading
(
self
,
xblock
):
student_item
=
xblock
.
get_student_item_dict
()
...
...
@@ -190,7 +208,7 @@ class TestPeerAssessment(XBlockHandlerTestCase):
self
.
assertIsNotNone
(
peer_response
)
self
.
assertNotIn
(
submission
[
"answer"
][
"text"
]
.
encode
(
'utf-8'
),
peer_response
.
body
)
#Validate Peer Rendering.
#
Validate Peer Rendering.
self
.
assertIn
(
"Sally"
.
encode
(
'utf-8'
),
peer_response
.
body
)
peer_api
.
create_assessment
(
sally_sub
[
'uuid'
],
...
...
@@ -206,7 +224,7 @@ class TestPeerAssessment(XBlockHandlerTestCase):
self
.
assertIsNotNone
(
peer_response
)
self
.
assertNotIn
(
submission
[
"answer"
][
"text"
]
.
encode
(
'utf-8'
),
peer_response
.
body
)
#Validate Peer Rendering.
#
Validate Peer Rendering.
self
.
assertIn
(
"Hal"
.
encode
(
'utf-8'
),
peer_response
.
body
)
peer_api
.
create_assessment
(
hal_sub
[
'uuid'
],
...
...
apps/openassessment/xblock/test/test_self.py
View file @
fa48be6c
...
...
@@ -180,3 +180,18 @@ class TestSelfAssessment(XBlockHandlerTestCase):
resp
=
self
.
request
(
xblock
,
'self_assess'
,
json
.
dumps
(
assessment
),
response_format
=
'json'
)
self
.
assertFalse
(
resp
[
'success'
])
@scenario
(
'data/assessment_not_started.xml'
,
user_id
=
'Bob'
)
def
test_start_dates
(
self
,
xblock
):
student_item
=
xblock
.
get_student_item_dict
()
submission
=
xblock
.
create_submission
(
student_item
,
u"Bob's answer"
)
workflow_info
=
xblock
.
get_workflow_info
()
self
.
assertEqual
(
workflow_info
[
"status"
],
u'peer'
)
self_response
=
xblock
.
render_self_assessment
({})
self
.
assertIsNotNone
(
self_response
)
self
.
assertNotIn
(
submission
[
"answer"
][
"text"
]
.
encode
(
'utf-8'
),
self_response
.
body
)
#Validate Self Rendering.
self
.
assertIn
(
"available"
.
encode
(
'utf-8'
),
self_response
.
body
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment