Commit 62042188 by J. Cliff Dyer

Revert "Fix Gating to use grades API, instead of its own calculations"

This reverts commit 31812141.
parent 75772d20
"""
Tests for ProctoredExamTransformer.
"""
from mock import patch
from mock import patch, Mock
from nose.plugins.attrib import attr
import ddt
......@@ -53,8 +53,7 @@ class MilestonesTransformerTestCase(CourseStructureTestCase, MilestonesTestCaseM
'course', 'A', 'B', 'C', 'ProctoredExam', 'D', 'E', 'PracticeExam', 'F', 'G', 'H', 'I', 'TimedExam', 'J', 'K'
)
# The special exams (proctored, practice, timed) are not visible to
# students via the Courses API.
# The special exams (proctored, practice, timed) should never be visible to students
ALL_BLOCKS_EXCEPT_SPECIAL = ('course', 'A', 'B', 'C', 'H', 'I')
def get_course_hierarchy(self):
......@@ -135,27 +134,27 @@ class MilestonesTransformerTestCase(CourseStructureTestCase, MilestonesTestCaseM
(
'H',
'A',
('course', 'A', 'B', 'C'),
'B',
('course', 'A', 'B', 'C',)
),
(
'H',
'ProctoredExam',
'D',
('course', 'A', 'B', 'C'),
),
)
@ddt.unpack
def test_gated(self, gated_block_ref, gating_block_ref, expected_blocks_before_completion):
def test_gated(self, gated_block_ref, gating_block_ref, gating_block_child, expected_blocks_before_completion):
"""
First, checks that a student cannot see the gated block when it is gated
by the gating block and no attempt has been made to complete the gating
block. Then, checks that the student can see the gated block after the
gating block has been completed.
First, checks that a student cannot see the gated block when it is gated by the gating block and no
attempt has been made to complete the gating block.
Then, checks that the student can see the gated block after the gating block has been completed.
expected_blocks_before_completion is the set of blocks we expect to be
visible to the student before the student has completed the gating block.
expected_blocks_before_completion is the set of blocks we expect to be visible to the student
before the student has completed the gating block.
The test data includes one special exam and one non-special block as the
gating blocks.
The test data includes one special exam and one non-special block as the gating blocks.
"""
self.course.enable_subsection_gating = True
self.setup_gated_section(self.blocks[gated_block_ref], self.blocks[gating_block_ref])
......@@ -166,16 +165,16 @@ class MilestonesTransformerTestCase(CourseStructureTestCase, MilestonesTestCaseM
# clear the request cache to simulate a new request
self.clear_caches()
# this call triggers reevaluation of prerequisites fulfilled by the
# gating block.
lms_gating_api.evaluate_prerequisite(
self.course,
self.user,
self.blocks[gating_block_ref].location,
100.0,
)
# mock the api that the lms gating api calls to get the score for each block to always return 1 (ie 100%)
with patch('gating.api.get_module_score', Mock(return_value=1)):
with self.assertNumQueries(3):
# this call triggers reevaluation of prerequisites fulfilled by the parent of the
# block passed in, so we pass in a child of the gating block
lms_gating_api.evaluate_prerequisite(
self.course,
UsageKey.from_string(unicode(self.blocks[gating_block_child].location)),
self.user.id)
with self.assertNumQueries(2):
self.get_blocks_and_check_against_expected(self.user, self.ALL_BLOCKS_EXCEPT_SPECIAL)
def test_staff_access(self):
......
......@@ -6,14 +6,35 @@ import json
from collections import defaultdict
from django.contrib.auth.models import User
from xmodule.modulestore.django import modulestore
from openedx.core.lib.gating import api as gating_api
from lms.djangoapps.grades.module_grades import get_module_score
from util import milestones_helpers
log = logging.getLogger(__name__)
def _get_xblock_parent(xblock, category=None):
"""
Returns the parent of the given XBlock. If an optional category is supplied,
traverses the ancestors of the XBlock and returns the first with the
given category.
Arguments:
xblock (XBlock): Get the parent of this XBlock
category (str): Find an ancestor with this category (e.g. sequential)
"""
parent = xblock.get_parent()
if parent and category:
if parent.category == category:
return parent
else:
return _get_xblock_parent(parent, category)
return parent
@gating_api.gating_enabled(default=False)
def evaluate_prerequisite(course, user, subsection_usage_key, new_score):
def evaluate_prerequisite(course, prereq_content_key, user_id):
"""
Finds the parent subsection of the content in the course and evaluates
any milestone relationships attached to that subsection. If the calculated
......@@ -21,40 +42,44 @@ def evaluate_prerequisite(course, user, subsection_usage_key, new_score):
dependent subsections, the related milestone will be fulfilled for the user.
Arguments:
user (User): User for which evaluation should occur
user_id (int): ID of User for which evaluation should occur
course (CourseModule): The course
subsection_usage_key (UsageKey): Usage key of the updated subsection
new_score (float): New score of the given subsection, in percentage.
prereq_content_key (UsageKey): The prerequisite content usage key
Returns:
None
"""
prereq_milestone = gating_api.get_gating_milestone(
course.id,
subsection_usage_key,
'fulfills'
)
if prereq_milestone:
gated_content_milestones = defaultdict(list)
for milestone in gating_api.find_gating_milestones(course.id, None, 'requires'):
gated_content_milestones[milestone['id']].append(milestone)
gated_content = gated_content_milestones.get(prereq_milestone['id'])
if gated_content:
for milestone in gated_content:
# Default minimum score to 100
min_score = 100.0
requirements = milestone.get('requirements')
if requirements:
try:
min_score = float(requirements.get('min_score'))
except (ValueError, TypeError):
log.warning(
'Failed to find minimum score for gating milestone %s, defaulting to 100',
json.dumps(milestone)
)
if new_score >= min_score:
milestones_helpers.add_user_milestone({'id': user.id}, prereq_milestone)
else:
milestones_helpers.remove_user_milestone({'id': user.id}, prereq_milestone)
xblock = modulestore().get_item(prereq_content_key)
sequential = _get_xblock_parent(xblock, 'sequential')
if sequential:
prereq_milestone = gating_api.get_gating_milestone(
course.id,
sequential.location.for_branch(None),
'fulfills'
)
if prereq_milestone:
gated_content_milestones = defaultdict(list)
for milestone in gating_api.find_gating_milestones(course.id, None, 'requires'):
gated_content_milestones[milestone['id']].append(milestone)
gated_content = gated_content_milestones.get(prereq_milestone['id'])
if gated_content:
user = User.objects.get(id=user_id)
score = get_module_score(user, course, sequential) * 100
for milestone in gated_content:
# Default minimum score to 100
min_score = 100
requirements = milestone.get('requirements')
if requirements:
try:
min_score = int(requirements.get('min_score'))
except (ValueError, TypeError):
log.warning(
'Failed to find minimum score for gating milestone %s, defaulting to 100',
json.dumps(milestone)
)
if score >= min_score:
milestones_helpers.add_user_milestone({'id': user_id}, prereq_milestone)
else:
milestones_helpers.remove_user_milestone({'id': user_id}, prereq_milestone)
......@@ -2,12 +2,14 @@
Signal handlers for the gating djangoapp
"""
from django.dispatch import receiver
from lms.djangoapps.grades.signals.signals import SUBSECTION_SCORE_UPDATED
from opaque_keys.edx.keys import CourseKey, UsageKey
from xmodule.modulestore.django import modulestore
from lms.djangoapps.grades.signals.signals import SCORE_CHANGED
from gating import api as gating_api
@receiver(SUBSECTION_SCORE_UPDATED)
def handle_subsection_score_updated(**kwargs):
@receiver(SCORE_CHANGED)
def handle_score_changed(**kwargs):
"""
Receives the SCORE_CHANGED signal sent by LMS when a student's score has changed
for a given component and triggers the evaluation of any milestone relationships
......@@ -19,13 +21,10 @@ def handle_subsection_score_updated(**kwargs):
Returns:
None
"""
course = kwargs['course']
course = modulestore().get_course(CourseKey.from_string(kwargs.get('course_id')))
if course.enable_subsection_gating:
subsection_grade = kwargs['subsection_grade']
new_score = subsection_grade.graded_total.earned / subsection_grade.graded_total.possible * 100.0
gating_api.evaluate_prerequisite(
course,
kwargs['user'],
subsection_grade.location,
new_score,
UsageKey.from_string(kwargs.get('usage_id')),
kwargs.get('user').id,
)
"""
Unit tests for gating.signals module
"""
from mock import patch, MagicMock
from mock import patch
from opaque_keys.edx.keys import UsageKey
from student.tests.factories import UserFactory
......@@ -9,7 +9,7 @@ from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.django import modulestore
from gating.signals import handle_subsection_score_updated
from gating.signals import handle_score_changed
class TestHandleScoreChanged(ModuleStoreTestCase):
......@@ -24,22 +24,28 @@ class TestHandleScoreChanged(ModuleStoreTestCase):
@patch('gating.signals.gating_api.evaluate_prerequisite')
def test_gating_enabled(self, mock_evaluate):
""" Test evaluate_prerequisite is called when course.enable_subsection_gating is True """
self.course.enable_subsection_gating = True
modulestore().update_item(self.course, 0)
handle_subsection_score_updated(
handle_score_changed(
sender=None,
course=self.course,
points_possible=1,
points_earned=1,
user=self.user,
subsection_grade=MagicMock(),
course_id=unicode(self.course.id),
usage_id=unicode(self.test_usage_key)
)
mock_evaluate.assert_called()
mock_evaluate.assert_called_with(self.course, self.test_usage_key, self.user.id) # pylint: disable=no-member
@patch('gating.signals.gating_api.evaluate_prerequisite')
def test_gating_disabled(self, mock_evaluate):
handle_subsection_score_updated(
""" Test evaluate_prerequisite is not called when course.enable_subsection_gating is False """
handle_score_changed(
sender=None,
course=self.course,
points_possible=1,
points_earned=1,
user=self.user,
subsection_grade=MagicMock(),
course_id=unicode(self.course.id),
usage_id=unicode(self.test_usage_key)
)
mock_evaluate.assert_not_called()
"""
Functionality for module-level grades.
"""
# TODO The score computation in this file is not accurate
# since it is summing percentages instead of computing a
# final percentage of the individual sums.
# Regardless, this file and its code should be removed soon
# as part of TNL-5062.
from django.test.client import RequestFactory
from courseware.model_data import FieldDataCache, ScoresClient
from courseware.module_render import get_module_for_descriptor
from opaque_keys.edx.locator import BlockUsageLocator
from util.module_utils import yield_dynamic_descriptor_descendants
def _get_mock_request(student):
"""
Make a fake request because grading code expects to be able to look at
the request. We have to attach the correct user to the request before
grading that student.
"""
request = RequestFactory().get('/')
request.user = student
return request
def _calculate_score_for_modules(user_id, course, modules):
"""
Calculates the cumulative score (percent) of the given modules
"""
# removing branch and version from exam modules locator
# otherwise student module would not return scores since module usage keys would not match
modules = [m for m in modules]
locations = [
BlockUsageLocator(
course_key=course.id,
block_type=module.location.block_type,
block_id=module.location.block_id
)
if isinstance(module.location, BlockUsageLocator) and module.location.version
else module.location
for module in modules
]
scores_client = ScoresClient(course.id, user_id)
scores_client.fetch_scores(locations)
# Iterate over all of the exam modules to get score percentage of user for each of them
module_percentages = []
ignore_categories = ['course', 'chapter', 'sequential', 'vertical', 'randomize', 'library_content']
for index, module in enumerate(modules):
if module.category not in ignore_categories and (module.graded or module.has_score):
module_score = scores_client.get(locations[index])
if module_score:
correct = module_score.correct or 0
total = module_score.total or 1
module_percentages.append(correct / total)
return sum(module_percentages) / float(len(module_percentages)) if module_percentages else 0
def get_module_score(user, course, module):
"""
Collects all children of the given module and calculates the cumulative
score for this set of modules for the given user.
Arguments:
user (User): The user
course (CourseModule): The course
module (XBlock): The module
Returns:
float: The cumulative score
"""
def inner_get_module(descriptor):
"""
Delegate to get_module_for_descriptor
"""
field_data_cache = FieldDataCache([descriptor], course.id, user)
return get_module_for_descriptor(
user,
_get_mock_request(user),
descriptor,
field_data_cache,
course.id,
course=course
)
modules = yield_dynamic_descriptor_descendants(
module,
user.id,
inner_get_module
)
return _calculate_score_for_modules(user.id, course, modules)
......@@ -251,6 +251,11 @@ class SubsectionGradeFactory(object):
"""
Updates the SubsectionGrade object for the student and subsection.
"""
# Save ourselves the extra queries if the course does not persist
# subsection grades.
if not PersistentGradesEnabledFlag.feature_enabled(self.course.id):
return
self._log_event(log.warning, u"update, subsection: {}".format(subsection.location))
block_structure = self._get_block_structure(block_structure)
......@@ -259,10 +264,8 @@ class SubsectionGradeFactory(object):
self.student, block_structure, self._submissions_scores, self._csm_scores
)
if PersistentGradesEnabledFlag.feature_enabled(self.course.id):
grade_model = subsection_grade.update_or_create_model(self.student)
self._update_saved_subsection_grade(subsection.location, grade_model)
grade_model = subsection_grade.update_or_create_model(self.student)
self._update_saved_subsection_grade(subsection.location, grade_model)
return subsection_grade
@lazy
......
......@@ -13,7 +13,7 @@ from openedx.core.djangoapps.content.block_structure.api import get_course_in_ca
from student.models import user_by_anonymous_id
from submissions.models import score_set, score_reset
from .signals import SCORE_CHANGED, SUBSECTION_SCORE_UPDATED
from .signals import SCORE_CHANGED
from ..config.models import PersistentGradesEnabledFlag
from ..transformer import GradesTransformer
from ..new.subsection_grade import SubsectionGradeFactory
......@@ -95,6 +95,8 @@ def recalculate_subsection_grade_handler(sender, **kwargs): # pylint: disable=u
"""
student = kwargs['user']
course_key = CourseLocator.from_string(kwargs['course_id'])
if not PersistentGradesEnabledFlag.feature_enabled(course_key):
return
scored_block_usage_key = UsageKey.from_string(kwargs['usage_id']).replace(course_key=course_key)
collected_block_structure = get_course_in_cache(course_key)
......@@ -113,12 +115,6 @@ def recalculate_subsection_grade_handler(sender, **kwargs): # pylint: disable=u
subsection_usage_key,
collected_block_structure=collected_block_structure,
)
subsection_grade = subsection_grade_factory.update(
subsection_grade_factory.update(
transformed_subsection_structure[subsection_usage_key], transformed_subsection_structure
)
SUBSECTION_SCORE_UPDATED.send(
sender=None,
course=course,
user=student,
subsection_grade=subsection_grade,
)
......@@ -14,20 +14,8 @@ SCORE_CHANGED = Signal(
providing_args=[
'points_possible', # Maximum score available for the exercise
'points_earned', # Score obtained by the user
'user', # User object
'user_id', # Integer User ID
'course_id', # Unicode string representing the course
'usage_id' # Unicode string indicating the courseware instance
]
)
# Signal that indicates that a user's score for a subsection has been updated.
# This is a downstream signal of SCORE_CHANGED sent for each affected containing
# subsection.
SUBSECTION_SCORE_UPDATED = Signal(
providing_args=[
'course', # Course object
'user', # User object
'subsection_grade', # SubsectionGrade object
]
)
......@@ -10,7 +10,12 @@ from nose.plugins.attrib import attr
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from capa.tests.response_xml_factory import MultipleChoiceResponseXMLFactory
from courseware.tests.helpers import get_request_for_user
from courseware.module_render import get_module
from courseware.model_data import FieldDataCache, set_score
from courseware.tests.helpers import (
LoginEnrollmentTestCase,
get_request_for_user
)
from lms.djangoapps.course_blocks.api import get_course_blocks
from student.tests.factories import UserFactory
from student.models import CourseEnrollment
......@@ -20,9 +25,9 @@ from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from .utils import answer_problem
from .. import course_grades
from ..course_grades import summary as grades_summary
from ..module_grades import get_module_score
from ..new.course_grade import CourseGradeFactory
from ..new.subsection_grade import SubsectionGradeFactory
......@@ -337,3 +342,222 @@ class TestScoreForModule(SharedModuleStoreTestCase):
earned, possible = self.course_grade.score_for_module(self.m.location)
self.assertEqual(earned, 0)
self.assertEqual(possible, 0)
class TestGetModuleScore(LoginEnrollmentTestCase, SharedModuleStoreTestCase):
"""
Test get_module_score
"""
@classmethod
def setUpClass(cls):
super(TestGetModuleScore, cls).setUpClass()
cls.course = CourseFactory.create()
cls.chapter = ItemFactory.create(
parent=cls.course,
category="chapter",
display_name="Test Chapter"
)
cls.seq1 = ItemFactory.create(
parent=cls.chapter,
category='sequential',
display_name="Test Sequential 1",
graded=True
)
cls.seq2 = ItemFactory.create(
parent=cls.chapter,
category='sequential',
display_name="Test Sequential 2",
graded=True
)
cls.seq3 = ItemFactory.create(
parent=cls.chapter,
category='sequential',
display_name="Test Sequential 3",
graded=True
)
cls.vert1 = ItemFactory.create(
parent=cls.seq1,
category='vertical',
display_name='Test Vertical 1'
)
cls.vert2 = ItemFactory.create(
parent=cls.seq2,
category='vertical',
display_name='Test Vertical 2'
)
cls.vert3 = ItemFactory.create(
parent=cls.seq3,
category='vertical',
display_name='Test Vertical 3'
)
cls.randomize = ItemFactory.create(
parent=cls.vert2,
category='randomize',
display_name='Test Randomize'
)
cls.library_content = ItemFactory.create(
parent=cls.vert3,
category='library_content',
display_name='Test Library Content'
)
problem_xml = MultipleChoiceResponseXMLFactory().build_xml(
question_text='The correct answer is Choice 3',
choices=[False, False, True, False],
choice_names=['choice_0', 'choice_1', 'choice_2', 'choice_3']
)
cls.problem1 = ItemFactory.create(
parent=cls.vert1,
category="problem",
display_name="Test Problem 1",
data=problem_xml
)
cls.problem2 = ItemFactory.create(
parent=cls.vert1,
category="problem",
display_name="Test Problem 2",
data=problem_xml
)
cls.problem3 = ItemFactory.create(
parent=cls.randomize,
category="problem",
display_name="Test Problem 3",
data=problem_xml
)
cls.problem4 = ItemFactory.create(
parent=cls.randomize,
category="problem",
display_name="Test Problem 4",
data=problem_xml
)
cls.problem5 = ItemFactory.create(
parent=cls.library_content,
category="problem",
display_name="Test Problem 5",
data=problem_xml
)
cls.problem6 = ItemFactory.create(
parent=cls.library_content,
category="problem",
display_name="Test Problem 6",
data=problem_xml
)
def setUp(self):
"""
Set up test course
"""
super(TestGetModuleScore, self).setUp()
self.request = get_request_for_user(UserFactory())
self.client.login(username=self.request.user.username, password="test")
CourseEnrollment.enroll(self.request.user, self.course.id)
self.course_structure = get_course_blocks(self.request.user, self.course.location)
# warm up the score cache to allow accurate query counts, even if tests are run in random order
get_module_score(self.request.user, self.course, self.seq1)
def test_subsection_scores(self):
"""
Test test_get_module_score
"""
# One query is for getting the list of disabled XBlocks (which is
# then stored in the request).
with self.assertNumQueries(1):
score = get_module_score(self.request.user, self.course, self.seq1)
new_score = SubsectionGradeFactory(self.request.user, self.course, self.course_structure).create(self.seq1)
self.assertEqual(score, 0)
self.assertEqual(new_score.all_total.earned, 0)
answer_problem(self.course, self.request, self.problem1)
answer_problem(self.course, self.request, self.problem2)
with self.assertNumQueries(1):
score = get_module_score(self.request.user, self.course, self.seq1)
new_score = SubsectionGradeFactory(self.request.user, self.course, self.course_structure).create(self.seq1)
self.assertEqual(score, 1.0)
self.assertEqual(new_score.all_total.earned, 2.0)
# These differ because get_module_score normalizes the subsection score
# to 1, which can cause incorrect aggregation behavior that will be
# fixed by TNL-5062.
answer_problem(self.course, self.request, self.problem1)
answer_problem(self.course, self.request, self.problem2, 0)
with self.assertNumQueries(1):
score = get_module_score(self.request.user, self.course, self.seq1)
new_score = SubsectionGradeFactory(self.request.user, self.course, self.course_structure).create(self.seq1)
self.assertEqual(score, .5)
self.assertEqual(new_score.all_total.earned, 1.0)
def test_get_module_score_with_empty_score(self):
"""
Test test_get_module_score_with_empty_score
"""
set_score(self.request.user.id, self.problem1.location, None, None) # pylint: disable=no-member
set_score(self.request.user.id, self.problem2.location, None, None) # pylint: disable=no-member
with self.assertNumQueries(1):
score = get_module_score(self.request.user, self.course, self.seq1)
self.assertEqual(score, 0)
answer_problem(self.course, self.request, self.problem1)
with self.assertNumQueries(1):
score = get_module_score(self.request.user, self.course, self.seq1)
self.assertEqual(score, 0.5)
answer_problem(self.course, self.request, self.problem2)
with self.assertNumQueries(1):
score = get_module_score(self.request.user, self.course, self.seq1)
self.assertEqual(score, 1.0)
def test_get_module_score_with_randomize(self):
"""
Test test_get_module_score_with_randomize
"""
answer_problem(self.course, self.request, self.problem3)
answer_problem(self.course, self.request, self.problem4)
score = get_module_score(self.request.user, self.course, self.seq2)
self.assertEqual(score, 1.0)
def test_get_module_score_with_library_content(self):
"""
Test test_get_module_score_with_library_content
"""
answer_problem(self.course, self.request, self.problem5)
answer_problem(self.course, self.request, self.problem6)
score = get_module_score(self.request.user, self.course, self.seq3)
self.assertEqual(score, 1.0)
def answer_problem(course, request, problem, score=1, max_value=1):
"""
Records a correct answer for the given problem.
Arguments:
course (Course): Course object, the course the required problem is in
request (Request): request Object
problem (xblock): xblock object, the problem to be answered
"""
user = request.user
grade_dict = {'value': score, 'max_value': max_value, 'user_id': user.id}
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course.id,
user,
course,
depth=2
)
# pylint: disable=protected-access
module = get_module(
user,
request,
problem.scope_ids.usage_id,
field_data_cache,
)._xmodule
module.system.publish(problem, 'grade', grade_dict)
......@@ -244,7 +244,7 @@ class ScoreChangedUpdatesSubsectionGradeTest(ModuleStoreTestCase):
with self.store.default_store(default_store):
self.set_up_course(enable_subsection_grades=False)
self.assertFalse(PersistentGradesEnabledFlag.feature_enabled(self.course.id))
with check_mongo_calls(2) and self.assertNumQueries(3):
with check_mongo_calls(2) and self.assertNumQueries(0):
recalculate_subsection_grade_handler(None, **self.score_changed_kwargs)
@skip("Pending completion of TNL-5089")
......
......@@ -3,8 +3,6 @@ Utilities for grades related tests
"""
from contextlib import contextmanager
from mock import patch
from courseware.module_render import get_module
from courseware.model_data import FieldDataCache
from xmodule.graders import ProblemScore
......@@ -26,32 +24,3 @@ def mock_get_score(earned=0, possible=1):
with patch('lms.djangoapps.grades.new.subsection_grade.get_score') as mock_score:
mock_score.return_value = ProblemScore(earned, possible, earned, possible, 1, True, None, None)
yield mock_score
def answer_problem(course, request, problem, score=1, max_value=1):
"""
Records an answer for the given problem.
Arguments:
course (Course): Course object, the course the required problem is in
request (Request): request Object
problem (xblock): xblock object, the problem to be answered
score (float): The new score for the problem
max_value (float): The new maximum score for the problem
"""
user = request.user
grade_dict = {'value': score, 'max_value': max_value, 'user_id': user.id}
field_data_cache = FieldDataCache.cache_for_descriptor_descendents(
course.id,
user,
course,
depth=2
)
module = get_module(
user,
request,
problem.location,
field_data_cache,
)
module.runtime.publish(problem, 'grade', grade_dict)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment