Commit 7c3d902f by Braden MacDonald Committed by GitHub

Merge pull request #11286 from open-craft/omar/remove-s3

Add OpenStack Swift support
parents 5d196afc 0dddc2e3
......@@ -317,6 +317,20 @@ else:
DATABASES = AUTH_TOKENS['DATABASES']
# The normal database user does not have enough permissions to run migrations.
# Migrations are run with separate credentials, given as DB_MIGRATION_*
# environment variables
for name, database in DATABASES.items():
if name != 'read_replica':
database.update({
'ENGINE': os.environ.get('DB_MIGRATION_ENGINE', database['ENGINE']),
'USER': os.environ.get('DB_MIGRATION_USER', database['USER']),
'PASSWORD': os.environ.get('DB_MIGRATION_PASS', database['PASSWORD']),
'NAME': os.environ.get('DB_MIGRATION_NAME', database['NAME']),
'HOST': os.environ.get('DB_MIGRATION_HOST', database['HOST']),
'PORT': os.environ.get('DB_MIGRATION_PORT', database['PORT']),
})
MODULESTORE = convert_module_store_setting_if_needed(AUTH_TOKENS.get('MODULESTORE', MODULESTORE))
MODULESTORE_FIELD_OVERRIDE_PROVIDERS = ENV_TOKENS.get(
......
"""
A Django settings file for use on AWS while running
database migrations, since we don't want to normally run the
LMS with enough privileges to modify the database schema.
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=wildcard-import, unused-wildcard-import
# Import everything from .aws so that our settings are based on those.
from .aws import *
import os
from django.core.exceptions import ImproperlyConfigured
def get_db_overrides(db_name):
"""
Now that we have multiple databases, we want to look up from the environment
for both databases.
"""
db_overrides = dict(
PASSWORD=os.environ.get('DB_MIGRATION_PASS', None),
ENGINE=os.environ.get('DB_MIGRATION_ENGINE', DATABASES[db_name]['ENGINE']),
USER=os.environ.get('DB_MIGRATION_USER', DATABASES[db_name]['USER']),
NAME=os.environ.get('DB_MIGRATION_NAME', DATABASES[db_name]['NAME']),
HOST=os.environ.get('DB_MIGRATION_HOST', DATABASES[db_name]['HOST']),
PORT=os.environ.get('DB_MIGRATION_PORT', DATABASES[db_name]['PORT']),
)
if db_overrides['PASSWORD'] is None:
raise ImproperlyConfigured("No database password was provided for running "
"migrations. This is fatal.")
return db_overrides
for db in DATABASES:
# You never migrate a read_replica
if db != 'read_replica':
DATABASES[db].update(get_db_overrides(db))
......@@ -69,7 +69,7 @@
"AUTH_USE_OPENID_PROVIDER": true,
"CERTIFICATES_HTML_VIEW": true,
"ENABLE_DISCUSSION_SERVICE": true,
"ENABLE_S3_GRADE_DOWNLOADS": true,
"ENABLE_GRADE_DOWNLOADS": true,
"ENTRANCE_EXAMS": true,
"MILESTONES_APP": true,
"PREVIEW_LMS_BASE": "preview.localhost:8003",
......
"""
Settings for OpenStack deployments.
"""
from .aws import * # pylint: disable=wildcard-import, unused-wildcard-import
......@@ -6,6 +6,7 @@ from contextlib import contextmanager
from django.dispatch import Signal
from markupsafe import escape
from mock import Mock, patch
import moto
@contextmanager
......@@ -107,3 +108,18 @@ def skip_signal(signal, **kwargs):
signal.disconnect(**kwargs)
yield
signal.connect(**kwargs)
class MockS3Mixin(object):
"""
TestCase mixin that stubs S3 using the moto library. Note that this will
activate httpretty, which will monkey patch socket.
"""
def setUp(self):
super(MockS3Mixin, self).setUp()
self._mock_s3 = moto.mock_s3()
self._mock_s3.start()
def tearDown(self):
self._mock_s3.stop()
super(MockS3Mixin, self).tearDown()
......@@ -2975,7 +2975,7 @@ class TestInstructorAPILevelsDataDump(SharedModuleStoreTestCase, LoginEnrollment
def test_list_report_downloads(self):
url = reverse('list_report_downloads', kwargs={'course_id': self.course.id.to_deprecated_string()})
with patch('instructor_task.models.LocalFSReportStore.links_for') as mock_links_for:
with patch('instructor_task.models.DjangoStorageReportStore.links_for') as mock_links_for:
mock_links_for.return_value = [
('mock_file_name_1', 'https://1.mock.url'),
('mock_file_name_2', 'https://2.mock.url'),
......
......@@ -649,10 +649,6 @@ def upload_exec_summary_to_store(data_dict, report_name, course_id, generated_at
timestamp_str=generated_at.strftime("%Y-%m-%d-%H%M")
),
output_buffer,
config={
'content_type': 'text/html',
'content_encoding': None,
}
)
tracker.emit(REPORT_REQUESTED_EVENT_NAME, {"report_type": report_name})
......
......@@ -11,7 +11,6 @@ from uuid import uuid4
from celery.states import SUCCESS, FAILURE
from django.core.urlresolvers import reverse
from django.conf import settings
from django.test.testcases import TestCase
from django.contrib.auth.models import User
from lms.djangoapps.lms_xblock.runtime import quote_slashes
......@@ -292,9 +291,14 @@ class TestReportMixin(object):
Cleans up after tests that place files in the reports directory.
"""
def tearDown(self):
reports_download_path = settings.GRADES_DOWNLOAD['ROOT_PATH']
if os.path.exists(reports_download_path):
shutil.rmtree(reports_download_path)
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
try:
reports_download_path = report_store.storage.path('')
except NotImplementedError:
pass # storage backend does not use the local filesystem
else:
if os.path.exists(reports_download_path):
shutil.rmtree(reports_download_path)
def verify_rows_in_csv(self, expected_rows, file_index=0, verify_order=True, ignore_other_columns=False):
"""
......@@ -317,7 +321,8 @@ class TestReportMixin(object):
"""
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
report_csv_filename = report_store.links_for(self.course.id)[file_index][0]
with open(report_store.path_to(self.course.id, report_csv_filename)) as csv_file:
report_path = report_store.path_to(self.course.id, report_csv_filename)
with report_store.storage.open(report_path) as csv_file:
# Expand the dict reader generator so we don't lose it's content
csv_rows = [row for row in unicodecsv.DictReader(csv_file)]
......@@ -337,6 +342,7 @@ class TestReportMixin(object):
"""
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
report_csv_filename = report_store.links_for(self.course.id)[0][0]
with open(report_store.path_to(self.course.id, report_csv_filename)) as csv_file:
report_path = report_store.path_to(self.course.id, report_csv_filename)
with report_store.storage.open(report_path) as csv_file:
rows = unicodecsv.reader(csv_file, encoding='utf-8')
return rows.next()
......@@ -3,55 +3,32 @@ Tests for instructor_task/models.py.
"""
from cStringIO import StringIO
import mock
import time
from datetime import datetime
from unittest import TestCase
from instructor_task.models import LocalFSReportStore, S3ReportStore
import boto
from django.conf import settings
from django.test import SimpleTestCase, override_settings
from mock import patch
from common.test.utils import MockS3Mixin
from instructor_task.models import ReportStore
from instructor_task.tests.test_base import TestReportMixin
from opaque_keys.edx.locator import CourseLocator
class MockKey(object):
"""
Mocking a boto S3 Key object.
"""
def __init__(self, bucket):
self.last_modified = datetime.now()
self.bucket = bucket
def set_contents_from_string(self, contents, headers): # pylint: disable=unused-argument
""" Expected method on a Key object. """
self.bucket.store_key(self)
def generate_url(self, expires_in): # pylint: disable=unused-argument
""" Expected method on a Key object. """
return "http://fake-edx-s3.edx.org/"
class MockBucket(object):
""" Mocking a boto S3 Bucket object. """
def __init__(self, _name):
self.keys = []
def store_key(self, key):
""" Not a Bucket method, created just to store the keys in the Bucket for testing purposes. """
self.keys.append(key)
LOCAL_SETTINGS = {
'STORAGE_KWARGS': {
'location': settings.GRADES_DOWNLOAD['ROOT_PATH'],
},
}
def list(self, prefix): # pylint: disable=unused-argument
""" Expected method on a Bucket object. """
return self.keys
class MockS3Connection(object):
""" Mocking a boto S3 Connection """
def __init__(self, access_key, secret_key):
pass
def get_bucket(self, bucket_name):
""" Expected method on an S3Connection object. """
return MockBucket(bucket_name)
S3_SETTINGS = {
'STORAGE_CLASS': 'storages.backends.s3boto.S3BotoStorage',
'STORAGE_KWARGS': {
'bucket': settings.GRADES_DOWNLOAD['BUCKET'],
'location': settings.GRADES_DOWNLOAD['ROOT_PATH'],
},
}
class ReportStoreTestMixin(object):
......@@ -59,6 +36,7 @@ class ReportStoreTestMixin(object):
Mixin for report store tests.
"""
def setUp(self):
super(ReportStoreTestMixin, self).setUp()
self.course_id = CourseLocator(org="testx", course="coursex", run="runx")
def create_report_store(self):
......@@ -73,6 +51,8 @@ class ReportStoreTestMixin(object):
in reverse chronological order.
"""
report_store = self.create_report_store()
self.assertEqual(report_store.links_for(self.course_id), [])
report_store.store(self.course_id, 'old_file', StringIO())
time.sleep(1) # Ensure we have a unique timestamp.
report_store.store(self.course_id, 'middle_file', StringIO())
......@@ -85,23 +65,57 @@ class ReportStoreTestMixin(object):
)
class LocalFSReportStoreTestCase(ReportStoreTestMixin, TestReportMixin, TestCase):
class LocalFSReportStoreTestCase(ReportStoreTestMixin, TestReportMixin, SimpleTestCase):
"""
Test the LocalFSReportStore model.
Test the old LocalFSReportStore configuration.
"""
def create_report_store(self):
""" Create and return a LocalFSReportStore. """
return LocalFSReportStore.from_config(config_name='GRADES_DOWNLOAD')
"""
Create and return a DjangoStorageReportStore using the old
LocalFSReportStore configuration.
"""
return ReportStore.from_config(config_name='GRADES_DOWNLOAD')
@mock.patch('instructor_task.models.S3Connection', new=MockS3Connection)
@mock.patch('instructor_task.models.Key', new=MockKey)
@mock.patch('instructor_task.models.settings.AWS_SECRET_ACCESS_KEY', create=True, new="access_key")
@mock.patch('instructor_task.models.settings.AWS_ACCESS_KEY_ID', create=True, new="access_id")
class S3ReportStoreTestCase(ReportStoreTestMixin, TestReportMixin, TestCase):
@patch.dict(settings.GRADES_DOWNLOAD, {'STORAGE_TYPE': 's3'})
class S3ReportStoreTestCase(MockS3Mixin, ReportStoreTestMixin, TestReportMixin, SimpleTestCase):
"""
Test the S3ReportStore model.
Test the old S3ReportStore configuration.
"""
def create_report_store(self):
""" Create and return a S3ReportStore. """
return S3ReportStore.from_config(config_name='GRADES_DOWNLOAD')
"""
Create and return a DjangoStorageReportStore using the old
S3ReportStore configuration.
"""
connection = boto.connect_s3()
connection.create_bucket(settings.GRADES_DOWNLOAD['BUCKET'])
return ReportStore.from_config(config_name='GRADES_DOWNLOAD')
@override_settings(GRADES_DOWNLOAD=LOCAL_SETTINGS)
class DjangoStorageReportStoreLocalTestCase(ReportStoreTestMixin, TestReportMixin, SimpleTestCase):
"""
Test the DjangoStorageReportStore implementation using the local
filesystem.
"""
def create_report_store(self):
"""
Create and return a DjangoStorageReportStore configured to use the
local filesystem for storage.
"""
return ReportStore.from_config(config_name='GRADES_DOWNLOAD')
@override_settings(GRADES_DOWNLOAD=S3_SETTINGS)
class DjangoStorageReportStoreS3TestCase(MockS3Mixin, ReportStoreTestMixin, TestReportMixin, SimpleTestCase):
"""
Test the DjangoStorageReportStore implementation using S3 stubs.
"""
def create_report_store(self):
"""
Create and return a DjangoStorageReportStore configured to use S3 for
storage.
"""
connection = boto.connect_s3()
connection.create_bucket(settings.GRADES_DOWNLOAD['STORAGE_KWARGS']['bucket'])
return ReportStore.from_config(config_name='GRADES_DOWNLOAD')
......@@ -79,7 +79,8 @@ class InstructorGradeReportTestCase(TestReportMixin, InstructorTaskCourseTestCas
self.assertDictContainsSubset({'attempted': 2, 'succeeded': 2, 'failed': 0}, result)
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
report_csv_filename = report_store.links_for(course_id)[0][0]
with open(report_store.path_to(course_id, report_csv_filename)) as csv_file:
report_path = report_store.path_to(course_id, report_csv_filename)
with report_store.storage.open(report_path) as csv_file:
for row in unicodecsv.DictReader(csv_file):
if row.get('username') == username:
self.assertEqual(row[column_header], expected_cell_content)
......@@ -564,7 +565,8 @@ class TestInstructorDetailedEnrollmentReport(TestReportMixin, InstructorTaskCour
"""
report_store = ReportStore.from_config(config_name='FINANCIAL_REPORTS')
report_csv_filename = report_store.links_for(self.course.id)[0][0]
with open(report_store.path_to(self.course.id, report_csv_filename)) as csv_file:
report_path = report_store.path_to(self.course.id, report_csv_filename)
with report_store.storage.open(report_path) as csv_file:
# Expand the dict reader generator so we don't lose it's content
for row in unicodecsv.DictReader(csv_file):
if row.get('Username') == username:
......@@ -994,7 +996,8 @@ class TestExecutiveSummaryReport(TestReportMixin, InstructorTaskCourseTestCase):
Verify grade report data.
"""
report_html_filename = report_store.links_for(self.course.id)[0][0]
with open(report_store.path_to(self.course.id, report_html_filename)) as html_file:
report_path = report_store.path_to(self.course.id, report_html_filename)
with report_store.storage.open(report_path) as html_file:
html_file_data = html_file.read()
for data in expected_data:
self.assertTrue(data in html_file_data)
......@@ -1087,7 +1090,8 @@ class TestCourseSurveyReport(TestReportMixin, InstructorTaskCourseTestCase):
Verify course survey data.
"""
report_csv_filename = report_store.links_for(self.course.id)[0][0]
with open(report_store.path_to(self.course.id, report_csv_filename)) as csv_file:
report_path = report_store.path_to(self.course.id, report_csv_filename)
with report_store.storage.open(report_path) as csv_file:
csv_file_data = csv_file.read()
for data in expected_data:
self.assertIn(data, csv_file_data)
......@@ -1169,7 +1173,8 @@ class TestTeamStudentReport(TestReportMixin, InstructorTaskCourseTestCase):
self.assertDictContainsSubset({'attempted': 2, 'succeeded': 2, 'failed': 0}, result)
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
report_csv_filename = report_store.links_for(self.course.id)[0][0]
with open(report_store.path_to(self.course.id, report_csv_filename)) as csv_file:
report_path = report_store.path_to(self.course.id, report_csv_filename)
with report_store.storage.open(report_path) as csv_file:
for row in unicodecsv.DictReader(csv_file):
if row.get('username') == username:
self.assertEqual(row['team'], expected_team)
......@@ -1539,7 +1544,8 @@ class TestGradeReportEnrollmentAndCertificateInfo(TestReportMixin, InstructorTas
upload_grades_csv(None, None, self.course.id, None, 'graded')
report_store = ReportStore.from_config(config_name='GRADES_DOWNLOAD')
report_csv_filename = report_store.links_for(self.course.id)[0][0]
with open(report_store.path_to(self.course.id, report_csv_filename)) as csv_file:
report_path = report_store.path_to(self.course.id, report_csv_filename)
with report_store.storage.open(report_path) as csv_file:
for row in unicodecsv.DictReader(csv_file):
if row.get('username') == username:
csv_row_data = [row[column] for column in self.columns_to_check]
......@@ -2213,7 +2219,7 @@ class TestInstructorOra2Report(SharedModuleStoreTestCase):
with patch('instructor_task.tasks_helper.OraAggregateData.collect_ora2_data') as mock_collect_data:
mock_collect_data.return_value = (test_header, test_rows)
with patch('instructor_task.models.LocalFSReportStore.store_rows') as mock_store_rows:
with patch('instructor_task.models.DjangoStorageReportStore.store_rows') as mock_store_rows:
return_val = upload_ora2_data(None, None, self.course.id, None, 'generated')
# pylint: disable=maybe-no-member
......
......@@ -3,31 +3,35 @@ Tests for django admin commands in the verify_student module
Lots of imports from verify_student's model tests, since they cover similar ground
"""
import boto
from nose.tools import assert_equals
from mock import patch
from django.test import TestCase
from django.conf import settings
from common.test.utils import MockS3Mixin
from student.tests.factories import UserFactory
from lms.djangoapps.verify_student.models import SoftwareSecurePhotoVerification
from django.core.management import call_command
from lms.djangoapps.verify_student.tests.test_models import (
MockKey, MockS3Connection, mock_software_secure_post,
mock_software_secure_post_error, FAKE_SETTINGS,
mock_software_secure_post, mock_software_secure_post_error, FAKE_SETTINGS,
)
# Lots of patching to stub in our own settings, S3 substitutes, and HTTP posting
# Lots of patching to stub in our own settings, and HTTP posting
@patch.dict(settings.VERIFY_STUDENT, FAKE_SETTINGS)
@patch('lms.djangoapps.verify_student.models.S3Connection', new=MockS3Connection)
@patch('lms.djangoapps.verify_student.models.Key', new=MockKey)
@patch('lms.djangoapps.verify_student.models.requests.post', new=mock_software_secure_post)
class TestVerifyStudentCommand(TestCase):
class TestVerifyStudentCommand(MockS3Mixin, TestCase):
"""
Tests for django admin commands in the verify_student module
"""
def setUp(self):
super(TestVerifyStudentCommand, self).setUp()
connection = boto.connect_s3()
connection.create_bucket(FAKE_SETTINGS['SOFTWARE_SECURE']['S3_BUCKET'])
def create_and_submit(self, username):
"""
Helper method that lets us create new SoftwareSecurePhotoVerifications
......
......@@ -11,6 +11,7 @@ photo verification process as generic as possible.
import functools
import json
import logging
import os.path
from datetime import datetime, timedelta
from email.utils import formatdate
......@@ -25,12 +26,13 @@ from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse
from django.core.cache import cache
from django.core.files.base import ContentFile
from django.dispatch import receiver
from django.db import models, transaction
from django.utils.functional import cached_property
from django.utils.translation import ugettext as _, ugettext_lazy
from boto.s3.connection import S3Connection
from boto.s3.key import Key
from openedx.core.storage import get_storage
from simple_history.models import HistoricalRecords
from config_models.models import ConfigurationModel
from course_modes.models import CourseMode
......@@ -616,9 +618,10 @@ class SoftwareSecurePhotoVerification(PhotoVerification):
@status_before_must_be("created")
def upload_face_image(self, img_data):
"""
Upload an image of the user's face to S3. `img_data` should be a raw
Upload an image of the user's face. `img_data` should be a raw
bytestream of a PNG image. This method will take the data, encrypt it
using our FACE_IMAGE_AES_KEY, encode it with base64 and save it to S3.
using our FACE_IMAGE_AES_KEY, encode it with base64 and save it to the
storage backend.
Yes, encoding it to base64 adds compute and disk usage without much real
benefit, but that's what the other end of this API is expecting to get.
......@@ -633,17 +636,18 @@ class SoftwareSecurePhotoVerification(PhotoVerification):
aes_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["FACE_IMAGE_AES_KEY"]
aes_key = aes_key_str.decode("hex")
s3_key = self._generate_s3_key("face")
s3_key.set_contents_from_string(encrypt_and_encode(img_data, aes_key))
path = self._get_path("face")
buff = ContentFile(encrypt_and_encode(img_data, aes_key))
self._storage.save(path, buff)
@status_before_must_be("created")
def upload_photo_id_image(self, img_data):
"""
Upload an the user's photo ID image to S3. `img_data` should be a raw
Upload an the user's photo ID image. `img_data` should be a raw
bytestream of a PNG image. This method will take the data, encrypt it
using a randomly generated AES key, encode it with base64 and save it to
S3. The random key is also encrypted using Software Secure's public RSA
key and stored in our `photo_id_key` field.
using a randomly generated AES key, encode it with base64 and save it
to the storage backend. The random key is also encrypted using Software
Secure's public RSA key and stored in our `photo_id_key` field.
Yes, encoding it to base64 adds compute and disk usage without much real
benefit, but that's what the other end of this API is expecting to get.
......@@ -662,9 +666,10 @@ class SoftwareSecurePhotoVerification(PhotoVerification):
rsa_key_str = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["RSA_PUBLIC_KEY"]
rsa_encrypted_aes_key = rsa_encrypt(aes_key, rsa_key_str)
# Upload this to S3
s3_key = self._generate_s3_key("photo_id")
s3_key.set_contents_from_string(encrypt_and_encode(img_data, aes_key))
# Save this to the storage backend
path = self._get_path("photo_id")
buff = ContentFile(encrypt_and_encode(img_data, aes_key))
self._storage.save(path, buff)
# Update our record fields
self.photo_id_key = rsa_encrypted_aes_key.encode('base64')
......@@ -752,31 +757,42 @@ class SoftwareSecurePhotoVerification(PhotoVerification):
string: The expiring URL for the image.
"""
s3_key = self._generate_s3_key(name, override_receipt_id=override_receipt_id)
return s3_key.generate_url(self.IMAGE_LINK_DURATION)
path = self._get_path(name, override_receipt_id=override_receipt_id)
return self._storage.url(path)
def _generate_s3_key(self, prefix, override_receipt_id=None):
@cached_property
def _storage(self):
"""
Generates a key for an s3 bucket location
Example: face/4dd1add9-6719-42f7-bea0-115c008c4fca
Return the configured django storage backend.
"""
conn = S3Connection(
settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["AWS_ACCESS_KEY"],
settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["AWS_SECRET_KEY"]
)
bucket = conn.get_bucket(settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["S3_BUCKET"])
config = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]
# Override the receipt ID if one is provided.
# This allow us to construct S3 keys to images submitted in previous attempts
# (used for reverification, where we send a new face photo with the same photo ID
# from a previous attempt).
receipt_id = self.receipt_id if override_receipt_id is None else override_receipt_id
# Default to the S3 backend for backward compatibility
storage_class = config.get("STORAGE_CLASS", "storages.backends.s3boto.S3BotoStorage")
storage_kwargs = config.get("STORAGE_KWARGS", {})
# Map old settings to the parameters expected by the storage backend
if "AWS_ACCESS_KEY" in config:
storage_kwargs["access_key"] = config["AWS_ACCESS_KEY"]
if "AWS_SECRET_KEY" in config:
storage_kwargs["secret_key"] = config["AWS_SECRET_KEY"]
if "S3_BUCKET" in config:
storage_kwargs["bucket"] = config["S3_BUCKET"]
storage_kwargs["querystring_expire"] = self.IMAGE_LINK_DURATION
key = Key(bucket)
key.key = "{}/{}".format(prefix, receipt_id)
return get_storage(storage_class, **storage_kwargs)
return key
def _get_path(self, prefix, override_receipt_id=None):
"""
Returns the path to a resource with this instance's `receipt_id`.
If `override_receipt_id` is given, the path to that resource will be
retrieved instead. This allows us to retrieve images submitted in
previous attempts (used for reverification, where we send a new face
photo with the same photo ID from a previous attempt).
"""
receipt_id = self.receipt_id if override_receipt_id is None else override_receipt_id
return os.path.join(prefix, receipt_id)
def _encrypted_user_photo_key_str(self):
"""
......
......@@ -2,6 +2,7 @@
from datetime import timedelta, datetime
import json
import boto
import ddt
from django.conf import settings
from django.db import IntegrityError
......@@ -13,6 +14,7 @@ from nose.tools import assert_is_none, assert_equals, assert_raises, assert_true
import pytz
import requests.exceptions
from common.test.utils import MockS3Mixin
from student.tests.factories import UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
......@@ -50,41 +52,6 @@ iwIDAQAB
}
class MockKey(object):
"""
Mocking a boto S3 Key object. It's a really dumb mock because once we
write data to S3, we never read it again. We simply generate a link to it
and pass that to Software Secure. Because of that, we don't even implement
the ability to pull back previously written content in this mock.
Testing that the encryption/decryption roundtrip on the data works is in
test_ssencrypt.py
"""
def __init__(self, bucket):
self.bucket = bucket
def set_contents_from_string(self, contents):
self.contents = contents
def generate_url(self, duration):
return "http://fake-edx-s3.edx.org/"
class MockBucket(object):
"""Mocking a boto S3 Bucket object."""
def __init__(self, name):
self.name = name
class MockS3Connection(object):
"""Mocking a boto S3 Connection"""
def __init__(self, access_key, secret_key):
pass
def get_bucket(self, bucket_name):
return MockBucket(bucket_name)
def mock_software_secure_post(url, headers=None, data=None, **kwargs):
"""
Mocks our interface when we post to Software Secure. Does basic assertions
......@@ -129,13 +96,16 @@ def mock_software_secure_post_unavailable(url, headers=None, data=None, **kwargs
raise requests.exceptions.ConnectionError
# Lots of patching to stub in our own settings, S3 substitutes, and HTTP posting
# Lots of patching to stub in our own settings, and HTTP posting
@patch.dict(settings.VERIFY_STUDENT, FAKE_SETTINGS)
@patch('lms.djangoapps.verify_student.models.S3Connection', new=MockS3Connection)
@patch('lms.djangoapps.verify_student.models.Key', new=MockKey)
@patch('lms.djangoapps.verify_student.models.requests.post', new=mock_software_secure_post)
@ddt.ddt
class TestPhotoVerification(ModuleStoreTestCase):
class TestPhotoVerification(MockS3Mixin, ModuleStoreTestCase):
def setUp(self):
super(TestPhotoVerification, self).setUp()
connection = boto.connect_s3()
connection.create_bucket(FAKE_SETTINGS['SOFTWARE_SECURE']['S3_BUCKET'])
def test_state_transitions(self):
"""
......
......@@ -18,6 +18,7 @@ Common traits:
import datetime
import json
import warnings
import dateutil
......@@ -191,6 +192,14 @@ ENV_FEATURES = ENV_TOKENS.get('FEATURES', {})
for feature, value in ENV_FEATURES.items():
FEATURES[feature] = value
# Backward compatibility for deprecated feature names
if 'ENABLE_S3_GRADE_DOWNLOADS' in FEATURES:
warnings.warn(
"'ENABLE_S3_GRADE_DOWNLOADS' is deprecated. Please use 'ENABLE_GRADE_DOWNLOADS' instead",
DeprecationWarning,
)
FEATURES['ENABLE_GRADE_DOWNLOADS'] = FEATURES['ENABLE_S3_GRADE_DOWNLOADS']
CMS_BASE = ENV_TOKENS.get('CMS_BASE', 'studio.edx.org')
ALLOWED_HOSTS = [
......@@ -492,6 +501,20 @@ FILE_UPLOAD_STORAGE_PREFIX = ENV_TOKENS.get('FILE_UPLOAD_STORAGE_PREFIX', FILE_U
# function in util/query.py, which is useful for very large database reads
DATABASES = AUTH_TOKENS['DATABASES']
# The normal database user does not have enough permissions to run migrations.
# Migrations are run with separate credentials, given as DB_MIGRATION_*
# environment variables
for name, database in DATABASES.items():
if name != 'read_replica':
database.update({
'ENGINE': os.environ.get('DB_MIGRATION_ENGINE', database['ENGINE']),
'USER': os.environ.get('DB_MIGRATION_USER', database['USER']),
'PASSWORD': os.environ.get('DB_MIGRATION_PASS', database['PASSWORD']),
'NAME': os.environ.get('DB_MIGRATION_NAME', database['NAME']),
'HOST': os.environ.get('DB_MIGRATION_HOST', database['HOST']),
'PORT': os.environ.get('DB_MIGRATION_PORT', database['PORT']),
})
XQUEUE_INTERFACE = AUTH_TOKENS['XQUEUE_INTERFACE']
# Get the MODULESTORE from auth.json, but if it doesn't exist,
......
"""
A Django settings file for use on AWS while running
database migrations, since we don't want to normally run the
LMS with enough privileges to modify the database schema.
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=wildcard-import, unused-wildcard-import
# Import everything from .aws so that our settings are based on those.
from .aws import *
import os
from django.core.exceptions import ImproperlyConfigured
def get_db_overrides(db_name):
"""
Now that we have multiple databases, we want to look up from the environment
for both databases.
"""
db_overrides = dict(
PASSWORD=os.environ.get('DB_MIGRATION_PASS', None),
ENGINE=os.environ.get('DB_MIGRATION_ENGINE', DATABASES[db_name]['ENGINE']),
USER=os.environ.get('DB_MIGRATION_USER', DATABASES[db_name]['USER']),
NAME=os.environ.get('DB_MIGRATION_NAME', DATABASES[db_name]['NAME']),
HOST=os.environ.get('DB_MIGRATION_HOST', DATABASES[db_name]['HOST']),
PORT=os.environ.get('DB_MIGRATION_PORT', DATABASES[db_name]['PORT']),
)
if db_overrides['PASSWORD'] is None:
raise ImproperlyConfigured("No database password was provided for running "
"migrations. This is fatal.")
return db_overrides
for db in DATABASES:
# You never migrate a read_replica
if db != 'read_replica':
DATABASES[db].update(get_db_overrides(db))
......@@ -77,7 +77,7 @@
"ENABLE_PAYMENT_FAKE": true,
"ENABLE_VERIFIED_CERTIFICATES": true,
"ENABLE_DISCUSSION_SERVICE": true,
"ENABLE_S3_GRADE_DOWNLOADS": true,
"ENABLE_GRADE_DOWNLOADS": true,
"ENABLE_THIRD_PARTY_AUTH": true,
"ENABLE_COMBINED_LOGIN_REGISTRATION": true,
"PREVIEW_LMS_BASE": "preview.localhost:8003",
......
......@@ -197,9 +197,9 @@ FEATURES = {
# when enrollment exceeds this number
'MAX_ENROLLMENT_INSTR_BUTTONS': 200,
# Grade calculation started from the new instructor dashboard will write
# grades CSV files to S3 and give links for downloads.
'ENABLE_S3_GRADE_DOWNLOADS': False,
# Grade calculation started from the instructor dashboard will write grades
# CSV files to the configured storage backend and give links for downloads.
'ENABLE_GRADE_DOWNLOADS': False,
# whether to use password policy enforcement or not
'ENFORCE_PASSWORD_POLICY': True,
......
......@@ -24,7 +24,7 @@ FEATURES['ENABLE_MANUAL_GIT_RELOAD'] = True
FEATURES['ENABLE_SERVICE_STATUS'] = True
FEATURES['ENABLE_SHOPPING_CART'] = True
FEATURES['AUTOMATIC_VERIFY_STUDENT_IDENTITY_FOR_TESTING'] = True
FEATURES['ENABLE_S3_GRADE_DOWNLOADS'] = True
FEATURES['ENABLE_GRADE_DOWNLOADS'] = True
FEATURES['ENABLE_PAYMENT_FAKE'] = True
......
"""
Settings for OpenStack deployments.
"""
from .aws import * # pylint: disable=wildcard-import, unused-wildcard-import
SWIFT_AUTH_URL = AUTH_TOKENS.get('SWIFT_AUTH_URL')
SWIFT_AUTH_VERSION = AUTH_TOKENS.get('SWIFT_AUTH_VERSION', 1)
SWIFT_USERNAME = AUTH_TOKENS.get('SWIFT_USERNAME')
SWIFT_KEY = AUTH_TOKENS.get('SWIFT_KEY')
SWIFT_TENANT_NAME = AUTH_TOKENS.get('SWIFT_TENANT_NAME')
SWIFT_TENANT_ID = AUTH_TOKENS.get('SWIFT_TENANT_ID')
SWIFT_CONTAINER_NAME = FILE_UPLOAD_STORAGE_BUCKET_NAME
SWIFT_NAME_PREFIX = FILE_UPLOAD_STORAGE_PREFIX
SWIFT_USE_TEMP_URLS = AUTH_TOKENS.get('SWIFT_USE_TEMP_URLS', False)
SWIFT_TEMP_URL_KEY = AUTH_TOKENS.get('SWIFT_TEMP_URL_KEY')
SWIFT_TEMP_URL_DURATION = AUTH_TOKENS.get('SWIFT_TEMP_URL_DURATION', 1800) # seconds
if AUTH_TOKENS.get('SWIFT_REGION_NAME'):
SWIFT_EXTRA_OPTIONS = {'region_name': AUTH_TOKENS['SWIFT_REGION_NAME']}
if AUTH_TOKENS.get('DEFAULT_FILE_STORAGE'):
DEFAULT_FILE_STORAGE = AUTH_TOKENS.get('DEFAULT_FILE_STORAGE')
elif SWIFT_AUTH_URL and SWIFT_USERNAME and SWIFT_KEY:
DEFAULT_FILE_STORAGE = 'swift.storage.SwiftStorage'
else:
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
......@@ -67,7 +67,7 @@ FEATURES['ENABLE_SHOPPING_CART'] = True
FEATURES['ENABLE_VERIFIED_CERTIFICATES'] = True
# Enable this feature for course staff grade downloads, to enable acceptance tests
FEATURES['ENABLE_S3_GRADE_DOWNLOADS'] = True
FEATURES['ENABLE_GRADE_DOWNLOADS'] = True
FEATURES['ALLOW_COURSE_STAFF_GRADE_DOWNLOADS'] = True
GRADES_DOWNLOAD['ROOT_PATH'] += "-{}".format(os.getpid())
......
......@@ -19,7 +19,7 @@ from openedx.core.djangolib.markup import HTML, Text
<p><input type="button" name="list-anon-ids" value="${_("Get Student Anonymized IDs CSV")}" data-csv="true" class="csv" data-endpoint="${ section_data['get_anon_ids_url'] }" class="${'is-disabled' if disable_buttons else ''}" aria-disabled="${'true' if disable_buttons else 'false'}" ></p>
</div>
%if settings.FEATURES.get('ENABLE_S3_GRADE_DOWNLOADS'):
%if settings.FEATURES.get('ENABLE_GRADE_DOWNLOADS'):
<div class="reports-download-container action-type-container">
<hr>
<h3 class="hd hd-3">${_("Reports")}</h3>
......
......@@ -3,6 +3,9 @@ Django storage backends for Open edX.
"""
from django_pipeline_forgiving.storages import PipelineForgivingStorage
from django.contrib.staticfiles.storage import StaticFilesStorage
from django.core.files.storage import get_storage_class
from django.utils.lru_cache import lru_cache
from pipeline.storage import NonPackagingMixin
from require.storage import OptimizedFilesMixin
from openedx.core.djangoapps.theming.storage import (
......@@ -39,3 +42,16 @@ class DevelopmentStorage(
so that we can skip packaging and optimization.
"""
pass
@lru_cache()
def get_storage(storage_class=None, **kwargs):
"""
Returns a storage instance with the given class name and kwargs. If the
class name is not given, an instance of the default storage is returned.
Instances are cached so that if this function is called multiple times
with the same arguments, the same instance is returned. This is useful if
the storage implementation makes http requests when instantiated, for
example.
"""
return get_storage_class(storage_class)(**kwargs)
......@@ -89,14 +89,14 @@ python-social-auth==0.2.12
pytz==2015.2
pysrt==0.4.7
PyYAML==3.10
requests==2.7.0
requests==2.9.1
requests-oauthlib==0.4.1
scipy==0.14.0
Shapely==1.2.16
singledispatch==3.4.0.2
sorl-thumbnail==12.3
sortedcontainers==0.9.2
stevedore==0.14.1
stevedore==1.10.0
sure==1.2.3
sympy==0.7.1
xmltodict==0.4.1
......
#
# Dependencies for OpenStack deployments.
#
# OpenStack swift backend for django storage API
django-storage-swift==1.2.10
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment