Commit c9d7aa82 by Régis Behmo

Allow file upload to backends other than S3

We would like to support open assessment file uploads for open-edx
installs without access to S3, e.g: to the local fileystem. To that end,
we propose to abstract away some of the logic behind the file upload
backends so that other backends may be implemented.

In this proposed implementation, the uploaded files are stored in the
following folder structure:

        root/bucket/prefix/
                filename.jpg/
                        content
                        metadata.json

Note that a couple new settings need to be defined in order to use this
feature; a new urls.py file must also be included.
parent 86c1e53a
......@@ -4,180 +4,17 @@ associated with submissions. This can be used to upload new files and provide
URLs to the new location.
"""
import boto
import logging
from django.conf import settings
logger = logging.getLogger("openassessment.fileupload.api")
class FileUploadError(Exception):
"""An error related to uploading files
This is the generic error raised when a file could not be uploaded.
"""
pass
class FileUploadInternalError(FileUploadError):
"""An error internal to the File Upload API.
This is an error raised when file upload failed due to internal problems in
the File Upload API, beyond the intervention of the requester.
"""
pass
class FileUploadRequestError(FileUploadError):
"""This error is raised when the request has invalid parameters for upload.
This error will be raised if the file being uploaded is somehow invalid,
based on type restrictions, size restrictions, upload limits, etc.
"""
pass
# The setting used to find the name of the AWS Bucket used for uploading
# content.
BUCKET_SETTING = "FILE_UPLOAD_STORAGE_BUCKET_NAME"
# The setting used to prefix uploaded files using this service.
FILE_STORAGE_SETTING = "FILE_UPLOAD_STORAGE_PREFIX"
# The default file storage prefix.
FILE_STORAGE = "submissions_attachments"
from . import backends
def get_upload_url(key, content_type):
"""Request a one-time upload URL to upload files.
Requests a URL for a one-time file upload.
Args:
key (str): A unique identifier used to construct the upload location and
later, can be used to retrieve the same information. This service
must be able to identify data for both upload and download using
this key.
content_type (str): The content type for the file.
Returns:
A URL (str) to use for a one-time upload.
Raises:
FileUploadInternalError: Raised when an internal error occurs while
retrieving a one-time URL.
FileUploadRequestError: Raised when the request failed due to
request restrictions
"""
bucket_name, key_name = _retrieve_parameters(key)
try:
conn = _connect_to_s3()
upload_url = conn.generate_url(
3600,
'PUT',
bucket_name,
key_name,
headers={'Content-Length': '5242880', 'Content-Type': content_type}
)
return upload_url
except Exception as ex:
logger.exception(
u"An internal exception occurred while generating an upload URL."
)
raise FileUploadInternalError(ex)
def get_download_url(key):
"""Requests a URL to download the related file from.
Requests a URL for the given student_item.
Args:
key (str): A unique identifier used to identify the data requested for
download. This service must be able to identify data for both
upload and download using this key.
Returns:
A URL (str) to use for downloading related files. If no file is found,
returns an empty string.
Returns a url (absolute or relative, depending on the endpoint) which can be used to upload a file to.
"""
bucket_name, key_name = _retrieve_parameters(key)
try:
conn = _connect_to_s3()
bucket = conn.get_bucket(bucket_name)
s3_key = bucket.get_key(key_name)
return s3_key.generate_url(expires_in=1000) if s3_key else ""
except Exception as ex:
logger.exception(
u"An internal exception occurred while generating a download URL."
)
raise FileUploadInternalError(ex)
def _connect_to_s3():
"""Connect to s3
Creates a connection to s3 for file URLs.
"""
# Try to get the AWS credentials from settings if they are available
# If not, these will default to `None`, and boto will try to use
# environment vars or configuration files instead.
aws_access_key_id = getattr(settings, 'AWS_ACCESS_KEY_ID', None)
aws_secret_access_key = getattr(settings, 'AWS_SECRET_ACCESS_KEY', None)
return boto.connect_s3(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key
)
def _retrieve_parameters(key):
"""
Simple utility function to validate settings and arguments before compiling
bucket names and key names.
Args:
key (str): Custom key passed in with the request.
Returns:
A tuple of the bucket name and the complete key.
Raises:
FileUploadRequestError
FileUploadInternalError
return backends.get_backend().get_upload_url(key, content_type)
def get_download_url(key):
"""
if not key:
raise FileUploadRequestError("Key required for URL request")
bucket_name = getattr(settings, BUCKET_SETTING, None)
if not bucket_name:
raise FileUploadInternalError("No bucket name configured for FileUpload Service.")
return bucket_name, _get_key_name(key)
def _get_key_name(key):
"""Construct a key name with the given string and configured prefix.
Constructs a unique key with the specified path and the service-specific
configured prefix.
Args:
key (str): Key to identify data for both upload and download.
Returns:
A key name (str) to use constructing URLs.
Returns the url at which the file that corresponds to the key can be downloaded.
"""
# The specified file prefix for the storage must be publicly viewable
# or all uploaded images will not be seen.
prefix = getattr(settings, FILE_STORAGE_SETTING, FILE_STORAGE)
return u"{prefix}/{key}".format(
prefix=prefix,
key=key
)
\ No newline at end of file
return backends.get_backend().get_download_url(key)
from . import s3
from . import filesystem
from django.conf import settings
def get_backend():
# Use S3 backend by default (current behaviour)
backend_setting = getattr(settings, "ORA2_FILEUPLOAD_BACKEND", "s3")
if backend_setting == "s3":
return s3.Backend()
elif backend_setting == "filesystem":
return filesystem.Backend()
else:
raise ValueError("Invalid ORA2_FILEUPLOAD_BACKEND setting value: %s" % backend_setting)
import abc
from django.conf import settings
from ..exceptions import FileUploadInternalError
from ..exceptions import FileUploadRequestError
class Settings(object):
"""Store settings related to file upload
The following settings are used:
FILE_UPLOAD_STORAGE_BUCKET_NAME (str, required): name of the bucket
(AWS or local file directory) to which content will be uploaded.
FILE_STORAGE_STORAGE_PREFIX (str, defaults to
DEFAULT_FILE_UPLOAD_STORAGE_PREFIX): this will be used to prefix all
stored file names. The specified file prefix for the storage must be
publicly viewable or all uploaded files will not be seen.
"""
DEFAULT_FILE_UPLOAD_STORAGE_PREFIX = "submissions_attachments"
@classmethod
def get_bucket_name(cls):
bucket_name = getattr(settings, "FILE_UPLOAD_STORAGE_BUCKET_NAME", None)
if not bucket_name:
raise FileUploadInternalError("No bucket name configured for FileUpload Service.")
return bucket_name
@classmethod
def get_prefix(cls):
"""Return the prefix for stored files.
Defaults to the DEFAULT_FILE_UPLOAD_STORAGE_PREFIX class attribute.
"""
return getattr(settings, "FILE_UPLOAD_STORAGE_PREFIX", cls.DEFAULT_FILE_UPLOAD_STORAGE_PREFIX)
class BaseBackend(object):
__metaclass__ = abc.ABCMeta
# Time (in seconds) before an upload url expires
UPLOAD_URL_TIMEOUT = 3600
# Time (in seconds) before a download url expires
DOWNLOAD_URL_TIMEOUT = 1000
@abc.abstractmethod
def get_upload_url(self, key, content_type):
"""Request a one-time upload URL to upload files.
Requests a URL for a one-time file upload.
Args:
key (str): A unique identifier used to construct the upload location and
later, can be used to retrieve the same information. This service
must be able to identify data for both upload and download using
this key.
content_type (str): The content type for the file.
Returns:
A URL (str) to use for a one-time upload.
Raises:
FileUploadInternalError: Raised when an internal error occurs while
retrieving a one-time URL.
FileUploadRequestError: Raised when the request failed due to
request restrictions
"""
raise NotImplementedError
@abc.abstractmethod
def get_download_url(self, key):
"""Requests a URL to download the related file from.
Requests a URL for the given student_item.
Args:
key (str): A unique identifier used to identify the data requested for
download. This service must be able to identify data for both
upload and download using this key.
Returns:
A URL (str) to use for downloading related files. If no file is found,
returns an empty string.
"""
raise NotImplementedError
def _retrieve_parameters(self, key):
"""
Simple utility function to validate settings and arguments before compiling
bucket names and key names.
Args:
key (str): Custom key passed in with the request.
Returns:
A tuple of the bucket name and the complete key.
Raises:
FileUploadRequestError
FileUploadInternalError
"""
if not key:
raise FileUploadRequestError("Key required for URL request")
return Settings.get_bucket_name(), self._get_key_name(key)
def _get_key_name(self, key):
"""Construct a key name with the given string and configured prefix.
Constructs a unique key with the specified path and the service-specific
configured prefix.
Args:
key (str): Key to identify data for both upload and download.
Returns:
A key name (str) to use constructing URLs.
"""
return "{prefix}/{key}".format(
prefix=Settings.get_prefix(),
key=key
)
from .base import BaseBackend
from .. import exceptions
from django.conf import settings
import django.core.cache
from django.core.urlresolvers import reverse
class Backend(BaseBackend):
"""
Upload openassessment student files (images) to a local filesystem. Note
that in order to use this file storage backend, you need to include the
urls from openassessment.fileupload in your urls.py file:
E.g:
url(r'^openassessment/storage', include(openassessment.fileupload.urls)),
The ORA2_FILEUPLOAD_CACHE_NAME setting will also have to be defined for the
name of the django.core.cache instance which will maintain the list of
active storage URLs.
E.g:
ORA2_FILEUPLOAD_CACHE_NAME = "ora2-storage"
CACHES = {
...
'ora2-storage': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
...
},
...
}
"""
def get_upload_url(self, key, content_type):
make_upload_url_available(self._get_key_name(key), self.UPLOAD_URL_TIMEOUT)
return self._get_url(key)
def get_download_url(self, key):
make_download_url_available(self._get_key_name(key), self.DOWNLOAD_URL_TIMEOUT)
return self._get_url(key)
def _get_url(self, key):
key_name = self._get_key_name(key)
url = reverse("openassessment-filesystem-storage", kwargs={'key': key_name})
return url
def get_cache():
"""
Returns a django.core.cache instance in charge of maintaining the
authorized upload and download URL.
Raises:
FileUploadInternalError if the cache name setting is not defined.
InvalidCacheBackendError if the corresponding cache backend has not
been configured.
"""
cache_name = getattr(settings, "ORA2_FILEUPLOAD_CACHE_NAME", None)
if cache_name is None:
raise exceptions.FileUploadInternalError("Undefined cache backend for file upload")
return django.core.cache.get_cache(cache_name)
def make_upload_url_available(url_key_name, timeout):
"""
Authorize an upload URL.
Arguments:
url_key_name (str): key that uniquely identifies the upload url
timeout (int): time in seconds before the url expires
"""
return get_cache().set(
get_upload_cache_key(url_key_name),
1, timeout
)
def make_download_url_available(url_key_name, timeout):
"""
Authorize a download URL.
Arguments:
url_key_name (str): key that uniquely identifies the url
timeout (int): time in seconds before the url expires
"""
return get_cache().set(
get_download_cache_key(url_key_name),
1, timeout
)
def is_upload_url_available(url_key_name):
"""
Return True if the corresponding upload URL is available.
"""
return get_cache().get(get_upload_cache_key(url_key_name)) is not None
def is_download_url_available(url_key_name):
"""
Return True if the corresponding download URL is available.
"""
return get_cache().get(get_download_cache_key(url_key_name)) is not None
def get_upload_cache_key(url_key_name):
return "upload/" + url_key_name
def get_download_cache_key(url_key_name):
return "download/" + url_key_name
import boto
import logging
from django.conf import settings
logger = logging.getLogger("openassessment.fileupload.api")
from .base import BaseBackend
from ..exceptions import FileUploadInternalError
class Backend(BaseBackend):
def get_upload_url(self, key, content_type):
bucket_name, key_name = self._retrieve_parameters(key)
try:
conn = _connect_to_s3()
upload_url = conn.generate_url(
expires_in=self.UPLOAD_URL_TIMEOUT,
method='PUT',
bucket=bucket_name,
key=key_name,
headers={'Content-Length': '5242880', 'Content-Type': content_type}
)
return upload_url
except Exception as ex:
logger.exception(
u"An internal exception occurred while generating an upload URL."
)
raise FileUploadInternalError(ex)
def get_download_url(self, key):
bucket_name, key_name = self._retrieve_parameters(key)
try:
conn = _connect_to_s3()
bucket = conn.get_bucket(bucket_name)
s3_key = bucket.get_key(key_name)
return s3_key.generate_url(expires_in=self.DOWNLOAD_URL_TIMEOUT) if s3_key else ""
except Exception as ex:
logger.exception(
u"An internal exception occurred while generating a download URL."
)
raise FileUploadInternalError(ex)
def _connect_to_s3():
"""Connect to s3
Creates a connection to s3 for file URLs.
"""
# Try to get the AWS credentials from settings if they are available
# If not, these will default to `None`, and boto will try to use
# environment vars or configuration files instead.
aws_access_key_id = getattr(settings, 'AWS_ACCESS_KEY_ID', None)
aws_secret_access_key = getattr(settings, 'AWS_SECRET_ACCESS_KEY', None)
return boto.connect_s3(
aws_access_key_id=aws_access_key_id,
aws_secret_access_key=aws_secret_access_key
)
class FileUploadError(Exception):
"""An error related to uploading files
This is the generic error raised when a file could not be uploaded.
"""
pass
class FileUploadInternalError(FileUploadError):
"""An error internal to the File Upload API.
This is an error raised when file upload failed due to internal problems in
the File Upload API, beyond the intervention of the requester.
"""
pass
class FileUploadRequestError(FileUploadError):
"""This error is raised when the request has invalid parameters for upload.
This error will be raised if the file being uploaded is somehow invalid,
based on type restrictions, size restrictions, upload limits, etc.
"""
pass
# -*- coding: utf-8 -*-
import boto
from boto.s3.key import Key
import ddt
import json
import os
import shutil
import tempfile
from django.conf import settings
from django.test import TestCase
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from moto import mock_s3
from mock import patch
from nose.tools import raises
from openassessment.fileupload import api
from openassessment.fileupload import exceptions
from openassessment.fileupload import views_filesystem as views
from openassessment.fileupload.backends.base import Settings as FileUploadSettings
from openassessment.fileupload.backends.filesystem import get_cache as get_filesystem_cache
@ddt.ddt
class TestFileUploadService(TestCase):
......@@ -38,11 +54,11 @@ class TestFileUploadService(TestCase):
downloadUrl = api.get_download_url("foo")
self.assertIn("https://mybucket.s3.amazonaws.com/submissions_attachments/foo", downloadUrl)
@raises(api.FileUploadInternalError)
@raises(exceptions.FileUploadInternalError)
def test_get_upload_url_no_bucket(self):
api.get_upload_url("foo", "bar")
@raises(api.FileUploadRequestError)
@raises(exceptions.FileUploadRequestError)
def test_get_upload_url_no_key(self):
api.get_upload_url("", "bar")
......@@ -53,7 +69,7 @@ class TestFileUploadService(TestCase):
FILE_UPLOAD_STORAGE_BUCKET_NAME="mybucket"
)
@patch.object(boto, 'connect_s3')
@raises(api.FileUploadInternalError)
@raises(exceptions.FileUploadInternalError)
def test_get_upload_url_error(self, mock_s3):
mock_s3.side_effect = Exception("Oh noes")
api.get_upload_url("foo", "bar")
......@@ -65,8 +81,199 @@ class TestFileUploadService(TestCase):
FILE_UPLOAD_STORAGE_BUCKET_NAME="mybucket"
)
@patch.object(boto, 'connect_s3')
@raises(api.FileUploadInternalError, mock_s3)
@raises(exceptions.FileUploadInternalError, mock_s3)
def test_get_download_url_error(self, mock_s3):
mock_s3.side_effect = Exception("Oh noes")
api.get_download_url("foo")
@override_settings(
ORA2_FILEUPLOAD_BACKEND="filesystem",
ORA2_FILEUPLOAD_ROOT='/tmp',
ORA2_FILEUPLOAD_CACHE_NAME='default',
FILE_UPLOAD_STORAGE_BUCKET_NAME="testbucket",
)
class TestFileUploadServiceWithFilesystemBackend(TestCase):
"""
Test open assessment file upload to local file storage.
"""
def setUp(self):
self.backend = api.backends.get_backend()
self.content = tempfile.TemporaryFile()
self.content.write("foobar content")
self.content.seek(0)
self.key = None
self.key_name = None
self.set_key("myfile.jpg")
self.content_type = "image/jpeg"
get_filesystem_cache().clear()
self.delete_data(self.key_name)
def tearDown(self):
self.delete_data(self.key_name)
def set_key(self, key):
self.key = key
self.key_name = os.path.join(FileUploadSettings.get_prefix(), self.key)
def delete_data(self, key_name):
try:
path = views.get_data_path(key_name)
if os.path.exists(path):
shutil.rmtree(path)
except exceptions.FileUploadInternalError:
pass
def test_get_backend(self):
self.assertTrue(isinstance(self.backend, api.backends.filesystem.Backend))
def test_get_file_path(self):
path1 = views.get_file_path("mykey1")
path2 = views.get_file_path("mykey2")
self.assertEqual(
os.path.join(
settings.ORA2_FILEUPLOAD_ROOT,
settings.FILE_UPLOAD_STORAGE_BUCKET_NAME,
"mykey1"
),
os.path.dirname(path1)
)
self.assertNotEqual(path1, path2)
def test_hack_get_file_path(self):
expected_path = os.path.join(
settings.ORA2_FILEUPLOAD_ROOT,
settings.FILE_UPLOAD_STORAGE_BUCKET_NAME,
"key",
"content"
)
self.assertEqual(
expected_path,
os.path.abspath(views.get_file_path("../key"))
)
self.assertEqual(
expected_path,
os.path.abspath(views.get_file_path("../key/"))
)
self.assertEqual(
expected_path,
os.path.abspath(views.get_file_path(" ../key/ "))
)
def test_safe_save(self):
self.assertRaises(
exceptions.FileUploadRequestError,
views.safe_save,
"/tmp/nonauthorisedbucket/file.txt",
"content"
)
def test_delete_file_data_on_metadata_saving_error(self):
key = "key"
file_path = views.get_file_path(key)
non_existing_path = "/non/existing/path"
with patch('openassessment.fileupload.views_filesystem.get_metadata_path') as mock_get_metadata_path:
mock_get_metadata_path.return_value = non_existing_path
self.assertRaises(
exceptions.FileUploadRequestError,
views.save_to_file,
"key", "content", "metadata"
)
self.assertFalse(os.path.exists(file_path))
self.assertFalse(os.path.exists(non_existing_path))
@override_settings(ORA2_FILEUPLOAD_ROOT='')
def test_undefined_file_upload_root(self):
self.assertRaises(exceptions.FileUploadInternalError, views.get_file_path, self.key)
@override_settings(ORA2_FILEUPLOAD_ROOT='/tmp/nonexistingdirectory')
def test_file_upload_root_does_not_exist(self):
if os.path.exists(settings.ORA2_FILEUPLOAD_ROOT):
shutil.rmtree(settings.ORA2_FILEUPLOAD_ROOT)
self.assertRaises(exceptions.FileUploadInternalError, views.save_to_file, self.key, "content")
def test_post_is_405(self):
upload_url = self.backend.get_upload_url(self.key, "bar")
response = self.client.post(upload_url, data={"attachment": self.content})
self.assertEqual(405, response.status_code)
def test_metadata(self):
self.content_type = "image/bmp"
upload_url = self.backend.get_upload_url(self.key, self.content_type)
self.client.put(upload_url, data=self.content.read(), content_type=self.content_type)
metadata_path = views.get_metadata_path(self.key_name)
metadata = json.load(open(metadata_path))
self.assertIsNotNone(metadata_path)
self.assertTrue(os.path.exists(metadata_path), "No metadata found at %s" % metadata_path)
self.assertIn("Content-Type", metadata)
self.assertIn("Date", metadata)
self.assertIn("Content-MD5", metadata)
self.assertIn("Content-Length", metadata)
def test_upload_download(self):
upload_url = self.backend.get_upload_url(self.key, self.content_type)
download_url = self.backend.get_download_url(self.key)
file_path = views.get_file_path(self.key_name)
upload_response = self.client.put(upload_url, data=self.content.read(), content_type=self.content_type)
download_response = self.client.get(download_url)
self.content.seek(0)
self.assertIn("/" + self.key, upload_url)
self.assertEqual(200, upload_response.status_code)
self.assertEqual("", upload_response.content)
self.assertEqual(200, download_response.status_code)
self.assertEqual(
"attachment; filename=" + self.key,
download_response.get('Content-Disposition')
)
self.assertEqual(self.content_type, download_response.get('Content-Type'))
self.assertIn("foobar content", download_response.content)
self.assertTrue(os.path.exists(file_path), "File %s does not exist" % file_path)
with open(file_path) as f:
self.assertEqual(self.content.read(), f.read())
def test_download_content_with_no_content_type(self):
views.save_to_file(self.key_name, "uploaded content", metadata=None)
download_url = self.backend.get_download_url(self.key)
download_response = self.client.get(download_url)
self.assertEqual(200, download_response.status_code)
self.assertEqual('application/octet-stream', download_response["Content-Type"])
def test_upload_with_unauthorized_key(self):
upload_url = reverse("openassessment-filesystem-storage", kwargs={'key': self.key_name})
cache_before_request = get_filesystem_cache().get(self.key_name)
upload_response = self.client.put(upload_url, data=self.content.read(), content_type=self.content_type)
cache_after_request = get_filesystem_cache().get(self.key_name)
self.assertIsNone(cache_before_request)
self.assertEqual(404, upload_response.status_code)
self.assertIsNone(cache_after_request)
def test_download_url_with_unauthorized_key(self):
download_url = reverse("openassessment-filesystem-storage", kwargs={'key': self.key_name})
views.save_to_file(self.key_name, "uploaded content")
download_response = self.client.get(download_url)
self.assertEqual(404, download_response.status_code)
def test_upload_download_with_accented_key(self):
self.set_key("noël.jpg")
upload_url = self.backend.get_upload_url(self.key, self.content_type)
download_url = self.backend.get_download_url(self.key)
upload_response = self.client.put(upload_url, data=self.content.read(), content_type=self.content_type)
download_response = self.client.get(download_url)
self.assertEqual(200, upload_response.status_code)
self.assertEqual(200, download_response.status_code)
from django.conf.urls import patterns, url
urlpatterns = patterns('openassessment.fileupload.views_filesystem',
url(r'^(?P<key>.+)/$', 'filesystem_storage', name='openassessment-filesystem-storage'),
)
import hashlib
import json
import os
from django.conf import settings
from django.shortcuts import HttpResponse, Http404
from django.utils import timezone
from django.views.decorators.http import require_http_methods
from . import exceptions
from .backends.filesystem import is_upload_url_available, is_download_url_available
from .backends.base import Settings
@require_http_methods(["PUT", "GET"])
def filesystem_storage(request, key):
"""
Uploading and download files to the local filesystem backend.
"""
if isinstance(key, unicode):
key = key.encode("utf-8")
if request.method == "PUT":
if not is_upload_url_available(key):
raise Http404()
content, metadata = get_content_metadata(request)
save_to_file(key, content, metadata)
return HttpResponse()
elif request.method == "GET":
if not is_download_url_available(key):
raise Http404()
return download_file(key)
def download_file(key):
"""Returns an HttpResponse to download the corresponding file"""
file_path = get_file_path(key)
metadata_path = get_metadata_path(key)
if not os.path.exists(file_path):
raise Http404()
with open(metadata_path) as f:
metadata = json.load(f)
content_type = metadata.get("Content-Type", 'application/octet-stream')
with open(file_path, 'r') as f:
response = HttpResponse(f.read(), content_type=content_type)
file_name = os.path.basename(os.path.dirname(file_path))
response['Content-Disposition'] = 'attachment; filename=' + file_name
return response
def get_content_metadata(request):
"""
Read the content and metadata associated to an HttpRequest.
Returns:
request body (str)
request metadata (dict)
"""
metadata = {
"Content-Type": request.META["CONTENT_TYPE"],
"Date": str(timezone.now()),
"Content-MD5": hashlib.md5(request.body).hexdigest(),
"Content-Length": request.META["CONTENT_LENGTH"],
}
return request.body, metadata
def save_to_file(key, content, metadata=None):
"""
Save the content and metadata to a local file determined by the given key.
Arguments:
key (str): unique file identifier
content (str): uploaded file content
metadata (dict): json-dumpable data
"""
file_path = get_file_path(key)
metadata_path = get_metadata_path(key)
if metadata is None:
metadata = {}
safe_save(file_path, content)
try:
safe_save(metadata_path, json.dumps(metadata))
except:
safe_remove(file_path)
safe_remove(metadata_path)
raise
def safe_save(path, content):
"""
Save content to path. Creates the appropriate directories, if required.
Raises:
FileUploadInternalError if the root directory does not exist or if we
try to save in an unauthorized directory.
"""
dir_path = os.path.abspath(os.path.dirname(path))
if not dir_path.startswith(get_bucket_path()):
raise exceptions.FileUploadRequestError("Uploaded file name not allowed: '%s'" % path)
root_directory = get_root_directory_path()
if not os.path.exists(root_directory):
raise exceptions.FileUploadInternalError("File upload root directory does not exist: %s" % root_directory)
if not os.path.exists(dir_path):
os.makedirs(dir_path)
with open(path, 'w') as f:
f.write(content)
def safe_remove(path):
"""Remove a file if it exists.
Note that an exception will be raised if the file is not writable.
"""
if os.path.exists(path):
os.remove(path)
def get_file_path(key):
"""
Returns the path to the content file.
"""
return os.path.join(get_data_path(key), "content")
def get_metadata_path(key):
"""
Returns the path to the metadata file.
"""
return os.path.join(get_data_path(key), "metadata.json")
def get_data_path(key):
"""
Returns the path to the directory which will store the content and metadata
files.
"""
subdirectory = key.replace("..", "").strip("/ ")
return os.path.join(get_bucket_path(), subdirectory)
def get_bucket_path():
"""
Returns the path to the bucket directory.
"""
dir_path = os.path.join(
get_root_directory_path(),
Settings.get_bucket_name(),
)
return os.path.abspath(dir_path)
def get_root_directory_path():
"""
Returns the path to the root directory in which bucket directories are stored.
Raises:
FileUploadInternalError if the root directory setting does not exist.
"""
root_dir = getattr(settings, "ORA2_FILEUPLOAD_ROOT", None)
if not root_dir:
raise exceptions.FileUploadInternalError("Undefined file upload root directory setting")
return root_dir
......@@ -24,6 +24,7 @@ from openassessment.assessment.api import self as self_api
from openassessment.assessment.api import ai as ai_api
from openassessment.fileupload import api as file_api
from openassessment.workflow import api as workflow_api
from openassessment.fileupload import exceptions as file_exceptions
logger = logging.getLogger(__name__)
......@@ -256,7 +257,7 @@ class StaffInfoMixin(object):
try:
submission['image_url'] = file_api.get_download_url(file_key)
except file_api.FileUploadError:
except file_exceptions.FileUploadError:
# Log the error, but do not prevent the rest of the student info
# from being displayed.
msg = (
......
......@@ -4,7 +4,7 @@ from xblock.core import XBlock
from submissions import api
from openassessment.fileupload import api as file_upload_api
from openassessment.fileupload.api import FileUploadError
from openassessment.fileupload.exceptions import FileUploadError
from openassessment.workflow import api as workflow_api
from openassessment.workflow.errors import AssessmentWorkflowError
from .resolve_dates import DISTANT_FUTURE
......
......@@ -12,7 +12,7 @@ from openassessment.assessment.api import self as self_api
from openassessment.assessment.api import ai as ai_api
from openassessment.workflow import api as workflow_api
from openassessment.assessment.errors.ai import AIError, AIGradingInternalError
from openassessment.fileupload.api import FileUploadInternalError
from openassessment.fileupload.exceptions import FileUploadInternalError
from submissions import api as sub_api
from openassessment.xblock.test.base import scenario, XBlockHandlerTestCase
from xblock.core import XBlock
......
......@@ -4,6 +4,7 @@ from django.views.i18n import javascript_catalog
from django.contrib import admin
import openassessment.assessment.urls
import openassessment.fileupload.urls
import workbench.urls
admin.autodiscover()
......@@ -26,6 +27,9 @@ urlpatterns = patterns(
# JavaScript i18n
(r'^jsi18n/$', 'django.views.i18n.javascript_catalog', JS_INFO_DICT),
# File upload to local filesystem
url(r'^openassessment/storage', include(openassessment.fileupload.urls)),
)
# We need to do explicit setup of the Django debug toolbar because autodiscovery
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment