Commit 34c01cd8 by Calen Pennington

Merge pull request #317 from MITx/feature/cale/cms-aws-cfg

Feature/cale/cms aws cfg
parents f53c6b32 7b0f5483
### ###
### One-off script for importing courseware form XML format ### Script for importing courseware from XML format
### ###
from django.core.management.base import BaseCommand, CommandError from django.core.management.base import BaseCommand, CommandError
......
...@@ -7,34 +7,51 @@ from git import Repo, PushInfo ...@@ -7,34 +7,51 @@ from git import Repo, PushInfo
from xmodule.modulestore.xml_importer import import_from_xml from xmodule.modulestore.xml_importer import import_from_xml
from xmodule.modulestore.django import modulestore from xmodule.modulestore.django import modulestore
from xmodule.modulestore import Location from collections import namedtuple
from .exceptions import GithubSyncError from .exceptions import GithubSyncError, InvalidRepo
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
RepoSettings = namedtuple('RepoSettings', 'path branch origin')
def sync_all_with_github():
"""
Sync all defined repositories from github
"""
for repo_name in settings.REPOS:
sync_with_github(load_repo_settings(repo_name))
def sync_with_github(repo_settings):
"""
Sync specified repository from github
repo_settings: A RepoSettings defining which repo to sync
"""
revision, course = import_from_github(repo_settings)
export_to_github(course, "Changes from cms import of revision %s" % revision, "CMS <cms@edx.org>")
def setup_repo(repo_settings): def setup_repo(repo_settings):
""" """
Reset the local github repo specified by repo_settings Reset the local github repo specified by repo_settings
repo_settings is a dictionary with the following keys: repo_settings (RepoSettings): The settings for the repo to reset
path: file system path to the local git repo
branch: name of the branch to track on github
origin: git url for the repository to track
""" """
course_dir = repo_settings['path'] course_dir = repo_settings.path
repo_path = settings.GITHUB_REPO_ROOT / course_dir repo_path = settings.GITHUB_REPO_ROOT / course_dir
if not os.path.isdir(repo_path): if not os.path.isdir(repo_path):
Repo.clone_from(repo_settings['origin'], repo_path) Repo.clone_from(repo_settings.origin, repo_path)
git_repo = Repo(repo_path) git_repo = Repo(repo_path)
origin = git_repo.remotes.origin origin = git_repo.remotes.origin
origin.fetch() origin.fetch()
# Do a hard reset to the remote branch so that we have a clean import # Do a hard reset to the remote branch so that we have a clean import
git_repo.git.checkout(repo_settings['branch']) git_repo.git.checkout(repo_settings.branch)
return git_repo return git_repo
...@@ -43,19 +60,19 @@ def load_repo_settings(course_dir): ...@@ -43,19 +60,19 @@ def load_repo_settings(course_dir):
""" """
Returns the repo_settings for the course stored in course_dir Returns the repo_settings for the course stored in course_dir
""" """
for repo_settings in settings.REPOS.values(): if course_dir not in settings.REPOS:
if repo_settings['path'] == course_dir: raise InvalidRepo(course_dir)
return repo_settings
raise InvalidRepo(course_dir) return RepoSettings(course_dir, **settings.REPOS[course_dir])
def import_from_github(repo_settings): def import_from_github(repo_settings):
""" """
Imports data into the modulestore based on the XML stored on github Imports data into the modulestore based on the XML stored on github
""" """
course_dir = repo_settings['path'] course_dir = repo_settings.path
git_repo = setup_repo(repo_settings) git_repo = setup_repo(repo_settings)
git_repo.head.reset('origin/%s' % repo_settings['branch'], index=True, working_tree=True) git_repo.head.reset('origin/%s' % repo_settings.branch, index=True, working_tree=True)
module_store = import_from_xml(modulestore(), module_store = import_from_xml(modulestore(),
settings.GITHUB_REPO_ROOT, course_dirs=[course_dir]) settings.GITHUB_REPO_ROOT, course_dirs=[course_dir])
......
class GithubSyncError(Exception): class GithubSyncError(Exception):
pass pass
class InvalidRepo(Exception):
pass
###
### Script for syncing CMS with defined github repos
###
from django.core.management.base import NoArgsCommand
from github_sync import sync_all_with_github
class Command(NoArgsCommand):
help = \
'''Sync the CMS with the defined github repos'''
def handle_noargs(self, **options):
sync_all_with_github()
from django.test import TestCase from django.test import TestCase
from path import path from path import path
import shutil import shutil
import os from github_sync import (
from github_sync import import_from_github, export_to_github import_from_github, export_to_github, load_repo_settings,
sync_all_with_github, sync_with_github
)
from git import Repo from git import Repo
from django.conf import settings from django.conf import settings
from xmodule.modulestore.django import modulestore from xmodule.modulestore.django import modulestore
from xmodule.modulestore import Location from xmodule.modulestore import Location
from override_settings import override_settings from override_settings import override_settings
from github_sync.exceptions import GithubSyncError from github_sync.exceptions import GithubSyncError
from mock import patch, Mock
REPO_DIR = settings.GITHUB_REPO_ROOT / 'local_repo' REPO_DIR = settings.GITHUB_REPO_ROOT / 'local_repo'
WORKING_DIR = path(settings.TEST_ROOT) WORKING_DIR = path(settings.TEST_ROOT)
...@@ -16,8 +19,7 @@ REMOTE_DIR = WORKING_DIR / 'remote_repo' ...@@ -16,8 +19,7 @@ REMOTE_DIR = WORKING_DIR / 'remote_repo'
@override_settings(REPOS={ @override_settings(REPOS={
'local': { 'local_repo': {
'path': 'local_repo',
'origin': REMOTE_DIR, 'origin': REMOTE_DIR,
'branch': 'master', 'branch': 'master',
} }
...@@ -40,7 +42,7 @@ class GithubSyncTestCase(TestCase): ...@@ -40,7 +42,7 @@ class GithubSyncTestCase(TestCase):
remote.git.commit(m='Initial commit') remote.git.commit(m='Initial commit')
remote.git.config("receive.denyCurrentBranch", "ignore") remote.git.config("receive.denyCurrentBranch", "ignore")
self.import_revision, self.import_course = import_from_github(settings.REPOS['local']) self.import_revision, self.import_course = import_from_github(load_repo_settings('local_repo'))
def tearDown(self): def tearDown(self):
self.cleanup() self.cleanup()
...@@ -61,6 +63,19 @@ class GithubSyncTestCase(TestCase): ...@@ -61,6 +63,19 @@ class GithubSyncTestCase(TestCase):
[child.location for child in self.import_course.get_children()]) [child.location for child in self.import_course.get_children()])
self.assertEquals(1, len(self.import_course.get_children())) self.assertEquals(1, len(self.import_course.get_children()))
@patch('github_sync.sync_with_github')
def test_sync_all_with_github(self, sync_with_github):
sync_all_with_github()
sync_with_github.assert_called_with(load_repo_settings('local_repo'))
def test_sync_with_github(self):
with patch('github_sync.import_from_github', Mock(return_value=(Mock(), Mock()))) as import_from_github:
with patch('github_sync.export_to_github') as export_to_github:
settings = load_repo_settings('local_repo')
sync_with_github(settings)
import_from_github.assert_called_with(settings)
export_to_github.assert_called
@override_settings(MITX_FEATURES={'GITHUB_PUSH': False}) @override_settings(MITX_FEATURES={'GITHUB_PUSH': False})
def test_export_no_pash(self): def test_export_no_pash(self):
""" """
......
import json import json
from django.test.client import Client from django.test.client import Client
from django.test import TestCase from django.test import TestCase
from mock import patch, Mock from mock import patch
from override_settings import override_settings from override_settings import override_settings
from django.conf import settings from github_sync import load_repo_settings
@override_settings(REPOS={'repo': {'path': 'path', 'branch': 'branch'}}) @override_settings(REPOS={'repo': {'branch': 'branch', 'origin': 'origin'}})
class PostReceiveTestCase(TestCase): class PostReceiveTestCase(TestCase):
def setUp(self): def setUp(self):
self.client = Client() self.client = Client()
@patch('github_sync.views.export_to_github') @patch('github_sync.views.sync_with_github')
@patch('github_sync.views.import_from_github') def test_non_branch(self, sync_with_github):
def test_non_branch(self, import_from_github, export_to_github):
self.client.post('/github_service_hook', {'payload': json.dumps({ self.client.post('/github_service_hook', {'payload': json.dumps({
'ref': 'refs/tags/foo'}) 'ref': 'refs/tags/foo'})
}) })
self.assertFalse(import_from_github.called) self.assertFalse(sync_with_github.called)
self.assertFalse(export_to_github.called)
@patch('github_sync.views.export_to_github') @patch('github_sync.views.sync_with_github')
@patch('github_sync.views.import_from_github') def test_non_watched_repo(self, sync_with_github):
def test_non_watched_repo(self, import_from_github, export_to_github):
self.client.post('/github_service_hook', {'payload': json.dumps({ self.client.post('/github_service_hook', {'payload': json.dumps({
'ref': 'refs/heads/branch', 'ref': 'refs/heads/branch',
'repository': {'name': 'bad_repo'}}) 'repository': {'name': 'bad_repo'}})
}) })
self.assertFalse(import_from_github.called) self.assertFalse(sync_with_github.called)
self.assertFalse(export_to_github.called)
@patch('github_sync.views.export_to_github') @patch('github_sync.views.sync_with_github')
@patch('github_sync.views.import_from_github') def test_non_tracked_branch(self, sync_with_github):
def test_non_tracked_branch(self, import_from_github, export_to_github):
self.client.post('/github_service_hook', {'payload': json.dumps({ self.client.post('/github_service_hook', {'payload': json.dumps({
'ref': 'refs/heads/non_branch', 'ref': 'refs/heads/non_branch',
'repository': {'name': 'repo'}}) 'repository': {'name': 'repo'}})
}) })
self.assertFalse(import_from_github.called) self.assertFalse(sync_with_github.called)
self.assertFalse(export_to_github.called)
@patch('github_sync.views.export_to_github') @patch('github_sync.views.sync_with_github')
@patch('github_sync.views.import_from_github', return_value=(Mock(), Mock())) def test_tracked_branch(self, sync_with_github):
def test_tracked_branch(self, import_from_github, export_to_github):
self.client.post('/github_service_hook', {'payload': json.dumps({ self.client.post('/github_service_hook', {'payload': json.dumps({
'ref': 'refs/heads/branch', 'ref': 'refs/heads/branch',
'repository': {'name': 'repo'}}) 'repository': {'name': 'repo'}})
}) })
import_from_github.assert_called_with(settings.REPOS['repo']) sync_with_github.assert_called_with(load_repo_settings('repo'))
mock_revision, mock_course = import_from_github.return_value
export_to_github.assert_called_with(mock_course, 'path', "Changes from cms import of revision %s" % mock_revision)
...@@ -5,7 +5,7 @@ from django.http import HttpResponse ...@@ -5,7 +5,7 @@ from django.http import HttpResponse
from django.conf import settings from django.conf import settings
from django_future.csrf import csrf_exempt from django_future.csrf import csrf_exempt
from . import import_from_github, export_to_github from . import sync_with_github, load_repo_settings
log = logging.getLogger() log = logging.getLogger()
...@@ -40,13 +40,12 @@ def github_post_receive(request): ...@@ -40,13 +40,12 @@ def github_post_receive(request):
log.info('No repository matching %s found' % repo_name) log.info('No repository matching %s found' % repo_name)
return HttpResponse('No Repo Found') return HttpResponse('No Repo Found')
repo = settings.REPOS[repo_name] repo = load_repo_settings(repo_name)
if repo['branch'] != branch_name: if repo.branch != branch_name:
log.info('Ignoring changes to non-tracked branch %s in repo %s' % (branch_name, repo_name)) log.info('Ignoring changes to non-tracked branch %s in repo %s' % (branch_name, repo_name))
return HttpResponse('Ignoring non-tracked branch') return HttpResponse('Ignoring non-tracked branch')
revision, course = import_from_github(repo) sync_with_github(repo)
export_to_github(course, repo['path'], "Changes from cms import of revision %s" % revision)
return HttpResponse('Push received') return HttpResponse('Push received')
"""
This is the default template for our main set of AWS servers.
"""
import json
from .logsettings import get_logger_config
from .common import *
############################### ALWAYS THE SAME ################################
DEBUG = False
TEMPLATE_DEBUG = False
EMAIL_BACKEND = 'django_ses.SESBackend'
SESSION_ENGINE = 'django.contrib.sessions.backends.cache'
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
########################### NON-SECURE ENV CONFIG ##############################
# Things like server locations, ports, etc.
with open(ENV_ROOT / "cms.env.json") as env_file:
ENV_TOKENS = json.load(env_file)
SITE_NAME = ENV_TOKENS['SITE_NAME']
LOG_DIR = ENV_TOKENS['LOG_DIR']
CACHES = ENV_TOKENS['CACHES']
for feature, value in ENV_TOKENS.get('MITX_FEATURES', {}).items():
MITX_FEATURES[feature] = value
LOGGING = get_logger_config(LOG_DIR,
logging_env=ENV_TOKENS['LOGGING_ENV'],
syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 514),
debug=False)
with open(ENV_ROOT / "repos.json") as repos_file:
REPOS = json.load(repos_file)
############################## SECURE AUTH ITEMS ###############################
# Secret things: passwords, access keys, etc.
with open(ENV_ROOT / "cms.auth.json") as auth_file:
AUTH_TOKENS = json.load(auth_file)
AWS_ACCESS_KEY_ID = AUTH_TOKENS["AWS_ACCESS_KEY_ID"]
AWS_SECRET_ACCESS_KEY = AUTH_TOKENS["AWS_SECRET_ACCESS_KEY"]
DATABASES = AUTH_TOKENS['DATABASES']
MODULESTORE = AUTH_TOKENS['MODULESTORE']
...@@ -243,7 +243,7 @@ with open(module_styles_path, 'w') as module_styles: ...@@ -243,7 +243,7 @@ with open(module_styles_path, 'w') as module_styles:
PIPELINE_CSS = { PIPELINE_CSS = {
'base-style': { 'base-style': {
'source_filenames': ['sass/base-style.scss'], 'source_filenames': ['sass/base-style.scss'],
'output_filename': 'css/base-style.css', 'output_filename': 'css/cms-base-style.css',
}, },
} }
...@@ -260,15 +260,15 @@ PIPELINE_JS = { ...@@ -260,15 +260,15 @@ PIPELINE_JS = {
for pth for pth
in glob2.glob(PROJECT_ROOT / 'static/coffee/src/**/*.coffee') in glob2.glob(PROJECT_ROOT / 'static/coffee/src/**/*.coffee')
], ],
'output_filename': 'js/application.js', 'output_filename': 'js/cms-application.js',
}, },
'module-js': { 'module-js': {
'source_filenames': module_js_sources, 'source_filenames': module_js_sources,
'output_filename': 'js/modules.js', 'output_filename': 'js/cms-modules.js',
}, },
'spec': { 'spec': {
'source_filenames': [pth.replace(PROJECT_ROOT / 'static/', '') for pth in glob2.glob(PROJECT_ROOT / 'static/coffee/spec/**/*.coffee')], 'source_filenames': [pth.replace(PROJECT_ROOT / 'static/', '') for pth in glob2.glob(PROJECT_ROOT / 'static/coffee/spec/**/*.coffee')],
'output_filename': 'js/spec.js' 'output_filename': 'js/cms-spec.js'
} }
} }
...@@ -309,6 +309,7 @@ INSTALLED_APPS = ( ...@@ -309,6 +309,7 @@ INSTALLED_APPS = (
# For CMS # For CMS
'contentstore', 'contentstore',
'github_sync',
'student', # misleading name due to sharing with lms 'student', # misleading name due to sharing with lms
# For asset pipelining # For asset pipelining
......
...@@ -32,38 +32,23 @@ DATABASES = { ...@@ -32,38 +32,23 @@ DATABASES = {
REPOS = { REPOS = {
'edx4edx': { 'edx4edx': {
'path': "edx4edx", 'branch': 'master',
'org': 'edx',
'course': 'edx4edx',
'branch': 'for_cms',
'origin': 'git@github.com:MITx/edx4edx.git', 'origin': 'git@github.com:MITx/edx4edx.git',
}, },
'6002x-fall-2012': { 'content-mit-6002x': {
'path': '6002x-fall-2012', 'branch': 'master',
'org': 'mit.edu',
'course': '6.002x',
'branch': 'for_cms',
'origin': 'git@github.com:MITx/6002x-fall-2012.git', 'origin': 'git@github.com:MITx/6002x-fall-2012.git',
}, },
'6.00x': { '6.00x': {
'path': '6.00x', 'branch': 'master',
'org': 'mit.edu',
'course': '6.00x',
'branch': 'for_cms',
'origin': 'git@github.com:MITx/6.00x.git', 'origin': 'git@github.com:MITx/6.00x.git',
}, },
'7.00x': { '7.00x': {
'path': '7.00x', 'branch': 'master',
'org': 'mit.edu',
'course': '7.00x',
'branch': 'for_cms',
'origin': 'git@github.com:MITx/7.00x.git', 'origin': 'git@github.com:MITx/7.00x.git',
}, },
'3.091x': { '3.091x': {
'path': '3.091x', 'branch': 'master',
'org': 'mit.edu',
'course': '3.091x',
'branch': 'for_cms',
'origin': 'git@github.com:MITx/3.091x.git', 'origin': 'git@github.com:MITx/3.091x.git',
}, },
} }
......
import os
import os.path
import platform
import sys
def get_logger_config(log_dir,
logging_env="no_env",
tracking_filename=None,
syslog_addr=None,
debug=False):
"""Return the appropriate logging config dictionary. You should assign the
result of this to the LOGGING var in your settings. The reason it's done
this way instead of registering directly is because I didn't want to worry
about resetting the logging state if this is called multiple times when
settings are extended."""
# If we're given an explicit place to put tracking logs, we do that (say for
# debugging). However, logging is not safe for multiple processes hitting
# the same file. So if it's left blank, we dynamically create the filename
# based on the PID of this worker process.
if tracking_filename:
tracking_file_loc = os.path.join(log_dir, tracking_filename)
else:
pid = os.getpid() # So we can log which process is creating the log
tracking_file_loc = os.path.join(log_dir, "tracking_{0}.log".format(pid))
hostname = platform.node().split(".")[0]
syslog_format = ("[%(name)s][env:{logging_env}] %(levelname)s [{hostname} " +
" %(process)d] [%(filename)s:%(lineno)d] - %(message)s").format(
logging_env=logging_env, hostname=hostname)
handlers = ['console'] if debug else ['console', 'syslogger', 'newrelic']
return {
'version': 1,
'formatters' : {
'standard' : {
'format' : '%(asctime)s %(levelname)s %(process)d [%(name)s] %(filename)s:%(lineno)d - %(message)s',
},
'syslog_format' : { 'format' : syslog_format },
'raw' : { 'format' : '%(message)s' },
},
'handlers' : {
'console' : {
'level' : 'DEBUG' if debug else 'INFO',
'class' : 'logging.StreamHandler',
'formatter' : 'standard',
'stream' : sys.stdout,
},
'syslogger' : {
'level' : 'INFO',
'class' : 'logging.handlers.SysLogHandler',
'address' : syslog_addr,
'formatter' : 'syslog_format',
},
'tracking' : {
'level' : 'DEBUG',
'class' : 'logging.handlers.WatchedFileHandler',
'filename' : tracking_file_loc,
'formatter' : 'raw',
},
'newrelic' : {
'level': 'ERROR',
'class': 'newrelic_logging.NewRelicHandler',
'formatter': 'raw',
}
},
'loggers' : {
'django' : {
'handlers' : handlers,
'propagate' : True,
'level' : 'INFO'
},
'tracking' : {
'handlers' : ['tracking'],
'level' : 'DEBUG',
'propagate' : False,
},
'' : {
'handlers' : handlers,
'level' : 'DEBUG',
'propagate' : False
},
'mitx' : {
'handlers' : handlers,
'level' : 'DEBUG',
'propagate' : False
},
'keyedcache' : {
'handlers' : handlers,
'level' : 'DEBUG',
'propagate' : False
},
}
}
...@@ -5,6 +5,7 @@ from x_module import XModuleDescriptor ...@@ -5,6 +5,7 @@ from x_module import XModuleDescriptor
from lxml import etree from lxml import etree
from functools import wraps from functools import wraps
import logging import logging
import traceback
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
...@@ -21,29 +22,31 @@ def process_includes(fn): ...@@ -21,29 +22,31 @@ def process_includes(fn):
next_include = xml_object.find('include') next_include = xml_object.find('include')
while next_include is not None: while next_include is not None:
file = next_include.get('file') file = next_include.get('file')
if file is not None: parent = next_include.getparent()
try:
ifp = system.resources_fs.open(file) if file is None:
except Exception: continue
msg = 'Error in problem xml include: %s\n' % (
etree.tostring(next_include, pretty_print=True)) try:
msg += 'Cannot find file %s in %s' % (file, dir) ifp = system.resources_fs.open(file)
log.exception(msg) # read in and convert to XML
system.error_handler(msg) incxml = etree.XML(ifp.read())
raise
try:
# read in and convert to XML
incxml = etree.XML(ifp.read())
except Exception:
msg = 'Error in problem xml include: %s\n' % (
etree.tostring(next_include, pretty_print=True))
msg += 'Cannot parse XML in %s' % (file)
log.exception(msg)
system.error_handler(msg)
raise
# insert new XML into tree in place of inlcude # insert new XML into tree in place of inlcude
parent = next_include.getparent()
parent.insert(parent.index(next_include), incxml) parent.insert(parent.index(next_include), incxml)
except Exception:
msg = "Error in problem xml include: %s" % (etree.tostring(next_include, pretty_print=True))
log.exception(msg)
parent = next_include.getparent()
errorxml = etree.Element('error')
messagexml = etree.SubElement(errorxml, 'message')
messagexml.text = msg
stackxml = etree.SubElement(errorxml, 'stacktrace')
stackxml.text = traceback.format_exc()
# insert error XML in place of include
parent.insert(parent.index(next_include), errorxml)
parent.remove(next_include) parent.remove(next_include)
next_include = xml_object.find('include') next_include = xml_object.find('include')
......
class InvalidDefinitionError(Exception): class InvalidDefinitionError(Exception):
pass pass
class NotFoundError(Exception): class NotFoundError(Exception):
pass pass
...@@ -52,6 +52,7 @@ function update_schematics() { ...@@ -52,6 +52,7 @@ function update_schematics() {
schematics[i].setAttribute("loaded","true"); schematics[i].setAttribute("loaded","true");
} }
} }
window.update_schematics = update_schematics;
// add ourselves to the tasks that get performed when window is loaded // add ourselves to the tasks that get performed when window is loaded
function add_schematic_handler(other_onload) { function add_schematic_handler(other_onload) {
......
...@@ -14,5 +14,10 @@ class InsufficientSpecificationError(Exception): ...@@ -14,5 +14,10 @@ class InsufficientSpecificationError(Exception):
class InvalidLocationError(Exception): class InvalidLocationError(Exception):
pass pass
class NoPathToItem(Exception): class NoPathToItem(Exception):
pass pass
class DuplicateItemError(Exception):
pass
import pymongo import pymongo
from bson.objectid import ObjectId
from bson.son import SON from bson.son import SON
from fs.osfs import OSFS from fs.osfs import OSFS
from itertools import repeat from itertools import repeat
from path import path
from importlib import import_module from importlib import import_module
from xmodule.errorhandlers import strict_error_handler from xmodule.errorhandlers import strict_error_handler
...@@ -14,14 +14,13 @@ from mitxmako.shortcuts import render_to_string ...@@ -14,14 +14,13 @@ from mitxmako.shortcuts import render_to_string
from . import ModuleStore, Location from . import ModuleStore, Location
from .exceptions import (ItemNotFoundError, InsufficientSpecificationError, from .exceptions import (ItemNotFoundError, InsufficientSpecificationError,
NoPathToItem) NoPathToItem, DuplicateItemError)
# TODO (cpennington): This code currently operates under the assumption that # TODO (cpennington): This code currently operates under the assumption that
# there is only one revision for each item. Once we start versioning inside the CMS, # there is only one revision for each item. Once we start versioning inside the CMS,
# that assumption will have to change # that assumption will have to change
class CachingDescriptorSystem(MakoDescriptorSystem): class CachingDescriptorSystem(MakoDescriptorSystem):
""" """
A system that has a cache of module json that it will use to load modules A system that has a cache of module json that it will use to load modules
...@@ -98,7 +97,7 @@ class MongoModuleStore(ModuleStore): ...@@ -98,7 +97,7 @@ class MongoModuleStore(ModuleStore):
module_path, _, class_name = default_class.rpartition('.') module_path, _, class_name = default_class.rpartition('.')
class_ = getattr(import_module(module_path), class_name) class_ = getattr(import_module(module_path), class_name)
self.default_class = class_ self.default_class = class_
self.fs_root = fs_root self.fs_root = path(fs_root)
def _clean_item_data(self, item): def _clean_item_data(self, item):
""" """
...@@ -142,8 +141,9 @@ class MongoModuleStore(ModuleStore): ...@@ -142,8 +141,9 @@ class MongoModuleStore(ModuleStore):
""" """
Load an XModuleDescriptor from item, using the children stored in data_cache Load an XModuleDescriptor from item, using the children stored in data_cache
""" """
resource_fs = OSFS(self.fs_root / item.get('data_dir', data_dir = item.get('metadata', {}).get('data_dir', item['location']['course'])
item['location']['course'])) resource_fs = OSFS(self.fs_root / data_dir)
system = CachingDescriptorSystem( system = CachingDescriptorSystem(
self, self,
data_cache, data_cache,
...@@ -215,15 +215,22 @@ class MongoModuleStore(ModuleStore): ...@@ -215,15 +215,22 @@ class MongoModuleStore(ModuleStore):
return self._load_items(list(items), depth) return self._load_items(list(items), depth)
# TODO (cpennington): This needs to be replaced by clone_item as soon as we allow
# creation of items from the cms
def create_item(self, location): def create_item(self, location):
""" """
Create an empty item at the specified location with the supplied editor Create an empty item at the specified location.
If that location already exists, raises a DuplicateItemError
location: Something that can be passed to Location location: Something that can be passed to Location
""" """
self.collection.insert({ try:
'_id': Location(location).dict(), self.collection.insert({
}) '_id': Location(location).dict(),
})
except pymongo.errors.DuplicateKeyError:
raise DuplicateItemError(location)
def update_item(self, location, data): def update_item(self, location, data):
""" """
...@@ -286,8 +293,6 @@ class MongoModuleStore(ModuleStore): ...@@ -286,8 +293,6 @@ class MongoModuleStore(ModuleStore):
{'_id': True}) {'_id': True})
return [i['_id'] for i in items] return [i['_id'] for i in items]
def path_to_location(self, location, course_name=None): def path_to_location(self, location, course_name=None):
''' '''
Try to find a course_id/chapter/section[/position] path to this location. Try to find a course_id/chapter/section[/position] path to this location.
...@@ -361,7 +366,6 @@ class MongoModuleStore(ModuleStore): ...@@ -361,7 +366,6 @@ class MongoModuleStore(ModuleStore):
if path is None: if path is None:
raise(NoPathToItem(location)) raise(NoPathToItem(location))
n = len(path) n = len(path)
course_id = CourseDescriptor.location_to_id(path[0]) course_id = CourseDescriptor.location_to_id(path[0])
chapter = path[1].name if n > 1 else None chapter = path[1].name if n > 1 else None
......
import logging import logging
from .xml import XMLModuleStore from .xml import XMLModuleStore
from .exceptions import DuplicateItemError
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
...@@ -27,7 +28,7 @@ def import_from_xml(store, data_dir, course_dirs=None, eager=True, ...@@ -27,7 +28,7 @@ def import_from_xml(store, data_dir, course_dirs=None, eager=True,
# This should in the future create new revisions of the items on import # This should in the future create new revisions of the items on import
try: try:
store.create_item(module.location) store.create_item(module.location)
except: except DuplicateItemError:
log.exception('Item already exists at %s' % module.location.url()) log.exception('Item already exists at %s' % module.location.url())
pass pass
if 'data' in module.definition: if 'data' in module.definition:
......
...@@ -6,6 +6,7 @@ import logging ...@@ -6,6 +6,7 @@ import logging
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
class RawDescriptor(MakoModuleDescriptor, XmlDescriptor): class RawDescriptor(MakoModuleDescriptor, XmlDescriptor):
""" """
Module that provides a raw editing view of its data and children Module that provides a raw editing view of its data and children
...@@ -33,7 +34,7 @@ class RawDescriptor(MakoModuleDescriptor, XmlDescriptor): ...@@ -33,7 +34,7 @@ class RawDescriptor(MakoModuleDescriptor, XmlDescriptor):
line, offset = err.position line, offset = err.position
msg = ("Unable to create xml for problem {loc}. " msg = ("Unable to create xml for problem {loc}. "
"Context: '{context}'".format( "Context: '{context}'".format(
context=lines[line-1][offset - 40:offset + 40], context=lines[line - 1][offset - 40:offset + 40],
loc=self.location)) loc=self.location))
log.exception(msg) log.exception(msg)
self.system.error_handler(msg) self.system.error_handler(msg)
......
from collections import MutableMapping from collections import MutableMapping
from xmodule.x_module import XModuleDescriptor from xmodule.x_module import XModuleDescriptor
from xmodule.modulestore import Location
from lxml import etree from lxml import etree
import copy import copy
import logging import logging
import traceback
from collections import namedtuple from collections import namedtuple
from fs.errors import ResourceNotFoundError from fs.errors import ResourceNotFoundError
import os import os
...@@ -13,6 +15,7 @@ log = logging.getLogger(__name__) ...@@ -13,6 +15,7 @@ log = logging.getLogger(__name__)
# but the actual improvement wasn't measured (and it was implemented late at night). # but the actual improvement wasn't measured (and it was implemented late at night).
# We should check if it hurts, and whether there's a better way of doing lazy loading # We should check if it hurts, and whether there's a better way of doing lazy loading
class LazyLoadingDict(MutableMapping): class LazyLoadingDict(MutableMapping):
""" """
A dictionary object that lazily loads its contents from a provided A dictionary object that lazily loads its contents from a provided
...@@ -173,6 +176,9 @@ class XmlDescriptor(XModuleDescriptor): ...@@ -173,6 +176,9 @@ class XmlDescriptor(XModuleDescriptor):
url identifiers url identifiers
""" """
xml_object = etree.fromstring(xml_data) xml_object = etree.fromstring(xml_data)
# VS[compat] -- just have the url_name lookup once translation is done
slug = xml_object.get('url_name', xml_object.get('slug'))
location = Location('i4x', org, course, xml_object.tag, slug)
def metadata_loader(): def metadata_loader():
metadata = {} metadata = {}
...@@ -210,25 +216,24 @@ class XmlDescriptor(XModuleDescriptor): ...@@ -210,25 +216,24 @@ class XmlDescriptor(XModuleDescriptor):
with system.resources_fs.open(filepath) as file: with system.resources_fs.open(filepath) as file:
definition_xml = cls.file_to_xml(file) definition_xml = cls.file_to_xml(file)
except (ResourceNotFoundError, etree.XMLSyntaxError): except (ResourceNotFoundError, etree.XMLSyntaxError):
msg = 'Unable to load file contents at path %s' % filepath msg = 'Unable to load file contents at path %s for item %s' % (filepath, location.url())
log.exception(msg) log.exception(msg)
system.error_handler(msg) system.error_handler(msg)
# if error_handler didn't reraise, work around problem. # if error_handler didn't reraise, work around problem.
return {'data': 'Error loading file contents at path %s' % filepath} error_elem = etree.Element('error')
message_elem = etree.SubElement(error_elem, 'error_message')
message_elem.text = msg
stack_elem = etree.SubElement(error_elem, 'stack_trace')
stack_elem.text = traceback.format_exc()
return {'data': etree.tostring(error_elem)}
cls.clean_metadata_from_xml(definition_xml) cls.clean_metadata_from_xml(definition_xml)
return cls.definition_from_xml(definition_xml, system) return cls.definition_from_xml(definition_xml, system)
# VS[compat] -- just have the url_name lookup once translation is done
slug = xml_object.get('url_name', xml_object.get('slug'))
return cls( return cls(
system, system,
LazyLoadingDict(definition_loader), LazyLoadingDict(definition_loader),
location=['i4x', location=location,
org,
course,
xml_object.tag,
slug],
metadata=LazyLoadingDict(metadata_loader), metadata=LazyLoadingDict(metadata_loader),
) )
......
...@@ -32,12 +32,12 @@ LOG_DIR = ENV_TOKENS['LOG_DIR'] ...@@ -32,12 +32,12 @@ LOG_DIR = ENV_TOKENS['LOG_DIR']
CACHES = ENV_TOKENS['CACHES'] CACHES = ENV_TOKENS['CACHES']
for feature,value in ENV_TOKENS.get('MITX_FEATURES', {}).items(): for feature, value in ENV_TOKENS.get('MITX_FEATURES', {}).items():
MITX_FEATURES[feature] = value MITX_FEATURES[feature] = value
WIKI_ENABLED = ENV_TOKENS.get('WIKI_ENABLED',WIKI_ENABLED) WIKI_ENABLED = ENV_TOKENS.get('WIKI_ENABLED', WIKI_ENABLED)
LOGGING = get_logger_config(LOG_DIR, LOGGING = get_logger_config(LOG_DIR,
logging_env=ENV_TOKENS['LOGGING_ENV'], logging_env=ENV_TOKENS['LOGGING_ENV'],
syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 514), syslog_addr=(ENV_TOKENS['SYSLOG_SERVER'], 514),
debug=False) debug=False)
......
...@@ -301,15 +301,15 @@ STATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage' ...@@ -301,15 +301,15 @@ STATICFILES_STORAGE = 'pipeline.storage.PipelineCachedStorage'
PIPELINE_CSS = { PIPELINE_CSS = {
'application': { 'application': {
'source_filenames': ['sass/application.scss'], 'source_filenames': ['sass/application.scss'],
'output_filename': 'css/application.css', 'output_filename': 'css/lms-application.css',
}, },
'course': { 'course': {
'source_filenames': ['sass/course.scss', 'js/vendor/CodeMirror/codemirror.css', 'css/vendor/jquery.treeview.css'], 'source_filenames': ['sass/course.scss', 'js/vendor/CodeMirror/codemirror.css', 'css/vendor/jquery.treeview.css'],
'output_filename': 'css/course.css', 'output_filename': 'css/lms-course.css',
}, },
'ie-fixes': { 'ie-fixes': {
'source_filenames': ['sass/ie.scss'], 'source_filenames': ['sass/ie.scss'],
'output_filename': 'css/ie.css', 'output_filename': 'css/lms-ie.css',
}, },
} }
...@@ -410,23 +410,23 @@ PIPELINE_JS = { ...@@ -410,23 +410,23 @@ PIPELINE_JS = {
'js/toggle_login_modal.js', 'js/toggle_login_modal.js',
'js/sticky_filter.js', 'js/sticky_filter.js',
], ],
'output_filename': 'js/application.js' 'output_filename': 'js/lms-application.js'
}, },
'courseware': { 'courseware': {
'source_filenames': [pth.replace(PROJECT_ROOT / 'static/', '') for pth in courseware_only_js], 'source_filenames': [pth.replace(PROJECT_ROOT / 'static/', '') for pth in courseware_only_js],
'output_filename': 'js/courseware.js' 'output_filename': 'js/lms-courseware.js'
}, },
'main_vendor': { 'main_vendor': {
'source_filenames': main_vendor_js, 'source_filenames': main_vendor_js,
'output_filename': 'js/main_vendor.js', 'output_filename': 'js/lms-main_vendor.js',
}, },
'module-js': { 'module-js': {
'source_filenames': module_js_sources, 'source_filenames': module_js_sources,
'output_filename': 'js/modules.js', 'output_filename': 'js/lms-modules.js',
}, },
'spec': { 'spec': {
'source_filenames': [pth.replace(PROJECT_ROOT / 'static/', '') for pth in glob2.glob(PROJECT_ROOT / 'static/coffee/spec/**/*.coffee')], 'source_filenames': [pth.replace(PROJECT_ROOT / 'static/', '') for pth in glob2.glob(PROJECT_ROOT / 'static/coffee/spec/**/*.coffee')],
'output_filename': 'js/spec.js' 'output_filename': 'js/lms-spec.js'
} }
} }
......
"""
Settings for the LMS that runs alongside the CMS on AWS
"""
from .aws import *
with open(ENV_ROOT / "cms.auth.json") as auth_file:
CMS_AUTH_TOKENS = json.load(auth_file)
MODULESTORE = CMS_AUTH_TOKENS['MODULESTORE']
...@@ -27,7 +27,7 @@ NORMALIZED_DEPLOY_NAME = DEPLOY_NAME.downcase().gsub(/[_\/]/, '-') ...@@ -27,7 +27,7 @@ NORMALIZED_DEPLOY_NAME = DEPLOY_NAME.downcase().gsub(/[_\/]/, '-')
INSTALL_DIR_PATH = File.join(DEPLOY_DIR, NORMALIZED_DEPLOY_NAME) INSTALL_DIR_PATH = File.join(DEPLOY_DIR, NORMALIZED_DEPLOY_NAME)
PIP_REPO_REQUIREMENTS = "#{INSTALL_DIR_PATH}/repo-requirements.txt" PIP_REPO_REQUIREMENTS = "#{INSTALL_DIR_PATH}/repo-requirements.txt"
# Set up the clean and clobber tasks # Set up the clean and clobber tasks
CLOBBER.include(BUILD_DIR, REPORT_DIR, 'cover*', '.coverage', 'test_root/*_repo') CLOBBER.include(BUILD_DIR, REPORT_DIR, 'cover*', '.coverage', 'test_root/*_repo', 'test_root/staticfiles')
CLEAN.include("#{BUILD_DIR}/*.deb", "#{BUILD_DIR}/util") CLEAN.include("#{BUILD_DIR}/*.deb", "#{BUILD_DIR}/util")
def select_executable(*cmds) def select_executable(*cmds)
...@@ -54,6 +54,10 @@ task :predjango do ...@@ -54,6 +54,10 @@ task :predjango do
sh('git submodule update --init') sh('git submodule update --init')
end end
task :clean_test_files do
sh("git clean -fdx test_root")
end
[:lms, :cms, :common].each do |system| [:lms, :cms, :common].each do |system|
report_dir = File.join(REPORT_DIR, system.to_s) report_dir = File.join(REPORT_DIR, system.to_s)
directory report_dir directory report_dir
...@@ -93,7 +97,7 @@ end ...@@ -93,7 +97,7 @@ end
# Per System tasks # Per System tasks
desc "Run all django tests on our djangoapps for the #{system}" desc "Run all django tests on our djangoapps for the #{system}"
task "test_#{system}" => ["#{system}:collectstatic:test", "fasttest_#{system}"] task "test_#{system}" => ["clean_test_files", "#{system}:collectstatic:test", "fasttest_#{system}"]
# Have a way to run the tests without running collectstatic -- useful when debugging without # Have a way to run the tests without running collectstatic -- useful when debugging without
# messing with static files. # messing with static files.
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment