Commit 001cfb84 by Brian Wilson

Update insights for datajam release:

* Remove @view and @cron decorators
* Remove use of celery and periodic tasks
* Clean up requirements and apt-packages.
* Remove handle_view.
* Remove get_replica_* helper wrappers.
parent f8be0482
Prior to installing, install djeventstream and loghandlersplus: Prior to installing, install djeventstream:
https://github.com/edx/djeventstream https://github.com/edx/djeventstream
https://github.com/edx/loghandlersplus
To install a development setup: To install a development setup:
sudo apt-get install python-pip python-matplotlib python-scipy emacs mongodb apache2-utils python-mysqldb subversion ipython nginx git redis-server cat apt-packages.txt | xargs sudo apt-get -yq install
git clone https://github.com/edx/insights git clone https://github.com/edx/insights
cd insights cd insights
pip install -r requirements.txt pip install -r requirements.txt
...@@ -16,7 +15,7 @@ To install a development setup: ...@@ -16,7 +15,7 @@ To install a development setup:
To install a setup to build from: To install a setup to build from:
sudo apt-get install python-pip python-matplotlib python-scipy emacs mongodb apache2-utils python-mysqldb subversion ipython nginx git redis-server cat apt-packages.txt | xargs sudo apt-get -yq install
git clone https://github.com/edx/insights git clone https://github.com/edx/insights
cd insights cd insights
pip install -r requirements.txt pip install -r requirements.txt
......
python-matplotlib
python-scipy
emacs
mongodb
apache2-utils
python-mysqldb
subversion
ipython
nginx
git git
python-pip
python-virtualenv
redis-server redis-server
libmysqlclient-dev mongodb
yui-compressor nginx
\ No newline at end of file
...@@ -12,12 +12,11 @@ import imp ...@@ -12,12 +12,11 @@ import imp
from pkg_resources import resource_filename from pkg_resources import resource_filename
DJ_REQUIRED_APPS = ( 'djeventstream.httphandler', DJ_REQUIRED_APPS = (
'djcelery', 'djeventstream.httphandler',
'south', 'south',
'edinsights.core', 'edinsights.core',
'edinsights.modulefs', 'edinsights.modulefs',
# 'modules',
) )
# Types of parameters that queries and views can take. # Types of parameters that queries and views can take.
...@@ -31,14 +30,10 @@ DJFS = { 'type' : 'osfs', ...@@ -31,14 +30,10 @@ DJFS = { 'type' : 'osfs',
TIME_BETWEEN_DATA_REGENERATION = datetime.timedelta(minutes=1) TIME_BETWEEN_DATA_REGENERATION = datetime.timedelta(minutes=1)
INSTALLED_ANALYTICS_MODULES = ['modules.dump_to_db'] #'modules.testmodule',) INSTALLED_ANALYTICS_MODULES = ['modules.dump_to_db']
print INSTALLED_ANALYTICS_MODULES print INSTALLED_ANALYTICS_MODULES
#Initialize celery
import djcelery
djcelery.setup_loader()
SNS_SUBSCRIPTIONS = [] SNS_SUBSCRIPTIONS = []
import django.contrib.auth.decorators import django.contrib.auth.decorators
...@@ -198,19 +193,3 @@ LOGGING = { ...@@ -198,19 +193,3 @@ LOGGING = {
}, },
} }
} }
#Celery settings
BROKER_URL = 'redis://localhost:6379/0'
BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 3600}
CELERY_RESULT_BACKEND = 'redis://localhost:6379/0'
CELERY_TASK_RESULT_EXPIRES = 60 * 60 #1 hour
MODULE_DIR = "modules"
CELERY_IMPORTS = ()
for analytics_module in INSTALLED_ANALYTICS_MODULES:
module_name = "{0}.{1}.{2}".format(MODULE_DIR,analytics_module,"tasks")
try:
imp.find_module(module_name)
CELERY_IMPORTS += (module_name,)
except:
pass
django==1.4.3 django==1.4.8
pymongo==2.4.1 pymongo==2.4.1
pymysql pymysql==0.6.1
flup flup==1.0.3.dev-20110405
fs fs==0.4.0
# mysql-client
mako==0.7.3 mako==0.7.3
requests==0.14.2 requests==1.2.3
multiprocessing multiprocessing==2.6.2.1
gunicorn gunicorn==0.17.4
path.py path.py==3.0.1
decorator decorator==3.4.0
MySQL-python==1.2.4c1 MySQL-python==1.2.4c1
simplejson simplejson==3.3.1
South==0.7.6 South==0.7.6
django-celery==3.0.11
celery-with-redis==3.0
-e git://github.com/edx/django-pipeline.git@c5a4848d3d8fa90a7da4a4007f5653be40cccdd9#egg=django_pipeline-dev -e git://github.com/edx/django-pipeline.git@c5a4848d3d8fa90a7da4a4007f5653be40cccdd9#egg=django_pipeline-dev
-e git://github.com/edx/django-staticfiles.git@6d2504e5c84a3003b4573e0ba0f11adf7583d372#egg=django_staticfiles-dev -e git://github.com/edx/django-staticfiles.git@6d2504e5c84a3003b4573e0ba0f11adf7583d372#egg=django_staticfiles-dev
...@@ -6,11 +6,7 @@ setup( ...@@ -6,11 +6,7 @@ setup(
version = '0.1', version = '0.1',
description='edX Insights Analytics Framework', description='edX Insights Analytics Framework',
package_dir = {'edinsights':'src/edinsights'}, package_dir = {'edinsights':'src/edinsights'},
packages = ['edinsights', 'edinsights.core', 'edinsights.modulefs', 'edinsights.modules', 'edinsights.modules.testmodule'], packages = ['edinsights', 'edinsights.core', 'edinsights.modulefs', 'edinsights.modules'],
author="Piotr Mitros, Vik Paruchuri", author="Piotr Mitros, Vik Paruchuri",
data_files = [
("edinsights/modules/testmodule/static/", ["src/edinsights/modules/testmodule/static/hello.html"]),
("edinsights/modules/testmodule/templates/", ["src/edinsights/modules/testmodule/templates/hello.html"])
],
license = "AGPLv3, see LICENSE.txt" license = "AGPLv3, see LICENSE.txt"
) )
''' Decorators for analytics modules. ''' Decorators for analytics modules.
@view defines a user-visible view
@query defines a machine-readable SOA @query defines a machine-readable SOA
@event_handler takes the user tracking event stream @event_handler takes the user tracking event stream
@cron allows for periodic and delayed events
''' '''
...@@ -17,7 +15,6 @@ from decorator import decorator ...@@ -17,7 +15,6 @@ from decorator import decorator
from django.core.cache import cache from django.core.cache import cache
from django.conf import settings from django.conf import settings
from celery.task import periodic_task
from util import optional_parameter_call from util import optional_parameter_call
import registry import registry
...@@ -25,53 +22,14 @@ from registry import event_handlers, request_handlers ...@@ -25,53 +22,14 @@ from registry import event_handlers, request_handlers
log=logging.getLogger(__name__) log=logging.getLogger(__name__)
def event_handler(batch=True, per_user=False, per_resource=False, def event_handler():
single_process=False, source_queue=None): ''' Decorator to register an event handler.'''
''' Decorator to register an event handler. batch = True
batch=True ==> Normal mode of operation. Cannot break system (unimplemented)
batch=False ==> Event handled immediately operation. Slow handlers can break system.
per_user = True ==> Can be sharded on a per-user basis (default: False)
per_resource = True ==> Can be sharded on a per-resource basis (default: False)
single_process = True ==> Cannot be distributed across process/machines. Queued must be true.
source_queue ==> Not implemented. For a pre-filter (e.g. video)
'''
if single_process or source_queue or not batch:
raise NotImplementedError("Framework isn't done. Sorry. batch=True, source_queue=None, single_proces=False")
def event_handler_factory(func): def event_handler_factory(func):
event_handlers.append({'function' : func, 'batch' : batch}) event_handlers.append({'function' : func, 'batch' : batch})
return func return func
return event_handler_factory return event_handler_factory
def view(category = None, name = None, description = None, args = None):
''' This decorator is appended to a view in an analytics module. A
view will return HTML which will be shown to the user.
category: Optional specification for type (global, per-user,
etc.). If not given, this will be extrapolated from the
argspec. This should typically be omitted.
name: Optional specification for name shown to the user. This will
default to function name. In most cases, this is recommended.
description: Optional description. If not given, this will default
to the docstring.
args: Optional argspec for the function. This is generally better
omitted.
TODO: human_name: Name without Python name restrictions -- e.g.
"Daily uploads" instead of "daily_uploads" -- for use in
human-usable dashboards.
'''
def view_factory(f):
registry.register_handler('view',category, name, description, f, args)
return f
return view_factory
def query(category = None, name = None, description = None, args = None): def query(category = None, name = None, description = None, args = None):
''' This decorator is appended to a query in an analytics ''' This decorator is appended to a query in an analytics
...@@ -266,48 +224,6 @@ def memoize_query(cache_time = 60*4, timeout = 60*15, ignores = ()): ...@@ -266,48 +224,6 @@ def memoize_query(cache_time = 60*4, timeout = 60*15, ignores = ()):
return decfun return decfun
return factory return factory
def cron(run_every, force_memoize=False, params={}):
''' Run command periodically
force_memoize: if the function being decorated is also decorated by
@memoize_query, setting this to True will redo the computation
regardless of whether the results of the computation already exist in cache
The task scheduler process (typically celery beat) needs to be started
manually by the client module with:
python manage.py celery worker -B --loglevel=INFO
Celery beat will automatically add tasks from files named 'tasks.py'
'''
def factory(f):
@periodic_task(run_every=run_every, name=f.__name__)
def run(func=None, *args, **kw):
""" Executes the function decorated by @cron
This function can be called from two distinct places. It can be
called by the task scheduler (due to @periodic_task),
in which case func will be None.
It can also be called as a result of calling the function we
are currently decorating with @cron. In this case func will be
the same as f.
"""
# Was it called from the task scheduler?
called_as_periodic = True if func is None else False
if called_as_periodic:
if force_memoize:
func = use_forcememoize(f)
else:
func = f
else:
func = f
result = optional_parameter_call(func, params)
return result
return decorator(run, f)
return factory
def event_property(name=None, description=None): def event_property(name=None, description=None):
''' This is used to add properties to events. ''' This is used to add properties to events.
...@@ -319,4 +235,3 @@ def event_property(name=None, description=None): ...@@ -319,4 +235,3 @@ def event_property(name=None, description=None):
registry.register_event_property(f, name, description) registry.register_event_property(f, name, description)
return f return f
return register return register
''' This, together with decorators.py, is the entire API intended to
be used by plug-in modules.
All of this should go through queries, not directly through the
DBs. I wrote this, and later released it would break abstractions
in not great ways. We may need to readd for performance eventually?
'''
from util import get_cache, get_filesystem, get_database
def get_replica_database(module):
''' Get a read-replica database of a different module. At
present, not a read-replica, but this will change in the
future.
This is a bad idea, and should be removed in the future'''
print 'deprecated'
get_database(module)
def get_replica_filesystem(module):
''' Get a read-replica filesystem of a different module. At
present, not a read-replica, but this will change in the
future.
This is a bad idea, and should be removed in the future'''
print 'deprecated'
get_filesystem(module)
def get_replica_cache(module):
''' Get a read-replica cache of a different module. At
present, not a read-replica, but this will change in the
future.
This is a bad idea, and should be removed in the future'''
print 'deprecated'
return get_cache(module)
...@@ -10,15 +10,15 @@ from edinsights.core.util import default_optional_kwargs ...@@ -10,15 +10,15 @@ from edinsights.core.util import default_optional_kwargs
funcskips = default_optional_kwargs.keys()+['params'] # params are additional GET/POST parameters funcskips = default_optional_kwargs.keys()+['params'] # params are additional GET/POST parameters
def register_handler(cls, category, name, description, f, args): def register_handler(cls, category, name, description, f, args):
''' Helper function for @view and @query decorators. ''' Helper function for @query decorators.
''' '''
log.debug("Register {0} {1} {2} {3}".format(cls, category, name, f)) log.debug("Register {0} {1} {2} {3}".format(cls, category, name, f))
# Figure out where this goes. See if there are parameters, and if not, # Figure out where this goes. See if there are parameters, and if not,
# create them by inspecting the function. # create them by inspecting the function.
if args == None: if args == None:
args = inspect.getargspec(f).args args = inspect.getargspec(f).args
if cls not in ['view', 'query']: if cls not in ['query']:
raise ValueError("We can only register views and queries") raise ValueError("We can only register queries")
if not name: if not name:
name = str(f.func_name) name = str(f.func_name)
if not description: if not description:
......
...@@ -2,7 +2,6 @@ from django.conf.urls.defaults import patterns, url ...@@ -2,7 +2,6 @@ from django.conf.urls.defaults import patterns, url
urlpatterns = patterns('', urlpatterns = patterns('',
# Examples: # Examples:
url(r'^view/([A-Za-z_+]+)$', 'edinsights.core.views.handle_view'),
url(r'^query/([A-Za-z_+]+)$', 'edinsights.core.views.handle_query'), url(r'^query/([A-Za-z_+]+)$', 'edinsights.core.views.handle_query'),
url(r'^schema$', 'edinsights.core.views.schema'), url(r'^schema$', 'edinsights.core.views.schema'),
url(r'^event_properties$', 'edinsights.core.views.event_properties'), url(r'^event_properties$', 'edinsights.core.views.event_properties'),
......
...@@ -46,29 +46,10 @@ def schema(request): ...@@ -46,29 +46,10 @@ def schema(request):
return HttpResponse("\n".join(sorted(["<dt><p><b>{class}/{name}</b> <i>{category}</i></dt><dd>{doc}</dd>".format(**rh) for rh in endpoints]))) return HttpResponse("\n".join(sorted(["<dt><p><b>{class}/{name}</b> <i>{category}</i></dt><dd>{doc}</dd>".format(**rh) for rh in endpoints])))
return HttpResponse(json.dumps(endpoints)) return HttpResponse(json.dumps(endpoints))
view_object=None
@auth.auth
def handle_view(request, name, **kwargs):
''' Handles generic view.
Category is where this should be place (per student, per problem, etc.)
Name is specific
'''
global view_object
if view_object is None:
from util import get_view
view_object = get_view(None)
if name[0] == '_':
raise SuspiciousOperation(name+' called')
kwargs.update(request.POST.items())
kwargs.update(request.GET.items())
results = view_object.__getattr__(name)(**kwargs)
return HttpResponse(results)
query_object = None query_object = None
@auth.auth @auth.auth
def handle_query(request, name, **kwargs): def handle_query(request, name, **kwargs):
''' Handles generic view. ''' Handles generic query.
Category is where this should be place (per student, per problem, etc.)
Name is specific Name is specific
''' '''
global query_object global query_object
......
...@@ -4,13 +4,7 @@ ...@@ -4,13 +4,7 @@
modules_to_import = [] modules_to_import = []
from edinsights.core.decorators import query, event_handler, view, event_property from edinsights.core.decorators import query, event_handler, event_property
@view()
def djt_hello_template():
''' Example of how to use mako templates in a view '''
from edinsights.core.render import render
return render("hello.html", {})
@query() @query()
def djt_event_count(mongodb): def djt_event_count(mongodb):
...@@ -158,13 +152,6 @@ def djt_fake_user_count(): ...@@ -158,13 +152,6 @@ def djt_fake_user_count():
''' Used as test case for query objects ''' ''' Used as test case for query objects '''
return 2 return 2
@view()
def djt_fake_user_count(query):
''' Test of an abstraction used to call queries, abstracting away
the network, as well as optional parameters like fs, db, etc.
'''
return "<html>Users: {uc}</html>".format(uc = query.djt_fake_user_count())
@query(name=['djt_three_name', 'edx_djt_three_name', 'edx.djt_three_name']) @query(name=['djt_three_name', 'edx_djt_three_name', 'edx.djt_three_name'])
def djt_three_name(): def djt_three_name():
return "I have three names" return "I have three names"
......
Overview
========
The purpose of this module is to unit test periodic tasks
created with edinsights.core.decorators.cron
The module does not provide any additional functionallity
Despite the name of the module, your periodic tasks
do NOT have to be inside this module. They can be
located in any tasks.py file in any django app
directory.
Running Tests
=============
Because testing periodic tasks is slow (~20s) they
are excluded from testing by default.
To test the module, add it to INSTALLED_APPS in settings.py
To run the tests:
python manage.py test periodic
# This module provides tests for periodic tasks using core.decorators.cron
from edinsights.core.decorators import view, use_fromcache, MemoizeNotInCacheError
from edinsights.periodic.tasks import big_computation
from edinsights.periodic.tasks import big_computation_withfm
#
@view()
def big_computation_visualizer():
return "<html>%s</html>" % use_fromcache(big_computation)()
@view()
def big_computation_visualizer_withfm():
try:
# returns instantly, does not perform computation if results are not
# in cache
result = use_fromcache(big_computation_withfm)()
except MemoizeNotInCacheError:
result = "The big computation has not been performed yet"
# alternatively you can display a "please wait" message
# and run big_computation_withfm() without force_retrieve
return "<html>%s</html>" % result
import tempfile
import time
from edinsights.core.decorators import memoize_query, cron
from django.utils.timezone import timedelta
def timestamp_to_tempfile(filename):
with open(tempfile.gettempdir() + '/' + filename, 'a') as temp_file:
temp_file.write(str(time.time()) + '\n') #write a timestamp for each call
# Test tasks are defined in tasks.py files. Other files could also be
# included using CELERY_IMPORTS. Avoid using @cron with nested functions and
# methods(the support of @periodic_task for these is experimental)
# The @cron decorator should precede all other decorators
@cron(run_every=timedelta(seconds=1))
def test_cron_task():
""" Simple task that gets executed by the scheduler (celery beat).
tested by: tests.SimpleTest.test_cron
"""
timestamp_to_tempfile('test_cron_task_counter')
@cron(run_every=timedelta(seconds=1), force_memoize=False) # cron decorators should go on top
@memoize_query(60)
def test_cron_memoize_task(fs):
"""
Simple task that gets executed by the scheduler (celery beat).
Combines periodic tasks and memoization, with force_memoize=False.
This means that the periodic task will return cached results if possible.
This scenario is probably not what you want.
tested by: tests.SimpleTest.test_cron_and_memoize
"""
timestamp_to_tempfile('test_cron_memoize_task')
return 42
@cron(run_every=timedelta(seconds=1), force_memoize=False) # cron decorators should go on top
@memoize_query(cache_time=60)
def big_computation():
"""
Simple task that gets executed by the scheduler (celery beat) and also by @view
Combines periodic tasks and memoization, with force_memoize=False.
This means that the periodic task will return cached results if possible.
This scenario is probably not what you want.
tested by: tests.SimpleTest.test_cron_and_memoize_and_view
"""
timestamp_to_tempfile('big_computation_counter')
return "FAKERESULT"
@cron(run_every=timedelta(seconds=1), force_memoize=True) # cron decorators should go on top
@memoize_query(cache_time=60)
def big_computation_withfm():
"""
Simple task that gets executed by the scheduler (celery beat) and also by @view
Combines periodic tasks and memoization, with force_memoize=True.
This means that the task will redo the computation regardless of
whether the result was already in the cache when it is called from the
task scheduler. If the task is called from code, it will return the cached
result. This scenario is probably what you want.
tested by: tests.SimpleTest.test_cron_and_memoize_and_view_with_forcememoize
"""
timestamp_to_tempfile('big_computation_withfm_counter')
return "FAKERESULTFM"
# TODO put every task in its own file, and use CELERY_IMPORTS to run
# individual tasks instead of all tasks at the same time for each test
import tempfile
import time
from django.test import TestCase
from django.test.client import Client
from django.core.cache import cache
from edinsights.core.decorators import use_clearcache
def count_timestamps(tempfilename):
with open(tempfile.gettempdir() + '/' + tempfilename, 'r') as temp_file:
timestamps = temp_file.readlines()
ncalls = len(timestamps)
last_call = float(timestamps[-1].rstrip()) if ncalls > 0 else None
return ncalls, last_call
def truncate_tempfile(tempfilename):
"""
Truncates the file used to share state between the test process
and the scheduler process (celery beat).
"""
with open(tempfile.gettempdir() + '/' + tempfilename, 'w') as temp_file:
pass
def run_celery_beat(seconds=3, verbose=False):
""" Runs the task scheduler celery beat for the specified number of seconds as a child process
"""
import os
with open(os.devnull, 'w') as devnull:
from subprocess import Popen
command = ['python', 'manage.py', 'celery', 'worker', '-B', '--loglevel=INFO',]
if verbose:
suppress_output_args = {}
else:
suppress_output_args = {'stdout':devnull, 'stderr':devnull}
celery_beat_process = Popen(command, **suppress_output_args)
# give time to celery beat to execute test_cron_task
from time import sleep
print "running periodic tasks for %s seconds... " % seconds
sleep(seconds)
celery_beat_process.terminate()
class SimpleTest(TestCase):
def __init__(self, arg):
TestCase.__init__(self, arg)
def test_cron(self):
""" Test that periodic tasks are scheduled and run
tests: tasks.test_cron_task
"""
truncate_tempfile('test_cron_task_counter')
run_celery_beat(seconds=3,verbose=False)
# verify number of calls and time of last call
ncalls, last_call = count_timestamps('test_cron_task_counter')
self.assertGreaterEqual(ncalls, 2)
self.assertAlmostEqual(last_call, time.time(), delta=100)
def test_cron_and_memoize(self):
""" Test that periodic tasks are scheduled and run, and the results
are cached.
tests: tasks.test_cron_memoize_task
"""
truncate_tempfile('test_cron_memoize_task')
# clear the cache from any previous executions of this test
from tasks import test_cron_memoize_task
use_clearcache(test_cron_memoize_task)()
run_celery_beat(seconds=3,verbose=False)
ncalls, last_call = count_timestamps('test_cron_memoize_task')
self.assertEqual(ncalls,1) # after the first call all subsequent calls should be cached
self.assertAlmostEqual(last_call, time.time(), delta=100)
def test_cron_and_memoize_and_view(self):
""" Test that periodic tasks are scheduled, run, cached, and the
cached results are available to @view
tests: tasks.big_computation
"""
truncate_tempfile('big_computation_counter')
# delete cache from previous executions of this unit test
from tasks import big_computation
use_clearcache(big_computation)()
run_celery_beat(seconds=3, verbose=False)
ncalls_before, lastcall_before = count_timestamps('big_computation_counter')
self.assertEqual(ncalls_before,1) # after the first call all subsequent calls should be cached
c = Client()
status_code = c.get('/view/big_computation_visualizer').status_code
content = c.get('/view/big_computation_visualizer').content
self.assertEqual(status_code, 200)
self.assertEqual(content, "<html>FAKERESULT</html>")
# ensure big_computation was not called and the cached result was used
# by the execution of c.get('/view...')
ncalls_after, lastcall_after = count_timestamps('big_computation_counter')
self.assertEqual(ncalls_before, ncalls_after)
self.assertEqual(lastcall_before, lastcall_after)
def test_cron_and_memoize_and_view_with_forcememoize(self):
""" Test that periodic tasks are scheduled, run, and cached, and the
cached results are available to @view. If the task is executed from
the scheduler (as a periodic task) the computation should be redone and
the new result should be stored in cache. If the task is executed from code
(e.g. from a @view or @query handler) the result from cache should be returned.
Tests task: tasks.big_computation_withfm
"""
truncate_tempfile('big_computation_withfm_counter')
from tasks import big_computation_withfm
use_clearcache(big_computation_withfm)()
run_celery_beat(seconds=3, verbose=False)
ncalls_before, lastcall_before = count_timestamps('big_computation_withfm_counter')
self.assertGreaterEqual(ncalls_before,2)
self.assertAlmostEqual(lastcall_before, time.time(),delta=100)
c = Client()
status_code = c.get('/view/big_computation_visualizer_withfm').status_code
content = c.get('/view/big_computation_visualizer_withfm').content
self.assertEqual(status_code, 200)
self.assertEqual(content, "<html>FAKERESULTFM</html>")
ncalls_after, lastcall_after = count_timestamps('big_computation_withfm_counter')
self.assertEqual(ncalls_before, ncalls_after)
self.assertEqual(lastcall_before, lastcall_after)
...@@ -11,9 +11,6 @@ urlpatterns = patterns('', ...@@ -11,9 +11,6 @@ urlpatterns = patterns('',
url(r'^$', 'edinsights.core.views.index'), url(r'^$', 'edinsights.core.views.index'),
url('^', include('edinsights.core.urls')), url('^', include('edinsights.core.urls')),
url(r'^httpevent$', 'djeventstream.httphandler.views.http_view'), url(r'^httpevent$', 'djeventstream.httphandler.views.http_view'),
# url(r'^view/([A-Za-z_+]+)/([A-Za-z_+]+)$', 'core.views.handle_view'),
# url(r'^view/([A-Za-z_+]+)/([A-Za-z_+]+)/([A-Za-z_0-9]+)$', 'core.views.handle_view'),
# url(r'^view/([A-Za-z_+]+)/([A-Za-z_+]+)/([A-Za-z_0-9]+)/([A-Za-z_0-9]+)$', 'core.views.handle_view'),
# url(r'^query/([A-Za-z_+]+)/([A-Za-z_+]+)$', 'core.views.handle_query'), # url(r'^query/([A-Za-z_+]+)/([A-Za-z_+]+)$', 'core.views.handle_query'),
# url(r'^query/([A-Za-z_+]+)/([A-Za-z_+]+)/([A-Za-z_0-9+]+)$', 'core.views.handle_query'), # url(r'^query/([A-Za-z_+]+)/([A-Za-z_+]+)/([A-Za-z_0-9+]+)$', 'core.views.handle_query'),
# url(r'^query/([A-Za-z_+]+)/([A-Za-z_+]+)/([A-Za-z_0-9+]+)/([A-Za-z_0-9+]+)$', 'core.views.handle_query'), # url(r'^query/([A-Za-z_+]+)/([A-Za-z_+]+)/([A-Za-z_0-9+]+)/([A-Za-z_0-9+]+)$', 'core.views.handle_query'),
...@@ -23,7 +20,6 @@ urlpatterns = patterns('', ...@@ -23,7 +20,6 @@ urlpatterns = patterns('',
# url(r'^probe/([A-Za-z_+]+)/([A-Za-z_+]+)$', 'core.views.handle_probe'), # url(r'^probe/([A-Za-z_+]+)/([A-Za-z_+]+)$', 'core.views.handle_probe'),
# url(r'^probe/([A-Za-z_+]+)/([A-Za-z_+]+)/([A-Za-z_+]+)$', 'core.views.handle_probe'), # url(r'^probe/([A-Za-z_+]+)/([A-Za-z_+]+)/([A-Za-z_+]+)$', 'core.views.handle_probe'),
# url(r'^probe/([A-Za-z_+]+)/([A-Za-z_+]+)/([A-Za-z_+]+)/([A-Za-z_+]+)$', 'core.views.handle_probe'), # url(r'^probe/([A-Za-z_+]+)/([A-Za-z_+]+)/([A-Za-z_+]+)/([A-Za-z_+]+)$', 'core.views.handle_probe'),
url('^tasks/', include('djcelery.urls')),
) )
if settings.DEBUG and settings.DJFS['type'] == 'osfs': if settings.DEBUG and settings.DJFS['type'] == 'osfs':
......
...@@ -139,12 +139,10 @@ TEMPLATE_DIRS = ( ...@@ -139,12 +139,10 @@ TEMPLATE_DIRS = (
DJ_REQUIRED_APPS = ( DJ_REQUIRED_APPS = (
'djeventstream.httphandler', 'djeventstream.httphandler',
'djcelery',
'south', 'south',
'edinsights.core', 'edinsights.core',
'edinsights.modulefs', 'edinsights.modulefs',
'edinsights.modules', 'edinsights.modules',
'edinsights.periodic',
) )
INSTALLED_APPS = ( INSTALLED_APPS = (
...@@ -198,10 +196,10 @@ LOGGING = { ...@@ -198,10 +196,10 @@ LOGGING = {
# 'error', r"DateTimeField received a naive datetime", # 'error', r"DateTimeField received a naive datetime",
# RuntimeWarning, r'django\.db\.models\.fields') # RuntimeWarning, r'django\.db\.models\.fields')
# initialize celery # Don't initialize celery
import djcelery # import djcelery
djcelery.setup_loader() # djcelery.setup_loader()
# import the settings for celery from the edinsights module and for cache # Don't import the settings for celery from the edinsights module and for cache
from edinsights.celerysettings_dev import * # from edinsights.celerysettings_dev import *
from edinsights.djangocachesettings_dev import * # from edinsights.djangocachesettings_dev import *
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment