Commit 1d13b1a9 by Ashley Penney

Make various changes to handle the s3/sftp part of the pearson process.

parent f2b43ec4
from optparse import make_option
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
import re
from dogapi import dog_http_api, dog_stats_api
import paramiko
dog_http_api.api_key = settings.DATADOG_API
class Command(BaseCommand):
option_list = BaseCommand.option_list
args = '<mode>'
help = """
Mode should be import or export depending on if you're fetching from pearson or
sending to them.
"""
def handle(self, *args):
if len(args) < 1:
raise CommandError('Usage is pearson {0}'.format(self.args))
for mode in args:
if mode == 'export':
sftp(settings.PEARSON_LOCAL_IMPORT, settings.PEARSON_SFTP_IMPORT)
s3(settings.PEARSON_LOCAL, settings.PEARSON_BUCKET)
elif mode == 'import':
sftp(settings.PEARSON_SFTP_EXPORT, settings.PEARSON_LOCAL_EXPORT)
s3(settings.PEARSON_LOCAL_EXPORT, settings.PEARSON_BUCKET)
else:
print("ERROR: Mode must be export or import.")
def sftp(files_from, files_to):
with dog_stats_api.timer('pearson.{0}'.format(mode), tags='sftp'):
try:
t = paramiko.Transport((hostname, 22))
t.connect(username=settings.PEARSON_SFTP_USERNAME,
password=settings.PEARSON_SFTP_PASSWORD)
sftp = paramiko.SFTPClient.from_transport(t)
if os.path.isdir(files_from):
for file in os.listdir(files_from):
sftp.put(files_from+'/'+filename,
files_to+'/'+filename)
else:
for file in sftp.listdir(files_from):
sftp.get(files_from+'/'+filename,
files_to+'/'+filename)
except:
dog_http_api.event('pearson {0}'.format(mode),
'sftp uploading failed', alert_type='error')
raise
def s3(files_from, bucket):
with dog_stats_api.timer('pearson.{0}'.format(mode), tags='s3'):
try:
for filename in os.listdir(files):
upload_file_to_s3(bucket, files_from+'/'+filename)
except:
dog_http_api.event('pearson {0}'.format(mode), 's3 archiving failed')
raise
def upload_file_to_s3(bucket, filename):
"""
Upload file to S3
"""
s3 = boto.connect_s3()
from boto.s3.key import Key
b = s3.get_bucket(bucket)
k = Key(b)
k.key = "{filename}".format(filename=filename)
k.set_contents_from_filename(filename)
......@@ -37,39 +37,25 @@ class Command(BaseCommand):
("LastUpdate", "user_updated_at"), # in UTC, so same as what we store
])
option_list = BaseCommand.option_list + (
make_option(
'--dump_all',
action='store_true',
dest='dump_all',
),
)
args = '<output_file_or_dir>'
help = """
Export user demographic information from TestCenterUser model into a tab delimited
text file with a format that Pearson expects.
"""
def handle(self, *args, **kwargs):
if len(args) < 1:
print Command.help
return
def handle(self, **kwargs):
# update time should use UTC in order to be comparable to the user_updated_at
# field
uploaded_at = datetime.utcnow()
# if specified destination is an existing directory, then
# create a filename for it automatically. If it doesn't exist,
# or exists as a file, then we will just write to it.
# then we will create the directory.
# Name will use timestamp -- this is UTC, so it will look funny,
# but it should at least be consistent with the other timestamps
# used in the system.
dest = args[0]
if isdir(dest):
destfile = os.path.join(dest, uploaded_at.strftime("cdd-%Y%m%d-%H%M%S.dat"))
if not os.path.isdir(settings.PEARSON_LOCAL_EXPORT):
os.makedirs(settings.PEARSON_LOCAL_EXPORT)
destfile = os.path.join(settings.PEARSON_LOCAL_EXPORT,
uploaded_at.strftime("cdd-%Y%m%d-%H%M%S.dat"))
else:
destfile = dest
destfile = os.path.join(settings.PEARSON_LOCAL_EXPORT,
uploaded_at.strftime("cdd-%Y%m%d-%H%M%S.dat"))
# strings must be in latin-1 format. CSV parser will
# otherwise convert unicode objects to ascii.
......
......@@ -23,11 +23,6 @@ class Command(BaseCommand):
("LastUpdate", "user_updated_at"), # in UTC, so same as what we store
])
args = '<output_file_or_dir>'
help = """
Export user registration information from TestCenterRegistration model into a tab delimited
text file with a format that Pearson expects.
"""
option_list = BaseCommand.option_list + (
make_option(
......@@ -43,26 +38,25 @@ class Command(BaseCommand):
)
def handle(self, *args, **kwargs):
if len(args) < 1:
print Command.help
return
def handle(self, **kwargs):
# update time should use UTC in order to be comparable to the user_updated_at
# field
uploaded_at = datetime.utcnow()
# if specified destination is an existing directory, then
# if specified destination is an existing directory, then
# create a filename for it automatically. If it doesn't exist,
# or exists as a file, then we will just write to it.
# then we will create the directory.
# Name will use timestamp -- this is UTC, so it will look funny,
# but it should at least be consistent with the other timestamps
# but it should at least be consistent with the other timestamps
# used in the system.
dest = args[0]
if isdir(dest):
destfile = join(dest, uploaded_at.strftime("ead-%Y%m%d-%H%M%S.dat"))
if not os.path.isdir(settings.PEARSON_LOCAL_EXPORT):
os.makedirs(settings.PEARSON_LOCAL_EXPORT)
destfile = os.path.join(settings.PEARSON_LOCAL_EXPORT,
uploaded_at.strftime("ead-%Y%m%d-%H%M%S.dat"))
else:
destfile = dest
destfile = os.path.join(settings.PEARSON_LOCAL_EXPORT,
uploaded_at.strftime("ead-%Y%m%d-%H%M%S.dat"))
dump_all = kwargs['dump_all']
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment