Commit 82cf064f by Gregory Martin Committed by GitHub

Merge pull request #20 from edx/yro/sandbox_update

Yro/sandbox update
parents cd3544c1 e9268dc8
[
{
"model": "VEDA_OS01.Destination",
"pk": 1,
"fields": {
"destination_name": "Amazon AWS",
"destination_active": true,
"destination_nick": "S31"
}
},
{
"model": "VEDA_OS01.Encode",
"pk": 1,
"fields": {
"encode_destination": 1,
"encode_name": "Desktop - High",
"profile_active": true,
"encode_suffix": "DTH",
"encode_filetype": "mp4",
"encode_bitdepth": "27",
"encode_resolution": "720",
"product_spec": "desktop_mp4",
"xuetang_proc": true
}
},
{
"model": "VEDA_OS01.Encode",
"pk": 2,
"fields": {
"encode_destination": 1,
"encode_name": "HLS",
"profile_active": true,
"encode_suffix": "HLS",
"encode_filetype": "HLS",
"encode_bitdepth": "0",
"encode_resolution": "0",
"product_spec": "hls",
"xuetang_proc": false
}
},
{
"model": "VEDA_OS01.Encode",
"pk": 3,
"fields": {
"encode_destination": 1,
"encode_name": "Mobile - Low",
"profile_active": true,
"encode_suffix": "MB2",
"encode_filetype": "mp4",
"encode_bitdepth": "27",
"encode_resolution": "360",
"product_spec": "mobile_low",
"xuetang_proc": false
}
},
{
"model": "VEDA_OS01.Course",
"pk": 1,
"fields": {
"course_name": "Veda Sandbox Test Course",
"course_hold": true,
"institution": "XXX",
"edx_classid": "XXXXX",
"semesterid": "2017",
"yt_proc": false,
"tp_proc": false,
"c24_proc": false,
"s3_proc": true,
"xue": true,
"local_storedir": "course-v1:VEDA+VEDA201+2015_T1",
"xuetang_proc": false,
"studio_hex": "xxxx"
}
}
]
......@@ -72,7 +72,7 @@ class VideoSerializer(serializers.ModelSerializer):
'video_trans_start',
'video_trans_end',
'video_trans_status',
'video_glacierid'
'video_glacierid',
'course_ids'
)
......
......@@ -5,6 +5,7 @@ import yaml
import boto
import boto.s3
from boto.s3.key import Key
from boto.exception import S3ResponseError
from os.path import expanduser
import requests
import datetime
......@@ -232,7 +233,7 @@ class VedaDelivery:
if not isinstance(self.video_proto.duration, int) and ':' not in self.video_proto.duration:
print 'Duration Failure'
return None
return
self.video_proto.duration = Output._seconds_from_string(
duration=self.video_proto.duration
......@@ -263,12 +264,11 @@ class VedaDelivery:
"""
if self.auth_dict['veda_deliverable_bucket'] == \
self.auth_dict['edx_s3_endpoint_bucket']:
return None
conn = boto.connect_s3(
self.auth_dict['veda_access_key_id'],
self.auth_dict['veda_secret_access_key']
)
return
try:
conn = boto.connect_s3()
except S3ResponseError:
return
del_bucket = conn.get_bucket(
self.auth_dict['veda_deliverable_bucket']
)
......@@ -382,27 +382,28 @@ class VedaDelivery:
"""
if self.video_query.inst_class.s3_proc is False and \
self.video_query.inst_class.mobile_override is False:
return None
return False
if self.video_proto.filesize < self.auth_dict['multi_upload_barrier']:
"""
Upload single part
"""
if self._BOTO_SINGLEPART() is False:
return None
return False
else:
"""
Upload multipart
"""
if self._BOTO_MULTIPART() is False:
return None
return False
self.endpoint_url = '/'.join((
'https://s3.amazonaws.com',
self.auth_dict['edx_s3_endpoint_bucket'],
self.encoded_file
))
return True
def _BOTO_SINGLEPART(self):
"""
......@@ -410,21 +411,16 @@ class VedaDelivery:
node_config MULTI_UPLOAD_BARRIER
"""
try:
conn = boto.connect_s3(
self.auth_dict['edx_access_key_id'],
self.auth_dict['edx_secret_access_key']
)
delv_bucket = conn.get_bucket(
self.auth_dict['edx_s3_endpoint_bucket']
)
except:
conn = boto.connect_s3()
except S3ResponseError:
ErrorObject.print_error(
message='Deliverable Fail: s3 Connection Error\n \
Check node_config DELIVERY_ENDPOINT'
)
return False
delv_bucket = conn.get_bucket(
self.auth_dict['edx_s3_endpoint_bucket']
)
upload_key = Key(delv_bucket)
upload_key.key = os.path.basename(os.path.join(
self.node_work_directory,
......@@ -455,9 +451,7 @@ class VedaDelivery:
if not os.path.exists(
os.path.join(path_to_multipart, filename.split('.')[0])
):
os.mkdir(
os.path.join(path_to_multipart, filename.split('.')[0])
)
os.mkdir(os.path.join(path_to_multipart, filename.split('.')[0]))
os.chdir(os.path.join(path_to_multipart, filename.split('.')[0]))
"""
......@@ -472,18 +466,14 @@ class VedaDelivery:
Connect to s3
"""
try:
c = boto.connect_s3(
settings['aws_access_key'],
settings['aws_secret_key']
)
b = c.lookup(settings['aws_deliver_bucket'])
except:
c = boto.connect_s3()
except S3ResponseError:
ErrorObject.print_error(
message='Deliverable Fail: s3 Connection Error\n \
Check node_config DELIVERY_ENDPOINT'
)
return False
b = c.lookup(self.auth_dict['edx_s3_endpoint_bucket'])
if b is None:
ErrorObject.print_error(
message='Deliverable Fail: s3 Connection Error\n \
......
import os.path
import boto
import yaml
from boto.s3.connection import S3Connection
import boto.s3
from boto.exception import S3ResponseError, S3DataError
import newrelic.agent
import yaml
try:
boto.config.add_section('Boto')
......@@ -33,7 +34,7 @@ from veda_file_ingest import VideoProto, VedaIngest
from veda_val import VALAPICall
class FileDiscovery():
class FileDiscovery(object):
def __init__(self, **kwargs):
self.video_info = {}
......@@ -62,25 +63,19 @@ class FileDiscovery():
@newrelic.agent.background_task()
def about_video_ingest(self):
"""
Crawl VEDA Upload bucket
"""
if self.node_work_directory is None:
ErrorObject().print_error(
message='No Workdir'
)
return None
"""
Crawl ingest bucket looking for files
"""
conn = S3Connection(
self.auth_dict['veda_access_key_id'],
self.auth_dict['veda_secret_access_key']
)
return
conn = boto.connect_s3()
"""
Occassional s3 Error
"""
try:
self.bucket = conn.get_bucket(self.auth_dict['veda_s3_upload_bucket'])
except:
except S3ResponseError:
return None
for key in self.bucket.list('upload/', '/'):
......@@ -97,10 +92,8 @@ class FileDiscovery():
video_serial=meta.name.split('/')[1]
)
if len(upload_query) == 0:
'''
Non serialized upload - reject
'''
return None
# Non serialized upload - reject
return
if upload_query[0].upload_filename is not None:
file_extension = upload_query[0].upload_filename.split('.')[-1]
......@@ -119,9 +112,8 @@ class FileDiscovery():
)
course_query = Course.objects.get(institution='EDX', edx_classid='ABVID')
"""
Trigger Ingest Process
"""
# Trigger Ingest Process
V = VideoProto(
abvid_serial=abvid_serial,
client_title=upload_query[0].upload_filename.replace('.' + file_extension, ''),
......@@ -146,28 +138,23 @@ class FileDiscovery():
@newrelic.agent.background_task()
def studio_s3_ingest(self):
"""
Ingest files from studio upload endpoint
"""
if self.node_work_directory is None:
ErrorObject().print_error(
message='No Workdir'
)
return None
return
"""
Ingest files from studio upload endpoint
"""
conn = S3Connection(
self.auth_dict['edx_access_key_id'],
self.auth_dict['edx_secret_access_key']
)
"""Occassional s3 Error"""
conn = boto.connect_s3()
try:
self.bucket = conn.get_bucket(self.auth_dict['edx_s3_ingest_bucket'])
except:
except S3ResponseError:
print 'S3: Ingest Conn Failure'
return None
return
for key in self.bucket.list('prod-edx/unprocessed/', '/'):
for key in self.bucket.list(self.auth_dict['edx_s3_ingest_prefix'], '/'):
meta = self.bucket.get_key(key.name)
self.studio_s3_validate(
meta=meta,
......@@ -175,7 +162,6 @@ class FileDiscovery():
)
def studio_s3_validate(self, meta, key):
if meta.get_metadata('course_video_upload_token') is None:
return None
......@@ -209,7 +195,7 @@ class FileDiscovery():
new_key = 'prod-edx/rejected/' + key.name[::-1].split('/')[0][::-1]
key.copy(self.bucket, new_key)
key.delete()
return None
return
file_extension = client_title[::-1].split('.')[0][::-1]
......@@ -225,7 +211,7 @@ class FileDiscovery():
)
)
file_ingested = True
except:
except S3DataError:
print 'File Copy Fail: Studio S3 Ingest'
file_ingested = False
else:
......@@ -237,7 +223,7 @@ class FileDiscovery():
)
)
file_ingested = True
except:
except S3DataError:
print 'File Copy Fail: Studio S3 Ingest'
file_ingested = False
file_extension = ''
......@@ -245,12 +231,9 @@ class FileDiscovery():
if file_ingested is not True:
# 's3 Bucket ingest Fail'
new_key = 'prod-edx/rejected/' + key.name[::-1].split('/')[0][::-1]
try:
key.copy(self.bucket, new_key)
except:
key.copy(self.bucket, new_key)
key.copy(self.bucket, new_key)
key.delete()
return None
return
"""
Trigger Ingest Process
......@@ -270,17 +253,15 @@ class FileDiscovery():
I.insert()
if I.complete is False:
return None
return
"""
Delete Original After Copy
"""
new_key = 'prod-edx/processed/' + key.name[::-1].split('/')[0][::-1]
try:
if self.auth_dict['edx_s3_ingest_prefix'] is not None:
new_key = 'prod-edx/processed/' + key.name[::-1].split('/')[0][::-1]
key.copy(self.bucket, new_key)
except:
key.copy(self.bucket, new_key)
# key.copy(self.bucket, new_key)
key.delete()
......
import boto
import os
import shutil
import sys
import boto
import yaml
import boto.s3
from boto.s3.key import Key
import yaml
import shutil
from os.path import expanduser
from boto.exception import S3ResponseError
import newrelic.agent
from os.path import expanduser
from veda_utils import ErrorObject
try:
boto.config.add_section('Boto')
......@@ -21,18 +25,14 @@ newrelic.agent.initialize(
'veda_newrelic.ini'
)
)
"""
Upload file to hotstore
"""
from veda_utils import ErrorObject, Output
homedir = expanduser("~")
class Hotstore():
class Hotstore(object):
"""
Upload file to hotstore (short term storage, s3 objects)
"""
def __init__(self, video_proto, upload_filepath, **kwargs):
self.video_proto = video_proto
self.upload_filepath = upload_filepath
......@@ -87,28 +87,22 @@ class Hotstore():
"""
if self.endpoint is False:
try:
conn = boto.connect_s3(
self.auth_dict['veda_access_key_id'],
self.auth_dict['veda_secret_access_key']
)
conn = boto.connect_s3()
delv_bucket = conn.get_bucket(
self.auth_dict['veda_s3_hotstore_bucket']
)
except:
except S3ResponseError:
ErrorObject().print_error(
message='Hotstore: Bucket Connectivity'
)
return False
else:
try:
conn = boto.connect_s3(
self.auth_dict['edx_access_key_id'],
self.auth_dict['edx_secret_access_key']
)
conn = boto.connect_s3()
delv_bucket = conn.get_bucket(
self.auth_dict['edx_s3_endpoint_bucket']
)
except:
except S3ResponseError:
ErrorObject().print_error(
message='Endpoint: Bucket Connectivity'
)
......@@ -155,24 +149,18 @@ class Hotstore():
"""
if self.endpoint is False:
try:
c = boto.connect_s3(
self.auth_dict['veda_access_key_id'],
self.auth_dict['veda_secret_access_key']
)
c = boto.connect_s3()
b = c.lookup(self.auth_dict['veda_s3_hotstore_bucket'])
except:
except S3ResponseError:
ErrorObject().print_error(
message='Hotstore: Bucket Connectivity'
)
return False
else:
try:
c = boto.connect_s3(
self.auth_dict['edx_access_key_id'],
self.auth_dict['edx_secret_access_key']
)
c = boto.connect_s3()
b = c.lookup(self.auth_dict['edx_s3_endpoint_bucket'])
except:
except S3ResponseError:
ErrorObject().print_error(
message='Endpoint: Bucket Connectivity'
)
......
......@@ -57,8 +57,10 @@ class EmailAlert():
email_body = 'There has been a fault:'
email_body += self.message
conn = boto.ses.connect_to_region('us-east-1')
try:
conn = boto.ses.connect_to_region('us-east-1')
except boto.exception.NoAuthHandlerFound:
return
conn.send_email(
self.auth_dict['veda_noreply_email'],
......@@ -231,8 +233,10 @@ class Report():
'edX Studio Course URL : ' + v1[0].edx_studio_url + '\n\n'
'Please do not reply to this email.\n\n <<EOM'
)
conn = boto.ses.connect_to_region('us-east-1')
try:
conn = boto.ses.connect_to_region('us-east-1')
except boto.exception.NoAuthHandlerFound:
return
conn.send_email(
self.auth_dict['veda_noreply_email'],
......
......@@ -2,44 +2,44 @@
# ---
# Database information
# ---
# SANDBOX
DATABASES:
default:
ENGINE: django.db.backends.sqlite3
NAME: sandbox.db
## PRODUCTION
# SANDBOX
#DATABASES:
# default:
# ENGINE: 'django.db.backends.mysql'
# NAME:
# USER:
# PASSWORD:
# HOST:
# PORT: '3306'
# ENGINE: django.db.backends.sqlite3
# NAME: sandbox.db
django_secret_key:
## PRODUCTION
DATABASES:
default:
ENGINE: 'django.db.backends.mysql'
NAME:
USER:
PASSWORD:
HOST:
PORT: '3306'
django_secret_key: ""
# ---
# Authentication keys, VEDA AWS Account
# AWS Buckets, Prefixes
# ---
# Studio/Platform
edx_s3_ingest_prefix:
edx_s3_ingest_bucket:
edx_s3_endpoint_bucket:
# CF
edx_cloudfront_prefix:
# Images
aws_video_images_bucket:
aws_video_images_prefix: "video-images/"
# VEDA Internal
veda_s3_upload_bucket:
veda_s3_hotstore_bucket:
veda_deliverable_bucket:
veda_access_key_id:
veda_secret_access_key:
# Settings
multi_upload_barrier: 2000000000
# ---
# edX AWS Account
# ---
edx_aws_user:
edx_access_key_id:
edx_secret_access_key:
edx_s3_ingest_bucket:
edx_s3_endpoint_bucket:
edx_cloudfront_prefix:
# ---
# email vars
......@@ -48,32 +48,37 @@ veda_noreply_email:
admin_email:
# ---
# VAL user creds
# VEDA API
# ---
## VEDA API Auth
veda_api_url:
veda_auth_url:
veda_client_id:
veda_secret_key:
veda_token_url:
# ---
# VAL
# ---
val_api_url:
val_token_url:
val_video_images_url:
# Credentials
val_client_id:
val_password:
val_secret_key:
val_token_url:
val_password:
val_username:
# ---
# AWS Storage config for video images
# ---
aws_video_images_access_key:
aws_video_images_secret_key:
aws_video_images_bucket:
aws_video_images_prefix:
# ---
## Celery Info
# Celery Info
# ---
celery_app_name:
celery_app_name: veda_production
# can do multiple queues like so: foo,bar,baz
main_celery_queue: encode_worker
largefile_queue_barrier: 1000000000
celery_receiver_queue: encode_worker
largefile_celery_queue: large_encode_worker
celery_stat_queue: transcode_stat
largefile_queue_barrier: 1000000000
celery_threads: 1
rabbitmq_broker:
......@@ -94,6 +99,11 @@ threeplay_ftphost:
xuetang_api_url:
xuetang_api_shared_secret:
## Encoding Config
ffmpeg_compiled: "ffmpeg"
ffprobe_compiled: "ffprobe"
target_aspect_ratio: 1.7777778
# ----------
##---
# This is a list of encodes and their respective course
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment