Unverified Commit b71d6d31 by M. Rehan Committed by GitHub

Merge pull request #81 from edx/yro/yt_callback_bugfix

Yro/yt callback bugfix
parents d6c03c48 728ffc85
...@@ -7,6 +7,7 @@ import django ...@@ -7,6 +7,7 @@ import django
from control_env import * from control_env import *
from dependencies.shotgun_api3 import Shotgun from dependencies.shotgun_api3 import Shotgun
from dependencies.shotgun_api3.lib.xmlrpclib import ProtocolError
from VEDA.utils import get_config from VEDA.utils import get_config
""" """
...@@ -101,26 +102,31 @@ class VedaEncode(object): ...@@ -101,26 +102,31 @@ class VedaEncode(object):
continue continue
def check_review_approved(self): def check_review_approved(self):
if self.sg_script_key is None:
return True
""" """
** Mediateam only ** ** Mediateam only **
Check in with SG to see if this video Check in with SG to see if this video
is authorized to go to final publishing is authorized to go to final publishing
""" """
# TODO: Move to independent API Method
if self.sg_script_key is None:
return True
video_object = Video.objects.filter( video_object = Video.objects.filter(
edx_id=self.veda_id edx_id=self.veda_id
).latest() ).latest()
if video_object.inst_class.sg_projID is None: if video_object.inst_class.sg_projID is None:
return False return False
try:
sg = Shotgun( sg = Shotgun(
self.sg_server_path, self.sg_server_path,
self.sg_script_name, self.sg_script_name,
self.sg_script_key self.sg_script_key
) )
except ProtocolError:
# Periodic API Error
return False
fields = ['project', 'entity', 'sg_status_list'] fields = ['project', 'entity', 'sg_status_list']
filters = [ filters = [
...@@ -130,6 +136,8 @@ class VedaEncode(object): ...@@ -130,6 +136,8 @@ class VedaEncode(object):
"id": video_object.inst_class.sg_projID "id": video_object.inst_class.sg_projID
}], }],
] ]
# TODO: Improve API query
tasks = sg.find("Task", filters, fields) tasks = sg.find("Task", filters, fields)
for t in tasks: for t in tasks:
if t['entity']['name'] == self.veda_id.split('-')[-1]: if t['entity']['name'] == self.veda_id.split('-')[-1]:
...@@ -137,11 +145,3 @@ class VedaEncode(object): ...@@ -137,11 +145,3 @@ class VedaEncode(object):
return True return True
return False return False
def main():
pass
if __name__ == '__main__':
sys.exit(main())
...@@ -238,6 +238,11 @@ class VALAPICall(): ...@@ -238,6 +238,11 @@ class VALAPICall():
Determine VAL profile data, from return/encode submix Determine VAL profile data, from return/encode submix
""" """
# Defend against old/deprecated encodes
try:
self.auth_dict['val_profile_dict'][self.encode_profile]
except KeyError:
return
if self.endpoint_url is not None: if self.endpoint_url is not None:
for p in self.auth_dict['val_profile_dict'][self.encode_profile]: for p in self.auth_dict['val_profile_dict'][self.encode_profile]:
...@@ -264,6 +269,10 @@ class VALAPICall(): ...@@ -264,6 +269,10 @@ class VALAPICall():
if final.encode_profile.product_spec == 'review': if final.encode_profile.product_spec == 'review':
pass pass
else: else:
try:
self.auth_dict['val_profile_dict'][final.encode_profile.product_spec]
except KeyError:
return
for p in self.auth_dict['val_profile_dict'][final.encode_profile.product_spec]: for p in self.auth_dict['val_profile_dict'][final.encode_profile.product_spec]:
test_list.append(dict( test_list.append(dict(
url=str(final.encode_url), url=str(final.encode_url),
...@@ -277,19 +286,19 @@ class VALAPICall(): ...@@ -277,19 +286,19 @@ class VALAPICall():
self.encode_data.append(t) self.encode_data.append(t)
if len(val_api_return) == 0: if len(val_api_return) == 0:
return None return
""" """
All URL Records Deleted (for some reason) All URL Records Deleted (for some reason)
""" """
if len(self.encode_data) == 0: if len(self.encode_data) == 0:
return None return
for i in val_api_return['encoded_videos']: for i in val_api_return['encoded_videos']:
if i['profile'] not in [g['profile'] for g in self.encode_data]: if i['profile'] not in [g['profile'] for g in self.encode_data]:
self.encode_data.append(i) self.encode_data.append(i)
return None return
def send_404(self): def send_404(self):
""" """
......
...@@ -63,4 +63,4 @@ val_profile_dict: ...@@ -63,4 +63,4 @@ val_profile_dict:
heal_start: 1 heal_start: 1
heal_end: 144 heal_end: 144
global_timeout: 40 global_timeout: 60
...@@ -11,7 +11,7 @@ import sys ...@@ -11,7 +11,7 @@ import sys
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
from datetime import timedelta from datetime import timedelta
from os.path import expanduser from os.path import expanduser
from paramiko.ssh_exception import AuthenticationException from paramiko.ssh_exception import AuthenticationException, SSHException
import django import django
import pysftp import pysftp
...@@ -91,6 +91,8 @@ def xml_downloader(course): ...@@ -91,6 +91,8 @@ def xml_downloader(course):
crawl_sftp(d=d, s1=s1) crawl_sftp(d=d, s1=s1)
except AuthenticationException: except AuthenticationException:
LOGGER.info("{inst}{clss} : Authentication Failed".format(inst=course.institution, clss=course.edx_classid)) LOGGER.info("{inst}{clss} : Authentication Failed".format(inst=course.institution, clss=course.edx_classid))
except SSHException:
LOGGER.info("{inst}{clss} : Authentication Failed".format(inst=course.institution, clss=course.edx_classid))
def crawl_sftp(d, s1): def crawl_sftp(d, s1):
...@@ -103,43 +105,47 @@ def crawl_sftp(d, s1): ...@@ -103,43 +105,47 @@ def crawl_sftp(d, s1):
""" """
dirtime = datetime.datetime.fromtimestamp(d.st_mtime) dirtime = datetime.datetime.fromtimestamp(d.st_mtime)
if dirtime < datetime.datetime.now() - timedelta(days=YOUTUBE_LOOKBACK_DAYS): if dirtime < datetime.datetime.now() - timedelta(days=YOUTUBE_LOOKBACK_DAYS):
return None return
if d.filename == "files_to_be_removed.txt": if d.filename == "files_to_be_removed.txt":
return None return
if d.filename == 'FAILED': if d.filename == 'FAILED':
return None return
try: try:
s1.cwd(d.filename) s1.cwd(d.filename)
except: except:
return None return
try:
for f in s1.listdir_attr(): for f in s1.listdir_attr():
filetime = datetime.datetime.fromtimestamp(f.st_mtime) filetime = datetime.datetime.fromtimestamp(f.st_mtime)
if not filetime > datetime.datetime.now() - timedelta(days=YOUTUBE_LOOKBACK_DAYS): if not filetime > datetime.datetime.now() - timedelta(days=YOUTUBE_LOOKBACK_DAYS):
continue continue
if fnmatch.fnmatch(f.filename, '*.xml') or fnmatch.fnmatch(f.filename, '*.csv'): if fnmatch.fnmatch(f.filename, '*.xml') or fnmatch.fnmatch(f.filename, '*.csv'):
# Determine If there are extant downloaded status files for this same ID, # Determine If there are extant downloaded status files for this same ID,
# If yes, increment filename # If yes, increment filename
x = 0 x = 0
while True: while True:
""" """
Just in case something runs out Just in case something runs out
""" """
if x > 20: if x > 20:
break break
file_to_find = f.filename.split('.')[0] + \ file_to_find = f.filename.split('.')[0] + \
str(x) + \ str(x) + \
'.' + \ '.' + \
f.filename.split('.')[1] f.filename.split('.')[1]
if os.path.exists(os.path.join(workdir, file_to_find)): if os.path.exists(os.path.join(workdir, file_to_find)):
x += 1 x += 1
else: else:
break break
print "%s : %s" % (f.filename, file_to_find) print "%s : %s" % (f.filename, file_to_find)
s1.get( s1.get(
f.filename, f.filename,
os.path.join(workdir, file_to_find) os.path.join(workdir, file_to_find)
) )
except IOError:
return
except SSHException:
return
s1.cwd('..') s1.cwd('..')
...@@ -218,6 +224,7 @@ def urlpatch(upload_data): ...@@ -218,6 +224,7 @@ def urlpatch(upload_data):
test_id = Video.objects.filter(edx_id=upload_data['edx_id']).latest() test_id = Video.objects.filter(edx_id=upload_data['edx_id']).latest()
except: except:
upload_data['status'] = 'Failure' upload_data['status'] = 'Failure'
return
if upload_data['status'] == 'Success': if upload_data['status'] == 'Success':
url_query = URL.objects.filter( url_query = URL.objects.filter(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment