Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
E
edx-platform
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
edx
edx-platform
Commits
c4d92b8b
Commit
c4d92b8b
authored
Dec 20, 2017
by
Jesse Zoldak
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
refactor paver db
parent
85b25d3c
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
80 additions
and
83 deletions
+80
-83
pavelib/database.py
+77
-77
pavelib/paver_tests/test_database.py
+3
-6
No files found.
pavelib/database.py
View file @
c4d92b8b
...
...
@@ -3,40 +3,40 @@ tasks for controlling the databases used in tests
"""
from
__future__
import
print_function
import
os
import
hashlib
from
paver.easy
import
sh
,
needs
import
boto
from
pavelib.prereqs
import
compute_fingerprint
from
pavelib.utils.passthrough_opts
import
PassthroughTask
from
pavelib.utils.timer
import
timed
from
pavelib.utils.envs
import
Env
# Bokchoy db schema and data fixtures
BOKCHOY_DB_FILES
=
[
'bok_choy_data_default.json'
,
'bok_choy_data_student_module_history.json'
,
'bok_choy_migrations_data_default.sql'
,
'bok_choy_migrations_data_student_module_history.sql'
,
'bok_choy_schema_default.sql'
,
'bok_choy_schema_student_module_history.sql'
]
@needs
(
'pavelib.prereqs.install_prereqs'
)
@PassthroughTask
@timed
def
update_bokchoy_db_cache
():
"""
Update and cache the MYSQL database for bokchoy testing. This command
will remove any previously cached database files and apply migrations
on a fresh db. Additionally, the collective sha1 checksum for all of
these files will be written to file, for future comparisons/checking
for updates.
You can commit the resulting files in common/test/db_cache into
git to speed up test runs
"""
bokchoy_db_files
=
[
'bok_choy_data_default.json'
,
'bok_choy_data_student_module_history.json'
,
'bok_choy_migrations_data_default.sql'
,
'bok_choy_migrations_data_student_module_history.sql'
,
'bok_choy_schema_default.sql'
,
'bok_choy_schema_student_module_history.sql'
]
# Output files from scripts/calculate-bokchoy-migrations.sh
MIGRATION_OUTPUT_FILES
=
[
'bok_choy_default_migrations.yaml'
,
'bok_choy_student_module_history_migrations.yaml'
]
ALL_DB_FILES
=
BOKCHOY_DB_FILES
+
MIGRATION_OUTPUT_FILES
CACHE_BUCKET_NAME
=
'edx-tools-database-caches'
FINGERPRINT_FILEPATH
=
'{}/common/test/db_cache/bokchoy_migrations.sha1'
.
format
(
Env
.
REPO_ROOT
)
def
remove_cached_db_files
():
"""Remove the cached db files if they exist."""
print
(
'Removing cached db files for bokchoy tests'
)
for
db_file
in
bokchoy_db_files
:
for
db_file
in
BOKCHOY_DB_FILES
:
try
:
db_file_path
=
os
.
path
.
join
(
'{}/common/test/db_cache'
.
format
(
Env
.
REPO_ROOT
),
db_file
...
...
@@ -44,31 +44,16 @@ def update_bokchoy_db_cache():
os
.
remove
(
db_file_path
)
print
(
'
\t
Removed {}'
.
format
(
db_file_path
))
except
OSError
:
print
(
'
\t
Could not remove {}. Continuing.'
.
format
(
db_file_path
))
continue
sh
(
'{}/scripts/reset-test-db.sh'
.
format
(
Env
.
REPO_ROOT
))
# Write the fingerprint of the database files to disk for use in future
# comparisons
fingerprint
=
fingerprint_bokchoy_db_files
()
with
open
(
'common/test/db_cache/bokchoy_migrations.sha1'
,
'w'
)
as
fingerprint_file
:
fingerprint_file
.
write
(
fingerprint
)
def
compare_bokchoy_db_fingerprints
():
def
calculate_bokchoy_migrations
():
"""
Determine if the current state of the bokchoy databases and related files
have changed since the last time they were updated in the repository by
comparing their fingerprint to the fingerprint saved in the repo
Run the calculate-bokchoy-migrations script, which will generate two
yml files. These tell whether or not we need to run migrations.
"""
try
:
fingerprint_filepath
=
'{}/common/test/db_cache/bokchoy_migrations.sha1'
.
format
(
Env
.
REPO_ROOT
)
with
open
(
fingerprint_filepath
,
'r'
)
as
fingerprint_file
:
cached_fingerprint
=
fingerprint_file
.
read
()
.
strip
()
except
IOError
:
return
False
current_fingerprint
=
fingerprint_bokchoy_db_files
()
return
current_fingerprint
==
cached_fingerprint
sh
(
'{}/scripts/calculate-bokchoy-migrations.sh'
.
format
(
Env
.
REPO_ROOT
))
def
fingerprint_bokchoy_db_files
():
...
...
@@ -78,52 +63,67 @@ def fingerprint_bokchoy_db_files():
including schema, migrations to be run and data. It can be used to determine
if the databases need to be updated.
"""
# Run the calculate-bokchoy-migrations script, which will generate two
# yml files. These tell whether or not we need to run migrations
sh
(
'{}/scripts/calculate-bokchoy-migrations.sh'
.
format
(
Env
.
REPO_ROOT
))
db_files
=
[
# Bokchoy db schema and data fixtures
'bok_choy_data_default.json'
,
'bok_choy_data_student_module_history.json'
,
'bok_choy_migrations_data_default.sql'
,
'bok_choy_migrations_data_student_module_history.sql'
,
'bok_choy_schema_default.sql'
,
'bok_choy_schema_student_module_history.sql'
,
# Output files from scripts/calculate-bokchoy-migrations.sh
'bok_choy_default_migrations.yaml'
,
'bok_choy_student_module_history_migrations.yaml'
]
hasher
=
hashlib
.
sha1
()
calculate_bokchoy_migrations
()
file_paths
=
[
os
.
path
.
join
(
'common/test/db_cache'
,
db_file
)
for
db_file
in
db_files
os
.
path
.
join
(
'common/test/db_cache'
,
db_file
)
for
db_file
in
ALL_DB_FILES
]
for
file_path
in
file_paths
:
with
open
(
file_path
,
'rb'
)
as
file_handle
:
hasher
.
update
(
file_handle
.
read
())
fingerprint
=
hasher
.
hexdigest
()
fingerprint
=
compute_fingerprint
(
file_paths
)
print
(
"Computed fingerprint for bokchoy db files: {}"
.
format
(
fingerprint
))
return
fingerprint
def
verify_fingerprint_in_bucket
(
fingerprint
):
@needs
(
'pavelib.prereqs.install_prereqs'
)
@PassthroughTask
@timed
def
update_bokchoy_db_cache
():
"""
Update and cache the MYSQL database for bokchoy testing:
* Remove any previously cached database files
* Apply migrations on a fresh db
* Write the collective sha1 checksum for all of these files to disk
"""
remove_cached_db_files
()
# Apply migrations to the test database and create the cache files
sh
(
'{}/scripts/reset-test-db.sh'
.
format
(
Env
.
REPO_ROOT
))
# Write the fingerprint of the database files to disk for use in future
# comparisons
fingerprint
=
fingerprint_bokchoy_db_files
()
with
open
(
FINGERPRINT_FILEPATH
,
'w'
)
as
fingerprint_file
:
fingerprint_file
.
write
(
fingerprint
)
def
is_fingerprint_in_bucket
(
fingerprint
,
bucket_name
=
CACHE_BUCKET_NAME
):
"""
Ensure that a zip file matching the given fingerprint is present within an
s3 bucket
Test if a zip file matching the given fingerprint is present within an s3 bucket
"""
conn
=
boto
.
connect_s3
()
bucket_name
=
os
.
environ
.
get
(
'DB_CACHE_S3_BUCKET'
,
'edx-tools-database-caches'
)
bucket
=
conn
.
get_bucket
(
bucket_name
)
zip_present
=
"{}.zip"
.
format
(
fingerprint
)
in
[
k
.
name
for
k
in
bucket
.
get_all_keys
()
]
msg
=
"a match in the {} bucket."
.
format
(
bucket_name
)
if
zip_present
:
print
(
"Found a match in the {} bucket"
.
format
(
bucket_name
)
)
print
(
"Found {}"
.
format
(
msg
))
else
:
print
(
"Couldn't find a match in the {} bucket"
.
format
(
bucket_name
)
)
print
(
"Couldn't find {}"
.
format
(
msg
))
return
zip_present
def
compare_bokchoy_db_fingerprints
():
"""
Determine if the current state of the bokchoy databases and related files
have changed since the last time they were updated in the repository by
comparing their fingerprint to the fingerprint saved in the repo.
Returns:
True if the fingerprint can be read off disk and matches, False otherwise.
"""
try
:
with
open
(
FINGERPRINT_FILEPATH
,
'r'
)
as
fingerprint_file
:
cached_fingerprint
=
fingerprint_file
.
read
()
.
strip
()
except
IOError
:
return
False
current_fingerprint
=
fingerprint_bokchoy_db_files
()
return
current_fingerprint
==
cached_fingerprint
pavelib/paver_tests/test_database.py
View file @
c4d92b8b
"""
Tests for the Paver commands for updating test databases
"""
import
os
from
unittest
import
TestCase
import
boto
from
mock
import
patch
from
common.test.utils
import
MockS3Mixin
from
pavelib.database
import
verify
_fingerprint_in_bucket
from
pavelib.database
import
is
_fingerprint_in_bucket
class
TestPaverDatabaseTasks
(
MockS3Mixin
,
TestCase
):
...
...
@@ -19,14 +18,12 @@ class TestPaverDatabaseTasks(MockS3Mixin, TestCase):
self
.
conn
.
create_bucket
(
'moto_test_bucket'
)
self
.
bucket
=
self
.
conn
.
get_bucket
(
'moto_test_bucket'
)
@patch.dict
(
os
.
environ
,
{
'DB_CACHE_S3_BUCKET'
:
'moto_test_bucket'
})
def
test_fingerprint_in_bucket
(
self
):
key
=
boto
.
s3
.
key
.
Key
(
bucket
=
self
.
bucket
,
name
=
'testfile.zip'
)
key
.
set_contents_from_string
(
'this is a test'
)
self
.
assertTrue
(
verify_fingerprint_in_bucket
(
'testfile
'
))
self
.
assertTrue
(
is_fingerprint_in_bucket
(
'testfile'
,
'moto_test_bucket
'
))
@patch.dict
(
os
.
environ
,
{
'DB_CACHE_S3_BUCKET'
:
'moto_test_bucket'
})
def
test_fingerprint_not_in_bucket
(
self
):
key
=
boto
.
s3
.
key
.
Key
(
bucket
=
self
.
bucket
,
name
=
'testfile.zip'
)
key
.
set_contents_from_string
(
'this is a test'
)
self
.
assertFalse
(
verify_fingerprint_in_bucket
(
'otherfile
'
))
self
.
assertFalse
(
is_fingerprint_in_bucket
(
'otherfile'
,
'moto_test_bucket
'
))
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment