Commit 1f6978cc by Brian Beggs Committed by GitHub

Merge branch 'master' into bbeggs/revert-context-migration

parents 265ceeab f471f96d
...@@ -26,10 +26,8 @@ doc/ ...@@ -26,10 +26,8 @@ doc/
config/benchmark.yml config/benchmark.yml
benchmark_log benchmark_log
# bundler binstubs
bin/
log/ log/
#redcar #redcar
.redcar/ .redcar/
/nbproject /nbproject
.idea/
cs_comments_service
rvm 1.9.3@cs_comments_service --create
sudo: false
language: ruby language: ruby
rvm: rvm:
- "1.9.3" - "1.9.3"
services:
- elasticsearch cache: bundler
before_install: before_install:
- wget https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-0.90.11.deb && sudo dpkg --force-confnew -i elasticsearch-0.90.11.deb && sudo service elasticsearch restart - gem update bundler # Ensure we use the latest version of bundler. Travis' default version of outdated.
# Install mongo 2.6.4 according to http://docs.mongodb.org/manual/tutorial/install-mongodb-on-ubuntu/
# TODO: This won't be necessary when travis switches to 2.6 by default - see https://github.com/travis-ci/travis-ci/issues/2246 # Run Elasticsearch as a daemon
- sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10 - curl -O https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-0.90.13.zip
- echo 'deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' | sudo tee /etc/apt/sources.list.d/mongodb.list - unzip elasticsearch-0.90.13.zip
- sudo apt-get update -q - elasticsearch-0.90.13/bin/elasticsearch
- sudo apt-get install -y mongodb-org=2.6.4 mongodb-org-server=2.6.4 mongodb-org-shell=2.6.4 mongodb-org-mongos=2.6.4 mongodb-org-tools=2.6.4
- mongo --version # Run MongoDB as a daemon
- curl -O https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-2.6.11.tgz
- tar -zxf mongodb-linux-x86_64-2.6.11.tgz
- export PATH=mongodb-linux-x86_64-2.6.11/bin:$PATH
- mkdir -p ./mongo/db
- mkdir -p ./mongo/log
- mongod --fork --dbpath ./mongo/db --logpath ./mongo/log/mongodb.log
script: bundle exec rspec script: bundle exec rspec
...@@ -22,3 +22,5 @@ Alan Boudreault <alan@alanb.ca> ...@@ -22,3 +22,5 @@ Alan Boudreault <alan@alanb.ca>
Matjaz Gregoric <mtyaka@gmail.com> Matjaz Gregoric <mtyaka@gmail.com>
Ben McMorran <ben.mcmorran@gmail.com> Ben McMorran <ben.mcmorran@gmail.com>
Bill DeRusha <bill@edx.org> Bill DeRusha <bill@edx.org>
Brian Beggs <macdiesel@gmail.com>
Clinton Blackburn <cblackburn@edx.org>
...@@ -14,27 +14,22 @@ gem 'sinatra' ...@@ -14,27 +14,22 @@ gem 'sinatra'
gem 'yajl-ruby' gem 'yajl-ruby'
gem 'ampex' gem 'mongoid', '~> 5.0.0'
gem 'bson', '~>3.1'
gem 'mongo'
gem 'moped', "1.5.1"
gem 'mongoid', "3.0.15"
gem 'bson_ext' gem 'bson_ext'
gem 'protected_attributes'
gem 'delayed_job' gem 'delayed_job'
gem 'delayed_job_mongoid', :git => 'https://github.com/dementrock/delayed_job_mongoid.git' gem 'delayed_job_mongoid'
gem "enumerize", "~>0.8.0"
gem 'mongoid-tree', :git => 'https://github.com/dementrock/mongoid-tree.git'
gem 'voteable_mongo', :git => 'https://github.com/dementrock/voteable_mongo.git'
gem 'mongoid_magic_counter_cache', :git => 'https://github.com/dementrock/mongoid-magic-counter-cache.git'
gem 'kaminari', :require => 'kaminari/sinatra', :git => 'https://github.com/dementrock/kaminari.git' gem "enumerize"
gem 'mongoid-tree', :git => 'https://github.com/macdiesel/mongoid-tree'
gem 'rs_voteable_mongo', :git => 'https://github.com/navneet35371/voteable_mongo.git'
gem 'mongoid_magic_counter_cache'
gem 'faker' gem 'will_paginate_mongoid', "~>2.0"
gem 'will_paginate_mongoid'
gem 'rdiscount' gem 'rdiscount'
gem 'nokogiri' gem 'nokogiri', "~>1.6.8"
gem 'tire', "0.6.2" gem 'tire', "0.6.2"
gem 'tire-contrib' gem 'tire-contrib'
...@@ -44,17 +39,19 @@ gem 'dalli' ...@@ -44,17 +39,19 @@ gem 'dalli'
gem 'rest-client' gem 'rest-client'
group :test do group :test do
gem 'rspec' gem 'codecov', :require => false
gem 'rack-test', :require => "rack/test" gem 'database_cleaner', '~> 1.5.1'
gem 'factory_girl', '~> 4.0'
gem 'faker', '~> 1.6'
gem 'guard' gem 'guard'
gem 'guard-unicorn' gem 'guard-unicorn'
gem 'simplecov', :require => false gem 'rack-test', :require => 'rack/test'
gem 'database_cleaner' gem 'rspec', '~> 2.11.0'
end end
gem 'newrelic_rpm' gem 'newrelic_rpm'
gem 'newrelic_moped'
gem 'unicorn' gem 'unicorn'
gem "rack-timeout", "0.1.0beta3" gem "rack-timeout"
gem "i18n" gem "i18n"
gem "rack-contrib", :git => 'https://github.com/rack/rack-contrib.git', :ref => '6ff3ca2b2d988911ca52a2712f6a7da5e064aa27' gem "rack-contrib", :git => 'https://github.com/rack/rack-contrib.git', :ref => '6ff3ca2b2d988911ca52a2712f6a7da5e064aa27'
GIT GIT
remote: https://github.com/dementrock/delayed_job_mongoid.git remote: https://github.com/macdiesel/mongoid-tree
revision: 48b1420d59bc01e0b1aba1c2ad66bda4a5e04b9a revision: b381dd56f1b3b061df8f4b4181d5440dea1602d1
specs: specs:
delayed_job_mongoid (1.0.8) mongoid-tree (2.0.0)
delayed_job (~> 3.0.0) mongoid (>= 4.0, <= 5.0)
mongoid (>= 3.0.0.rc)
GIT GIT
remote: https://github.com/dementrock/kaminari.git remote: https://github.com/navneet35371/voteable_mongo.git
revision: 82a38e07db1ca1598c8daf073a8f6be22ae714d6 revision: 55fcfe76705ab5da1c9e5670594331b33954c545
specs: specs:
kaminari (0.13.0) rs_voteable_mongo (1.0.2)
actionpack (>= 3.0.0) mongoid (>= 3.0, <= 5.0)
activesupport (>= 3.0.0)
GIT
remote: https://github.com/dementrock/mongoid-magic-counter-cache.git
revision: 28bc5e617cab19187b323e7d97d49fe73a7de68a
specs:
mongoid_magic_counter_cache (0.1.1)
mongoid (>= 3.0)
rake
GIT
remote: https://github.com/dementrock/mongoid-tree.git
revision: 5aa7a4ee16cd90dbbcac3ad702446d2119e971df
specs:
mongoid-tree (1.0.0)
mongoid (>= 3.0, <= 4.0)
GIT
remote: https://github.com/dementrock/voteable_mongo.git
revision: 538e86856daa1c180ba80b7c6f2805e531ba420c
specs:
voteable_mongo (0.9.3)
GIT GIT
remote: https://github.com/rack/rack-contrib.git remote: https://github.com/rack/rack-contrib.git
...@@ -46,88 +23,100 @@ GIT ...@@ -46,88 +23,100 @@ GIT
GEM GEM
remote: https://rubygems.org/ remote: https://rubygems.org/
specs: specs:
actionpack (3.2.8) activemodel (4.2.4)
activemodel (= 3.2.8) activesupport (= 4.2.4)
activesupport (= 3.2.8) builder (~> 3.1)
builder (~> 3.0.0) activesupport (4.2.4)
erubis (~> 2.7.0) i18n (~> 0.7)
journey (~> 1.0.4) json (~> 1.7, >= 1.7.7)
rack (~> 1.4.0) minitest (~> 5.1)
rack-cache (~> 1.2) thread_safe (~> 0.3, >= 0.3.4)
rack-test (~> 0.6.1) tzinfo (~> 1.1)
sprockets (~> 2.1.3) ansi (1.5.0)
activemodel (3.2.8) bson (3.2.4)
activesupport (= 3.2.8) bson_ext (1.5.1)
builder (~> 3.0.0) builder (3.2.2)
activesupport (3.2.8) codecov (0.1.2)
i18n (~> 0.6) json
multi_json (~> 1.0) simplecov
ampex (2.0.0) url
blankslate
ansi (1.4.3)
blankslate (2.1.2.4)
bson (1.6.4)
bson_ext (1.6.4)
bson (~> 1.6.4)
builder (3.0.4)
coderay (1.0.7) coderay (1.0.7)
dalli (2.1.0) dalli (2.1.0)
database_cleaner (1.2.0) database_cleaner (1.5.1)
delayed_job (3.0.3) delayed_job (4.1.1)
activesupport (~> 3.0) activesupport (>= 3.0, < 5.0)
delayed_job_mongoid (2.2.0)
delayed_job (>= 3.0, < 5)
mongoid (>= 3.0, < 6)
mongoid-compatibility
diff-lcs (1.1.3) diff-lcs (1.1.3)
enumerize (0.8.0) docile (1.1.5)
domain_name (0.5.24)
unf (>= 0.0.5, < 1.0.0)
enumerize (0.11.0)
activesupport (>= 3.2) activesupport (>= 3.2)
erubis (2.7.0) factory_girl (4.5.0)
faker (1.0.1) activesupport (>= 3.0.0)
i18n (~> 0.4) faker (1.6.1)
i18n (~> 0.5)
guard (1.3.2) guard (1.3.2)
listen (>= 0.4.2) listen (>= 0.4.2)
thor (>= 0.14.6) thor (>= 0.14.6)
guard-unicorn (0.0.7) guard-unicorn (0.0.7)
guard (>= 1.1) guard (>= 1.1)
hashr (0.0.22) hashr (0.0.22)
hike (1.2.1) http-cookie (1.0.2)
i18n (0.6.9) domain_name (~> 0.5)
journey (1.0.4) i18n (0.7.0)
kgio (2.7.4) json (1.8.3)
kgio (2.10.0)
listen (0.5.0) listen (0.5.0)
method_source (0.8) method_source (0.8)
mime-types (2.2) mime-types (2.6.1)
mongo (1.6.4) mini_portile2 (2.1.0)
bson (~> 1.6.4) minitest (5.8.1)
mongoid (3.0.15) mongo (2.1.1)
activemodel (~> 3.1) bson (~> 3.0)
moped (~> 1.1) mongoid (5.0.0)
origin (~> 1.0) activemodel (~> 4.0)
tzinfo (~> 0.3.22) mongo (~> 2.1)
moped (1.5.1) origin (~> 2.1)
multi_json (1.10.0) tzinfo (>= 0.3.37)
newrelic_moped (1.0.0) mongoid-compatibility (0.3.1)
moped activesupport
newrelic_rpm (>= 3.7) mongoid (>= 2.0)
newrelic_rpm (3.11.2.286) mongoid_magic_counter_cache (1.1.1)
nokogiri (1.5.5) mongoid
origin (1.1.0) rake
multi_json (1.11.2)
netrc (0.10.3)
newrelic_rpm (3.15.0.314)
nokogiri (1.6.8)
mini_portile2 (~> 2.1.0)
pkg-config (~> 1.1.7)
origin (2.1.1)
pkg-config (1.1.7)
protected_attributes (1.1.3)
activemodel (>= 4.0.1, < 5.0)
pry (0.9.10) pry (0.9.10)
coderay (~> 1.0.5) coderay (~> 1.0.5)
method_source (~> 0.8) method_source (~> 0.8)
slop (~> 3.3.1) slop (~> 3.3.1)
pry-nav (0.2.2) pry-nav (0.2.2)
pry (~> 0.9.10) pry (~> 0.9.10)
rack (1.4.1) rack (1.6.4)
rack-cache (1.2)
rack (>= 0.4)
rack-protection (1.2.0) rack-protection (1.2.0)
rack rack
rack-test (0.6.1) rack-test (0.6.3)
rack (>= 1.0) rack (>= 1.0)
rack-timeout (0.1.0beta3) rack-timeout (0.3.2)
raindrops (0.10.0) raindrops (0.15.0)
rake (10.3.1) rake (10.4.2)
rdiscount (1.6.8) rdiscount (1.6.8)
rest-client (1.6.7) rest-client (1.8.0)
mime-types (>= 1.16) http-cookie (>= 1.0.2, < 2.0)
mime-types (>= 1.16, < 3.0)
netrc (~> 0.7)
rspec (2.11.0) rspec (2.11.0)
rspec-core (~> 2.11.0) rspec-core (~> 2.11.0)
rspec-expectations (~> 2.11.0) rspec-expectations (~> 2.11.0)
...@@ -136,20 +125,18 @@ GEM ...@@ -136,20 +125,18 @@ GEM
rspec-expectations (2.11.2) rspec-expectations (2.11.2)
diff-lcs (~> 1.1.3) diff-lcs (~> 1.1.3)
rspec-mocks (2.11.2) rspec-mocks (2.11.2)
simplecov (0.7.1) simplecov (0.11.1)
multi_json (~> 1.0) docile (~> 1.1.0)
simplecov-html (~> 0.7.1) json (~> 1.8)
simplecov-html (0.7.1) simplecov-html (~> 0.10.0)
simplecov-html (0.10.0)
sinatra (1.3.3) sinatra (1.3.3)
rack (~> 1.3, >= 1.3.6) rack (~> 1.3, >= 1.3.6)
rack-protection (~> 1.2) rack-protection (~> 1.2)
tilt (~> 1.3, >= 1.3.3) tilt (~> 1.3, >= 1.3.3)
slop (3.3.2) slop (3.3.2)
sprockets (2.1.3)
hike (~> 1.2)
rack (~> 1.0)
tilt (~> 1.1, != 1.3.0)
thor (0.16.0) thor (0.16.0)
thread_safe (0.3.5)
tilt (1.3.3) tilt (1.3.3)
tire (0.6.2) tire (0.6.2)
activemodel (>= 3.0) activemodel (>= 3.0)
...@@ -161,56 +148,59 @@ GEM ...@@ -161,56 +148,59 @@ GEM
rest-client (~> 1.6) rest-client (~> 1.6)
tire-contrib (0.1.1) tire-contrib (0.1.1)
tire tire
tzinfo (0.3.38) tzinfo (1.2.2)
unicorn (4.3.1) thread_safe (~> 0.1)
unf (0.1.4)
unf_ext
unf_ext (0.0.7.1)
unicorn (4.9.0)
kgio (~> 2.6) kgio (~> 2.6)
rack rack
raindrops (~> 0.7) raindrops (~> 0.7)
will_paginate (3.0.4) url (0.3.2)
will_paginate_mongoid (1.1.0) will_paginate (3.0.7)
mongoid (>= 2.4) will_paginate_mongoid (2.0.1)
mongoid
will_paginate (~> 3.0) will_paginate (~> 3.0)
yajl-ruby (1.1.0) yajl-ruby (1.2.1)
PLATFORMS PLATFORMS
ruby ruby
DEPENDENCIES DEPENDENCIES
ampex bson (~> 3.1)
bson_ext bson_ext
bundler bundler
codecov
dalli dalli
database_cleaner database_cleaner (~> 1.5.1)
delayed_job delayed_job
delayed_job_mongoid! delayed_job_mongoid
enumerize (~> 0.8.0) enumerize
faker factory_girl (~> 4.0)
faker (~> 1.6)
guard guard
guard-unicorn guard-unicorn
i18n i18n
kaminari! mongoid (~> 5.0.0)
mongo
mongoid (= 3.0.15)
mongoid-tree! mongoid-tree!
mongoid_magic_counter_cache! mongoid_magic_counter_cache
moped (= 1.5.1)
newrelic_moped
newrelic_rpm newrelic_rpm
nokogiri nokogiri (~> 1.6.8)
protected_attributes
pry pry
pry-nav pry-nav
rack-contrib! rack-contrib!
rack-test rack-test
rack-timeout (= 0.1.0beta3) rack-timeout
rake rake
rdiscount rdiscount
rest-client rest-client
rspec rs_voteable_mongo!
simplecov rspec (~> 2.11.0)
sinatra sinatra
tire (= 0.6.2) tire (= 0.6.2)
tire-contrib tire-contrib
unicorn unicorn
voteable_mongo! will_paginate_mongoid (~> 2.0)
will_paginate_mongoid
yajl-ruby yajl-ruby
...@@ -2,37 +2,46 @@ Part of `edX code`__. ...@@ -2,37 +2,46 @@ Part of `edX code`__.
__ http://code.edx.org/ __ http://code.edx.org/
comment_as_a_service edX Comments Service/Forums |Travis|_ |Codecov|_
==================== ==================================================
.. |Travis| image:: https://travis-ci.org/edx/cs_comments_service.svg?branch=master
.. _Travis: https://travis-ci.org/edx/cs_comments_service
.. |Codecov| image:: http://codecov.io/github/edx/cs_comments_service/coverage.svg?branch=master
.. _Codecov: http://codecov.io/github/edx/cs_comments_service?branch=master
An independent comment system which supports voting and nested comments. It An independent comment system which supports voting and nested comments. It
also supports features including instructor endorsement for education-aimed also supports features including instructor endorsement for education-aimed
discussion platforms. discussion platforms.
Running The Server
----
Elasticsearch and MongoDB servers need to be available, and correctly referenced
in config/application.yml and config/mongoid.yml, respectively.
Before the server is first run, ensure gems are installed by doing ``bundle install``.
To run the server, do ``ruby app.rb [-p PORT]`` where PORT defaults to 4567.
Running the Server
------------------
If you are running cs_comments_service as part of edx-platform__ development under If you are running cs_comments_service as part of edx-platform__ development under
devstack, it is strongly recommended to read `those setup documents`__ first. Note that devstack, it is strongly recommended to read `those setup documents`__ first. Note that
devstack will take care of just about all of the installation, configuration, and devstack will take care of just about all of the installation, configuration, and
service management on your behalf. service management on your behalf. If running outside of devstack, continue reading below.
__ https://github.com/edx/edx-platform __ https://github.com/edx/edx-platform
__ https://github.com/edx/configuration/wiki/edX-Developer-Stack __ https://github.com/edx/configuration/wiki/edX-Developer-Stack
This service relies on Elasticsearch and MongoDB. By default the service will use the Elasticsearch server available at
`http://localhost:9200` and the MongoDB server available at `localhost:27017`. This is suitable for local development;
however, if you wish to change these values, refer to `config/application.yml` and `config/mongoid.yml` for the
environment variables that can be set to override the defaults.
Before the server is first run, ensure gems are installed by doing ``bundle install``.
To run the server, do ``ruby app.rb [-p PORT]`` where PORT defaults to 4567.
Running Tests Running Tests
---- -------------
To run tests, do ``bundle exec rspec``. Append ``--help`` or see rspec documentation To run tests, do ``bundle exec rspec``. Append ``--help`` or see rspec documentation
for additional options to this command. for additional options to this command.
Internationalization and Localization Internationalization (i18n) and Localization (l10n)
---- ---------------------------------------------------
To run the comments service in a language other than English, set the To run the comments service in a language other than English, set the
``SERVICE_LANGUAGE`` environment variable to the `language code` for the ``SERVICE_LANGUAGE`` environment variable to the `language code` for the
......
...@@ -4,7 +4,18 @@ require 'bundler' ...@@ -4,7 +4,18 @@ require 'bundler'
Bundler.setup Bundler.setup
Bundler.require Bundler.require
application_yaml = ERB.new(File.read("config/application.yml")).result() application_yaml = ERB.new(File.read('config/application.yml')).result()
begin
require 'rspec/core/rake_task'
RSpec::Core::RakeTask.new(:spec)
task :default => :spec
rescue LoadError
# no rspec available
end
Tire.configure do Tire.configure do
url YAML.load(application_yaml)['elasticsearch_server'] url YAML.load(application_yaml)['elasticsearch_server']
...@@ -12,29 +23,22 @@ end ...@@ -12,29 +23,22 @@ end
LOG = Logger.new(STDERR) LOG = Logger.new(STDERR)
desc "Load the environment" desc 'Load the environment'
task :environment do task :environment do
environment = ENV["SINATRA_ENV"] || "development" environment = ENV['SINATRA_ENV'] || 'development'
Sinatra::Base.environment = environment Sinatra::Base.environment = environment
Mongoid.load!("config/mongoid.yml") Mongoid.load!('config/mongoid.yml')
Mongoid.logger.level = Logger::INFO Mongoid.logger.level = Logger::INFO
module CommentService module CommentService
class << self; attr_accessor :config; end class << self;
attr_accessor :config;
end
end end
CommentService.config = YAML.load(application_yaml) CommentService.config = YAML.load(application_yaml)
Dir[File.dirname(__FILE__) + '/lib/**/*.rb'].each {|file| require file} Dir[File.dirname(__FILE__) + '/lib/**/*.rb'].each { |file| require file }
Dir[File.dirname(__FILE__) + '/models/*.rb'].each {|file| require file} Dir[File.dirname(__FILE__) + '/models/*.rb'].each { |file| require file }
#Dir[File.dirname(__FILE__) + '/models/observers/*.rb'].each {|file| require file}
#Mongoid.observers = PostReplyObserver, PostTopicObserver, AtUserObserver
#Mongoid.instantiate_observers
end
def create_test_user(id)
User.create!(external_id: id, username: "user#{id}")
end end
Dir.glob('lib/tasks/*.rake').each { |r| import r } Dir.glob('lib/tasks/*.rake').each { |r| import r }
...@@ -42,412 +46,3 @@ Dir.glob('lib/tasks/*.rake').each { |r| import r } ...@@ -42,412 +46,3 @@ Dir.glob('lib/tasks/*.rake').each { |r| import r }
task :console => :environment do task :console => :environment do
binding.pry binding.pry
end end
namespace :db do
task :init => :environment do
puts "recreating indexes..."
[Comment, CommentThread, User, Notification, Subscription, Activity, Delayed::Backend::Mongoid::Job].each(&:remove_indexes).each(&:create_indexes)
puts "finished"
end
task :clean => :environment do
Comment.delete_all
CommentThread.delete_all
User.delete_all
Notification.delete_all
Subscription.delete_all
end
THREADS_PER_COMMENTABLE = 20
TOP_COMMENTS_PER_THREAD = 3
ADDITIONAL_COMMENTS_PER_THREAD = 5
COURSE_ID = "MITx/6.002x/2012_Fall"
def generate_comments_for(commentable_id, num_threads=THREADS_PER_COMMENTABLE, num_top_comments=TOP_COMMENTS_PER_THREAD, num_subcomments=ADDITIONAL_COMMENTS_PER_THREAD)
level_limit = CommentService.config["level_limit"]
users = User.all.to_a
puts "Generating threads and comments for #{commentable_id}..."
threads = []
top_comments = []
additional_comments = []
num_threads.times do
inner_top_comments = []
comment_thread = CommentThread.new(commentable_id: commentable_id, body: Faker::Lorem.paragraphs.join("\n\n"), title: Faker::Lorem.sentence(6))
comment_thread.author = users.sample
comment_thread.course_id = COURSE_ID
comment_thread.save!
threads << comment_thread
users.sample(3).each {|user| user.subscribe(comment_thread)}
(1 + rand(num_top_comments)).times do
comment = comment_thread.comments.new(body: Faker::Lorem.paragraph(2))
comment.author = users.sample
comment.endorsed = [true, false].sample
comment.comment_thread = comment_thread
comment.course_id = COURSE_ID
comment.save!
top_comments << comment
inner_top_comments << comment
end
previous_level_comments = inner_top_comments
(level_limit-1).times do
current_level_comments = []
(1 + rand(num_subcomments)).times do
comment = previous_level_comments.sample
sub_comment = comment.children.new(body: Faker::Lorem.paragraph(2))
sub_comment.author = users.sample
sub_comment.endorsed = [true, false].sample
sub_comment.comment_thread = comment_thread
sub_comment.course_id = COURSE_ID
sub_comment.save!
current_level_comments << sub_comment
end
previous_level_comments = current_level_comments
end
end
puts "voting"
(threads + top_comments + additional_comments).each do |c|
users.each do |user|
user.vote(c, [:up, :down].sample)
end
end
puts "finished"
end
task :generate_comments, [:commentable_id, :num_threads, :num_top_comments, :num_subcomments] => :environment do |t, args|
args.with_defaults(:num_threads => THREADS_PER_COMMENTABLE,
:num_top_comments=>TOP_COMMENTS_PER_THREAD,
:num_subcomments=> ADDITIONAL_COMMENTS_PER_THREAD)
generate_comments_for(args[:commentable_id], args[:num_threads], args[:num_top_comments], args[:num_subcomments])
end
task :bulk_seed, [:num] => :environment do |t, args|
Mongoid.configure do |config|
config.connect_to("cs_comments_service_bulk_test")
end
connnection = Mongo::Connection.new("127.0.0.1", "27017")
db = Mongo::Connection.new.db("cs_comments_service_bulk_test")
CommentThread.create_indexes
Comment.create_indexes
Content.delete_all
coll = db.collection("contents")
args[:num].to_i.times do
doc = {"_type" => "CommentThread", "anonymous" => [true, false].sample, "at_position_list" => [],
"tags_array" => [],
"comment_count" => 0, "title" => Faker::Lorem.sentence(6), "author_id" => rand(1..10).to_s,
"body" => Faker::Lorem.paragraphs.join("\n\n"), "course_id" => COURSE_ID, "created_at" => Time.now,
"commentable_id" => COURSE_ID, "closed" => [true, false].sample, "updated_at" => Time.now, "last_activity_at" => Time.now,
"votes" => {"count" => 0, "down" => [], "down_count" => 0, "point" => 0, "up" => [], "up_count" => []}}
coll.insert(doc)
end
Tire.index('comment_threads').delete
CommentThread.create_elasticsearch_index
Tire.index('comment_threads') { import CommentThread.all }
end
task :seed_fast => :environment do
ADDITIONAL_COMMENTS_PER_THREAD = 20
config = YAML.load_file("config/mongoid.yml")[Sinatra::Base.environment]["sessions"]["default"]
connnection = Mongo::Connection.new(config["hosts"][0].split(":")[0], config["hosts"][0].split(":")[1])
db = Mongo::Connection.new.db(config["database"])
coll = db.collection("contents")
Comment.delete_all
CommentThread.each do |thread|
ADDITIONAL_COMMENTS_PER_THREAD.times do
doc = {"_type" => "Comment", "anonymous" => false, "at_position_list" => [],
"author_id" => rand(1..10).to_s, "body" => Faker::Lorem.paragraphs.join("\n\n"),
"comment_thread_id" => BSON::ObjectId.from_string(thread.id.to_s), "course_id" => COURSE_ID,
"created_at" => Time.now,
"endorsed" => [true, false].sample, "parent_ids" => [], "updated_at" => Time.now,
"votes" => {"count" => 0, "down" => [], "down_count" => 0, "point" => 0, "up" => [], "up_count" => []}}
coll.insert(doc)
end
end
end
task :seed => :environment do
Comment.delete_all
CommentThread.delete_all
User.delete_all
Notification.delete_all
Subscription.delete_all
Tire.index 'comment_threads' do delete end
CommentThread.create_elasticsearch_index
beginning_time = Time.now
users = (1..10).map {|id| create_test_user(id)}
# 3.times do
# other_user = users[1..9].sample
# users.first.subscribe(other_user)
# end
# 10.times do
# user = users.sample
# other_user = users.select{|u| u != user}.sample
# user.subscribe(other_user)
# end
generate_comments_for("video_1")
generate_comments_for("lab_1")
generate_comments_for("lab_2")
end_time = Time.now
puts "Number of comments generated: #{Comment.count}"
puts "Number of comment threads generated: #{CommentThread.count}"
puts "Time elapsed #{(end_time - beginning_time)*1000} milliseconds"
end
task :add_anonymous_to_peers => :environment do
Content.collection.find(:anonymous_to_peers=>nil).update_all({"$set" => {'anonymous_to_peers' => false}})
end
end
namespace :search do
def get_es_index
# we are using the same index for two types, which is against the
# grain of Tire's design. This is why this method works for both
# comment_threads and comments.
CommentThread.tire.index
end
def get_number_of_primary_shards(index_name)
res = Tire::Configuration.client.get "#{Tire::Configuration.url}/#{index_name}/_status"
status = JSON.parse res.body
status["indices"].first[1]["shards"].size
end
def create_es_index
# create the new index with a unique name
new_index = Tire.index "#{Content::ES_INDEX_NAME}_#{Time.now.strftime('%Y%m%d%H%M%S')}"
new_index.create
LOG.info "configuring new index: #{new_index.name}"
[CommentThread, Comment].each do |klass|
LOG.info "applying index mappings for #{klass.name}"
klass.put_search_index_mapping new_index
end
new_index
end
def import_from_cursor(cursor, index, opts)
Mongoid.identity_map_enabled = true
tot = cursor.count
cnt = 0
t = Time.now
index.import cursor, {:method => :paginate, :per_page => opts[:batch_size]} do |documents|
if cnt % opts[:batch_size] == 0 then
elapsed_secs = (Time.now - t).round(2)
pct_complete = (100 * (cnt/tot.to_f)).round(2)
LOG.info "#{index.name}: imported #{cnt} of #{tot} (#{pct_complete}% complete after #{elapsed_secs} seconds)"
end
cnt += documents.length
Mongoid::IdentityMap.clear
sleep opts[:sleep_time]
documents
end
LOG.info "#{index.name}: finished importing #{cnt} documents"
cnt
end
def move_alias_to(name, index)
# if there was a previous index, switch over the alias to point to the new index
alias_ = Tire::Alias.find name
if alias_ then
# does the alias already point to this index?
if alias_.indices.include? index.name then
return false
end
# remove the alias from wherever it points to now
LOG.info "alias already exists (will move): #{alias_.indices.to_ary.join(',')}"
alias_.indices.each do |old_index_name|
alias_.indices.delete old_index_name unless old_index_name == name
end
else
# create the alias
LOG.info "alias \"#{name}\" does not yet exist - creating."
alias_ = Tire::Alias.new :name => name
end
# point the alias at our new index
alias_.indices.add index.name
alias_.save
LOG.info "alias \"#{name}\" now points to index #{index.name}."
true
end
def do_reindex (opts, in_place=false)
# get a reference to the model class (and make sure it's a model class with tire hooks)
start_time = Time.now
# create the new index with a unique name
new_index = create_es_index
# unless the user is forcing a rebuild, or the index does not yet exist, we
# can do a Tire api reindex which is much faster than reimporting documents
# from mongo.
#
# Checking if the index exists is tricky. Tire automatically created an index
# for the model class when the app loaded if one did not already exist. However,
# it won't create an alias, which is what our app uses. So if the index exists
# but not the alias, we know that it's auto-created.
old_index = get_es_index
alias_name = old_index.name
alias_ = Tire::Alias.find alias_name
if alias_.nil? then
# edge case.
# the alias doesn't exist, so we know the index was auto-created.
# We will delete it and replace it with an alias.
raise RuntimeError, 'Cannot reindex in-place, no valid source index' if in_place
LOG.warn "deleting auto-created index to make room for the alias"
old_index.delete
# NOTE on the small chance that another process re-auto-creates the index
# we just deleted before we have a chance to create the alias, this next
# call will fail.
move_alias_to(Content::ES_INDEX_NAME, new_index)
end
op = in_place ? "reindex" : "(re)build index"
LOG.info "preparing to #{op}"
if in_place then
# reindex, moving source documents directly from old index to new
LOG.info "copying documents from original index (this may take a while!)"
old_index.reindex new_index.name
LOG.info "done copying!"
else
# fetch all the documents ever, up til start_time
cursor = Content.where(:_type.in => ["Comment", "CommentThread"], :updated_at.lte => start_time)
# import them to the new index
import_from_cursor(cursor, new_index, opts)
end
# move the alias if necessary
did_alias_move = move_alias_to(Content::ES_INDEX_NAME, new_index)
if did_alias_move then
# Reimport any source documents that got updated since start_time,
# while the alias still pointed to the old index.
# Elasticsearch understands our document ids, so re-indexing the same
# document won't create duplicates.
LOG.info "importing any documents that changed between #{start_time} and now"
cursor = Content.where(:_type.in => ["Comment", "CommentThread"], :updated_at.gte => start_time)
import_from_cursor(cursor, new_index, opts)
end
end
desc "Copies contents of MongoDB into Elasticsearch if updated in the last N minutes."
task :catchup, [:minutes, :batch_size, :sleep_time] => :environment do |t, args|
opts = batch_opts args
the_index = get_es_index
alias_ = Tire::Alias.find the_index.name
# this check makes sure we are working with the index to which
# the desired model's alias presently points.
raise RuntimeError, "could not find live index" if alias_.nil?
start_time = Time.now - (args[:minutes].to_i * 60)
cursor = Content.where(:_type.in => ["Comment", "CommentThread"], :updated_at.gte => start_time)
import_from_cursor(cursor, the_index, opts)
end
def batch_opts(args)
args = args.to_hash
{ :batch_size => args[:batch_size].nil? ? 500 : args[:batch_size].to_i,
:sleep_time => args[:sleep_time].nil? ? 0 : args[:sleep_time].to_i }
end
desc "Removes any data from Elasticsearch that no longer exists in MongoDB."
task :prune, [:batch_size, :sleep_time] => :environment do |t, args|
opts = batch_opts args
the_index = get_es_index
puts "pruning #{the_index.name}"
alias_ = Tire::Alias.find the_index.name
raise RuntimeError, "could not find live index" if alias_.nil?
scan_size = opts[:batch_size] / get_number_of_primary_shards(the_index.name)
cnt = 0
[CommentThread, Comment].each do |klass|
doc_type = klass.document_type
# this check makes sure we are working with the index to which
# the desired model's alias presently points.
search = Tire::Search::Scan.new the_index.name, {size: scan_size, type: doc_type}
search.each do |results|
es_ids = results.map(&:id)
mongo_ids = klass.where(:id.in => es_ids).map {|d| d.id.to_s}
to_delete = es_ids - mongo_ids
if to_delete.size > 0
cnt += to_delete.size
puts "deleting #{to_delete.size} orphaned #{doc_type} documents from elasticsearch"
the_index.bulk_delete (to_delete).map {|v| {"type" => doc_type, "id" => v}}
end
puts "#{the_index.name}/#{doc_type}: processed #{search.seen} of #{search.total}"
sleep opts[:sleep_time]
end
end
puts "done pruning #{the_index.name}, deleted a total of #{cnt} orphaned documents"
end
desc "Rebuild the content index from MongoDB data."
task :rebuild, [:batch_size, :sleep_time] => :environment do |t, args|
do_reindex(batch_opts(args))
end
desc "Rebuild the content index from already-indexed data (in place)."
task :reindex, [:batch_size, :sleep_time] => :environment do |t, args|
do_reindex(batch_opts(args), true)
end
desc "Generate a new, empty physical index, without bringing it online."
task :create_index => :environment do
create_es_index
end
end
namespace :jobs do
desc "Clear the delayed_job queue."
task :clear => :environment do
Delayed::Job.delete_all
end
desc "Start a delayed_job worker."
task :work => :environment do
Delayed::Worker.new(:min_priority => ENV['MIN_PRIORITY'], :max_priority => ENV['MAX_PRIORITY'], :queues => (ENV['QUEUES'] || ENV['QUEUE'] || '').split(','), :quiet => false).start
end
end
namespace :i18n do
desc "Push source strings to Transifex for translation"
task :push do
sh("tx push -s")
end
desc "Pull translated strings from Transifex"
task :pull do
sh("tx pull --mode=reviewed --all --minimum-perc=1")
end
desc "Clean the locale directory"
task :clean do
sh("git clean -f locale/")
end
desc "Commit translated strings to the repository"
task :commit => ["i18n:clean", "i18n:pull"] do
sh("git add locale")
sh("git commit -m 'Updated translations (autogenerated message)'")
end
end
...@@ -27,9 +27,12 @@ get "#{APIPREFIX}/threads/:thread_id" do |thread_id| ...@@ -27,9 +27,12 @@ get "#{APIPREFIX}/threads/:thread_id" do |thread_id|
error 404, [t(:requested_object_not_found)].to_json error 404, [t(:requested_object_not_found)].to_json
end end
if params["user_id"] and bool_mark_as_read # user is required to return user-specific fields, such as "read" (even if bool_mark_as_read is False)
if params["user_id"]
user = User.only([:id, :username, :read_states]).find_by(external_id: params["user_id"]) user = User.only([:id, :username, :read_states]).find_by(external_id: params["user_id"])
user.mark_as_read(thread) if user end
if user and bool_mark_as_read
user.mark_as_read(thread)
end end
presenter = ThreadPresenter.factory(thread, user || nil) presenter = ThreadPresenter.factory(thread, user || nil)
...@@ -47,7 +50,7 @@ get "#{APIPREFIX}/threads/:thread_id" do |thread_id| ...@@ -47,7 +50,7 @@ get "#{APIPREFIX}/threads/:thread_id" do |thread_id|
else else
resp_limit = nil resp_limit = nil
end end
presenter.to_hash(true, resp_skip, resp_limit).to_json presenter.to_hash(true, resp_skip, resp_limit, bool_recursive).to_json
end end
put "#{APIPREFIX}/threads/:thread_id" do |thread_id| put "#{APIPREFIX}/threads/:thread_id" do |thread_id|
...@@ -69,6 +72,7 @@ post "#{APIPREFIX}/threads/:thread_id/comments" do |thread_id| ...@@ -69,6 +72,7 @@ post "#{APIPREFIX}/threads/:thread_id/comments" do |thread_id|
comment.anonymous_to_peers = bool_anonymous_to_peers || false comment.anonymous_to_peers = bool_anonymous_to_peers || false
comment.author = user comment.author = user
comment.comment_thread = thread comment.comment_thread = thread
comment.child_count = 0
comment.save comment.save
if comment.errors.any? if comment.errors.any?
error 400, comment.errors.full_messages.to_json error 400, comment.errors.full_messages.to_json
......
...@@ -47,6 +47,8 @@ post "#{APIPREFIX}/:commentable_id/threads" do |commentable_id| ...@@ -47,6 +47,8 @@ post "#{APIPREFIX}/:commentable_id/threads" do |commentable_id|
else else
user.subscribe(thread) if bool_auto_subscribe user.subscribe(thread) if bool_auto_subscribe
presenter = ThreadPresenter.factory(thread, nil) presenter = ThreadPresenter.factory(thread, nil)
presenter.to_hash.to_json thread = presenter.to_hash
thread["resp_total"] = 0
thread.to_json
end end
end end
get "#{APIPREFIX}/comments/:comment_id" do |comment_id| get "#{APIPREFIX}/comments/:comment_id" do |comment_id|
comment.to_hash(recursive: bool_recursive).to_json @comment = comment
comment_hash = @comment.to_hash(recursive: bool_recursive)
verify_or_fix_cached_comment_count(@comment, comment_hash)
comment_hash.to_json
end end
put "#{APIPREFIX}/comments/:comment_id" do |comment_id| put "#{APIPREFIX}/comments/:comment_id" do |comment_id|
...@@ -8,7 +11,11 @@ put "#{APIPREFIX}/comments/:comment_id" do |comment_id| ...@@ -8,7 +11,11 @@ put "#{APIPREFIX}/comments/:comment_id" do |comment_id|
if params.has_key?("endorsed") if params.has_key?("endorsed")
new_endorsed_val = Boolean.mongoize(params["endorsed"]) new_endorsed_val = Boolean.mongoize(params["endorsed"])
if new_endorsed_val != comment.endorsed if new_endorsed_val != comment.endorsed
endorsement = {:user_id => params["endorsement_user_id"], :time => DateTime.now} if params["endorsement_user_id"].nil?
endorsement = nil
else
endorsement = {:user_id => params["endorsement_user_id"], :time => DateTime.now}
end
updated_content["endorsement"] = new_endorsed_val ? endorsement : nil updated_content["endorsement"] = new_endorsed_val ? endorsement : nil
end end
end end
...@@ -27,16 +34,31 @@ post "#{APIPREFIX}/comments/:comment_id" do |comment_id| ...@@ -27,16 +34,31 @@ post "#{APIPREFIX}/comments/:comment_id" do |comment_id|
sub_comment.anonymous_to_peers = bool_anonymous_to_peers || false sub_comment.anonymous_to_peers = bool_anonymous_to_peers || false
sub_comment.author = user sub_comment.author = user
sub_comment.comment_thread = comment.comment_thread sub_comment.comment_thread = comment.comment_thread
sub_comment.child_count = 0
sub_comment.save sub_comment.save
if sub_comment.errors.any? if sub_comment.errors.any?
error 400, sub_comment.errors.full_messages.to_json error 400, sub_comment.errors.full_messages.to_json
else else
user.subscribe(comment.comment_thread) if bool_auto_subscribe comment.update_cached_child_count
sub_comment.to_hash.to_json if comment.errors.any?
error 400, comment.errors.full_messages.to_json
else
user.subscribe(comment.comment_thread) if bool_auto_subscribe
sub_comment.to_hash.to_json
end
end end
end end
delete "#{APIPREFIX}/comments/:comment_id" do |comment_id| delete "#{APIPREFIX}/comments/:comment_id" do |comment_id|
parent_id = comment.parent_id
comment.destroy comment.destroy
unless parent_id.nil?
begin
parent_comment = Comment.find(parent_id)
parent_comment.update_cached_child_count
rescue Mongoid::Errors::DocumentNotFound
pass
end
end
comment.to_hash.to_json comment.to_hash.to_json
end end
require 'new_relic/agent/method_tracer'
get "#{APIPREFIX}/search/threads" do get "#{APIPREFIX}/search/threads" do
local_params = params # Necessary for params to be available inside blocks local_params = params # Necessary for params to be available inside blocks
group_ids = get_group_ids_from_params(local_params) group_ids = get_group_ids_from_params(local_params)
......
require 'new_relic/agent/method_tracer'
post "#{APIPREFIX}/users" do post "#{APIPREFIX}/users" do
user = User.new(external_id: params["id"]) user = User.new(external_id: params["id"])
user.username = params["username"] user.username = params["username"]
...@@ -76,3 +74,8 @@ put "#{APIPREFIX}/users/:user_id" do |user_id| ...@@ -76,3 +74,8 @@ put "#{APIPREFIX}/users/:user_id" do |user_id|
user.to_hash.to_json user.to_hash.to_json
end end
end end
post "#{APIPREFIX}/users/:user_id/read" do |user_id|
user.mark_as_read(source)
user.reload.to_hash.to_json
end
...@@ -19,29 +19,6 @@ module CommentService ...@@ -19,29 +19,6 @@ module CommentService
API_PREFIX = "/api/#{API_VERSION}" API_PREFIX = "/api/#{API_VERSION}"
end end
if ["staging", "production", "loadtest", "edgestage","edgeprod"].include? environment
require 'newrelic_rpm'
require 'new_relic/agent/method_tracer'
Moped::Session.class_eval do
include NewRelic::Agent::MethodTracer
add_method_tracer :new
add_method_tracer :use
add_method_tracer :login
end
Moped::Cluster.class_eval do
include NewRelic::Agent::MethodTracer
add_method_tracer :with_primary
add_method_tracer :nodes
end
Moped::Node.class_eval do
include NewRelic::Agent::MethodTracer
add_method_tracer :command
add_method_tracer :connect
add_method_tracer :flush
add_method_tracer :refresh
end
end
if ENV["ENABLE_GC_PROFILER"] if ENV["ENABLE_GC_PROFILER"]
GC::Profiler.enable GC::Profiler.enable
end end
...@@ -56,11 +33,12 @@ end ...@@ -56,11 +33,12 @@ end
Mongoid.load!("config/mongoid.yml", environment) Mongoid.load!("config/mongoid.yml", environment)
Mongoid.logger.level = Logger::INFO Mongoid.logger.level = Logger::INFO
Moped.logger.level = ENV["ENABLE_MOPED_DEBUGGING"] ? Logger::DEBUG : Logger::INFO Mongo::Logger.logger.level = ENV["ENABLE_MONGO_DEBUGGING"] ? Logger::DEBUG : Logger::INFO
# set up i18n # set up i18n
I18n.load_path += Dir[File.join(File.dirname(__FILE__), 'locale', '*.yml').to_s] I18n.load_path += Dir[File.join(File.dirname(__FILE__), 'locale', '*.yml').to_s]
I18n.default_locale = CommentService.config[:default_locale] I18n.default_locale = CommentService.config[:default_locale]
I18n.enforce_available_locales = false
I18n::Backend::Simple.send(:include, I18n::Backend::Fallbacks) I18n::Backend::Simple.send(:include, I18n::Backend::Fallbacks)
use Rack::Locale use Rack::Locale
...@@ -97,27 +75,6 @@ before do ...@@ -97,27 +75,6 @@ before do
content_type "application/json" content_type "application/json"
end end
if ENV["ENABLE_IDMAP_LOGGING"]
after do
idmap = Mongoid::Threaded.identity_map
vals = {
"pid" => Process.pid,
"dyno" => ENV["DYNO"],
"request_id" => params[:request_id]
}
idmap.each {|k, v| vals["idmap_count_#{k.to_s}"] = v.size }
logger.info vals.map{|e| e.join("=") }.join(" ")
end
end
# Enable the identity map. The middleware ensures that the identity map is
# cleared for every request.
Mongoid.identity_map_enabled = true
use Rack::Mongoid::Middleware::IdentityMap
# use yajl implementation for to_json. # use yajl implementation for to_json.
# https://github.com/brianmario/yajl-ruby#json-gem-compatibility-api # https://github.com/brianmario/yajl-ruby#json-gem-compatibility-api
# #
...@@ -128,16 +85,27 @@ require 'yajl/json_gem' ...@@ -128,16 +85,27 @@ require 'yajl/json_gem'
# patch json serialization of ObjectIds to work properly with yajl. # patch json serialization of ObjectIds to work properly with yajl.
# See https://groups.google.com/forum/#!topic/mongoid/MaXFVw7D_4s # See https://groups.google.com/forum/#!topic/mongoid/MaXFVw7D_4s
module Moped # Note that BSON was moved from Moped::BSON::ObjectId to BSON::ObjectId
module BSON module BSON
class ObjectId class ObjectId
def to_json def to_json
self.to_s.to_json self.to_s.to_json
end
end end
end end
end end
# Patch json serialization of Time Objects
class Time
# Returns a hash, that will be turned into a JSON object and represent this
# object.
# Note that this was done to prevent milliseconds from showing up in the JSON response thus breaking
# API compatibility for downstream clients.
def to_json(*)
'"' + utc().strftime("%Y-%m-%dT%H:%M:%SZ") + '"'
end
end
# these files must be required in order # these files must be required in order
require './api/search' require './api/search'
...@@ -158,7 +126,7 @@ if RACK_ENV.to_s == "development" ...@@ -158,7 +126,7 @@ if RACK_ENV.to_s == "development"
end end
end end
error Moped::Errors::InvalidObjectId do error Mongo::Error::InvalidDocument do
error 400, [t(:requested_object_not_found)].to_json error 400, [t(:requested_object_not_found)].to_json
end end
...@@ -170,10 +138,10 @@ error ArgumentError do ...@@ -170,10 +138,10 @@ error ArgumentError do
error 400, [env['sinatra.error'].message].to_json error 400, [env['sinatra.error'].message].to_json
end end
CommentService.blocked_hashes = Content.mongo_session[:blocked_hash].find.select(hash: 1).each.map {|d| d["hash"]} CommentService.blocked_hashes = Content.mongo_client[:blocked_hash].find(nil, projection: {hash: 1}).map {|d| d["hash"]}
def get_db_is_master def get_db_is_master
Mongoid::Sessions.default.command(isMaster: 1) Mongoid::Clients.default.command(isMaster: 1)
end end
def get_es_status def get_es_status
...@@ -186,7 +154,7 @@ get '/heartbeat' do ...@@ -186,7 +154,7 @@ get '/heartbeat' do
db_ok = false db_ok = false
begin begin
res = get_db_is_master res = get_db_is_master
db_ok = ( res["ismaster"] == true and Integer(res["ok"]) == 1 ) db_ok = res.ok? && res.documents.first['ismaster'] == true
rescue rescue
end end
error 500, JSON.generate({"OK" => false, "check" => "db"}) unless db_ok error 500, JSON.generate({"OK" => false, "check" => "db"}) unless db_ok
...@@ -221,4 +189,4 @@ get '/selftest' do ...@@ -221,4 +189,4 @@ get '/selftest' do
"#{ex.backtrace.first}: #{ex.message} (#{ex.class})\n\t#{ex.backtrace[1..-1].join("\n\t")}" "#{ex.backtrace.first}: #{ex.message} (#{ex.class})\n\t#{ex.backtrace[1..-1].join("\n\t")}"
] ]
end end
end end
\ No newline at end of file
#!/usr/bin/env ruby
#
# This file was generated by Bundler.
#
# The application 'rake' is installed as part of a gem, and
# this file is here to facilitate running it.
#
require "pathname"
ENV["BUNDLE_GEMFILE"] ||= File.expand_path("../../Gemfile",
Pathname.new(__FILE__).realpath)
require "rubygems"
require "bundler/setup"
load Gem.bin_path("rake", "rake")
#!/usr/bin/env ruby
#
# This file was generated by Bundler.
#
# The application 'rspec' is installed as part of a gem, and
# this file is here to facilitate running it.
#
require "pathname"
ENV["BUNDLE_GEMFILE"] ||= File.expand_path("../../Gemfile",
Pathname.new(__FILE__).realpath)
require "rubygems"
require "bundler/setup"
load Gem.bin_path("rspec-core", "rspec")
#!/usr/bin/env ruby
#
# This file was generated by Bundler.
#
# The application 'unicorn' is installed as part of a gem, and
# this file is here to facilitate running it.
#
require "pathname"
ENV["BUNDLE_GEMFILE"] ||= File.expand_path("../../Gemfile",
Pathname.new(__FILE__).realpath)
require "rubygems"
require "bundler/setup"
load Gem.bin_path("unicorn", "unicorn")
# It is possible that the rack time out here is set to a different value than
# on the edx-platform django_comment_client timeout. An attempt was made to
# move these two values closer together (5s django_client_comment, 6s
# cs_comments_service from 20). This resulted in more reported timeout errors
# on the cs_comments_service side which better reflected the timeout errors the
# django_comment_client. On the downside, the shorter timeout lead to less time
# for processing longer queries in the background. The timeout has been set back
# to 20s. Until these slow queries that benefit from being cached in the
# background are resolved, reducing the timeout is not suggested.
# More conversation at https://github.com/edx/cs_comments_service/pull/146
# -Nov 18th, 2015
require "rack-timeout" require "rack-timeout"
use Rack::Timeout # Call as early as possible so rack-timeout runs before other middleware. use Rack::Timeout # Call as early as possible so rack-timeout runs before other middleware.
Rack::Timeout.timeout = 20 Rack::Timeout.timeout = 20
require "mongoid"
use Mongoid::QueryCache::Middleware
require './app' require './app'
run Sinatra::Application run Sinatra::Application
common: &default_client
options:
write:
w: 1
read:
mode: :primary
max_retries: <%= ENV['MONGOID_MAX_RETRIES'] || 1 %>
retry_interval: <%= ENV['MONGOID_RETRY_INTERVAL'] || 0 %>
timeout: <%= ENV['MONGOID_TIMEOUT'] || 0.5 %>
ssl: <%= ENV['MONGOID_USE_SSL'] || false %>
common_uri: &default_uri
uri: <%= ENV['MONGOHQ_URL'] %>
development: development:
sessions: clients:
default: default:
<<: *default_client
database: cs_comments_service_development database: cs_comments_service_development
hosts: hosts:
- localhost:27017 - localhost:27017
test: test:
sessions: clients:
default: default:
<<: *default_client
database: cs_comments_service_test database: cs_comments_service_test
hosts: hosts:
- localhost:27017 - localhost:27017
common: &default_session
uri: <%= ENV['MONGOHQ_URL'] %>
options:
skip_version_check: true
safe: true
consistency: strong
max_retries: <%= ENV['MONGOID_MAX_RETRIES'] || 1 %>
retry_interval: <%= ENV['MONGOID_RETRY_INTERVAL'] || 0 %>
timeout: <%= ENV['MONGOID_TIMEOUT'] || 0.5 %>
ssl: <%= ENV['MONGOID_USE_SSL'] || false %>
production: production:
sessions: clients:
default: default:
<<: *default_session <<: *default_uri
<<: *default_client
edgeprod: edgeprod:
sessions: clients:
default: default:
<<: *default_session <<: *default_uri
<<: *default_client
edgestage: edgestage:
sessions: clients:
default: default:
<<: *default_session <<: *default_uri
<<: *default_client
staging: staging:
sessions: clients:
default: default:
<<: *default_session <<: *default_uri
<<: *default_client
loadtest: loadtest:
sessions: clients:
default: default:
<<: *default_session <<: *default_uri
<<: *default_client
defaults: &defaults defaults: &defaults
use_utc: false use_utc: false
......
...@@ -13,5 +13,5 @@ after_fork do |server, worker| ...@@ -13,5 +13,5 @@ after_fork do |server, worker|
Signal.trap 'TERM' do Signal.trap 'TERM' do
puts 'Unicorn worker intercepting TERM and doing nothing. Waiting for master to send QUIT' puts 'Unicorn worker intercepting TERM and doing nothing. Waiting for master to send QUIT'
end end
::Mongoid.default_session.disconnect ::Mongoid.default_client.close
end end
...@@ -7,5 +7,5 @@ listen "unix:#{data_dir}/forum.sock", :backlog => 512 ...@@ -7,5 +7,5 @@ listen "unix:#{data_dir}/forum.sock", :backlog => 512
pid "#{data_dir}/forum_unicorn.pid" pid "#{data_dir}/forum_unicorn.pid"
after_fork do |server, worker| after_fork do |server, worker|
::Mongoid.default_session.disconnect ::Mongoid.default_client.close
end end
...@@ -11,5 +11,5 @@ data_dir = ENV['DATA_DIR'] || Dir.tmpdir ...@@ -11,5 +11,5 @@ data_dir = ENV['DATA_DIR'] || Dir.tmpdir
pid "#{data_dir}/forum_unicorn.pid" pid "#{data_dir}/forum_unicorn.pid"
after_fork do |server, worker| after_fork do |server, worker|
::Mongoid.default_session.disconnect ::Mongoid.default_client.close
end end
...@@ -19,6 +19,16 @@ helpers do ...@@ -19,6 +19,16 @@ helpers do
@comment ||= Comment.find(params[:comment_id]) @comment ||= Comment.find(params[:comment_id])
end end
def verify_or_fix_cached_comment_count(comment, comment_hash)
# if child count cached value gets stale; re-calculate and update it
unless comment_hash["children"].nil?
if comment_hash["child_count"] != comment_hash["children"].length
comment.update_cached_child_count
comment_hash["child_count"] = comment.get_cached_child_count
end
end
end
def source def source
@source ||= case params["source_type"] @source ||= case params["source_type"]
when "user" when "user"
...@@ -190,24 +200,21 @@ helpers do ...@@ -190,24 +200,21 @@ helpers do
to_skip = (page - 1) * per_page to_skip = (page - 1) * per_page
has_more = false has_more = false
# batch_size is used to cap the number of documents we might load into memory at any given time # batch_size is used to cap the number of documents we might load into memory at any given time
# TODO: starting with Mongoid 3.1, you can just do comment_threads.batch_size(size).each() comment_threads.batch_size(CommentService.config["manual_pagination_batch_size"].to_i).each do |thread|
comment_threads.query.batch_size(CommentService.config["manual_pagination_batch_size"].to_i) thread_key = thread._id.to_s
Mongoid.unit_of_work(disable: :current) do # this is to prevent Mongoid from memoizing every document we look at if !read_dates.has_key?(thread_key) || read_dates[thread_key] < thread.last_activity_at
comment_threads.each do |thread| if skipped >= to_skip
thread_key = thread._id.to_s if threads.length == per_page
if !read_dates.has_key?(thread_key) || read_dates[thread_key] < thread.last_activity_at has_more = true
if skipped >= to_skip break
if threads.length == per_page
has_more = true
break
end
threads << thread
else
skipped += 1
end end
threads << thread
else
skipped += 1
end end
end end
end end
# The following trick makes frontend pagers work without recalculating # The following trick makes frontend pagers work without recalculating
# the number of all unread threads per user on every request (since the number # the number of all unread threads per user on every request (since the number
# of threads in a course could be tens or hundreds of thousands). It has the # of threads in a course could be tens or hundreds of thousands). It has the
...@@ -219,7 +226,7 @@ helpers do ...@@ -219,7 +226,7 @@ helpers do
# let the installed paginator library handle pagination # let the installed paginator library handle pagination
num_pages = [1, (comment_threads.count / per_page.to_f).ceil].max num_pages = [1, (comment_threads.count / per_page.to_f).ceil].max
page = [1, page].max page = [1, page].max
threads = comment_threads.page(page).per(per_page).to_a threads = comment_threads.paginate(:page => page, :per_page => per_page).to_a
end end
if threads.length == 0 if threads.length == 0
...@@ -228,7 +235,7 @@ helpers do ...@@ -228,7 +235,7 @@ helpers do
pres_threads = ThreadListPresenter.new(threads, request_user, course_id) pres_threads = ThreadListPresenter.new(threads, request_user, course_id)
collection = pres_threads.to_hash collection = pres_threads.to_hash
end end
{collection: collection, num_pages: num_pages, page: page} {collection: collection, num_pages: num_pages, page: page, thread_count: comment_threads.count}
end end
end end
...@@ -368,7 +375,7 @@ helpers do ...@@ -368,7 +375,7 @@ helpers do
rescue rescue
# body was nil, or the hash function failed somehow - never mind # body was nil, or the hash function failed somehow - never mind
return return
end end
if CommentService.blocked_hashes.include? hash then if CommentService.blocked_hashes.include? hash then
msg = t(:blocked_content_with_body_hash, :hash => hash) msg = t(:blocked_content_with_body_hash, :hash => hash)
logger.warn msg logger.warn msg
......
module TaskHelpers
module ElasticsearchHelper
def self.create_index(name=nil)
name ||= "#{Content::ES_INDEX_NAME}_#{Time.now.strftime('%Y%m%d%H%M%S')}"
index = Tire.index(name)
LOG.info "Creating new index: #{name}..."
index.create
[CommentThread, Comment].each do |model|
LOG.info "Applying index mappings for #{model.name}"
model.put_search_index_mapping(index)
end
LOG.info '...done!'
index
end
def self.delete_index(name)
Tire.index(name).delete
end
def self.get_index
CommentThread.tire.index
end
def self.get_index_shard_count(name)
settings = Tire.index(name)
settings['index.number_of_shards']
end
end
end
require 'factory_girl'
namespace :db do
FactoryGirl.find_definitions
def create_test_user(id)
User.create!(external_id: id, username: "user#{id}")
end
task :init => :environment do
puts 'recreating indexes...'
[Comment, CommentThread, User, Notification, Subscription, Activity, Delayed::Backend::Mongoid::Job].each(&:remove_indexes).each(&:create_indexes)
puts 'finished'
end
task :clean => :environment do
Comment.delete_all
CommentThread.delete_all
User.delete_all
Notification.delete_all
Subscription.delete_all
end
THREADS_PER_COMMENTABLE = 20
TOP_COMMENTS_PER_THREAD = 3
ADDITIONAL_COMMENTS_PER_THREAD = 5
COURSE_ID = 'MITx/6.002x/2012_Fall'
def generate_comments_for(commentable_id, num_threads=THREADS_PER_COMMENTABLE, num_top_comments=TOP_COMMENTS_PER_THREAD, num_subcomments=ADDITIONAL_COMMENTS_PER_THREAD)
level_limit = CommentService.config['level_limit']
users = User.all.to_a
puts "Generating threads and comments for #{commentable_id}..."
threads = []
top_comments = []
additional_comments = []
num_threads.times do
inner_top_comments = []
# Create a new thread
comment_thread = FactoryGirl::create(:comment_thread, commentable_id: commentable_id, author: users.sample, course_id: COURSE_ID)
threads << comment_thread
# Subscribe a few users to the thread
users.sample(3).each { |user| user.subscribe(comment_thread) }
# Create a few top-level comments for the thread
(1 + rand(num_top_comments)).times do
endorsed = [true, false].sample
comment = FactoryGirl::create(:comment, author: users.sample, comment_thread: comment_thread, endorsed: endorsed, course_id: COURSE_ID)
top_comments << comment
inner_top_comments << comment
end
# Created additional nested comments
parent_comments = inner_top_comments
(level_limit-1).times do
current_level_comments = []
(1 + rand(num_subcomments)).times do
parent = parent_comments.sample
endorsed = [true, false].sample
child = FactoryGirl::create(:comment, author: users.sample, parent: parent, endorsed: endorsed)
current_level_comments << child
end
parent_comments = current_level_comments
end
end
puts 'voting'
(threads + top_comments + additional_comments).each do |c|
users.each do |user|
user.vote(c, [:up, :down].sample)
end
end
puts 'finished'
end
task :generate_comments, [:commentable_id, :num_threads, :num_top_comments, :num_subcomments] => :environment do |t, args|
args.with_defaults(num_threads: THREADS_PER_COMMENTABLE,
num_top_comments: TOP_COMMENTS_PER_THREAD,
num_subcomments: ADDITIONAL_COMMENTS_PER_THREAD)
generate_comments_for(args[:commentable_id], args[:num_threads], args[:num_top_comments], args[:num_subcomments])
end
task :seed => [:environment, :clean] do
Tire.index 'comment_threads' do
delete
end
CommentThread.create_elasticsearch_index
beginning_time = Time.now
(1..10).map { |id| create_test_user(id) }
generate_comments_for('video_1')
generate_comments_for('lab_1')
generate_comments_for('lab_2')
end_time = Time.now
puts "Number of comments generated: #{Comment.count}"
puts "Number of comment threads generated: #{CommentThread.count}"
puts "Time elapsed #{(end_time - beginning_time)*1000} milliseconds"
end
task :add_anonymous_to_peers => :environment do
Content.collection.find(:anonymous_to_peers => nil).update_all({'$set' => {anonymous_to_peers: false}})
end
end
namespace :i18n do
desc "Push source strings to Transifex for translation"
task :push do
sh("tx push -s")
end
desc "Pull translated strings from Transifex"
task :pull do
sh("tx pull --mode=reviewed --all --minimum-perc=1")
end
desc "Clean the locale directory"
task :clean do
sh("git clean -f locale/")
end
desc "Commit translated strings to the repository"
task :commit => ["i18n:clean", "i18n:pull"] do
sh("git add locale")
sh("git commit -m 'Updated translations (autogenerated message)'")
end
end
namespace :jobs do
desc "Clear the delayed_job queue."
task :clear => :environment do
Delayed::Job.delete_all
end
desc "Start a delayed_job worker."
task :work => :environment do
Delayed::Worker.new(:min_priority => ENV['MIN_PRIORITY'], :max_priority => ENV['MAX_PRIORITY'], :queues => (ENV['QUEUES'] || ENV['QUEUE'] || '').split(','), :quiet => false).start
end
end
require 'task_helpers'
namespace :search do
def import_from_cursor(cursor, index, opts)
tot = cursor.count
cnt = 0
t = Time.now
index.import cursor, {:method => :paginate, :per_page => opts[:batch_size]} do |documents|
if cnt % opts[:batch_size] == 0 then
elapsed_secs = (Time.now - t).round(2)
pct_complete = (100 * (cnt/tot.to_f)).round(2)
LOG.info "#{index.name}: imported #{cnt} of #{tot} (#{pct_complete}% complete after #{elapsed_secs} seconds)"
end
cnt += documents.length
sleep opts[:sleep_time]
documents
end
LOG.info "#{index.name}: finished importing #{cnt} documents"
cnt
end
def move_alias_to(name, index)
# if there was a previous index, switch over the alias to point to the new index
alias_ = Tire::Alias.find name
if alias_
# does the alias already point to this index?
if alias_.indices.include? index.name
return false
end
# remove the alias from wherever it points to now
LOG.info "alias already exists (will move): #{alias_.indices.to_ary.join(',')}"
alias_.indices.each do |old_index_name|
alias_.indices.delete old_index_name unless old_index_name == name
end
else
# create the alias
LOG.info "alias \"#{name}\" does not yet exist - creating."
alias_ = Tire::Alias.new :name => name
end
# point the alias at our new index
alias_.indices.add index.name
alias_.save
LOG.info "alias \"#{name}\" now points to index #{index.name}."
true
end
def do_reindex (opts, in_place=false)
start_time = Time.now
# create the new index with a unique name
new_index = TaskHelpers::ElasticsearchHelper.create_index
# unless the user is forcing a rebuild, or the index does not yet exist, we
# can do a Tire api reindex which is much faster than reimporting documents
# from mongo.
#
# Checking if the index exists is tricky. Tire automatically created an index
# for the model class when the app loaded if one did not already exist. However,
# it won't create an alias, which is what our app uses. So if the index exists
# but not the alias, we know that it's auto-created.
old_index = TaskHelpers::ElasticsearchHelper.get_index
alias_name = old_index.name
alias_ = Tire::Alias.find alias_name
if alias_.nil?
# edge case.
# the alias doesn't exist, so we know the index was auto-created.
# We will delete it and replace it with an alias.
raise RuntimeError, 'Cannot reindex in-place, no valid source index' if in_place
LOG.warn 'deleting auto-created index to make room for the alias'
old_index.delete
# NOTE on the small chance that another process re-auto-creates the index
# we just deleted before we have a chance to create the alias, this next
# call will fail.
move_alias_to(Content::ES_INDEX_NAME, new_index_name)
end
op = in_place ? 'reindex' : '(re)build index'
LOG.info "preparing to #{op}"
content_types = %w(Comment CommentThread)
if in_place
# reindex, moving source documents directly from old index to new
LOG.info 'copying documents from original index (this may take a while!)'
old_index.reindex new_index.name
LOG.info 'done copying!'
else
# fetch all the documents ever, up til start_time
cursor = Content.where(:_type.in => content_types, :updated_at.lte => start_time)
# import them to the new index
import_from_cursor(cursor, new_index, opts)
end
# move the alias if necessary
did_alias_move = move_alias_to(Content::ES_INDEX_NAME, new_index)
if did_alias_move
# Reimport any source documents that got updated since start_time,
# while the alias still pointed to the old index.
# Elasticsearch understands our document ids, so re-indexing the same
# document won't create duplicates.
LOG.info "importing any documents that changed between #{start_time} and now"
cursor = Content.where(:_type.in => content_types, :updated_at.gte => start_time)
import_from_cursor(cursor, new_index, opts)
end
end
desc 'Copies contents of MongoDB into Elasticsearch if updated in the last N minutes.'
task :catchup, [:minutes, :batch_size, :sleep_time] => :environment do |t, args|
opts = batch_opts args
the_index = TaskHelpers::ElasticsearchHelper.get_index
alias_ = Tire::Alias.find the_index.name
# this check makes sure we are working with the index to which
# the desired model's alias presently points.
raise RuntimeError, "could not find live index" if alias_.nil?
start_time = Time.now - (args[:minutes].to_i * 60)
cursor = Content.where(:_type.in => %w(Comment CommentThread), :updated_at.gte => start_time)
import_from_cursor(cursor, the_index, opts)
end
def batch_opts(args)
args = args.to_hash
{:batch_size => args[:batch_size].nil? ? 500 : args[:batch_size].to_i,
:sleep_time => args[:sleep_time].nil? ? 0 : args[:sleep_time].to_i}
end
desc 'Removes any data from Elasticsearch that no longer exists in MongoDB.'
task :prune, [:batch_size, :sleep_time] => :environment do |t, args|
opts = batch_opts args
the_index = TaskHelpers::ElasticsearchHelper.get_index
puts "pruning #{the_index.name}"
alias_ = Tire::Alias.find the_index.name
raise RuntimeError, 'could not find live index' if alias_.nil?
scan_size = opts[:batch_size] / TaskHelpers::ElasticsearchHelper.get_index_shard_count(the_index.name)
cnt = 0
[CommentThread, Comment].each do |klass|
doc_type = klass.document_type
# this check makes sure we are working with the index to which
# the desired model's alias presently points.
search = Tire::Search::Scan.new the_index.name, {size: scan_size, type: doc_type}
search.each do |results|
es_ids = results.map(&:id)
mongo_ids = klass.where(:id.in => es_ids).map { |d| d.id.to_s }
to_delete = es_ids - mongo_ids
if to_delete.size > 0
cnt += to_delete.size
puts "deleting #{to_delete.size} orphaned #{doc_type} documents from elasticsearch"
the_index.bulk_delete (to_delete).map { |v| {"type" => doc_type, "id" => v} }
end
puts "#{the_index.name}/#{doc_type}: processed #{search.seen} of #{search.total}"
sleep opts[:sleep_time]
end
end
puts "done pruning #{the_index.name}, deleted a total of #{cnt} orphaned documents"
end
desc 'Rebuild the content index from MongoDB data.'
task :rebuild, [:batch_size, :sleep_time] => :environment do |t, args|
do_reindex(batch_opts(args))
end
desc 'Rebuild the content index from already-indexed data (in place).'
task :reindex, [:batch_size, :sleep_time] => :environment do |t, args|
do_reindex(batch_opts(args), true)
end
desc 'Generate a new, empty physical index, without bringing it online.'
task :create_index => :environment do
TaskHelpers::ElasticsearchHelper.create_index
end
end
...@@ -5,7 +5,10 @@ class Comment < Content ...@@ -5,7 +5,10 @@ class Comment < Content
include Mongoid::Tree include Mongoid::Tree
include Mongoid::Timestamps include Mongoid::Timestamps
include Mongoid::MagicCounterCache include Mongoid::MagicCounterCache
include ActiveModel::MassAssignmentSecurity
include Tire::Model::Search
include Tire::Model::Callbacks
voteable self, :up => +1, :down => -1 voteable self, :up => +1, :down => -1
field :course_id, type: String field :course_id, type: String
...@@ -14,22 +17,14 @@ class Comment < Content ...@@ -14,22 +17,14 @@ class Comment < Content
field :endorsement, type: Hash field :endorsement, type: Hash
field :anonymous, type: Boolean, default: false field :anonymous, type: Boolean, default: false
field :anonymous_to_peers, type: Boolean, default: false field :anonymous_to_peers, type: Boolean, default: false
field :commentable_id, type: String
field :at_position_list, type: Array, default: [] field :at_position_list, type: Array, default: []
field :sk, type: String, default: nil
field :child_count, type: Integer
index({author_id: 1, course_id: 1}) index({author_id: 1, course_id: 1})
index({_type: 1, comment_thread_id: 1, author_id: 1, updated_at: 1}) index({_type: 1, comment_thread_id: 1, author_id: 1, updated_at: 1})
field :sk, type: String, default: nil
before_save :set_sk
def set_sk()
# this attribute is explicitly write-once
if self.sk.nil?
self.sk = (self.parent_ids.dup << self.id).join("-")
end
end
include Tire::Model::Search
include Tire::Model::Callbacks
index_name Content::ES_INDEX_NAME index_name Content::ES_INDEX_NAME
...@@ -43,10 +38,10 @@ class Comment < Content ...@@ -43,10 +38,10 @@ class Comment < Content
indexes :created_at, type: :date, included_in_all: false indexes :created_at, type: :date, included_in_all: false
indexes :updated_at, type: :date, included_in_all: false indexes :updated_at, type: :date, included_in_all: false
end end
belongs_to :comment_thread, index: true belongs_to :comment_thread, index: true
belongs_to :author, class_name: "User", index: true belongs_to :author, class_name: 'User', index: true
attr_accessible :body, :course_id, :anonymous, :anonymous_to_peers, :endorsed, :endorsement attr_accessible :body, :course_id, :anonymous, :anonymous_to_peers, :endorsed, :endorsement
...@@ -57,13 +52,13 @@ class Comment < Content ...@@ -57,13 +52,13 @@ class Comment < Content
counter_cache :comment_thread counter_cache :comment_thread
before_destroy :destroy_children # TODO async before_destroy :destroy_children
before_create :set_thread_last_activity_at before_create :set_thread_last_activity_at
before_update :set_thread_last_activity_at before_update :set_thread_last_activity_at
before_save :set_sk
def self.hash_tree(nodes) def self.hash_tree(nodes)
nodes.map{|node, sub_nodes| node.to_hash.merge("children" => hash_tree(sub_nodes).compact)} nodes.map { |node, sub_nodes| node.to_hash.merge('children' => hash_tree(sub_nodes).compact) }
end end
# This should really go somewhere else, but sticking it here for now. This is # This should really go somewhere else, but sticking it here for now. This is
...@@ -74,9 +69,9 @@ class Comment < Content ...@@ -74,9 +69,9 @@ class Comment < Content
# actually creates the subtree. # actually creates the subtree.
def self.flatten_subtree(x) def self.flatten_subtree(x)
if x.is_a? Array if x.is_a? Array
x.flatten.map{|y| self.flatten_subtree(y)} x.flatten.map { |y| self.flatten_subtree(y) }
elsif x.is_a? Hash elsif x.is_a? Hash
x.to_a.map{|y| self.flatten_subtree(y)}.flatten x.to_a.map { |y| self.flatten_subtree(y) }.flatten
else else
x x
end end
...@@ -97,20 +92,31 @@ class Comment < Content ...@@ -97,20 +92,31 @@ class Comment < Content
self.class.hash_tree(subtree_hash).first self.class.hash_tree(subtree_hash).first
else else
as_document.slice(*%w[body course_id endorsed endorsement anonymous anonymous_to_peers created_at updated_at at_position_list]) as_document.slice(*%w[body course_id endorsed endorsement anonymous anonymous_to_peers created_at updated_at at_position_list])
.merge("id" => _id) .merge("id" => _id)
.merge("user_id" => author_id) .merge("user_id" => author_id)
.merge("username" => author_username) .merge("username" => author_username)
.merge("depth" => depth) .merge("depth" => depth)
.merge("closed" => comment_thread.nil? ? false : comment_thread.closed) # ditto .merge("closed" => comment_thread.nil? ? false : comment_thread.closed)
.merge("thread_id" => comment_thread_id) .merge("thread_id" => comment_thread_id)
.merge("parent_id" => parent_ids[-1]) .merge("parent_id" => parent_ids[-1])
.merge("commentable_id" => comment_thread.nil? ? nil : comment_thread.commentable_id) # ditto .merge("commentable_id" => comment_thread.nil? ? nil : comment_thread.commentable_id)
.merge("votes" => votes.slice(*%w[count up_count down_count point])) .merge("votes" => votes.slice(*%w[count up_count down_count point]))
.merge("abuse_flaggers" => abuse_flaggers) .merge("abuse_flaggers" => abuse_flaggers)
.merge("type" => "comment") .merge("type" => "comment")
.merge("child_count" => get_cached_child_count)
end end
end end
def get_cached_child_count
update_cached_child_count if self.child_count.nil?
self.child_count
end
def update_cached_child_count
child_comments_count = Comment.where({"parent_id" => self._id}).count()
self.set(child_count: child_comments_count)
end
def commentable_id def commentable_id
#we need this to have a universal access point for the flag rake task #we need this to have a universal access point for the flag rake task
if self.comment_thread_id if self.comment_thread_id
...@@ -147,16 +153,22 @@ class Comment < Content ...@@ -147,16 +153,22 @@ class Comment < Content
end end
def self.by_date_range_and_thread_ids from_when, to_when, thread_ids def self.by_date_range_and_thread_ids from_when, to_when, thread_ids
#return all content between from_when and to_when #return all content between from_when and to_when
self.where(:created_at.gte => (from_when)).where(:created_at.lte => (to_when)). self.where(:created_at.gte => (from_when)).where(:created_at.lte => (to_when)).
where(:comment_thread_id.in => thread_ids) where(:comment_thread_id.in => thread_ids)
end end
private private
def set_thread_last_activity_at def set_thread_last_activity_at
self.comment_thread.update_attributes!(last_activity_at: Time.now.utc) self.comment_thread.update_attribute(:last_activity_at, Time.now.utc)
end end
def set_sk
# this attribute is explicitly write-once
if self.sk.nil?
self.sk = (self.parent_ids.dup << self.id).join("-")
end
end
end end
...@@ -5,6 +5,10 @@ require_relative 'content' ...@@ -5,6 +5,10 @@ require_relative 'content'
class CommentThread < Content class CommentThread < Content
include Mongoid::Timestamps include Mongoid::Timestamps
include Mongoid::Attributes::Dynamic
include ActiveModel::MassAssignmentSecurity
include Tire::Model::Search
include Tire::Model::Callbacks
extend Enumerize extend Enumerize
voteable self, :up => +1, :down => -1 voteable self, :up => +1, :down => -1
...@@ -28,8 +32,6 @@ class CommentThread < Content ...@@ -28,8 +32,6 @@ class CommentThread < Content
index({author_id: 1, course_id: 1}) index({author_id: 1, course_id: 1})
include Tire::Model::Search
include Tire::Model::Callbacks
index_name Content::ES_INDEX_NAME index_name Content::ES_INDEX_NAME
...@@ -48,12 +50,12 @@ class CommentThread < Content ...@@ -48,12 +50,12 @@ class CommentThread < Content
indexes :commentable_id, type: :string, index: :not_analyzed, included_in_all: false indexes :commentable_id, type: :string, index: :not_analyzed, included_in_all: false
indexes :author_id, type: :string, as: 'author_id', index: :not_analyzed, included_in_all: false indexes :author_id, type: :string, as: 'author_id', index: :not_analyzed, included_in_all: false
indexes :group_id, type: :integer, as: 'group_id', index: :not_analyzed, included_in_all: false indexes :group_id, type: :integer, as: 'group_id', index: :not_analyzed, included_in_all: false
indexes :id, :index => :not_analyzed indexes :id, :index => :not_analyzed
indexes :thread_id, :analyzer => :keyword, :as => "_id" indexes :thread_id, :analyzer => :keyword, :as => '_id'
end end
belongs_to :author, class_name: "User", inverse_of: :comment_threads, index: true#, autosave: true belongs_to :author, class_name: 'User', inverse_of: :comment_threads, index: true
has_many :comments, dependent: :destroy#, autosave: true# Use destroy to envoke callback on the top-level comments TODO async has_many :comments, dependent: :destroy # Use destroy to invoke callback on the top-level comments
has_many :activities, autosave: true has_many :activities, autosave: true
attr_accessible :title, :body, :course_id, :commentable_id, :anonymous, :anonymous_to_peers, :closed, :thread_type attr_accessible :title, :body, :course_id, :commentable_id, :anonymous, :anonymous_to_peers, :closed, :thread_type
...@@ -69,24 +71,12 @@ class CommentThread < Content ...@@ -69,24 +71,12 @@ class CommentThread < Content
before_create :set_last_activity_at before_create :set_last_activity_at
before_update :set_last_activity_at, :unless => lambda { closed_changed? } before_update :set_last_activity_at, :unless => lambda { closed_changed? }
after_update :clear_endorsements after_update :clear_endorsements
before_destroy :destroy_subscriptions before_destroy :destroy_subscriptions
scope :active_since, ->(from_time) { where(:last_activity_at => {:$gte => from_time}) } scope :active_since, ->(from_time) { where(:last_activity_at => {:$gte => from_time}) }
scope :standalone_context, ->() { where(:context => :standalone) } scope :standalone_context, ->() { where(:context => :standalone) }
scope :course_context, ->() { where(:context => :course) } scope :course_context, ->() { where(:context => :course) }
def self.new_dumb_thread(options={})
c = self.new
c.title = options[:title] || "title"
c.body = options[:body] || "body"
c.commentable_id = options[:commentable_id] || "commentable_id"
c.course_id = options[:course_id] || "course_id"
c.author = options[:author] || User.first
c.save!
c
end
def activity_since(from_time=nil) def activity_since(from_time=nil)
if from_time if from_time
activities.where(:created_at => {:$gte => from_time}) activities.where(:created_at => {:$gte => from_time})
...@@ -95,13 +85,21 @@ class CommentThread < Content ...@@ -95,13 +85,21 @@ class CommentThread < Content
end end
end end
def activity_today; activity_since(Date.today.to_time); end def activity_today
activity_since(Date.today.to_time)
end
def activity_this_week; activity_since(Date.today.to_time - 1.weeks); end def activity_this_week
activity_since(Date.today.to_time - 1.weeks)
end
def activity_this_month; activity_since(Date.today.to_time - 1.months); end def activity_this_month
activity_since(Date.today.to_time - 1.months)
end
def activity_overall; activity_since(nil); end def activity_overall
activity_since(nil)
end
def root_comments def root_comments
Comment.roots.where(comment_thread_id: self.id) Comment.roots.where(comment_thread_id: self.id)
...@@ -124,25 +122,26 @@ class CommentThread < Content ...@@ -124,25 +122,26 @@ class CommentThread < Content
end end
def to_hash(params={}) def to_hash(params={})
as_document.slice(*%w[thread_type title body course_id anonymous anonymous_to_peers commentable_id created_at updated_at at_position_list closed context]) as_document.slice(*%w[thread_type title body course_id anonymous anonymous_to_peers commentable_id created_at updated_at at_position_list closed context last_activity_at])
.merge("id" => _id, "user_id" => author_id, .merge('id' => _id,
"username" => author_username, 'user_id' => author_id,
"votes" => votes.slice(*%w[count up_count down_count point]), 'username' => author_username,
"abuse_flaggers" => abuse_flaggers, 'votes' => votes.slice(*%w[count up_count down_count point]),
"tags" => [], 'abuse_flaggers' => abuse_flaggers,
"type" => "thread", 'tags' => [],
"group_id" => group_id, 'type' => 'thread',
"pinned" => pinned?, 'group_id' => group_id,
"comments_count" => comment_count) 'pinned' => pinned?,
'comments_count' => comment_count)
end end
def comment_thread_id def comment_thread_id
#so that we can use the comment thread id as a common attribute for flagging #so that we can use the comment thread id as a common attribute for flagging
self.id self.id
end end
private private
def set_last_activity_at def set_last_activity_at
self.last_activity_at = Time.now.utc unless last_activity_at_changed? self.last_activity_at = Time.now.utc unless last_activity_at_changed?
...@@ -154,8 +153,8 @@ private ...@@ -154,8 +153,8 @@ private
# the last activity time on the thread. Therefore the callbacks would be mutually recursive and we end up with a # the last activity time on the thread. Therefore the callbacks would be mutually recursive and we end up with a
# 'SystemStackError'. The 'set' method skips callbacks and therefore bypasses this issue. # 'SystemStackError'. The 'set' method skips callbacks and therefore bypasses this issue.
self.comments.each do |comment| self.comments.each do |comment|
comment.set :endorsed, false comment.set(endorsed: false)
comment.set :endorsement, nil comment.set(endorsement: nil)
end end
end end
end end
...@@ -163,5 +162,4 @@ private ...@@ -163,5 +162,4 @@ private
def destroy_subscriptions def destroy_subscriptions
subscriptions.delete_all subscriptions.delete_all
end end
end end
class Content class Content
include Mongoid::Document include Mongoid::Document
include Mongo::Voteable include Mongo::Voteable
field :visible, type: Boolean, default: true field :visible, type: Boolean, default: true
field :abuse_flaggers, type: Array, default: [] field :abuse_flaggers, type: Array, default: []
field :historical_abuse_flaggers, type: Array, default: [] #preserve abuse flaggers after a moderator unflags field :historical_abuse_flaggers, type: Array, default: [] #preserve abuse flaggers after a moderator unflags
field :author_username, type: String, default: nil field :author_username, type: String, default: nil
index({_type: 1, course_id: 1, pinned: -1, created_at: -1 }, {background: true} )
index({_type: 1, course_id: 1, pinned: -1, comment_count: -1, created_at: -1}, {background: true})
index({_type: 1, course_id: 1, pinned: -1, "votes.point" => -1, created_at: -1}, {background: true})
index({_type: 1, course_id: 1, pinned: -1, created_at: -1}, {background: true})
index({_type: 1, course_id: 1, pinned: -1, comment_count: -1, created_at: -1}, {background: true})
index({_type: 1, course_id: 1, pinned: -1, 'votes.point' => -1, created_at: -1}, {background: true})
index({_type: 1, course_id: 1, pinned: -1, last_activity_at: -1, created_at: -1}, {background: true})
index({comment_thread_id: 1, sk: 1}, {sparse: true}) index({comment_thread_id: 1, sk: 1}, {sparse: true})
index({comment_thread_id: 1, endorsed: 1}, {sparse: true}) index({comment_thread_id: 1, endorsed: 1}, {sparse: true})
index({commentable_id: 1}, {sparse: true, background: true}) index({commentable_id: 1}, {sparse: true, background: true})
...@@ -27,10 +27,7 @@ class Content ...@@ -27,10 +27,7 @@ class Content
end end
before_save :set_username before_save :set_username
def set_username
# avoid having to look this attribute up later, since it does not change
self.author_username = author.username
end
def author_with_anonymity(attr=nil, attr_when_anonymous=nil) def author_with_anonymity(attr=nil, attr_when_anonymous=nil)
if not attr if not attr
...@@ -43,7 +40,7 @@ class Content ...@@ -43,7 +40,7 @@ class Content
def self.flagged def self.flagged
#return an array of flagged content #return an array of flagged content
holder = [] holder = []
Content.where(:abuse_flaggers.ne => [],:abuse_flaggers.exists => true).each do |c| Content.where(:abuse_flaggers.ne => [], :abuse_flaggers.exists => true).each do |c|
holder << c holder << c
end end
holder holder
...@@ -53,61 +50,65 @@ class Content ...@@ -53,61 +50,65 @@ class Content
#take a hash of criteria (what) and return a hash of hashes #take a hash of criteria (what) and return a hash of hashes
#course => user => count #course => user => count
contributors = {} map = 'function(){emit(this.author_id,1)}'
reduce = 'function(k, vals) { var sum = 0; for(var i in vals) sum += vals[i]; return sum; }'
map = "function(){emit(this.author_id,1)}"
reduce = "function(k, vals) { var sum = 0; for(var i in vals) sum += vals[i]; return sum; }"
contributors = [] contributors = []
self.where(what).map_reduce(map,reduce).out(replace: "results").each do |d| self.where(what).map_reduce(map, reduce).out(replace: 'results').each do |d|
contributors << d contributors << d
end end
#now sort and limit them #now sort and limit them
#first sort destructively #first sort destructively
contributors.sort! {|a,b| -a["value"] <=> -b["value"]} contributors.sort! { |a, b| -a['value'] <=> -b['value'] }
#then trim it #then trim it
contributors = contributors[0..(count - 1)] contributors = contributors[0..(count - 1)]
contributors contributors
end end
def self.summary what def self.summary what
#take a hash of criteria (what) and return a hash of hashes #take a hash of criteria (what) and return a hash of hashes
#of total users, votes, comments, endorsements, #of total users, votes, comments, endorsements,
answer = {} answer = {}
vote_count = 0 vote_count = 0
thread_count = 0 thread_count = 0
comment_count = 0 comment_count = 0
contributors = [] contributors = []
content = self.where(what) content = self.where(what)
content.each do |c| content.each do |c|
contributors << c.author_id contributors << c.author_id
contributors << c["votes"]["up"] contributors << c['votes']['up']
contributors << c["votes"]["down"] contributors << c['votes']['down']
vote_count += c["votes"]["count"] vote_count += c['votes']['count']
if c._type == "CommentThread" if c._type == 'CommentThread'
thread_count += 1 thread_count += 1
elsif c._type == "Comment" elsif c._type == 'Comment'
comment_count += 1 comment_count += 1
end end
end end
#uniquify contributors #uniquify contributors
contributors = contributors.uniq contributors = contributors.uniq
#assemble the answer and ship #assemble the answer and ship
answer["vote_count"] = vote_count answer['vote_count'] = vote_count
answer["thread_count"] = thread_count answer['thread_count'] = thread_count
answer["comment_count"] = comment_count answer['comment_count'] = comment_count
answer["contributor_count"] = contributors.count answer['contributor_count'] = contributors.count
answer answer
end end
private
def set_username
# avoid having to look this attribute up later, since it does not change
self.author_username = author.username
end
end end
class Notification class Notification
include Mongoid::Document include Mongoid::Document
include Mongoid::Timestamps include Mongoid::Timestamps
include ActiveModel::MassAssignmentSecurity
field :notification_type, type: String field :notification_type, type: String
field :info, type: Hash field :info, type: Hash
......
...@@ -12,7 +12,7 @@ class Subscription ...@@ -12,7 +12,7 @@ class Subscription
index({source_id: 1, source_type: 1}, {background: true}) index({source_id: 1, source_type: 1}, {background: true})
def to_hash def to_hash
as_document.slice(*%w[subscriber_id source_id source_type]) as_document.slice(*%w[subscriber_id source_id source_type]).merge("id" => _id)
end end
def subscriber def subscriber
......
...@@ -153,8 +153,9 @@ class ReadState ...@@ -153,8 +153,9 @@ class ReadState
field :last_read_times, type: Hash, default: {} field :last_read_times, type: Hash, default: {}
embedded_in :user embedded_in :user
validates :course_id, uniqueness: true, presence: true validates_presence_of :course_id
validates_uniqueness_of :course_id
def to_hash def to_hash
to_json to_json
end end
......
...@@ -23,7 +23,7 @@ class ThreadPresenter ...@@ -23,7 +23,7 @@ class ThreadPresenter
@is_endorsed = is_endorsed @is_endorsed = is_endorsed
end end
def to_hash with_responses=false, resp_skip=0, resp_limit=nil def to_hash with_responses=false, resp_skip=0, resp_limit=nil, recursive=true
raise ArgumentError unless resp_skip >= 0 raise ArgumentError unless resp_skip >= 0
raise ArgumentError unless resp_limit.nil? or resp_limit >= 1 raise ArgumentError unless resp_limit.nil? or resp_limit >= 1
h = @thread.to_hash h = @thread.to_hash
...@@ -32,7 +32,11 @@ class ThreadPresenter ...@@ -32,7 +32,11 @@ class ThreadPresenter
h["endorsed"] = @is_endorsed || false h["endorsed"] = @is_endorsed || false
if with_responses if with_responses
if @thread.thread_type.discussion? && resp_skip == 0 && resp_limit.nil? if @thread.thread_type.discussion? && resp_skip == 0 && resp_limit.nil?
content = Comment.where(comment_thread_id: @thread._id).order_by({"sk" => 1}) if recursive
content = Comment.where(comment_thread_id: @thread._id).order_by({"sk" => 1})
else
content = Comment.where(comment_thread_id: @thread._id, "parent_ids" => []).order_by({"sk" => 1})
end
h["children"] = merge_response_content(content) h["children"] = merge_response_content(content)
h["resp_total"] = content.to_a.select{|d| d.depth == 0 }.length h["resp_total"] = content.to_a.select{|d| d.depth == 0 }.length
else else
...@@ -41,18 +45,20 @@ class ThreadPresenter ...@@ -41,18 +45,20 @@ class ThreadPresenter
when "question" when "question"
endorsed_responses = responses.where(endorsed: true) endorsed_responses = responses.where(endorsed: true)
non_endorsed_responses = responses.where(endorsed: false) non_endorsed_responses = responses.where(endorsed: false)
endorsed_response_info = get_paged_merged_responses(@thread._id, endorsed_responses, 0, nil) endorsed_response_info = get_paged_merged_responses(@thread._id, endorsed_responses, 0, nil, recursive)
non_endorsed_response_info = get_paged_merged_responses( non_endorsed_response_info = get_paged_merged_responses(
@thread._id, @thread._id,
non_endorsed_responses, non_endorsed_responses,
resp_skip, resp_skip,
resp_limit resp_limit,
recursive
) )
h["endorsed_responses"] = endorsed_response_info["responses"] h["endorsed_responses"] = endorsed_response_info["responses"]
h["non_endorsed_responses"] = non_endorsed_response_info["responses"] h["non_endorsed_responses"] = non_endorsed_response_info["responses"]
h["non_endorsed_resp_total"] = non_endorsed_response_info["response_count"] h["non_endorsed_resp_total"] = non_endorsed_response_info["response_count"]
h["resp_total"] = non_endorsed_response_info["response_count"] + endorsed_response_info["response_count"]
when "discussion" when "discussion"
response_info = get_paged_merged_responses(@thread._id, responses, resp_skip, resp_limit) response_info = get_paged_merged_responses(@thread._id, responses, resp_skip, resp_limit, recursive)
h["children"] = response_info["responses"] h["children"] = response_info["responses"]
h["resp_total"] = response_info["response_count"] h["resp_total"] = response_info["response_count"]
end end
...@@ -67,15 +73,20 @@ class ThreadPresenter ...@@ -67,15 +73,20 @@ class ThreadPresenter
# a hash containing the following: # a hash containing the following:
# responses # responses
# An array of hashes representing the page of responses (including # An array of hashes representing the page of responses (including
# children) # children, if recursive is true)
# response_count # response_count
# The total number of responses # The total number of responses
def get_paged_merged_responses(thread_id, responses, skip, limit) def get_paged_merged_responses(thread_id, responses, skip, limit, recursive=false)
response_ids = responses.only(:_id).sort({"sk" => 1}).to_a.map{|doc| doc["_id"]} response_ids = responses.only(:_id).sort({"sk" => 1}).to_a.map{|doc| doc["_id"]}
paged_response_ids = limit.nil? ? response_ids.drop(skip) : response_ids.drop(skip).take(limit) paged_response_ids = limit.nil? ? response_ids.drop(skip) : response_ids.drop(skip).take(limit)
content = Comment.where(comment_thread_id: thread_id). if recursive
or({:parent_id => {"$in" => paged_response_ids}}, {:id => {"$in" => paged_response_ids}}). content = Comment.where(comment_thread_id: thread_id).
sort({"sk" => 1}) or({:parent_id => {"$in" => paged_response_ids}}, {:id => {"$in" => paged_response_ids}}).
sort({"sk" => 1})
else
content = Comment.where(comment_thread_id: thread_id, "parent_ids" => []).
where({:id => {"$in" => paged_response_ids}}).sort({"sk" => 1})
end
{"responses" => merge_response_content(content), "response_count" => response_ids.length} {"responses" => merge_response_content(content), "response_count" => response_ids.length}
end end
......
...@@ -5,10 +5,10 @@ module ThreadUtils ...@@ -5,10 +5,10 @@ module ThreadUtils
# only threads which are endorsed will have entries, value will always be true. # only threads which are endorsed will have entries, value will always be true.
endorsed_threads = {} endorsed_threads = {}
thread_ids = threads.collect {|t| t._id} thread_ids = threads.collect {|t| t._id}
Comment.collection.aggregate( Comment.collection.aggregate([
{"$match" => {"comment_thread_id" => {"$in" => thread_ids}, "endorsed" => true}}, {"$match" => {"comment_thread_id" => {"$in" => thread_ids}, "endorsed" => true}},
{"$group" => {"_id" => "$comment_thread_id"}} {"$group" => {"_id" => "$comment_thread_id"}}
).each do |res| ]).each do |res|
endorsed_threads[res["_id"].to_s] = true endorsed_threads[res["_id"].to_s] = true
end end
endorsed_threads endorsed_threads
...@@ -26,7 +26,7 @@ module ThreadUtils ...@@ -26,7 +26,7 @@ module ThreadUtils
thread_key = t._id.to_s thread_key = t._id.to_s
if read_dates.has_key? thread_key if read_dates.has_key? thread_key
is_read = read_dates[thread_key] >= t.updated_at is_read = read_dates[thread_key] >= t.updated_at
unread_comment_count = Comment.collection.where( unread_comment_count = Comment.collection.find(
:comment_thread_id => t._id, :comment_thread_id => t._id,
:author_id => {"$ne" => user.id}, :author_id => {"$ne" => user.id},
:updated_at => {"$gte" => read_dates[thread_key]} :updated_at => {"$gte" => read_dates[thread_key]}
......
require 'spec_helper' require 'spec_helper'
def create_comment_flag(comment_id, user_id) describe 'Abuse API' do
create_flag("/api/v1/comments/" + comment_id + "/abuse_flag", user_id) before(:each) { set_api_key_header }
end
def create_thread_flag(thread_id, user_id) shared_examples 'an abuse endpoint' do
create_flag("/api/v1/threads/" + thread_id + "/abuse_flag", user_id) let(:affected_entity_id) { affected_entity.id }
end let(:user_id) { create(:user).id }
def remove_thread_flag(thread_id, user_id) it { should be_ok }
remove_flag("/api/v1/threads/" + thread_id + "/abuse_unflag", user_id)
end
def remove_comment_flag(comment_id, user_id) it 'updates the abuse flaggers' do
remove_flag("/api/v1/comments/" + comment_id + "/abuse_unflag", user_id) subject
end
def create_flag(put_command, user_id) affected_entity.reload
if user_id.nil? expect(affected_entity.abuse_flaggers).to eq expected_abuse_flaggers
put put_command expect(non_affected_entity.abuse_flaggers).to have(0).items
else end
put put_command, user_id: user_id
end
end
def remove_flag(put_command, user_id) context 'if the comment does not exist' do
if user_id.nil? let(:affected_entity_id) { 'does_not_exist' }
put put_command it { should be_bad_request }
else its(:body) { should eq "[\"#{I18n.t(:requested_object_not_found)}\"]" }
put put_command, user_id: user_id end
context 'if no user_id is provided' do
let(:user_id) { nil }
it { should be_bad_request }
its(:body) { should eq "[\"#{I18n.t(:user_id_is_required)}\"]" }
end
end end
end
describe "app" do describe 'comment actions' do
describe "abuse" do let(:affected_entity) { create(:comment, abuse_flaggers: []) }
let(:non_affected_entity) { affected_entity.comment_thread }
before(:each) do context 'when flagging a comment for abuse' do
init_without_subscriptions let(:expected_abuse_flaggers) { [user_id] }
set_api_key_header subject { put "/api/v1/comments/#{affected_entity_id}/abuse_flag", user_id: user_id }
end
describe "flag a comment as abusive" do it_behaves_like 'an abuse endpoint'
it "create or update the abuse_flags on the comment" do
comment = Comment.first
# We get the count rather than just keeping the array, because the array
# will update as the Comment updates since the IdentityMap is enabled.
prev_abuse_flaggers_count = comment.abuse_flaggers.length
create_comment_flag("#{comment.id}", User.first.id)
comment = Comment.find(comment.id)
comment.abuse_flaggers.count.should == prev_abuse_flaggers_count + 1
# verify that the thread doesn't automatically get flagged
comment.comment_thread.abuse_flaggers.length.should == 0
end
it "returns 400 when the comment does not exist" do
create_comment_flag("does_not_exist", User.first.id)
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
it "returns 400 when user_id is not provided" do
create_comment_flag("#{Comment.first.id}", nil)
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:user_id_is_required)
end
#Would like to test the output of to_hash, but not sure how to deal with a Moped::BSON::Document object
#it "has a correct hash" do
# create_flag("#{Comment.first.id}", User.first.id)
# Comment.first.to_hash
#end
end end
describe "flag a thread as abusive" do
it "create or update the abuse_flags on the comment" do context 'when un-flagging a comment for abuse' do
comment = Comment.first let(:affected_entity) { create(:comment, abuse_flaggers: [user_id]) }
thread = comment.comment_thread let(:expected_abuse_flaggers) { [] }
prev_abuse_flaggers_count = thread.abuse_flaggers.count subject { put "/api/v1/comments/#{affected_entity_id}/abuse_unflag", user_id: user_id }
create_thread_flag("#{thread.id}", User.first.id)
it_behaves_like 'an abuse endpoint'
comment = Comment.find(comment.id)
comment.comment_thread.abuse_flaggers.count.should == prev_abuse_flaggers_count + 1
# verify that the comment doesn't automatically get flagged
comment.abuse_flaggers.length.should == 0
end
it "returns 400 when the thread does not exist" do
create_thread_flag("does_not_exist", User.first.id)
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
it "returns 400 when user_id is not provided" do
create_thread_flag("#{Comment.first.comment_thread.id}", nil)
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:user_id_is_required)
end
#Would like to test the output of to_hash, but not sure how to deal with a Moped::BSON::Document object
#it "has a correct hash" do
# create_thread_flag("#{Comment.first.comment_thread.id}", User.first.id)
# Comment.first.comment_thread.to_hash
#end
end end
describe "unflag a comment as abusive" do end
it "removes the user from the existing abuse_flaggers" do
comment = Comment.first describe 'comment thread actions' do
create_comment_flag("#{comment.id}", User.first.id) let(:affected_entity) { create(:comment_thread, abuse_flaggers: []) }
let(:non_affected_entity) { create(:comment, comment_thread: affected_entity) }
comment = Comment.first
prev_abuse_flaggers = comment.abuse_flaggers context 'when flagging a comment thread for abuse' do
prev_abuse_flaggers_count = prev_abuse_flaggers.count let(:expected_abuse_flaggers) { [user_id] }
subject { put "/api/v1/threads/#{affected_entity_id}/abuse_flag", user_id: user_id }
prev_abuse_flaggers.should include User.first.id
it_behaves_like 'an abuse endpoint'
remove_comment_flag("#{comment.id}", User.first.id)
comment = Comment.find(comment.id)
comment.abuse_flaggers.count.should == prev_abuse_flaggers_count - 1
comment.abuse_flaggers.to_a.should_not include User.first.id
end
it "returns 400 when the comment does not exist" do
remove_comment_flag("does_not_exist", User.first.id)
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
it "returns 400 when the thread does not exist" do
remove_thread_flag("does_not_exist", User.first.id)
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
it "returns 400 when user_id is not provided" do
remove_thread_flag("#{Comment.first.comment_thread.id}", nil)
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:user_id_is_required)
end
#Would like to test the output of to_hash, but not sure how to deal with a Moped::BSON::Document object
#it "has a correct hash" do
# create_thread_flag("#{Comment.first.comment_thread.id}", User.first.id)
# Comment.first.comment_thread.to_hash
#end
end end
describe "unflag a thread as abusive" do
it "removes the user from the existing abuse_flaggers" do context 'when un-flagging a comment thread for abuse' do
thread = CommentThread.first let(:affected_entity) { create(:comment_thread, abuse_flaggers: [user_id]) }
create_thread_flag("#{thread.id}", User.first.id) let(:expected_abuse_flaggers) { [] }
subject { put "/api/v1/threads/#{affected_entity_id}/abuse_unflag", user_id: user_id }
thread = CommentThread.first
prev_abuse_flaggers = thread.abuse_flaggers it_behaves_like 'an abuse endpoint'
prev_abuse_flaggers_count = prev_abuse_flaggers.count
prev_abuse_flaggers.should include User.first.id
remove_thread_flag("#{thread.id}", User.first.id)
thread = CommentThread.find(thread.id)
thread.abuse_flaggers.count.should == prev_abuse_flaggers_count - 1
thread.abuse_flaggers.to_a.should_not include User.first.id
end
it "returns 400 when the thread does not exist" do
remove_thread_flag("does_not_exist", User.first.id)
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
it "returns 400 when user_id is not provided" do
remove_thread_flag("#{Comment.first.comment_thread.id}", nil)
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:user_id_is_required)
end
#Would like to test the output of to_hash, but not sure how to deal with a Moped::BSON::Document object
#it "has a correct hash" do
# create_thread_flag("#{Comment.first.comment_thread.id}", User.first.id)
# Comment.first.comment_thread.to_hash
#end
end end
end end
end end
require 'spec_helper' require 'spec_helper'
require 'unicode_shared_examples' require 'unicode_shared_examples'
describe "app" do BLOCKED_BODY = 'BLOCKED POST'
describe 'Comment API' do
before(:each) { set_api_key_header } before(:each) { set_api_key_header }
let(:thread) { create_comment_thread_and_comments }
describe "comments" do describe 'GET /api/v1/comments/:comment_id' do
before(:each) { init_without_subscriptions } it 'returns JSON' do
describe "GET /api/v1/comments/:comment_id" do comment = thread.comments.first
it "returns JSON" do get "/api/v1/comments/#{comment.id}"
comment = Comment.first last_response.should be_ok
get "/api/v1/comments/#{comment.id}" last_response.content_type.should == 'application/json;charset=utf-8'
last_response.should be_ok end
last_response.content_type.should == "application/json;charset=utf-8"
end
it "retrieve information of a single comment" do
comment = Comment.first
get "/api/v1/comments/#{comment.id}"
last_response.should be_ok
retrieved = parse last_response.body
retrieved["body"].should == comment.body
retrieved["endorsed"].should == comment.endorsed
retrieved["id"].should == comment.id.to_s
retrieved["children"].should be_nil
retrieved["votes"]["point"].should == comment.votes_point
retrieved["depth"].should == comment.depth
retrieved["parent_id"].should == comment.parent_ids[-1]
end
it "retrieve information of a single comment with its sub comments" do
comment = Comment.first
get "/api/v1/comments/#{comment.id}", recursive: true
last_response.should be_ok
retrieved = parse last_response.body
retrieved["body"].should == comment.body
retrieved["endorsed"].should == comment.endorsed
retrieved["id"].should == comment.id.to_s
retrieved["votes"]["point"].should == comment.votes_point
retrieved["children"].length.should == comment.children.length
retrieved["children"].select{|c| c["body"] == comment.children.first.body}.first.should_not be_nil
retrieved["children"].each{|c| c["parent_id"].should == comment.id.to_s}
end
it "returns 400 when the comment does not exist" do
get "/api/v1/comments/does_not_exist"
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
def test_unicode_data(text) it 'retrieves information of a single comment' do
comment = make_comment(User.first, CommentThread.first, text) comment = thread.comments.first
get "/api/v1/comments/#{comment.id}" get "/api/v1/comments/#{comment.id}"
last_response.should be_ok last_response.should be_ok
retrieved = parse last_response.body retrieved = parse last_response.body
retrieved["body"].should == text retrieved['body'].should == comment.body
end retrieved['endorsed'].should == comment.endorsed
retrieved['id'].should == comment.id.to_s
retrieved['children'].should be_nil
retrieved['votes']['point'].should == comment.votes_point
retrieved['depth'].should == comment.depth
retrieved['parent_id'].should == comment.parent_ids.map(&:to_s)[-1]
retrieved["child_count"].should == comment.children.length
end
include_examples "unicode data" it 'retrieves information of a single comment with its sub comments' do
end comment = thread.comments.first
describe "PUT /api/v1/comments/:comment_id" do get "/api/v1/comments/#{comment.id}", recursive: true
def test_update_endorsed(true_val, false_val) last_response.should be_ok
comment = Comment.first retrieved = parse last_response.body
before = DateTime.now retrieved['body'].should == comment.body
put "/api/v1/comments/#{comment.id}", endorsed: true_val, endorsement_user_id: "#{User.first.id}" retrieved['endorsed'].should == comment.endorsed
after = DateTime.now retrieved['id'].should == comment.id.to_s
last_response.should be_ok retrieved['votes']['point'].should == comment.votes_point
comment.reload
comment.endorsed.should == true
comment.endorsement.should_not be_nil
comment.endorsement["user_id"].should == "#{User.first.id}"
comment.endorsement["time"].should be_between(before, after)
put "/api/v1/comments/#{comment.id}", endorsed: false_val
last_response.should be_ok
comment.reload
comment.endorsed.should == false
comment.endorsement.should be_nil
end
it "updates endorsed correctly" do
test_update_endorsed(true, false)
end
it "updates endorsed correctly with Pythonic values" do
test_update_endorsed("True", "False")
end
it "updates body correctly" do
comment = Comment.first
put "/api/v1/comments/#{comment.id}", body: "new body"
last_response.should be_ok
comment.reload
comment.body.should == "new body"
end
it "can update endorsed and body simultaneously" do
comment = Comment.first
put "/api/v1/comments/#{comment.id}", body: "new body", endorsed: true
last_response.should be_ok
comment.reload
comment.body.should == "new body"
comment.endorsed.should == true
end
it "returns 400 when the comment does not exist" do
put "/api/v1/comments/does_not_exist", body: "new body", endorsed: true
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
it "returns 503 and does not update when the post hash is blocked" do
comment = Comment.first
original_body = comment.body
put "/api/v1/comments/#{comment.id}", body: "BLOCKED POST", endorsed: true
last_response.status.should == 503
parse(last_response.body).first.should == I18n.t(:blocked_content_with_body_hash, :hash => Digest::MD5.hexdigest("blocked post"))
comment.reload
comment.body.should == original_body
end
def test_unicode_data(text) retrieved_children = retrieved['children']
comment = Comment.first retrieved_children.length.should == comment.children.length
put "/api/v1/comments/#{comment.id}", body: text retrieved["child_count"].should == comment.children.length
last_response.should be_ok
comment.body.should == text
end
include_examples "unicode data" comment.children.each_with_index do |child, index|
end expect(retrieved_children[index]).to include('body' => child.body, 'parent_id' => comment.id.to_s)
describe "POST /api/v1/comments/:comment_id" do
it "create a sub comment to the comment" do
comment = Comment.first.to_hash(recursive: true)
user = User.first
post "/api/v1/comments/#{comment["id"]}", body: "new comment", course_id: "1", user_id: User.first.id
last_response.should be_ok
changed_comment = Comment.find(comment["id"]).to_hash(recursive: true)
changed_comment["children"].length.should == comment["children"].length + 1
subcomment = changed_comment["children"].select{|c| c["body"] == "new comment"}.first
subcomment.should_not be_nil
subcomment["user_id"].should == user.id
end
it "returns 400 when the comment does not exist" do
post "/api/v1/comments/does_not_exist", body: "new comment", course_id: "1", user_id: User.first.id
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
it "returns 503 and does not create when the post hash is blocked" do
comment = Comment.first.to_hash(recursive: true)
user = User.first
post "/api/v1/comments/#{comment["id"]}", body: "BLOCKED POST", course_id: "1", user_id: User.first.id
last_response.status.should == 503
parse(last_response.body).first.should == I18n.t(:blocked_content_with_body_hash, :hash => Digest::MD5.hexdigest("blocked post"))
Comment.where(body: "BLOCKED POST").to_a.should be_empty
end end
end
def test_unicode_data(text) it 'retrieves information of a single comment and fixes incorrect child count' do
parent = Comment.first comment = thread.comments.first
post "/api/v1/comments/#{parent.id}", body: text, course_id: parent.course_id, user_id: User.first.id comment.set(child_count: 2000)
last_response.should be_ok comment_hash = comment.to_hash(recursive: true)
parent.children.where(body: text).should_not be_empty comment_hash["child_count"].should == 2000
end get "/api/v1/comments/#{comment.id}", recursive: true
last_response.should be_ok
retrieved = parse last_response.body
retrieved["child_count"].should == comment.children.length
include_examples "unicode data" comment.set(child_count: nil)
get "/api/v1/comments/#{comment.id}"
last_response.should be_ok
retrieved = parse last_response.body
retrieved["child_count"].should == comment.children.length
end end
describe "DELETE /api/v1/comments/:comment_id" do
it "delete the comment and its sub comments" do it 'returns 400 when the comment does not exist' do
comment = Comment.first get '/api/v1/comments/does_not_exist'
cnt_comments = comment.descendants_and_self.length last_response.status.should == 400
prev_count = Comment.count parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
delete "/api/v1/comments/#{comment.id}" end
Comment.count.should == prev_count - cnt_comments
Comment.all.select{|c| c.id == comment.id}.first.should be_nil def test_unicode_data(text)
end comment = create(:comment, body: text)
it "can delete a sub comment" do get "/api/v1/comments/#{comment.id}"
parent = CommentThread.first.comments.first last_response.should be_ok
sub_comment = parent.children.first parse(last_response.body)['body'].should == text
id = sub_comment.id end
delete "/api/v1/comments/#{id}"
Comment.where(:id => id).should be_empty include_examples 'unicode data'
parent.children.where(:id => id).should be_empty end
end
it "returns 400 when the comment does not exist" do describe 'PUT /api/v1/comments/:comment_id' do
delete "/api/v1/comments/does_not_exist" def test_update_endorsed(true_val, false_val)
last_response.status.should == 400 comment = thread.comments.first
parse(last_response.body).first.should == I18n.t(:requested_object_not_found) before = DateTime.now
end put "/api/v1/comments/#{comment.id}", endorsed: true_val, endorsement_user_id: "#{User.first.id}"
after = DateTime.now
last_response.should be_ok
comment.reload
comment.endorsed.should == true
comment.endorsement.should_not be_nil
comment.endorsement["user_id"].should == "#{User.first.id}"
comment.endorsement["time"].should be_between(before, after)
put "/api/v1/comments/#{comment.id}", endorsed: false_val
last_response.should be_ok
comment.reload
comment.endorsed.should == false
comment.endorsement.should be_nil
end
it 'updates endorsed correctly' do
test_update_endorsed(true, false)
end
it 'updates endorsed correctly with Pythonic values' do
test_update_endorsed('True', 'False')
end
it 'updates body correctly' do
comment = thread.comments.first
put "/api/v1/comments/#{comment.id}", body: 'new body'
last_response.should be_ok
comment.reload
comment.body.should == 'new body'
end
it 'can update endorsed and body simultaneously' do
comment = thread.comments.first
put "/api/v1/comments/#{comment.id}", body: 'new body', endorsed: true
last_response.should be_ok
comment.reload
comment.body.should == 'new body'
comment.endorsed.should == true
end
it 'returns 400 when the comment does not exist' do
put '/api/v1/comments/does_not_exist', body: 'new body', endorsed: true
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
it 'returns 503 and does not update when the post hash is blocked' do
blocked_hash = block_post_body(BLOCKED_BODY)
comment = thread.comments.first
original_body = comment.body
put "/api/v1/comments/#{comment.id}", body: BLOCKED_BODY, endorsed: true
last_response.status.should == 503
parse(last_response.body).first.should == I18n.t(:blocked_content_with_body_hash, :hash => blocked_hash)
comment.reload
comment.body.should == original_body
end
def test_unicode_data(text)
comment = thread.comments.first
put "/api/v1/comments/#{comment.id}", body: text
last_response.should be_ok
comment.reload
comment.body.should == text
end
include_examples 'unicode data'
end
describe 'POST /api/v1/comments/:comment_id' do
it 'creates a sub comment to the comment' do
comment = thread.comments.first
previous_child_count = comment.children.length
user = thread.author
body = 'new comment'
course_id = '1'
post "/api/v1/comments/#{comment.id}", body: body, course_id: course_id, user_id: user.id
last_response.should be_ok
comment.reload
comment.children.length.should == previous_child_count + 1
comment.child_count.should == previous_child_count + 1
sub_comment = comment.children.order_by(created_at: :desc).first
sub_comment.body.should == body
sub_comment.course_id.should == course_id
sub_comment.author.should == user
sub_comment.child_count.should == 0
end
it 'returns 400 when the comment does not exist' do
post '/api/v1/comments/does_not_exist', body: 'new comment', course_id: '1', user_id: thread.author.id
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
it 'returns 503 and does not create when the post hash is blocked' do
blocked_hash = block_post_body(BLOCKED_BODY)
comment = thread.comments.first
user = comment.author
post "/api/v1/comments/#{comment.id}", body: BLOCKED_BODY, course_id: '1', user_id: user.id
last_response.status.should == 503
parse(last_response.body).first.should == I18n.t(:blocked_content_with_body_hash, :hash => blocked_hash)
Comment.where(body: BLOCKED_BODY).to_a.should be_empty
end
def test_unicode_data(text)
parent = thread.comments.first
post "/api/v1/comments/#{parent.id}", body: text, course_id: parent.course_id, user_id: User.first.id
last_response.should be_ok
parent.children.where(body: text).should_not be_empty
end
include_examples 'unicode data'
end
describe 'DELETE /api/v1/comments/:comment_id' do
it 'deletes the comment and its sub comments' do
comment = thread.comments.first
cnt_comments = comment.descendants_and_self.length
prev_count = Comment.count
delete "/api/v1/comments/#{comment.id}"
Comment.count.should == prev_count - cnt_comments
Comment.all.select { |c| c.id == comment.id }.first.should be_nil
end
it 'can delete a sub comment' do
# Sort to ensure we get the thread's first comment, rather than the child of that comment.
parent_comment = thread.comments.sort_by(&:_id).first
child_comment = parent_comment.children.first
delete "/api/v1/comments/#{child_comment.id}"
Comment.where(:id => child_comment.id).should be_empty
parent_comment.children.where(:id => child_comment.id).should be_empty
end
it 'returns 400 when the comment does not exist' do
delete '/api/v1/comments/does_not_exist'
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end end
end end
end end
...@@ -18,12 +18,12 @@ describe "app" do ...@@ -18,12 +18,12 @@ describe "app" do
context "when filtering by course" do context "when filtering by course" do
it "returns only threads with matching course id" do it "returns only threads with matching course id" do
[@threads["t1"], @threads["t2"]].each do |t| [@threads["t1"], @threads["t2"]].each do |t|
t.course_id = "abc" t.course_id = "abc"
t.save! t.save!
end end
rs = thread_result course_id: "abc", sort_order: "asc" rs = thread_result course_id: "abc", sort_order: "asc"
rs.length.should == 2 rs.length.should == 2
rs.each_with_index { |res, i| rs.each_with_index { |res, i|
check_thread_result_json(nil, @threads["t#{i+1}"], res) check_thread_result_json(nil, @threads["t#{i+1}"], res)
res["course_id"].should == "abc" res["course_id"].should == "abc"
...@@ -89,7 +89,7 @@ describe "app" do ...@@ -89,7 +89,7 @@ describe "app" do
@threads["t3"].group_id = 100 @threads["t3"].group_id = 100
@threads["t3"].save! @threads["t3"].save!
rs = thread_result course_id: "omg", group_id: 100, sort_order: "asc" rs = thread_result course_id: "omg", group_id: 100, sort_order: "asc"
rs.length.should == 2 rs.length.should == 2
rs.each_with_index { |res, i| rs.each_with_index { |res, i|
check_thread_result_json(nil, @threads["t#{i+1}"], res) check_thread_result_json(nil, @threads["t#{i+1}"], res)
res["course_id"].should == "omg" res["course_id"].should == "omg"
...@@ -97,32 +97,32 @@ describe "app" do ...@@ -97,32 +97,32 @@ describe "app" do
end end
it "returns an empty result when no threads match course_id" do it "returns an empty result when no threads match course_id" do
rs = thread_result course_id: 99 rs = thread_result course_id: 99
rs.length.should == 0 rs.length.should == 0
end end
it "returns only group-less threads when no threads have matching group id" do it "returns only group-less threads when no threads have matching group id" do
@threads["t1"].group_id = 123 @threads["t1"].group_id = 123
@threads["t1"].save! @threads["t1"].save!
rs = thread_result course_id: DFLT_COURSE_ID, group_id: 321 rs = thread_result course_id: DFLT_COURSE_ID, group_id: 321
rs.each.map {|res| res["group_id"].should be_nil } rs.each.map { |res| res["group_id"].should be_nil }
end end
context "when filtering flagged posts" do context "when filtering flagged posts" do
it "returns threads that are flagged" do it "returns threads that are flagged" do
@threads["t1"].abuse_flaggers = [1] @threads["t1"].abuse_flaggers = [1]
@threads["t1"].save! @threads["t1"].save!
rs = thread_result course_id: DFLT_COURSE_ID, flagged: true rs = thread_result course_id: DFLT_COURSE_ID, flagged: true
rs.length.should == 1 rs.length.should == 1
check_thread_result_json(nil, @threads["t1"], rs.first) check_thread_result_json(nil, @threads["t1"], rs.first)
end end
it "returns threads that have flagged comments" do it "returns threads that have flagged comments" do
@comments["t2 c3"].abuse_flaggers = [1] @comments["t2 c3"].abuse_flaggers = [1]
@comments["t2 c3"].save! @comments["t2 c3"].save!
rs = thread_result course_id: DFLT_COURSE_ID, flagged: true rs = thread_result course_id: DFLT_COURSE_ID, flagged: true
rs.length.should == 1 rs.length.should == 1
check_thread_result_json(nil, @threads["t2"], rs.first) check_thread_result_json(nil, @threads["t2"], rs.first)
end end
it "returns an empty result when no posts were flagged" do it "returns an empty result when no posts were flagged" do
rs = thread_result course_id: DFLT_COURSE_ID, flagged: true rs = thread_result course_id: DFLT_COURSE_ID, flagged: true
rs.length.should == 0 rs.length.should == 0
end end
end end
it "filters unread posts" do it "filters unread posts" do
...@@ -180,12 +180,12 @@ describe "app" do ...@@ -180,12 +180,12 @@ describe "app" do
end end
it "correctly considers read state" do it "correctly considers read state" do
user = create_test_user(123) user = create_test_user(123)
[@threads["t1"], @threads["t2"]].each do |t| [@threads["t1"], @threads["t2"]].each do |t|
t.course_id = "abc" t.course_id = "abc"
t.save! t.save!
end end
rs = thread_result course_id: "abc", user_id: "123", sort_order: "asc" rs = thread_result course_id: "abc", user_id: "123", sort_order: "asc"
rs.length.should == 2 rs.length.should == 2
rs.each_with_index { |result, i| rs.each_with_index { |result, i|
check_thread_result_json(user, @threads["t#{i+1}"], result) check_thread_result_json(user, @threads["t#{i+1}"], result)
result["course_id"].should == "abc" result["course_id"].should == "abc"
...@@ -230,20 +230,20 @@ describe "app" do ...@@ -230,20 +230,20 @@ describe "app" do
context "sorting" do context "sorting" do
def thread_result_order (sort_key, sort_order) def thread_result_order (sort_key, sort_order)
results = thread_result course_id: DFLT_COURSE_ID, sort_key: sort_key, sort_order: sort_order results = thread_result course_id: DFLT_COURSE_ID, sort_key: sort_key, sort_order: sort_order
results.length.should == 10 results.length.should == 10
results.map {|t| t["title"]} results.map { |t| t["title"] }
end end
def move_to_end(ary, *vals) def move_to_end(ary, *vals)
vals.each do |val| vals.each do |val|
ary = ary.select {|v| v!=val } << val ary = ary.select { |v| v!=val } << val
end end
ary ary
end end
def move_to_front(ary, *vals) def move_to_front(ary, *vals)
vals.reverse.each do |val| vals.reverse.each do |val|
ary = ary.select {|v| v!=val }.insert(0, val) ary = ary.select { |v| v!=val }.insert(0, val)
end end
ary ary
end end
...@@ -263,7 +263,7 @@ describe "app" do ...@@ -263,7 +263,7 @@ describe "app" do
t5c.update(body: "changed!") t5c.update(body: "changed!")
t5c.save! t5c.save!
actual_order = thread_result_order("activity", "desc") actual_order = thread_result_order("activity", "desc")
expected_order = move_to_front(@default_order, "t5") expected_order = move_to_front(@default_order, "t5")
actual_order.should == expected_order actual_order.should == expected_order
end end
it "sorts using last activity / ascending" do it "sorts using last activity / ascending" do
...@@ -271,7 +271,7 @@ describe "app" do ...@@ -271,7 +271,7 @@ describe "app" do
t5c.update(body: "changed!") t5c.update(body: "changed!")
t5c.save! t5c.save!
actual_order = thread_result_order("activity", "asc") actual_order = thread_result_order("activity", "asc")
expected_order = move_to_end(@default_order.reverse, "t5") expected_order = move_to_end(@default_order.reverse, "t5")
actual_order.should == expected_order actual_order.should == expected_order
end end
it "sorts using vote count / descending" do it "sorts using vote count / descending" do
...@@ -280,7 +280,7 @@ describe "app" do ...@@ -280,7 +280,7 @@ describe "app" do
user.vote(t5, :up) user.vote(t5, :up)
t5.save! t5.save!
actual_order = thread_result_order("votes", "desc") actual_order = thread_result_order("votes", "desc")
expected_order = move_to_front(@default_order, "t5") expected_order = move_to_front(@default_order, "t5")
actual_order.should == expected_order actual_order.should == expected_order
end end
it "sorts using vote count / ascending" do it "sorts using vote count / ascending" do
...@@ -289,19 +289,19 @@ describe "app" do ...@@ -289,19 +289,19 @@ describe "app" do
user.vote(t5, :up) user.vote(t5, :up)
t5.save! t5.save!
actual_order = thread_result_order("votes", "asc") actual_order = thread_result_order("votes", "asc")
expected_order = move_to_end(@default_order, "t5") expected_order = move_to_end(@default_order, "t5")
actual_order.should == expected_order actual_order.should == expected_order
end end
it "sorts using comment count / descending" do it "sorts using comment count / descending" do
make_comment(@threads["t5"].author, @threads["t5"], "extra comment") make_comment(@threads["t5"].author, @threads["t5"], "extra comment")
actual_order = thread_result_order("comments", "desc") actual_order = thread_result_order("comments", "desc")
expected_order = move_to_front(@default_order, "t5") expected_order = move_to_front(@default_order, "t5")
actual_order.should == expected_order actual_order.should == expected_order
end end
it "sorts using comment count / ascending" do it "sorts using comment count / ascending" do
make_comment(@threads["t5"].author, @threads["t5"], "extra comment") make_comment(@threads["t5"].author, @threads["t5"], "extra comment")
actual_order = thread_result_order("comments", "asc") actual_order = thread_result_order("comments", "asc")
expected_order = move_to_end(@default_order, "t5") expected_order = move_to_end(@default_order, "t5")
actual_order.should == expected_order actual_order.should == expected_order
end end
it "sorts pinned items first" do it "sorts pinned items first" do
...@@ -332,31 +332,48 @@ describe "app" do ...@@ -332,31 +332,48 @@ describe "app" do
expected_order = move_to_front(@default_order.reverse, "t7", "t8") expected_order = move_to_front(@default_order.reverse, "t7", "t8")
actual_order.should == expected_order actual_order.should == expected_order
end end
context "pagination" do context "pagination" do
def thread_result_page (sort_key, sort_order, page, per_page, user_id=nil, unread=false) def thread_result_page (sort_key, sort_order, page, per_page, course_id=DFLT_COURSE_ID, user_id=nil, unread=false)
get "/api/v1/threads", course_id: DFLT_COURSE_ID, sort_key: sort_key, sort_order: sort_order, page: page, per_page: per_page, user_id: user_id, unread: unread get "/api/v1/threads", course_id: course_id, sort_key: sort_key, sort_order: sort_order, page: page, per_page: per_page, user_id: user_id, unread: unread
last_response.should be_ok last_response.should be_ok
parse(last_response.body) parse(last_response.body)
end end
it "returns single page with no threads in a course" do
result = thread_result_page("date", "desc", 1, 20, "99")
result["collection"].length.should == 0
result["thread_count"].should == 0
result["num_pages"].should == 1
result["page"].should == 1
end
it "returns single page" do it "returns single page" do
result = thread_result_page("date", "desc", 1, 20) result = thread_result_page("date", "desc", 1, 20)
result["collection"].length.should == 10 result["collection"].length.should == 10
result["thread_count"].should == 10
result["num_pages"].should == 1 result["num_pages"].should == 1
result["page"].should == 1 result["page"].should == 1
end end
it "returns multiple pages" do it "returns multiple pages" do
result = thread_result_page("date", "desc", 1, 5) result = thread_result_page("date", "desc", 1, 5)
result["collection"].length.should == 5 result["collection"].length.should == 5
result["thread_count"].should == 10
result["num_pages"].should == 2 result["num_pages"].should == 2
result["page"].should == 1 result["page"].should == 1
result = thread_result_page("date", "desc", 2, 5) result = thread_result_page("date", "desc", 2, 5)
result["collection"].length.should == 5 result["collection"].length.should == 5
result["thread_count"].should == 10
result["num_pages"].should == 2 result["num_pages"].should == 2
result["page"].should == 2 result["page"].should == 2
end end
it "returns page exceeding available pages with no results" do
#TODO: Review whether we can switch pagination endpoint to raise an exception; rather than an empty page
result = thread_result_page("date", "desc", 3, 5)
result["collection"].length.should == 0
result["thread_count"].should == 10
result["num_pages"].should == 2
result["page"].should == 3
end
def test_paged_order (sort_spec, expected_order, filter_spec=[], user_id=nil) def test_paged_order (sort_spec, expected_order, filter_spec=[], user_id=nil)
# sort spec is a hash with keys: sort_key, sort_dir, per_page # sort spec is a hash with keys: sort_key, sort_dir, per_page
...@@ -368,12 +385,13 @@ describe "app" do ...@@ -368,12 +385,13 @@ describe "app" do
num_pages.times do |i| num_pages.times do |i|
page = i + 1 page = i + 1
result = thread_result_page( result = thread_result_page(
sort_spec['sort_key'], sort_spec['sort_key'],
sort_spec['sort_dir'], sort_spec['sort_dir'],
page, page,
per_page, per_page,
user_id, DFLT_COURSE_ID,
filter_spec.include?("unread") user_id,
filter_spec.include?("unread")
) )
result["collection"].length.should == (page * per_page <= expected_order.length ? per_page : expected_order.length % per_page) result["collection"].length.should == (page * per_page <= expected_order.length ? per_page : expected_order.length % per_page)
if filter_spec.include?("unread") if filter_spec.include?("unread")
...@@ -383,7 +401,7 @@ describe "app" do ...@@ -383,7 +401,7 @@ describe "app" do
result["num_pages"].should == num_pages result["num_pages"].should == num_pages
end end
result["page"].should == page result["page"].should == page
actual_order += result["collection"].map {|v| v["title"]} actual_order += result["collection"].map { |v| v["title"] }
end end
actual_order.should == expected_order actual_order.should == expected_order
end end
...@@ -393,7 +411,7 @@ describe "app" do ...@@ -393,7 +411,7 @@ describe "app" do
@threads["t7"].pinned = true @threads["t7"].pinned = true
@threads["t7"].save! @threads["t7"].save!
expected_order = move_to_front(move_to_end(@default_order, "t5"), "t7") expected_order = move_to_front(move_to_end(@default_order, "t5"), "t7")
test_paged_order({'sort_key'=>'comments', 'sort_dir'=>'asc', 'per_page'=>3}, expected_order) test_paged_order({'sort_key' => 'comments', 'sort_dir' => 'asc', 'per_page' => 3}, expected_order)
end end
it "orders correctly acrosss pages with unread filter" do it "orders correctly acrosss pages with unread filter" do
...@@ -405,21 +423,21 @@ describe "app" do ...@@ -405,21 +423,21 @@ describe "app" do
@threads["t7"].save! @threads["t7"].save!
expected_order = move_to_front(move_to_end(@default_order[1..8], "t5"), "t7") expected_order = move_to_front(move_to_end(@default_order[1..8], "t5"), "t7")
test_paged_order( test_paged_order(
{'sort_key'=>'comments', 'sort_dir'=>'asc', 'per_page'=>3}, {'sort_key' => 'comments', 'sort_dir' => 'asc', 'per_page' => 3},
expected_order, expected_order,
["unread"], ["unread"],
user.id user.id
) )
end end
end end
end end
end end
def test_unicode_data(text) def test_unicode_data(text)
course_id = "unicode_course" course_id = 'unicode_course'
thread = make_thread(User.first, text, course_id, "unicode_commentable") thread = create(:comment_thread, body: text, course_id: course_id)
make_comment(User.first, thread, text) create(:comment, comment_thread: thread, body: text)
result = thread_result(course_id: course_id).first result = thread_result(course_id: course_id).first
check_thread_result_json(nil, thread, result) check_thread_result_json(nil, thread, result)
end end
...@@ -427,98 +445,118 @@ describe "app" do ...@@ -427,98 +445,118 @@ describe "app" do
include_examples "unicode data" include_examples "unicode data"
end end
describe "GET /api/v1/threads/:thread_id" do describe 'GET /api/v1/threads/:thread_id' do
let(:thread) do
comment = create(:comment)
comment.comment_thread
end
before(:each) { init_without_subscriptions } subject do
it "returns JSON" do
thread = CommentThread.first
get "/api/v1/threads/#{thread.id}" get "/api/v1/threads/#{thread.id}"
last_response.should be_ok
last_response.content_type.should == "application/json;charset=utf-8"
end end
it "get information of a single comment thread" do it { should be_ok }
thread = CommentThread.first
get "/api/v1/threads/#{thread.id}" it 'returns JSON' do
last_response.should be_ok expect(subject.content_type).to eq 'application/json;charset=utf-8'
response_thread = parse last_response.body
check_thread_result_json(nil, thread, response_thread)
end end
it "computes endorsed correctly" do it 'get information of a single comment thread' do
thread = CommentThread.first check_thread_result_json(nil, thread, parse(subject.body))
comment = thread.root_comments[1] end
it 'computes endorsed correctly' do
comment = thread.root_comments[0]
comment.endorsed = true comment.endorsed = true
comment.save! comment.save!
get "/api/v1/threads/#{thread.id}"
last_response.should be_ok
response_thread = parse last_response.body
response_thread["endorsed"].should == true
check_thread_result_json(nil, thread, response_thread)
end
# This is a test to ensure that the username is included even if the expect(subject).to be_ok
# thread's author is the one looking at the comment. This is because of a parsed = parse(subject.body)
# regression in which we used User.only(:id, :read_states). This worked expect(parsed).to include('endorsed' => true)
# before we included the identity map, but afterwards, the user was thread.reload
# missing the username and was not refetched. check_thread_result_json(nil, thread, parsed)
it "includes the username even if the thread is being marked as read for the thread author" do end
thread = CommentThread.first
expected_username = thread.author.username
# We need to clear the IdentityMap after getting the expected data to context 'when marking as read' do
# ensure that this spec fails when it should. If we don't do this, then subject do
# in the cases where the User is fetched without its username, the spec get "/api/v1/threads/#{thread.id}", {:user_id => thread.author.id, :mark_as_read => true}
# won't fail because the User will already be in the identity map. end
Mongoid::IdentityMap.clear
get "/api/v1/threads/#{thread.id}", {:user_id => thread.author_id, :mark_as_read => true} it { should be_ok }
last_response.should be_ok
response_thread = parse last_response.body # This is a test to ensure that the username is included even if the
response_thread["username"].should == expected_username # thread's author is the one looking at the comment. This is because of a
# regression in which we used User.only(:id, :read_states). This worked
# before we included the identity map, but afterwards, the user was
# missing the username and was not refetched.
# BBEGGS - Note 8/4/2015: Identify map has been removed during the mongoid 4.x upgrade.
# Should no longer be an issue.
it 'includes the username even if the thread is being marked as read for the thread author' do
expect(parse(subject.body)).to include('username' => thread.author.username)
end
end end
it "get information of a single comment thread with its comments" do context 'with comments' do
thread = CommentThread.first subject do
get "/api/v1/threads/#{thread.id}", recursive: true get "/api/v1/threads/#{thread.id}", recursive: true
last_response.should be_ok end
check_thread_result_json(nil, thread, parse(last_response.body))
check_thread_response_paging_json(thread, parse(last_response.body)) it { should be_ok }
it 'get information of a single comment thread with its comments' do
parsed = parse(subject.body)
check_thread_result_json(nil, thread, parsed)
check_thread_response_paging_json(thread, parsed)
end
end end
it "returns 404 when the thread does not exist" do it 'returns 404 when the thread does not exist' do
thread = CommentThread.first
path = "/api/v1/threads/#{thread.id}"
get path
last_response.should be_ok
thread.destroy thread.destroy
get path expect(subject.status).to eq 404
last_response.status.should == 404 expect(parse(last_response.body).first).to eq I18n.t(:requested_object_not_found)
parse(last_response.body).first.should == I18n.t(:requested_object_not_found) end
context 'with user specified' do
let(:user) { create(:user) }
subject do
user.mark_as_read(thread)
get "/api/v1/threads/#{thread.id}", user_id: user.id
last_response
end
it { should be_ok }
it 'marks thread as read and confirms its value on returned response' do
parsed = parse(subject.body)
thread.reload
check_thread_result_json(user, thread, parsed)
expect(parsed).to include('read' => true)
end
end end
def test_unicode_data(text) def test_unicode_data(text)
thread = make_thread(User.first, text, "unicode_course", "unicode_commentable") thread = create(:comment_thread, body: text)
make_comment(User.first, thread, text) create(:comment, comment_thread: thread, body: text)
get "/api/v1/threads/#{thread.id}", recursive: true get "/api/v1/threads/#{thread.id}", recursive: true
last_response.should be_ok expect(last_response).to be_ok
result = parse last_response.body
check_thread_result_json(nil, thread, result) parsed = parse(last_response.body)
check_thread_response_paging_json(thread, result) check_thread_result_json(nil, thread, parsed)
check_thread_response_paging_json(thread, parsed)
end end
include_examples "unicode data" include_examples 'unicode data'
context "response pagination" do context "response pagination" do
before(:each) do before(:each) do
User.all.delete User.all.delete
Content.all.delete Content.all.delete
@user = create_test_user(999) @user = create_test_user(999)
@threads = {} @threads = {}
@comments = {} @comments = {}
[20,10,3,2,1,0].each do |n| [20, 10, 3, 2, 1, 0].each do |n|
thread_key = "t#{n}" thread_key = "t#{n}"
thread = make_thread(@user, thread_key, DFLT_COURSE_ID, "pdq") thread = make_thread(@user, thread_key, DFLT_COURSE_ID, "pdq")
@threads[n] = thread @threads[n] = thread
...@@ -544,29 +582,29 @@ describe "app" do ...@@ -544,29 +582,29 @@ describe "app" do
it "returns all responses when no skip/limit params given" do it "returns all responses when no skip/limit params given" do
@threads.each do |n, thread| @threads.each do |n, thread|
res = thread_result thread.id, {} res = thread_result thread.id, {}
check_thread_response_paging_json thread, res check_thread_response_paging_json thread, res, 0, nil, false
end end
end end
it "skips the specified number of responses" do it "skips the specified number of responses" do
@threads.each do |n, thread| @threads.each do |n, thread|
res = thread_result thread.id, {:resp_skip => 1} res = thread_result thread.id, {:resp_skip => 1}
check_thread_response_paging_json thread, res, 1, nil check_thread_response_paging_json thread, res, 1, nil, false
end end
end end
it "limits the specified number of responses" do it "limits the specified number of responses" do
@threads.each do |n, thread| @threads.each do |n, thread|
res = thread_result thread.id, {:resp_limit => 2} res = thread_result thread.id, {:resp_limit => 2}
check_thread_response_paging_json thread, res, 0, 2 check_thread_response_paging_json thread, res, 0, 2, false
end end
end end
it "skips and limits responses" do it "skips and limits responses" do
@threads.each do |n, thread| @threads.each do |n, thread|
res = thread_result thread.id, {:resp_skip => 3, :resp_limit => 5} res = thread_result thread.id, {:resp_skip => 3, :resp_limit => 5}
check_thread_response_paging_json thread, res, 3, 5 check_thread_response_paging_json thread, res, 3, 5, false
end end
end end
end end
...@@ -575,8 +613,8 @@ describe "app" do ...@@ -575,8 +613,8 @@ describe "app" do
describe "PUT /api/v1/threads/:thread_id" do describe "PUT /api/v1/threads/:thread_id" do
before(:each) { init_without_subscriptions } before(:each) { init_without_subscriptions }
it "update information of comment thread" do it "updates information of comment thread" do
thread = CommentThread.first thread = CommentThread.first
comment = thread.comments.first comment = thread.comments.first
comment.endorsed = true comment.endorsed = true
...@@ -592,7 +630,7 @@ describe "app" do ...@@ -592,7 +630,7 @@ describe "app" do
comment.reload comment.reload
comment.endorsed.should == false comment.endorsed.should == false
comment.endorsement.should == nil comment.endorsement.should == nil
check_thread_result_json(nil, changed_thread, parse(last_response.body)) check_unread_thread_result_json(changed_thread, parse(last_response.body))
end end
it "returns 400 when the thread does not exist" do it "returns 400 when the thread does not exist" do
put "/api/v1/threads/does_not_exist", body: "new body", title: "new title" put "/api/v1/threads/does_not_exist", body: "new body", title: "new title"
...@@ -616,6 +654,7 @@ describe "app" do ...@@ -616,6 +654,7 @@ describe "app" do
thread = CommentThread.first thread = CommentThread.first
put "/api/v1/threads/#{thread.id}", body: text, title: text put "/api/v1/threads/#{thread.id}", body: text, title: text
last_response.should be_ok last_response.should be_ok
thread = CommentThread.find(thread.id)
thread.body.should == text thread.body.should == text
thread.title.should == text thread.title.should == text
end end
...@@ -626,7 +665,7 @@ describe "app" do ...@@ -626,7 +665,7 @@ describe "app" do
before(:each) { init_without_subscriptions } before(:each) { init_without_subscriptions }
let :default_params do let :default_params do
{body: "new comment", course_id: "1", user_id: User.first.id} {body: "new comment", course_id: "1", user_id: User.first.id}
end end
it "create a comment to the comment thread" do it "create a comment to the comment thread" do
...@@ -635,11 +674,13 @@ describe "app" do ...@@ -635,11 +674,13 @@ describe "app" do
orig_count = thread.comment_count orig_count = thread.comment_count
post "/api/v1/threads/#{thread.id}/comments", default_params post "/api/v1/threads/#{thread.id}/comments", default_params
last_response.should be_ok last_response.should be_ok
retrieved = parse last_response.body
changed_thread = CommentThread.find(thread.id) changed_thread = CommentThread.find(thread.id)
changed_thread.comment_count.should == orig_count + 1 changed_thread.comment_count.should == orig_count + 1
comment = changed_thread.comments.select{|c| c["body"] == "new comment"}.first comment = changed_thread.comments.select { |c| c["body"] == "new comment" }.first
comment.should_not be_nil comment.should_not be_nil
comment.author_id.should == user.id comment.author_id.should == user.id
retrieved["child_count"].should == 0
end end
it "allows anonymous comment" do it "allows anonymous comment" do
thread = CommentThread.first thread = CommentThread.first
...@@ -649,7 +690,7 @@ describe "app" do ...@@ -649,7 +690,7 @@ describe "app" do
last_response.should be_ok last_response.should be_ok
changed_thread = CommentThread.find(thread.id) changed_thread = CommentThread.find(thread.id)
changed_thread.comment_count.should == orig_count + 1 changed_thread.comment_count.should == orig_count + 1
comment = changed_thread.comments.select{|c| c["body"] == "new comment"}.first comment = changed_thread.comments.select { |c| c["body"] == "new comment" }.first
comment.should_not be_nil comment.should_not be_nil
comment.anonymous.should be_true comment.anonymous.should be_true
end end
...@@ -681,18 +722,29 @@ describe "app" do ...@@ -681,18 +722,29 @@ describe "app" do
include_examples "unicode data" include_examples "unicode data"
end end
describe "DELETE /api/v1/threads/:thread_id" do
before(:each) { init_without_subscriptions } describe 'DELETE /api/v1/threads/:thread_id' do
it "delete the comment thread and its comments" do let(:thread) { create_comment_thread_and_comments }
thread = CommentThread.first.to_hash
delete "/api/v1/threads/#{thread['id']}" subject { delete "/api/v1/threads/#{thread.id}" }
last_response.should be_ok
CommentThread.where(title: thread["title"]).first.should be_nil it { should be_ok }
it 'deletes the comment thread and its comments' do
expect(CommentThread.where(id: thread.id).count).to eq 1
expect(Comment.where(comment_thread: thread).count).to eq 2
subject
expect(CommentThread.where(id: thread.id).count).to eq 0
expect(Comment.where(comment_thread: thread).count).to eq 0
end end
it "returns 400 when the thread does not exist" do
delete "/api/v1/threads/does_not_exist" context 'when thread does not exist' do
last_response.status.should == 400 subject { delete '/api/v1/threads/does_not_exist' }
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
it 'returns 400 when the thread does not exist' do
expect(subject.status).to eq 400
expect(parse(subject.body).first).to eq I18n.t(:requested_object_not_found)
end
end end
end end
end end
......
require 'spec_helper' require 'spec_helper'
require 'unicode_shared_examples' require 'unicode_shared_examples'
describe "app" do describe 'app' do
describe "commentables" do describe 'commentables' do
before(:each) { set_api_key_header }
let(:commentable_id) { Faker::Lorem.word }
before(:each) do describe 'DELETE /api/v1/:commentable_id/threads' do
init_without_subscriptions it 'delete all associated threads and comments of a commentable' do
set_api_key_header thread_count = 2
end create_list(:comment_thread, thread_count, commentable_id: commentable_id)
expect(Commentable.find(commentable_id).comment_threads.count).to eq thread_count
describe "DELETE /api/v1/:commentable_id/threads" do delete "/api/v1/#{commentable_id}/threads"
it "delete all associated threads and comments of a commentable" do expect(last_response).to be_ok
delete '/api/v1/question_1/threads' expect(Commentable.find(commentable_id).comment_threads.count).to eq 0
last_response.should be_ok
Commentable.find("question_1").comment_threads.count.should == 0
end end
it "handle normally when commentable does not exist" do
delete '/api/v1/does_not_exist/threads' context 'if the commentable does not exist' do
last_response.should be_ok subject { delete '/api/v1/does_not_exist/threads' }
it { should be_ok }
end end
end end
describe "GET /api/v1/:commentable_id/threads" do
def thread_result(commentable_id, params={}) describe 'GET /api/v1/:commentable_id/threads' do
get "/api/v1/#{commentable_id}/threads", params let(:returned_threads) { parse(subject.body)['collection'] }
last_response.should be_ok subject { get "/api/v1/#{commentable_id}/threads" }
parse(last_response.body)["collection"]
end shared_examples_for 'a filterable API endpoint' do
it "get all comment threads associated with a commentable object" do let!(:ignored_threads) { create_list(:comment_thread, 3, commentable_id: commentable_id) }
threads = thread_result "question_1" subject { get "/api/v1/#{commentable_id}/threads", parameters }
threads.length.should == 2
threads.index{|c| c["body"] == "can anyone help me?"}.should_not be_nil it { should be_ok }
threads.index{|c| c["body"] == "it is unsolvable"}.should_not be_nil
end it 'returns the correct CommentThreads' do
it "returns standalone threads if explicitly requested" do expect(returned_threads.length).to eq threads.length
threads = thread_result "question_1", context: "standalone" threads.sort_by!(&:_id).reverse!
threads.length.should == 1 threads.each_with_index do |thread, index|
threads[0]["body"].should == "no one can see us" expect(returned_threads[index]).to include('id' => thread.id.to_s, 'body' => thread.body)
end end
it "filters by course_id" do end
course1_threads = thread_result "question_1", course_id: "1" end
course1_threads.length.should == 1
course2_threads = thread_result "question_1", course_id: "2" context 'without filtering' do
course2_threads.length.should == 1 let(:parameters) { {} }
course1_threads.should_not == course2_threads let!(:threads) { ignored_threads + create_list(:comment_thread, 3, :with_group_id, commentable_id: commentable_id) }
end
it "filters by group_id" do it_behaves_like 'a filterable API endpoint'
group_thread = Commentable.find("question_1").comment_threads.first end
threads = thread_result "question_1", group_id: 42
threads.length.should == 2 context 'when filtering by the standalone context' do
group_thread.group_id = 43 let(:parameters) { {context: :standalone} }
group_thread.save! let!(:threads) { create_list(:comment_thread, 3, commentable_id: commentable_id, context: :standalone) }
threads = thread_result "question_1", group_id: 42
threads.length.should == 1 it_behaves_like 'a filterable API endpoint'
group_thread.group_id = 42 end
group_thread.save!
threads = thread_result "question_1", group_id: 42 context 'when filtering by course_id' do
threads.length.should == 2 let(:course_id) { Faker::Lorem.word }
end let(:parameters) { {course_id: course_id} }
it "filters by group_ids" do let!(:threads) { create_list(:comment_thread, 3, commentable_id: commentable_id, course_id: course_id) }
group_thread = Commentable.find("question_1").comment_threads.first
group_thread.group_id = 42
group_thread.save! it_behaves_like 'a filterable API endpoint'
threads = thread_result "question_1", group_ids: "42,43" end
threads.length.should == 2
group_thread.group_id = 43 context 'when filtering by group_id' do
group_thread.save! let(:group_id) { Faker::Number.number(4) }
threads = thread_result "question_1", group_ids: "42,43" let(:parameters) { {group_id: group_id} }
threads.length.should == 2 let!(:threads) { create_list(:comment_thread, 3, commentable_id: commentable_id, group_id: group_id) }
group_thread.group_id = 44
group_thread.save
threads = thread_result "question_1", group_ids: "42,43" it_behaves_like 'a filterable API endpoint'
threads.length.should == 1 end
end
it "returns an empty array when the commentable object does not exist (no threads)" do context 'when filtering by multiple group_id values' do
threads = thread_result "does_not_exist" let(:group_ids) { [Faker::Number.number(4), Faker::Number.number(4)] }
threads.length.should == 0 let(:parameters) { {group_ids: group_ids.join(',')} }
it_behaves_like 'a filterable API endpoint' do
let!(:threads) do
threads = []
group_ids.each do |group_id|
threads += create_list(:comment_thread, 3, commentable_id: commentable_id, group_id: group_id)
end
threads
end
end
end
context 'when the commentable does not exist' do
subject { get '/api/v1/does_not_exist/threads' }
it { should be_ok }
it 'should not return any results' do
expect(returned_threads.length).to eq 0
end
end end
def test_unicode_data(text) def test_unicode_data(text)
commentable_id = "unicode_commentable" commentable_id = 'unicode_commentable'
thread = make_thread(User.first, text, "unicode_course", commentable_id) thread = create(:comment_thread, commentable_id: commentable_id, body: text)
make_comment(User.first, thread, text) create(:comment, comment_thread: thread, body: text)
get "/api/v1/#{commentable_id}/threads" get "/api/v1/#{commentable_id}/threads"
last_response.should be_ok last_response.should be_ok
result = parse(last_response.body)["collection"] result = parse(last_response.body)['collection']
result.should_not be_empty result.should_not be_empty
check_thread_result_json(nil, thread, result.first) check_thread_result_json(nil, thread, result.first)
end end
include_examples "unicode data" include_examples 'unicode data'
end end
describe "POST /api/v1/:commentable_id/threads" do
let(:default_params) do describe 'POST /api/v1/:commentable_id/threads' do
{title: "Interesting question", body: "cool", course_id: "1", user_id: "1"} let(:commentable_id) { Faker::Lorem.word }
let(:user) { create(:user) }
let(:parameters) { attributes_for(:comment_thread, user_id: user.id) }
subject { post "/api/v1/#{commentable_id}/threads", parameters }
shared_examples_for 'CommentThread creation API' do |context='course'|
it 'creates a new CommentThread' do
expect(CommentThread.count).to eq 0
body = parse(subject.body)
expect(body).to include('read' => false,
'unread_comments_count' => 0,
'endorsed' => false,
'resp_total' => 0)
expect(CommentThread.count).to eq 1
thread = CommentThread.find(body['id'])
expect(thread).to_not be_nil
expect(thread.context).to eq context
end
end end
it "create a new comment thread for the commentable object" do
old_count = CommentThread.count it { should be_ok }
post '/api/v1/question_1/threads', default_params
last_response.should be_ok it_behaves_like 'CommentThread creation API'
result = parse(last_response.body) it_behaves_like 'CommentThread creation API', 'standalone' do
result["read"].should == false let(:parameters) { attributes_for(:comment_thread, user_id: user.id, context: 'standalone') }
result["unread_comments_count"].should == 0
result["endorsed"].should == false
CommentThread.count.should == old_count + 1
thread = CommentThread.where(title: "Interesting question").first
thread.should_not be_nil
thread.context.should == "course"
end
it "can create a standalone thread" do
old_count = CommentThread.count
post '/api/v1/question_1/threads', default_params.merge(:context => "standalone")
CommentThread.count.should == old_count + 1
thread = CommentThread.where(title: "Interesting question").first
thread.should_not be_nil
thread.context.should == "standalone"
end end
CommentThread.thread_type.values.each do |thread_type| CommentThread.thread_type.values.each do |thread_type|
it "can create a #{thread_type} thread" do it "can create a #{thread_type} thread" do
old_count = CommentThread.where(thread_type: thread_type).count old_count = CommentThread.where(thread_type: thread_type).count
post "/api/v1/question_1/threads", default_params.merge(thread_type: thread_type.to_s) post '/api/v1/question_1/threads', parameters.merge(thread_type: thread_type.to_s)
last_response.should be_ok last_response.should be_ok
parse(last_response.body)["thread_type"].should == thread_type.to_s parse(last_response.body)['thread_type'].should == thread_type.to_s
CommentThread.where(thread_type: thread_type).count.should == old_count + 1 CommentThread.where(thread_type: thread_type).count.should == old_count + 1
end end
end end
it "allows anonymous thread" do
old_count = CommentThread.count it 'allows anonymous thread' do
post '/api/v1/question_1/threads', default_params.merge(anonymous: true) post '/api/v1/question_1/threads', parameters.merge!(anonymous: true)
last_response.should be_ok
CommentThread.count.should == old_count + 1
c = CommentThread.where(title: "Interesting question").first
c.should_not be_nil
c["anonymous"].should be_true
end
it "create a new comment thread for a new commentable object" do
post '/api/v1/does_not_exist/threads', default_params
last_response.should be_ok last_response.should be_ok
Commentable.find("does_not_exist").comment_threads.length.should == 1 body = parse(subject.body)
Commentable.find("does_not_exist").comment_threads.first.body.should == "cool"
thread = CommentThread.find(body['id'])
expect(thread).to_not be_nil
expect(thread['anonymous']).to be_true
end end
it "returns error when title, body or course id does not exist" do
params = default_params.dup it 'returns error when title, body or course id does not exist' do
params.delete(:title) [:title, :body, :course_id].each do |parameter|
post '/api/v1/question_1/threads', params params = parameters.dup
last_response.status.should == 400 params.delete(parameter)
params = default_params.dup post '/api/v1/question_1/threads', params
params.delete(:body) last_response.status.should == 400
post '/api/v1/question_1/threads', params end
last_response.status.should == 400
params = default_params.dup
params.delete(:course_id)
post '/api/v1/question_1/threads', params
last_response.status.should == 400
end end
it "returns error when title or body is blank (only consists of spaces and new lines)" do it "returns error when title or body is blank (only consists of spaces and new lines)" do
post '/api/v1/question_1/threads', default_params.merge(title: " ") post '/api/v1/question_1/threads', parameters.merge(title: " ")
last_response.status.should == 400 last_response.status.should == 400
post '/api/v1/question_1/threads', default_params.merge(body: " \n \n") post '/api/v1/question_1/threads', parameters.merge(body: " \n \n")
last_response.status.should == 400 last_response.status.should == 400
end end
it "returns 503 and does not create when the post content is blocked" do
post '/api/v1/question_1/threads', default_params.merge(body: "BLOCKED POST") it 'returns 503 and does not create when the post content is blocked' do
body = 'BLOCKED POST'
hash = block_post_body
post '/api/v1/question_1/threads', parameters.merge!(body: body)
last_response.status.should == 503 last_response.status.should == 503
parse(last_response.body).first.should == I18n.t(:blocked_content_with_body_hash, :hash => Digest::MD5.hexdigest("blocked post")) parse(last_response.body).first.should == I18n.t(:blocked_content_with_body_hash, :hash => hash)
CommentThread.where(body: "BLOCKED POST").to_a.should be_empty expect(CommentThread.where(body: body).length).to eq 0
end end
def test_unicode_data(text) def test_unicode_data(text)
commentable_id = "unicode_commentable" commentable_id = 'unicode_commentable'
post "/api/v1/#{commentable_id}/threads", default_params.merge(body: text, title: text) post "/api/v1/#{commentable_id}/threads", parameters.merge!(body: text, title: text)
last_response.should be_ok last_response.should be_ok
CommentThread.where(commentable_id: commentable_id, body: text, title: text).should_not be_empty expect(CommentThread.where(commentable_id: commentable_id, body: text, title: text)).to_not be_empty
end end
include_examples "unicode data" include_examples 'unicode data'
end end
end end
end end
require "spec_helper" require 'spec_helper'
describe "i18n" do describe 'i18n' do
before(:each) { set_api_key_header } before(:each) { set_api_key_header }
it "should respect the Accept-Language header" do it 'should respect the Accept-Language header' do
put "/api/v1/comments/does_not_exist/votes", {}, {"HTTP_ACCEPT_LANGUAGE" => "x-test"} put '/api/v1/comments/does_not_exist/votes', {}, {'HTTP_ACCEPT_LANGUAGE' => 'x-test'}
last_response.status.should == 400 last_response.status.should == 400
parse(last_response.body).first.should == "##x-test## requested object not found" parse(last_response.body).first.should == '##x-test## requested object not found'
end end
end end
...@@ -17,7 +17,7 @@ describe "app" do ...@@ -17,7 +17,7 @@ describe "app" do
random_string = (0...8).map{ ('a'..'z').to_a[rand(26)] }.join random_string = (0...8).map{ ('a'..'z').to_a[rand(26)] }.join
thread = CommentThread.new( thread = CommentThread.new(
title: "Test title", body: "elephant otter", course_id: "1", title: "Test title", body: "elephant otter", course_id: "1",
commentable_id: commentable.id, comments_text_dummy: random_string commentable_id: commentable.id, body: random_string
) )
thread.thread_type = :discussion thread.thread_type = :discussion
thread.author = user thread.author = user
...@@ -94,7 +94,8 @@ describe "app" do ...@@ -94,7 +94,8 @@ describe "app" do
subscription = Subscription.create({:subscriber_id => user._id.to_s, :source_id => thread._id.to_s}) subscription = Subscription.create({:subscriber_id => user._id.to_s, :source_id => thread._id.to_s})
comment = Comment.new(body: "dummy body text", course_id: "1", commentable_id: commentable.id) comment = Comment.new(body: "dummy body text", course_id: "1")
comment.commentable_id = commentable.id
comment.author = user comment.author = user
comment.comment_thread = thread comment.comment_thread = thread
comment.save! comment.save!
......
require 'spec_helper' require 'spec_helper'
require 'faker'
describe "app" do
before (:each) { set_api_key_header } describe 'app' do
before(:each) { set_api_key_header }
let(:body) { Faker::Lorem.word }
let(:author) { create_test_user(1) } describe 'GET /api/v1/search/threads' do
describe "thread search" do
describe "GET /api/v1/search/threads" do
it "returns thread with query match" do
commentable = Commentable.new("question_1")
random_string = (0...8).map{ ('a'..'z').to_a[rand(26)] }.join shared_examples_for 'a search endpoint' do
subject do
refresh_es_index
get '/api/v1/search/threads', text: body
end
thread = CommentThread.new(title: "Test title", body: random_string, course_id: "1", commentable_id: commentable.id) let(:matched_thread) { parse(subject.body)['collection'].select { |t| t['id'] == thread.id.to_s }.first }
thread.thread_type = :discussion
thread.author = author
thread.save!
sleep 3 it { should be_ok }
get "/api/v1/search/threads", text: random_string it 'returns thread with query match' do
last_response.should be_ok expect(matched_thread).to_not be_nil
threads = parse(last_response.body)['collection'] check_thread_result_json(nil, thread, matched_thread)
check_thread_result_json(nil, thread, threads.select{|t| t["id"] == thread.id.to_s}.first)
end end
end end
end
describe "comment search" do
describe "GET /api/v1/search/threads" do
it "returns thread with comment query match" do
commentable = Commentable.new("question_1")
random_string = (0...8).map{ ('a'..'z').to_a[rand(26)] }.join
thread = CommentThread.new(title: "Test title", body: "elephant otter", course_id: "1", commentable_id: commentable.id) context 'when searching on thread content' do
thread.thread_type = :discussion let!(:thread) { create(:comment_thread, body: body) }
thread.author = author
thread.save!
sleep 3 it_behaves_like 'a search endpoint'
end
comment = Comment.new(body: random_string, course_id: "1", commentable_id: commentable.id)
comment.author = author
comment.comment_thread = thread
comment.save!
sleep 1
get "/api/v1/search/threads", text: random_string context 'when searching on comment content' do
last_response.should be_ok let!(:thread) do
threads = parse(last_response.body)['collection'] comment = create(:comment, body: body)
check_thread_result_json(nil, thread, threads.select{|t| t["id"] == thread.id.to_s}.first) thread = comment.comment_thread
end end
it_behaves_like 'a search endpoint'
end end
end end
end end
...@@ -361,5 +361,22 @@ describe "app" do ...@@ -361,5 +361,22 @@ describe "app" do
include_examples "unicode data" include_examples "unicode data"
end end
describe "POST /api/v1/users/:user_id/read" do
before(:each) { setup_10_threads }
it "marks a thread as read for the user" do
thread = @threads["t0"]
user = create_test_user(42)
post "/api/v1/users/#{user.external_id}/read", source_type: "thread", source_id: thread.id
last_response.should be_ok
user.reload
read_states = user.read_states.where(course_id: thread.course_id).to_a
read_date = read_states.first.last_read_times[thread.id.to_s]
read_date.should >= thread.updated_at
end
end
end end
end end
...@@ -45,8 +45,11 @@ describe "app" do ...@@ -45,8 +45,11 @@ describe "app" do
context "db check" do context "db check" do
def test_db_check(response, is_success) def test_db_check(response, is_success)
db = double("db") db = double("db")
stub_const("Mongoid::Sessions", Class.new).stub(:default).and_return(db) stub_const("Mongoid::Clients", Class.new).stub(:default).and_return(db)
db.should_receive(:command).with({:isMaster => 1}).and_return(response) result = double('result')
result.stub(:ok?).and_return(response['ok'] == 1)
result.stub(:documents).and_return([response])
db.should_receive(:command).with({:isMaster => 1}).and_return(result)
get "/heartbeat" get "/heartbeat"
if is_success if is_success
last_response.status.should == 200 last_response.status.should == 200
...@@ -75,7 +78,7 @@ describe "app" do ...@@ -75,7 +78,7 @@ describe "app" do
it "reports failure when db command raises an error" do it "reports failure when db command raises an error" do
db = double("db") db = double("db")
stub_const("Mongoid::Sessions", Class.new).stub(:default).and_return(db) stub_const("Mongoid::Clients", Class.new).stub(:default).and_return(db)
db.should_receive(:command).with({:isMaster => 1}).and_raise(StandardError) db.should_receive(:command).with({:isMaster => 1}).and_raise(StandardError)
get "/heartbeat" get "/heartbeat"
last_response.status.should == 500 last_response.status.should == 500
...@@ -168,4 +171,4 @@ describe "app" do ...@@ -168,4 +171,4 @@ describe "app" do
end end
end end
end end
\ No newline at end of file
require 'faker'
# Reload i18n data for faker
I18n.reload!
FactoryGirl.define do
factory :user do
# Initialize the model with all attributes since we are using a custom _id field.
# See https://github.com/thoughtbot/factory_girl/issues/544.
initialize_with { new(attributes) }
sequence(:username) { |n| "#{Faker::Internet.user_name}_#{n}" }
sequence(:external_id) { username }
end
factory :comment_thread do
title { Faker::Lorem.sentence }
body { Faker::Lorem.paragraph }
course_id { Faker::Lorem.word }
thread_type :discussion
commentable_id { Faker::Lorem.word }
association :author, factory: :user
group_id nil
pinned false
trait :subscribe_author do
after(:create) do |thread|
thread.author.subscribe(thread)
end
end
trait :with_group_id do
group_id { Faker::Number.number(4) }
end
end
factory :comment do
association :author, factory: :user
comment_thread { parent ? parent.comment_thread : create(:comment_thread) }
body { Faker::Lorem.paragraph }
course_id { comment_thread.course_id }
commentable_id { comment_thread.commentable_id }
endorsed false
end
end
...@@ -69,4 +69,32 @@ describe Comment do ...@@ -69,4 +69,32 @@ describe Comment do
end end
end end
end end
describe '#child_count' do
context 'with course_thread' do
it 'returns cached child count' do
comment = make_comment(author, course_thread, "comment")
child_comment = make_comment(author, comment, "comment")
expect(comment.get_cached_child_count).to eq(1)
end
it 'returns cached child count' do
comment = make_comment(author, course_thread, "comment")
child_comment = make_comment(author, comment, "comment")
comment.child_count = nil
expect(comment.get_cached_child_count).to eq(1)
end
it 'updates cached child count' do
comment = make_comment(author, course_thread, "comment")
expect(comment.get_cached_child_count).to eq(0)
comment.child_count = 2
expect(comment.get_cached_child_count).to eq(2)
comment.update_cached_child_count
expect(comment.get_cached_child_count).to eq(0)
end
end
end
end end
...@@ -77,19 +77,33 @@ describe ThreadPresenter do ...@@ -77,19 +77,33 @@ describe ThreadPresenter do
@reader = create_test_user('thread reader') @reader = create_test_user('thread reader')
end end
it "handles with_responses=false" do it "handles with_responses=false and recursive has no impact" do
@threads_with_num_comments.each do |thread, num_comments| @threads_with_num_comments.each do |thread, num_comments|
is_endorsed = num_comments > 0 && endorse_responses is_endorsed = num_comments > 0 && endorse_responses
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash # with response=false and recursive=false
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(false, 0, nil, false)
check_thread_result(@reader, thread, hash) check_thread_result(@reader, thread, hash)
['children', 'resp_skip', 'resp_limit', 'resp_total'].each {|k| (hash.has_key? k).should be_false } ['children', 'resp_skip', 'resp_limit', 'resp_total'].each {|k| (hash.has_key? k).should be_false }
# with response=false and recursive=true
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(false, 0, nil, true)
check_thread_result(@reader, thread, hash)
['children', 'resp_skip', 'resp_limit', 'resp_total'].each {|k| (hash.has_key? k).should be_false }
end
end
it "handles with_responses=true and recursive=true" do
@threads_with_num_comments.each do |thread, num_comments|
is_endorsed = num_comments > 0 && endorse_responses
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, 0, nil, true)
check_thread_result(@reader, thread, hash)
check_thread_response_paging(thread, hash, 0, nil, false, true)
end end
end end
it "handles with_responses=true" do it "handles with_responses=true and recursive=false" do
@threads_with_num_comments.each do |thread, num_comments| @threads_with_num_comments.each do |thread, num_comments|
is_endorsed = num_comments > 0 && endorse_responses is_endorsed = num_comments > 0 && endorse_responses
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash true hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, 0, nil, false)
check_thread_result(@reader, thread, hash) check_thread_result(@reader, thread, hash)
check_thread_response_paging(thread, hash) check_thread_response_paging(thread, hash)
end end
...@@ -99,7 +113,7 @@ describe ThreadPresenter do ...@@ -99,7 +113,7 @@ describe ThreadPresenter do
@threads_with_num_comments.each do |thread, num_comments| @threads_with_num_comments.each do |thread, num_comments|
is_endorsed = num_comments > 0 && endorse_responses is_endorsed = num_comments > 0 && endorse_responses
[0, 1, 2, 9, 10, 11, 1000].each do |skip| [0, 1, 2, 9, 10, 11, 1000].each do |skip|
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash true, skip hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, skip, nil, true)
check_thread_result(@reader, thread, hash) check_thread_result(@reader, thread, hash)
check_thread_response_paging(thread, hash, skip) check_thread_response_paging(thread, hash, skip)
end end
...@@ -111,7 +125,7 @@ describe ThreadPresenter do ...@@ -111,7 +125,7 @@ describe ThreadPresenter do
is_endorsed = num_comments > 0 && endorse_responses is_endorsed = num_comments > 0 && endorse_responses
[1, 2, 3, 9, 10, 11, 1000].each do |limit| [1, 2, 3, 9, 10, 11, 1000].each do |limit|
[0, 1, 2, 9, 10, 11, 1000].each do |skip| [0, 1, 2, 9, 10, 11, 1000].each do |skip|
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash true, skip, limit hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, skip, limit, true)
check_thread_result(@reader, thread, hash) check_thread_result(@reader, thread, hash)
check_thread_response_paging(thread, hash, skip, limit) check_thread_response_paging(thread, hash, skip, limit)
end end
...@@ -122,9 +136,9 @@ describe ThreadPresenter do ...@@ -122,9 +136,9 @@ describe ThreadPresenter do
it "fails with invalid arguments" do it "fails with invalid arguments" do
@threads_with_num_comments.each do |thread, num_comments| @threads_with_num_comments.each do |thread, num_comments|
is_endorsed = num_comments > 0 && endorse_responses is_endorsed = num_comments > 0 && endorse_responses
expect{ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, -1, nil)}.to raise_error(ArgumentError) expect{ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, -1, nil, true)}.to raise_error(ArgumentError)
[-1, 0].each do |limit| [-1, 0].each do |limit|
expect{ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, 0, limit)}.to raise_error(ArgumentError) expect{ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, 0, limit, true)}.to raise_error(ArgumentError)
end end
end end
end end
......
ENV["SINATRA_ENV"] = "test" ENV["SINATRA_ENV"] = "test"
require 'simplecov' require 'simplecov'
SimpleCov.start SimpleCov.start
if ENV['CI']=='true'
require 'codecov'
SimpleCov.formatter = SimpleCov::Formatter::Codecov
end
require File.join(File.dirname(__FILE__), '..', 'app') require File.join(File.dirname(__FILE__), '..', 'app')
require 'sinatra'
require 'rack/test' require 'rack/test'
require 'sinatra'
require 'yajl' require 'yajl'
require 'database_cleaner'
require 'support/database_cleaner'
require 'support/elasticsearch'
require 'support/factory_girl'
# setup test environment # setup test environment
set :environment, :test set :environment, :test
...@@ -15,6 +23,9 @@ set :run, false ...@@ -15,6 +23,9 @@ set :run, false
set :raise_errors, true set :raise_errors, true
set :logging, false set :logging, false
Mongoid.logger.level = Logger::WARN
Mongo::Logger.logger.level = ENV["ENABLE_MONGO_DEBUGGING"] ? Logger::DEBUG : Logger::WARN
Delayed::Worker.delay_jobs = false Delayed::Worker.delay_jobs = false
def app def app
...@@ -28,36 +39,12 @@ def set_api_key_header ...@@ -28,36 +39,12 @@ def set_api_key_header
current_session.header "X-Edx-Api-Key", TEST_API_KEY current_session.header "X-Edx-Api-Key", TEST_API_KEY
end end
def delete_es_index
Tire.index Content::ES_INDEX_NAME do delete end
end
def create_es_index
new_index = Tire.index Content::ES_INDEX_NAME
new_index.create
[CommentThread, Comment].each do |klass|
klass.put_search_index_mapping
end
end
def refresh_es_index
# we are using the same index for two types, which is against the
# grain of Tire's design. This is why this method works for both
# comment_threads and comments.
CommentThread.tire.index.refresh
end
RSpec.configure do |config| RSpec.configure do |config|
config.include Rack::Test::Methods config.include Rack::Test::Methods
config.treat_symbols_as_metadata_keys_with_true_values = true config.treat_symbols_as_metadata_keys_with_true_values = true
config.filter_run focus: true config.filter_run focus: true
config.run_all_when_everything_filtered = true config.run_all_when_everything_filtered = true
config.before(:each) do
Mongoid::IdentityMap.clear
DatabaseCleaner.clean
delete_es_index
create_es_index
end
end end
Mongoid.configure do |config| Mongoid.configure do |config|
...@@ -72,16 +59,24 @@ def create_test_user(id) ...@@ -72,16 +59,24 @@ def create_test_user(id)
User.create!(external_id: id.to_s, username: "user#{id}") User.create!(external_id: id.to_s, username: "user#{id}")
end end
def init_without_subscriptions # Add the given body of text to the list of blocked texts/hashes.
def block_post_body(body='blocked post')
body = body.strip.downcase.gsub(/[^a-z ]/, '').gsub(/\s+/, ' ')
blocked_hash = Digest::MD5.hexdigest(body)
Content.mongo_client[:blocked_hash].insert_one(hash: blocked_hash)
# reload the global holding the blocked hashes
CommentService.blocked_hashes = Content.mongo_client[:blocked_hash].find(nil, projection: {hash: 1}).map do |d|
d['hash']
end
blocked_hash
end
[Comment, CommentThread, User, Notification, Subscription, Activity, Delayed::Backend::Mongoid::Job].each(&:delete_all).each(&:remove_indexes).each(&:create_indexes) def init_without_subscriptions
Content.mongo_session[:blocked_hash].drop
delete_es_index
create_es_index
commentable = Commentable.new("question_1") commentable = Commentable.new("question_1")
users = (1..10).map{|id| create_test_user(id)} users = (1..10).map { |id| create_test_user(id) }
user = users.first user = users.first
thread = CommentThread.new(title: "I can't solve this problem", body: "can anyone help me?", course_id: "1", commentable_id: commentable.id) thread = CommentThread.new(title: "I can't solve this problem", body: "can anyone help me?", course_id: "1", commentable_id: commentable.id)
...@@ -150,63 +145,15 @@ def init_without_subscriptions ...@@ -150,63 +145,15 @@ def init_without_subscriptions
Comment.all.each do |c| Comment.all.each do |c|
user.vote(c, :up) # make the first user always vote up for convenience user.vote(c, :up) # make the first user always vote up for convenience
users[2,9].each {|user| user.vote(c, [:up, :down].sample)} users[2, 9].each { |user| user.vote(c, [:up, :down].sample) }
end end
CommentThread.all.each do |c| CommentThread.all.each do |c|
user.vote(c, :up) # make the first user always vote up for convenience user.vote(c, :up) # make the first user always vote up for convenience
users[2,9].each {|user| user.vote(c, [:up, :down].sample)} users[2, 9].each { |user| user.vote(c, [:up, :down].sample) }
end end
Content.mongo_session[:blocked_hash].insert(hash: Digest::MD5.hexdigest("blocked post")) block_post_body
# reload the global holding the blocked hashes
CommentService.blocked_hashes = Content.mongo_session[:blocked_hash].find.select(hash: 1).each.map {|d| d["hash"]}
end
def init_with_subscriptions
[Comment, CommentThread, User, Notification, Subscription, Activity, Delayed::Backend::Mongoid::Job].each(&:delete_all).each(&:remove_indexes).each(&:create_indexes)
delete_es_index
create_es_index
user1 = create_test_user(1)
user2 = create_test_user(2)
user2.subscribe(user1)
commentable = Commentable.new("question_1")
user1.subscribe(commentable)
user2.subscribe(commentable)
thread = CommentThread.new(title: "I can't solve this problem", body: "can anyone help me?", course_id: "1", commentable_id: commentable.id)
thread.author = user1
user1.subscribe(thread)
user2.subscribe(thread)
thread.save!
thread = thread.reload
comment = thread.comments.new(body: "this problem is so easy", course_id: "1")
comment.author = user2
comment.save!
comment1 = comment.children.new(body: "not for me!", course_id: "1")
comment1.author = user1
comment1.comment_thread = thread
comment1.save!
comment2 = comment1.children.new(body: "not for me neither!", course_id: "1")
comment2.author = user2
comment2.comment_thread = thread
comment2.save!
thread = CommentThread.new(title: "This problem is wrong", body: "it is unsolvable", course_id: "2", commentable_id: commentable.id)
thread.author = user2
user2.subscribe(thread)
thread.save!
thread = CommentThread.new(title: "I don't know what to say", body: "lol", course_id: "2", commentable_id: "something else")
thread.author = user1
thread.save!
end end
# this method is used to test results produced using the helper function handle_threads_query # this method is used to test results produced using the helper function handle_threads_query
...@@ -215,28 +162,28 @@ def check_thread_result(user, thread, hash, is_json=false) ...@@ -215,28 +162,28 @@ def check_thread_result(user, thread, hash, is_json=false)
expected_keys = %w(id thread_type title body course_id commentable_id created_at updated_at context) expected_keys = %w(id thread_type title body course_id commentable_id created_at updated_at context)
expected_keys += %w(anonymous anonymous_to_peers at_position_list closed user_id) expected_keys += %w(anonymous anonymous_to_peers at_position_list closed user_id)
expected_keys += %w(username votes abuse_flaggers tags type group_id pinned) expected_keys += %w(username votes abuse_flaggers tags type group_id pinned)
expected_keys += %w(comments_count unread_comments_count read endorsed) expected_keys += %w(comments_count unread_comments_count read endorsed last_activity_at)
# these keys are checked separately, when desired, using check_thread_response_paging. # these keys are checked separately, when desired, using check_thread_response_paging.
actual_keys = hash.keys - [ actual_keys = hash.keys - [
"children", "endorsed_responses", "non_endorsed_responses", "resp_skip", "children", "endorsed_responses", "non_endorsed_responses", "resp_skip",
"resp_limit", "resp_total", "non_endorsed_resp_total" "resp_limit", "resp_total", "non_endorsed_resp_total"
] ]
actual_keys.sort.should == expected_keys.sort actual_keys.sort.should == expected_keys.sort
hash["title"].should == thread.title hash["title"].should == thread.title
hash["body"].should == thread.body hash["body"].should == thread.body
hash["course_id"].should == thread.course_id hash["course_id"].should == thread.course_id
hash["anonymous"].should == thread.anonymous hash["anonymous"].should == thread.anonymous
hash["anonymous_to_peers"].should == thread.anonymous_to_peers hash["anonymous_to_peers"].should == thread.anonymous_to_peers
hash["commentable_id"].should == thread.commentable_id hash["commentable_id"].should == thread.commentable_id
hash["at_position_list"].should == thread.at_position_list hash["at_position_list"].should == thread.at_position_list
hash["closed"].should == thread.closed hash["closed"].should == thread.closed
hash["user_id"].should == thread.author.id hash["user_id"].should == thread.author.id
hash["username"].should == thread.author.username hash["username"].should == thread.author.username
hash["votes"]["point"].should == thread.votes["point"] hash["votes"]["point"].should == thread.votes["point"]
hash["votes"]["count"].should == thread.votes["count"] hash["votes"]["count"].should == thread.votes["count"]
hash["votes"]["up_count"].should == thread.votes["up_count"] hash["votes"]["up_count"].should == thread.votes["up_count"]
hash["votes"]["down_count"].should == thread.votes["down_count"] hash["votes"]["down_count"].should == thread.votes["down_count"]
hash["abuse_flaggers"].should == thread.abuse_flaggers hash["abuse_flaggers"].should == thread.abuse_flaggers
hash["tags"].should == [] hash["tags"].should == []
hash["type"].should == "thread" hash["type"].should == "thread"
...@@ -249,15 +196,17 @@ def check_thread_result(user, thread, hash, is_json=false) ...@@ -249,15 +196,17 @@ def check_thread_result(user, thread, hash, is_json=false)
if is_json if is_json
hash["id"].should == thread._id.to_s hash["id"].should == thread._id.to_s
hash["created_at"].should == thread.created_at.utc.strftime("%Y-%m-%dT%H:%M:%SZ") hash["created_at"].should == thread.created_at.utc.strftime("%Y-%m-%dT%H:%M:%SZ")
hash["updated_at"].should == thread.updated_at.utc.strftime("%Y-%m-%dT%H:%M:%SZ") hash["updated_at"].should == thread.updated_at.utc.strftime("%Y-%m-%dT%H:%M:%SZ")
hash["last_activity_at"].should == thread.last_activity_at.utc.strftime("%Y-%m-%dT%H:%M:%SZ")
else else
hash["created_at"].should == thread.created_at hash["created_at"].should == thread.created_at
hash["updated_at"].should == thread.updated_at hash["updated_at"].should == thread.updated_at
hash["last_activity_at"].should == thread.last_activity_at
end end
if user.nil? if user.nil?
hash["unread_comments_count"].should == thread.comments.length hash["unread_comments_count"].should == thread.comments.length
hash["read"].should == false hash["read"].should == false
else else
expected_unread_cnt = thread.comments.length # initially assume nothing has been read expected_unread_cnt = thread.comments.length # initially assume nothing has been read
read_states = user.read_states.where(course_id: thread.course_id).to_a read_states = user.read_states.where(course_id: thread.course_id).to_a
...@@ -265,7 +214,7 @@ def check_thread_result(user, thread, hash, is_json=false) ...@@ -265,7 +214,7 @@ def check_thread_result(user, thread, hash, is_json=false)
read_date = read_states.first.last_read_times[thread.id.to_s] read_date = read_states.first.last_read_times[thread.id.to_s]
if read_date if read_date
thread.comments.each do |c| thread.comments.each do |c|
if c.author != user and c.updated_at < read_date if c.updated_at < read_date
expected_unread_cnt -= 1 expected_unread_cnt -= 1
end end
end end
...@@ -282,16 +231,22 @@ def check_thread_result_json(user, thread, json_response) ...@@ -282,16 +231,22 @@ def check_thread_result_json(user, thread, json_response)
check_thread_result(user, thread, json_response, true) check_thread_result(user, thread, json_response, true)
end end
def check_thread_response_paging(thread, hash, resp_skip=0, resp_limit=nil, is_json=false) def check_unread_thread_result_json(thread, json_response)
# when thread is unread we do not check if thread matches the user read states data
# and explicitly asserts `read` to false; hence pass user=nil
check_thread_result(nil, thread, json_response, true)
end
def check_thread_response_paging(thread, hash, resp_skip=0, resp_limit=nil, is_json=false, recursive=false)
case thread.thread_type case thread.thread_type
when "discussion" when "discussion"
check_discussion_response_paging(thread, hash, resp_skip, resp_limit, is_json) check_discussion_response_paging(thread, hash, resp_skip, resp_limit, is_json, recursive)
when "question" when "question"
check_question_response_paging(thread, hash, resp_skip, resp_limit, is_json) check_question_response_paging(thread, hash, resp_skip, resp_limit, is_json, recursive)
end end
end end
def check_comment(comment, hash, is_json) def check_comment(comment, hash, is_json, recursive=false)
hash["id"].should == (is_json ? comment.id.to_s : comment.id) # Convert from ObjectId if necessary hash["id"].should == (is_json ? comment.id.to_s : comment.id) # Convert from ObjectId if necessary
hash["body"].should == comment.body hash["body"].should == comment.body
hash["user_id"].should == comment.author_id hash["user_id"].should == comment.author_id
...@@ -299,22 +254,27 @@ def check_comment(comment, hash, is_json) ...@@ -299,22 +254,27 @@ def check_comment(comment, hash, is_json)
hash["endorsed"].should == comment.endorsed hash["endorsed"].should == comment.endorsed
hash["endorsement"].should == comment.endorsement hash["endorsement"].should == comment.endorsement
children = Comment.where({"parent_id" => comment.id}).sort({"sk" => 1}).to_a children = Comment.where({"parent_id" => comment.id}).sort({"sk" => 1}).to_a
hash["children"].length.should == children.length hash["child_count"].should == children.length
hash["children"].each_with_index do |child_hash, i| if recursive
check_comment(children[i], child_hash, is_json) hash["children"].length.should == children.length
hash["children"].each_with_index do |child_hash, i|
check_comment(children[i], child_hash, is_json)
end
end end
end end
def check_discussion_response_paging(thread, hash, resp_skip=0, resp_limit=nil, is_json=false)
def check_discussion_response_paging(thread, hash, resp_skip=0, resp_limit=nil, is_json=false, recursive=false)
all_responses = thread.root_comments.sort({"sk" => 1}).to_a all_responses = thread.root_comments.sort({"sk" => 1}).to_a
total_responses = all_responses.length total_responses = all_responses.length
hash["resp_total"].should == total_responses hash["resp_total"].should == total_responses
expected_responses = resp_limit.nil? ? expected_responses = resp_limit.nil? ?
all_responses.drop(resp_skip) : all_responses.drop(resp_skip) :
all_responses.drop(resp_skip).take(resp_limit) all_responses.drop(resp_skip).take(resp_limit)
hash["children"].length.should == expected_responses.length hash["children"].length.should == expected_responses.length
hash["children"].each_with_index do |response_hash, i| hash["children"].each_with_index do |response_hash, i|
check_comment(expected_responses[i], response_hash, is_json) check_comment(expected_responses[i], response_hash, is_json, recursive)
end end
hash["resp_skip"].to_i.should == resp_skip hash["resp_skip"].to_i.should == resp_skip
if resp_limit.nil? if resp_limit.nil?
...@@ -324,23 +284,26 @@ def check_discussion_response_paging(thread, hash, resp_skip=0, resp_limit=nil, ...@@ -324,23 +284,26 @@ def check_discussion_response_paging(thread, hash, resp_skip=0, resp_limit=nil,
end end
end end
def check_question_response_paging(thread, hash, resp_skip=0, resp_limit=nil, is_json=false) def check_question_response_paging(thread, hash, resp_skip=0, resp_limit=nil, is_json=false, recursive=false)
all_responses = thread.root_comments.sort({"sk" => 1}).to_a all_responses = thread.root_comments.sort({"sk" => 1}).to_a
endorsed_responses, non_endorsed_responses = all_responses.partition { |resp| resp.endorsed } endorsed_responses, non_endorsed_responses = all_responses.partition { |resp| resp.endorsed }
hash["endorsed_responses"].length.should == endorsed_responses.length hash["endorsed_responses"].length.should == endorsed_responses.length
hash["endorsed_responses"].each_with_index do |response_hash, i| hash["endorsed_responses"].each_with_index do |response_hash, i|
check_comment(endorsed_responses[i], response_hash, is_json) check_comment(endorsed_responses[i], response_hash, is_json, recursive)
end end
hash["non_endorsed_resp_total"] == non_endorsed_responses.length hash["non_endorsed_resp_total"] == non_endorsed_responses.length
expected_non_endorsed_responses = resp_limit.nil? ? expected_non_endorsed_responses = resp_limit.nil? ?
non_endorsed_responses.drop(resp_skip) : non_endorsed_responses.drop(resp_skip) :
non_endorsed_responses.drop(resp_skip).take(resp_limit) non_endorsed_responses.drop(resp_skip).take(resp_limit)
hash["non_endorsed_responses"].length.should == expected_non_endorsed_responses.length hash["non_endorsed_responses"].length.should == expected_non_endorsed_responses.length
hash["non_endorsed_responses"].each_with_index do |response_hash, i| hash["non_endorsed_responses"].each_with_index do |response_hash, i|
check_comment(expected_non_endorsed_responses[i], response_hash, is_json) check_comment(expected_non_endorsed_responses[i], response_hash, is_json, recursive)
end end
total_responses = endorsed_responses.length + non_endorsed_responses.length
hash["resp_total"].should == total_responses
hash["resp_skip"].to_i.should == resp_skip hash["resp_skip"].to_i.should == resp_skip
if resp_limit.nil? if resp_limit.nil?
hash["resp_limit"].should be_nil hash["resp_limit"].should be_nil
...@@ -349,8 +312,8 @@ def check_question_response_paging(thread, hash, resp_skip=0, resp_limit=nil, is ...@@ -349,8 +312,8 @@ def check_question_response_paging(thread, hash, resp_skip=0, resp_limit=nil, is
end end
end end
def check_thread_response_paging_json(thread, hash, resp_skip=0, resp_limit=nil) def check_thread_response_paging_json(thread, hash, resp_skip=0, resp_limit=nil, recursive=false)
check_thread_response_paging(thread, hash, resp_skip, resp_limit, true) check_thread_response_paging(thread, hash, resp_skip, resp_limit, true, recursive)
end end
# general purpose factory helpers # general purpose factory helpers
...@@ -370,6 +333,7 @@ def make_comment(author, parent, text) ...@@ -370,6 +333,7 @@ def make_comment(author, parent, text)
else else
coll = parent.children coll = parent.children
thread = parent.comment_thread thread = parent.comment_thread
parent.set(child_count: coll.length + 1)
end end
comment = coll.new(body: text, course_id: parent.course_id) comment = coll.new(body: text, course_id: parent.course_id)
comment.author = author comment.author = author
...@@ -384,12 +348,12 @@ end ...@@ -384,12 +348,12 @@ end
# AKA this will overwrite "standalone t0" each time it is called. # AKA this will overwrite "standalone t0" each time it is called.
def make_standalone_thread_with_comments(author, index=0) def make_standalone_thread_with_comments(author, index=0)
thread = make_thread( thread = make_thread(
author, author,
"standalone thread #{index}", "standalone thread #{index}",
DFLT_COURSE_ID, DFLT_COURSE_ID,
"pdq", "pdq",
:discussion, :discussion,
:standalone :standalone
) )
3.times do |i| 3.times do |i|
...@@ -418,5 +382,19 @@ def setup_10_threads ...@@ -418,5 +382,19 @@ def setup_10_threads
@comments["t#{i} c#{j}"] = comment @comments["t#{i} c#{j}"] = comment
end end
end end
@default_order = 10.times.map {|i| "t#{i}"}.reverse @default_order = 10.times.map { |i| "t#{i}" }.reverse
end
# Creates a CommentThread with a Comment, and nested child Comment.
# The author of the thread is subscribed to the thread.
def create_comment_thread_and_comments
# Create a new comment thread, and subscribe the author to the thread
thread = create(:comment_thread, :subscribe_author)
# Create a comment along with a nested child comment
comment = create(:comment, comment_thread: thread)
create(:comment, parent: comment)
comment.set(child_count: 1)
thread
end end
require 'database_cleaner'
RSpec.configure do |config|
config.before(:suite) do
# Mongoid only supports truncation.
DatabaseCleaner.strategy = :truncation
DatabaseCleaner.clean_with(:truncation)
end
config.around(:each) do |example|
DatabaseCleaner.cleaning do
example.run
end
end
end
def delete_es_index
Tire.index Content::ES_INDEX_NAME do
delete
end
end
def create_es_index
new_index = Tire.index Content::ES_INDEX_NAME
new_index.create
[CommentThread, Comment].each do |klass|
klass.put_search_index_mapping
end
end
def refresh_es_index
es_index_name = Content::ES_INDEX_NAME
Tire.index es_index_name do
refresh
end
end
RSpec.configure do |config|
config.before(:each) do
delete_es_index
create_es_index
end
end
require 'factory_girl'
RSpec.configure do |config|
config.include FactoryGirl::Syntax::Methods
FactoryGirl.find_definitions
config.before(:suite) do
begin
DatabaseCleaner.start
FactoryGirl.lint
ensure
DatabaseCleaner.clean
end
end
end
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment