Commit 1f6978cc by Brian Beggs Committed by GitHub

Merge branch 'master' into bbeggs/revert-context-migration

parents 265ceeab f471f96d
......@@ -26,10 +26,8 @@ doc/
config/benchmark.yml
benchmark_log
# bundler binstubs
bin/
log/
#redcar
.redcar/
/nbproject
.idea/
cs_comments_service
rvm 1.9.3@cs_comments_service --create
sudo: false
language: ruby
rvm:
- "1.9.3"
services:
- elasticsearch
cache: bundler
before_install:
- wget https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-0.90.11.deb && sudo dpkg --force-confnew -i elasticsearch-0.90.11.deb && sudo service elasticsearch restart
# Install mongo 2.6.4 according to http://docs.mongodb.org/manual/tutorial/install-mongodb-on-ubuntu/
# TODO: This won't be necessary when travis switches to 2.6 by default - see https://github.com/travis-ci/travis-ci/issues/2246
- sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv 7F0CEB10
- echo 'deb http://downloads-distro.mongodb.org/repo/ubuntu-upstart dist 10gen' | sudo tee /etc/apt/sources.list.d/mongodb.list
- sudo apt-get update -q
- sudo apt-get install -y mongodb-org=2.6.4 mongodb-org-server=2.6.4 mongodb-org-shell=2.6.4 mongodb-org-mongos=2.6.4 mongodb-org-tools=2.6.4
- mongo --version
- gem update bundler # Ensure we use the latest version of bundler. Travis' default version of outdated.
# Run Elasticsearch as a daemon
- curl -O https://download.elasticsearch.org/elasticsearch/elasticsearch/elasticsearch-0.90.13.zip
- unzip elasticsearch-0.90.13.zip
- elasticsearch-0.90.13/bin/elasticsearch
# Run MongoDB as a daemon
- curl -O https://fastdl.mongodb.org/linux/mongodb-linux-x86_64-2.6.11.tgz
- tar -zxf mongodb-linux-x86_64-2.6.11.tgz
- export PATH=mongodb-linux-x86_64-2.6.11/bin:$PATH
- mkdir -p ./mongo/db
- mkdir -p ./mongo/log
- mongod --fork --dbpath ./mongo/db --logpath ./mongo/log/mongodb.log
script: bundle exec rspec
......@@ -22,3 +22,5 @@ Alan Boudreault <alan@alanb.ca>
Matjaz Gregoric <mtyaka@gmail.com>
Ben McMorran <ben.mcmorran@gmail.com>
Bill DeRusha <bill@edx.org>
Brian Beggs <macdiesel@gmail.com>
Clinton Blackburn <cblackburn@edx.org>
......@@ -14,27 +14,22 @@ gem 'sinatra'
gem 'yajl-ruby'
gem 'ampex'
gem 'mongo'
gem 'moped', "1.5.1"
gem 'mongoid', "3.0.15"
gem 'mongoid', '~> 5.0.0'
gem 'bson', '~>3.1'
gem 'bson_ext'
gem 'protected_attributes'
gem 'delayed_job'
gem 'delayed_job_mongoid', :git => 'https://github.com/dementrock/delayed_job_mongoid.git'
gem "enumerize", "~>0.8.0"
gem 'mongoid-tree', :git => 'https://github.com/dementrock/mongoid-tree.git'
gem 'voteable_mongo', :git => 'https://github.com/dementrock/voteable_mongo.git'
gem 'mongoid_magic_counter_cache', :git => 'https://github.com/dementrock/mongoid-magic-counter-cache.git'
gem 'delayed_job_mongoid'
gem 'kaminari', :require => 'kaminari/sinatra', :git => 'https://github.com/dementrock/kaminari.git'
gem "enumerize"
gem 'mongoid-tree', :git => 'https://github.com/macdiesel/mongoid-tree'
gem 'rs_voteable_mongo', :git => 'https://github.com/navneet35371/voteable_mongo.git'
gem 'mongoid_magic_counter_cache'
gem 'faker'
gem 'will_paginate_mongoid'
gem 'will_paginate_mongoid', "~>2.0"
gem 'rdiscount'
gem 'nokogiri'
gem 'nokogiri', "~>1.6.8"
gem 'tire', "0.6.2"
gem 'tire-contrib'
......@@ -44,17 +39,19 @@ gem 'dalli'
gem 'rest-client'
group :test do
gem 'rspec'
gem 'rack-test', :require => "rack/test"
gem 'codecov', :require => false
gem 'database_cleaner', '~> 1.5.1'
gem 'factory_girl', '~> 4.0'
gem 'faker', '~> 1.6'
gem 'guard'
gem 'guard-unicorn'
gem 'simplecov', :require => false
gem 'database_cleaner'
gem 'rack-test', :require => 'rack/test'
gem 'rspec', '~> 2.11.0'
end
gem 'newrelic_rpm'
gem 'newrelic_moped'
gem 'unicorn'
gem "rack-timeout", "0.1.0beta3"
gem "rack-timeout"
gem "i18n"
gem "rack-contrib", :git => 'https://github.com/rack/rack-contrib.git', :ref => '6ff3ca2b2d988911ca52a2712f6a7da5e064aa27'
GIT
remote: https://github.com/dementrock/delayed_job_mongoid.git
revision: 48b1420d59bc01e0b1aba1c2ad66bda4a5e04b9a
remote: https://github.com/macdiesel/mongoid-tree
revision: b381dd56f1b3b061df8f4b4181d5440dea1602d1
specs:
delayed_job_mongoid (1.0.8)
delayed_job (~> 3.0.0)
mongoid (>= 3.0.0.rc)
mongoid-tree (2.0.0)
mongoid (>= 4.0, <= 5.0)
GIT
remote: https://github.com/dementrock/kaminari.git
revision: 82a38e07db1ca1598c8daf073a8f6be22ae714d6
remote: https://github.com/navneet35371/voteable_mongo.git
revision: 55fcfe76705ab5da1c9e5670594331b33954c545
specs:
kaminari (0.13.0)
actionpack (>= 3.0.0)
activesupport (>= 3.0.0)
GIT
remote: https://github.com/dementrock/mongoid-magic-counter-cache.git
revision: 28bc5e617cab19187b323e7d97d49fe73a7de68a
specs:
mongoid_magic_counter_cache (0.1.1)
mongoid (>= 3.0)
rake
GIT
remote: https://github.com/dementrock/mongoid-tree.git
revision: 5aa7a4ee16cd90dbbcac3ad702446d2119e971df
specs:
mongoid-tree (1.0.0)
mongoid (>= 3.0, <= 4.0)
GIT
remote: https://github.com/dementrock/voteable_mongo.git
revision: 538e86856daa1c180ba80b7c6f2805e531ba420c
specs:
voteable_mongo (0.9.3)
rs_voteable_mongo (1.0.2)
mongoid (>= 3.0, <= 5.0)
GIT
remote: https://github.com/rack/rack-contrib.git
......@@ -46,88 +23,100 @@ GIT
GEM
remote: https://rubygems.org/
specs:
actionpack (3.2.8)
activemodel (= 3.2.8)
activesupport (= 3.2.8)
builder (~> 3.0.0)
erubis (~> 2.7.0)
journey (~> 1.0.4)
rack (~> 1.4.0)
rack-cache (~> 1.2)
rack-test (~> 0.6.1)
sprockets (~> 2.1.3)
activemodel (3.2.8)
activesupport (= 3.2.8)
builder (~> 3.0.0)
activesupport (3.2.8)
i18n (~> 0.6)
multi_json (~> 1.0)
ampex (2.0.0)
blankslate
ansi (1.4.3)
blankslate (2.1.2.4)
bson (1.6.4)
bson_ext (1.6.4)
bson (~> 1.6.4)
builder (3.0.4)
activemodel (4.2.4)
activesupport (= 4.2.4)
builder (~> 3.1)
activesupport (4.2.4)
i18n (~> 0.7)
json (~> 1.7, >= 1.7.7)
minitest (~> 5.1)
thread_safe (~> 0.3, >= 0.3.4)
tzinfo (~> 1.1)
ansi (1.5.0)
bson (3.2.4)
bson_ext (1.5.1)
builder (3.2.2)
codecov (0.1.2)
json
simplecov
url
coderay (1.0.7)
dalli (2.1.0)
database_cleaner (1.2.0)
delayed_job (3.0.3)
activesupport (~> 3.0)
database_cleaner (1.5.1)
delayed_job (4.1.1)
activesupport (>= 3.0, < 5.0)
delayed_job_mongoid (2.2.0)
delayed_job (>= 3.0, < 5)
mongoid (>= 3.0, < 6)
mongoid-compatibility
diff-lcs (1.1.3)
enumerize (0.8.0)
docile (1.1.5)
domain_name (0.5.24)
unf (>= 0.0.5, < 1.0.0)
enumerize (0.11.0)
activesupport (>= 3.2)
erubis (2.7.0)
faker (1.0.1)
i18n (~> 0.4)
factory_girl (4.5.0)
activesupport (>= 3.0.0)
faker (1.6.1)
i18n (~> 0.5)
guard (1.3.2)
listen (>= 0.4.2)
thor (>= 0.14.6)
guard-unicorn (0.0.7)
guard (>= 1.1)
hashr (0.0.22)
hike (1.2.1)
i18n (0.6.9)
journey (1.0.4)
kgio (2.7.4)
http-cookie (1.0.2)
domain_name (~> 0.5)
i18n (0.7.0)
json (1.8.3)
kgio (2.10.0)
listen (0.5.0)
method_source (0.8)
mime-types (2.2)
mongo (1.6.4)
bson (~> 1.6.4)
mongoid (3.0.15)
activemodel (~> 3.1)
moped (~> 1.1)
origin (~> 1.0)
tzinfo (~> 0.3.22)
moped (1.5.1)
multi_json (1.10.0)
newrelic_moped (1.0.0)
moped
newrelic_rpm (>= 3.7)
newrelic_rpm (3.11.2.286)
nokogiri (1.5.5)
origin (1.1.0)
mime-types (2.6.1)
mini_portile2 (2.1.0)
minitest (5.8.1)
mongo (2.1.1)
bson (~> 3.0)
mongoid (5.0.0)
activemodel (~> 4.0)
mongo (~> 2.1)
origin (~> 2.1)
tzinfo (>= 0.3.37)
mongoid-compatibility (0.3.1)
activesupport
mongoid (>= 2.0)
mongoid_magic_counter_cache (1.1.1)
mongoid
rake
multi_json (1.11.2)
netrc (0.10.3)
newrelic_rpm (3.15.0.314)
nokogiri (1.6.8)
mini_portile2 (~> 2.1.0)
pkg-config (~> 1.1.7)
origin (2.1.1)
pkg-config (1.1.7)
protected_attributes (1.1.3)
activemodel (>= 4.0.1, < 5.0)
pry (0.9.10)
coderay (~> 1.0.5)
method_source (~> 0.8)
slop (~> 3.3.1)
pry-nav (0.2.2)
pry (~> 0.9.10)
rack (1.4.1)
rack-cache (1.2)
rack (>= 0.4)
rack (1.6.4)
rack-protection (1.2.0)
rack
rack-test (0.6.1)
rack-test (0.6.3)
rack (>= 1.0)
rack-timeout (0.1.0beta3)
raindrops (0.10.0)
rake (10.3.1)
rack-timeout (0.3.2)
raindrops (0.15.0)
rake (10.4.2)
rdiscount (1.6.8)
rest-client (1.6.7)
mime-types (>= 1.16)
rest-client (1.8.0)
http-cookie (>= 1.0.2, < 2.0)
mime-types (>= 1.16, < 3.0)
netrc (~> 0.7)
rspec (2.11.0)
rspec-core (~> 2.11.0)
rspec-expectations (~> 2.11.0)
......@@ -136,20 +125,18 @@ GEM
rspec-expectations (2.11.2)
diff-lcs (~> 1.1.3)
rspec-mocks (2.11.2)
simplecov (0.7.1)
multi_json (~> 1.0)
simplecov-html (~> 0.7.1)
simplecov-html (0.7.1)
simplecov (0.11.1)
docile (~> 1.1.0)
json (~> 1.8)
simplecov-html (~> 0.10.0)
simplecov-html (0.10.0)
sinatra (1.3.3)
rack (~> 1.3, >= 1.3.6)
rack-protection (~> 1.2)
tilt (~> 1.3, >= 1.3.3)
slop (3.3.2)
sprockets (2.1.3)
hike (~> 1.2)
rack (~> 1.0)
tilt (~> 1.1, != 1.3.0)
thor (0.16.0)
thread_safe (0.3.5)
tilt (1.3.3)
tire (0.6.2)
activemodel (>= 3.0)
......@@ -161,56 +148,59 @@ GEM
rest-client (~> 1.6)
tire-contrib (0.1.1)
tire
tzinfo (0.3.38)
unicorn (4.3.1)
tzinfo (1.2.2)
thread_safe (~> 0.1)
unf (0.1.4)
unf_ext
unf_ext (0.0.7.1)
unicorn (4.9.0)
kgio (~> 2.6)
rack
raindrops (~> 0.7)
will_paginate (3.0.4)
will_paginate_mongoid (1.1.0)
mongoid (>= 2.4)
url (0.3.2)
will_paginate (3.0.7)
will_paginate_mongoid (2.0.1)
mongoid
will_paginate (~> 3.0)
yajl-ruby (1.1.0)
yajl-ruby (1.2.1)
PLATFORMS
ruby
DEPENDENCIES
ampex
bson (~> 3.1)
bson_ext
bundler
codecov
dalli
database_cleaner
database_cleaner (~> 1.5.1)
delayed_job
delayed_job_mongoid!
enumerize (~> 0.8.0)
faker
delayed_job_mongoid
enumerize
factory_girl (~> 4.0)
faker (~> 1.6)
guard
guard-unicorn
i18n
kaminari!
mongo
mongoid (= 3.0.15)
mongoid (~> 5.0.0)
mongoid-tree!
mongoid_magic_counter_cache!
moped (= 1.5.1)
newrelic_moped
mongoid_magic_counter_cache
newrelic_rpm
nokogiri
nokogiri (~> 1.6.8)
protected_attributes
pry
pry-nav
rack-contrib!
rack-test
rack-timeout (= 0.1.0beta3)
rack-timeout
rake
rdiscount
rest-client
rspec
simplecov
rs_voteable_mongo!
rspec (~> 2.11.0)
sinatra
tire (= 0.6.2)
tire-contrib
unicorn
voteable_mongo!
will_paginate_mongoid
will_paginate_mongoid (~> 2.0)
yajl-ruby
......@@ -2,37 +2,46 @@ Part of `edX code`__.
__ http://code.edx.org/
comment_as_a_service
====================
edX Comments Service/Forums |Travis|_ |Codecov|_
==================================================
.. |Travis| image:: https://travis-ci.org/edx/cs_comments_service.svg?branch=master
.. _Travis: https://travis-ci.org/edx/cs_comments_service
.. |Codecov| image:: http://codecov.io/github/edx/cs_comments_service/coverage.svg?branch=master
.. _Codecov: http://codecov.io/github/edx/cs_comments_service?branch=master
An independent comment system which supports voting and nested comments. It
also supports features including instructor endorsement for education-aimed
discussion platforms.
Running The Server
----
Elasticsearch and MongoDB servers need to be available, and correctly referenced
in config/application.yml and config/mongoid.yml, respectively.
Before the server is first run, ensure gems are installed by doing ``bundle install``.
To run the server, do ``ruby app.rb [-p PORT]`` where PORT defaults to 4567.
Running the Server
------------------
If you are running cs_comments_service as part of edx-platform__ development under
devstack, it is strongly recommended to read `those setup documents`__ first. Note that
devstack will take care of just about all of the installation, configuration, and
service management on your behalf.
devstack will take care of just about all of the installation, configuration, and
service management on your behalf. If running outside of devstack, continue reading below.
__ https://github.com/edx/edx-platform
__ https://github.com/edx/configuration/wiki/edX-Developer-Stack
This service relies on Elasticsearch and MongoDB. By default the service will use the Elasticsearch server available at
`http://localhost:9200` and the MongoDB server available at `localhost:27017`. This is suitable for local development;
however, if you wish to change these values, refer to `config/application.yml` and `config/mongoid.yml` for the
environment variables that can be set to override the defaults.
Before the server is first run, ensure gems are installed by doing ``bundle install``.
To run the server, do ``ruby app.rb [-p PORT]`` where PORT defaults to 4567.
Running Tests
----
-------------
To run tests, do ``bundle exec rspec``. Append ``--help`` or see rspec documentation
for additional options to this command.
Internationalization and Localization
----
Internationalization (i18n) and Localization (l10n)
---------------------------------------------------
To run the comments service in a language other than English, set the
``SERVICE_LANGUAGE`` environment variable to the `language code` for the
......
......@@ -4,7 +4,18 @@ require 'bundler'
Bundler.setup
Bundler.require
application_yaml = ERB.new(File.read("config/application.yml")).result()
application_yaml = ERB.new(File.read('config/application.yml')).result()
begin
require 'rspec/core/rake_task'
RSpec::Core::RakeTask.new(:spec)
task :default => :spec
rescue LoadError
# no rspec available
end
Tire.configure do
url YAML.load(application_yaml)['elasticsearch_server']
......@@ -12,29 +23,22 @@ end
LOG = Logger.new(STDERR)
desc "Load the environment"
desc 'Load the environment'
task :environment do
environment = ENV["SINATRA_ENV"] || "development"
environment = ENV['SINATRA_ENV'] || 'development'
Sinatra::Base.environment = environment
Mongoid.load!("config/mongoid.yml")
Mongoid.load!('config/mongoid.yml')
Mongoid.logger.level = Logger::INFO
module CommentService
class << self; attr_accessor :config; end
class << self;
attr_accessor :config;
end
end
CommentService.config = YAML.load(application_yaml)
Dir[File.dirname(__FILE__) + '/lib/**/*.rb'].each {|file| require file}
Dir[File.dirname(__FILE__) + '/models/*.rb'].each {|file| require file}
#Dir[File.dirname(__FILE__) + '/models/observers/*.rb'].each {|file| require file}
#Mongoid.observers = PostReplyObserver, PostTopicObserver, AtUserObserver
#Mongoid.instantiate_observers
end
def create_test_user(id)
User.create!(external_id: id, username: "user#{id}")
Dir[File.dirname(__FILE__) + '/lib/**/*.rb'].each { |file| require file }
Dir[File.dirname(__FILE__) + '/models/*.rb'].each { |file| require file }
end
Dir.glob('lib/tasks/*.rake').each { |r| import r }
......@@ -42,412 +46,3 @@ Dir.glob('lib/tasks/*.rake').each { |r| import r }
task :console => :environment do
binding.pry
end
namespace :db do
task :init => :environment do
puts "recreating indexes..."
[Comment, CommentThread, User, Notification, Subscription, Activity, Delayed::Backend::Mongoid::Job].each(&:remove_indexes).each(&:create_indexes)
puts "finished"
end
task :clean => :environment do
Comment.delete_all
CommentThread.delete_all
User.delete_all
Notification.delete_all
Subscription.delete_all
end
THREADS_PER_COMMENTABLE = 20
TOP_COMMENTS_PER_THREAD = 3
ADDITIONAL_COMMENTS_PER_THREAD = 5
COURSE_ID = "MITx/6.002x/2012_Fall"
def generate_comments_for(commentable_id, num_threads=THREADS_PER_COMMENTABLE, num_top_comments=TOP_COMMENTS_PER_THREAD, num_subcomments=ADDITIONAL_COMMENTS_PER_THREAD)
level_limit = CommentService.config["level_limit"]
users = User.all.to_a
puts "Generating threads and comments for #{commentable_id}..."
threads = []
top_comments = []
additional_comments = []
num_threads.times do
inner_top_comments = []
comment_thread = CommentThread.new(commentable_id: commentable_id, body: Faker::Lorem.paragraphs.join("\n\n"), title: Faker::Lorem.sentence(6))
comment_thread.author = users.sample
comment_thread.course_id = COURSE_ID
comment_thread.save!
threads << comment_thread
users.sample(3).each {|user| user.subscribe(comment_thread)}
(1 + rand(num_top_comments)).times do
comment = comment_thread.comments.new(body: Faker::Lorem.paragraph(2))
comment.author = users.sample
comment.endorsed = [true, false].sample
comment.comment_thread = comment_thread
comment.course_id = COURSE_ID
comment.save!
top_comments << comment
inner_top_comments << comment
end
previous_level_comments = inner_top_comments
(level_limit-1).times do
current_level_comments = []
(1 + rand(num_subcomments)).times do
comment = previous_level_comments.sample
sub_comment = comment.children.new(body: Faker::Lorem.paragraph(2))
sub_comment.author = users.sample
sub_comment.endorsed = [true, false].sample
sub_comment.comment_thread = comment_thread
sub_comment.course_id = COURSE_ID
sub_comment.save!
current_level_comments << sub_comment
end
previous_level_comments = current_level_comments
end
end
puts "voting"
(threads + top_comments + additional_comments).each do |c|
users.each do |user|
user.vote(c, [:up, :down].sample)
end
end
puts "finished"
end
task :generate_comments, [:commentable_id, :num_threads, :num_top_comments, :num_subcomments] => :environment do |t, args|
args.with_defaults(:num_threads => THREADS_PER_COMMENTABLE,
:num_top_comments=>TOP_COMMENTS_PER_THREAD,
:num_subcomments=> ADDITIONAL_COMMENTS_PER_THREAD)
generate_comments_for(args[:commentable_id], args[:num_threads], args[:num_top_comments], args[:num_subcomments])
end
task :bulk_seed, [:num] => :environment do |t, args|
Mongoid.configure do |config|
config.connect_to("cs_comments_service_bulk_test")
end
connnection = Mongo::Connection.new("127.0.0.1", "27017")
db = Mongo::Connection.new.db("cs_comments_service_bulk_test")
CommentThread.create_indexes
Comment.create_indexes
Content.delete_all
coll = db.collection("contents")
args[:num].to_i.times do
doc = {"_type" => "CommentThread", "anonymous" => [true, false].sample, "at_position_list" => [],
"tags_array" => [],
"comment_count" => 0, "title" => Faker::Lorem.sentence(6), "author_id" => rand(1..10).to_s,
"body" => Faker::Lorem.paragraphs.join("\n\n"), "course_id" => COURSE_ID, "created_at" => Time.now,
"commentable_id" => COURSE_ID, "closed" => [true, false].sample, "updated_at" => Time.now, "last_activity_at" => Time.now,
"votes" => {"count" => 0, "down" => [], "down_count" => 0, "point" => 0, "up" => [], "up_count" => []}}
coll.insert(doc)
end
Tire.index('comment_threads').delete
CommentThread.create_elasticsearch_index
Tire.index('comment_threads') { import CommentThread.all }
end
task :seed_fast => :environment do
ADDITIONAL_COMMENTS_PER_THREAD = 20
config = YAML.load_file("config/mongoid.yml")[Sinatra::Base.environment]["sessions"]["default"]
connnection = Mongo::Connection.new(config["hosts"][0].split(":")[0], config["hosts"][0].split(":")[1])
db = Mongo::Connection.new.db(config["database"])
coll = db.collection("contents")
Comment.delete_all
CommentThread.each do |thread|
ADDITIONAL_COMMENTS_PER_THREAD.times do
doc = {"_type" => "Comment", "anonymous" => false, "at_position_list" => [],
"author_id" => rand(1..10).to_s, "body" => Faker::Lorem.paragraphs.join("\n\n"),
"comment_thread_id" => BSON::ObjectId.from_string(thread.id.to_s), "course_id" => COURSE_ID,
"created_at" => Time.now,
"endorsed" => [true, false].sample, "parent_ids" => [], "updated_at" => Time.now,
"votes" => {"count" => 0, "down" => [], "down_count" => 0, "point" => 0, "up" => [], "up_count" => []}}
coll.insert(doc)
end
end
end
task :seed => :environment do
Comment.delete_all
CommentThread.delete_all
User.delete_all
Notification.delete_all
Subscription.delete_all
Tire.index 'comment_threads' do delete end
CommentThread.create_elasticsearch_index
beginning_time = Time.now
users = (1..10).map {|id| create_test_user(id)}
# 3.times do
# other_user = users[1..9].sample
# users.first.subscribe(other_user)
# end
# 10.times do
# user = users.sample
# other_user = users.select{|u| u != user}.sample
# user.subscribe(other_user)
# end
generate_comments_for("video_1")
generate_comments_for("lab_1")
generate_comments_for("lab_2")
end_time = Time.now
puts "Number of comments generated: #{Comment.count}"
puts "Number of comment threads generated: #{CommentThread.count}"
puts "Time elapsed #{(end_time - beginning_time)*1000} milliseconds"
end
task :add_anonymous_to_peers => :environment do
Content.collection.find(:anonymous_to_peers=>nil).update_all({"$set" => {'anonymous_to_peers' => false}})
end
end
namespace :search do
def get_es_index
# we are using the same index for two types, which is against the
# grain of Tire's design. This is why this method works for both
# comment_threads and comments.
CommentThread.tire.index
end
def get_number_of_primary_shards(index_name)
res = Tire::Configuration.client.get "#{Tire::Configuration.url}/#{index_name}/_status"
status = JSON.parse res.body
status["indices"].first[1]["shards"].size
end
def create_es_index
# create the new index with a unique name
new_index = Tire.index "#{Content::ES_INDEX_NAME}_#{Time.now.strftime('%Y%m%d%H%M%S')}"
new_index.create
LOG.info "configuring new index: #{new_index.name}"
[CommentThread, Comment].each do |klass|
LOG.info "applying index mappings for #{klass.name}"
klass.put_search_index_mapping new_index
end
new_index
end
def import_from_cursor(cursor, index, opts)
Mongoid.identity_map_enabled = true
tot = cursor.count
cnt = 0
t = Time.now
index.import cursor, {:method => :paginate, :per_page => opts[:batch_size]} do |documents|
if cnt % opts[:batch_size] == 0 then
elapsed_secs = (Time.now - t).round(2)
pct_complete = (100 * (cnt/tot.to_f)).round(2)
LOG.info "#{index.name}: imported #{cnt} of #{tot} (#{pct_complete}% complete after #{elapsed_secs} seconds)"
end
cnt += documents.length
Mongoid::IdentityMap.clear
sleep opts[:sleep_time]
documents
end
LOG.info "#{index.name}: finished importing #{cnt} documents"
cnt
end
def move_alias_to(name, index)
# if there was a previous index, switch over the alias to point to the new index
alias_ = Tire::Alias.find name
if alias_ then
# does the alias already point to this index?
if alias_.indices.include? index.name then
return false
end
# remove the alias from wherever it points to now
LOG.info "alias already exists (will move): #{alias_.indices.to_ary.join(',')}"
alias_.indices.each do |old_index_name|
alias_.indices.delete old_index_name unless old_index_name == name
end
else
# create the alias
LOG.info "alias \"#{name}\" does not yet exist - creating."
alias_ = Tire::Alias.new :name => name
end
# point the alias at our new index
alias_.indices.add index.name
alias_.save
LOG.info "alias \"#{name}\" now points to index #{index.name}."
true
end
def do_reindex (opts, in_place=false)
# get a reference to the model class (and make sure it's a model class with tire hooks)
start_time = Time.now
# create the new index with a unique name
new_index = create_es_index
# unless the user is forcing a rebuild, or the index does not yet exist, we
# can do a Tire api reindex which is much faster than reimporting documents
# from mongo.
#
# Checking if the index exists is tricky. Tire automatically created an index
# for the model class when the app loaded if one did not already exist. However,
# it won't create an alias, which is what our app uses. So if the index exists
# but not the alias, we know that it's auto-created.
old_index = get_es_index
alias_name = old_index.name
alias_ = Tire::Alias.find alias_name
if alias_.nil? then
# edge case.
# the alias doesn't exist, so we know the index was auto-created.
# We will delete it and replace it with an alias.
raise RuntimeError, 'Cannot reindex in-place, no valid source index' if in_place
LOG.warn "deleting auto-created index to make room for the alias"
old_index.delete
# NOTE on the small chance that another process re-auto-creates the index
# we just deleted before we have a chance to create the alias, this next
# call will fail.
move_alias_to(Content::ES_INDEX_NAME, new_index)
end
op = in_place ? "reindex" : "(re)build index"
LOG.info "preparing to #{op}"
if in_place then
# reindex, moving source documents directly from old index to new
LOG.info "copying documents from original index (this may take a while!)"
old_index.reindex new_index.name
LOG.info "done copying!"
else
# fetch all the documents ever, up til start_time
cursor = Content.where(:_type.in => ["Comment", "CommentThread"], :updated_at.lte => start_time)
# import them to the new index
import_from_cursor(cursor, new_index, opts)
end
# move the alias if necessary
did_alias_move = move_alias_to(Content::ES_INDEX_NAME, new_index)
if did_alias_move then
# Reimport any source documents that got updated since start_time,
# while the alias still pointed to the old index.
# Elasticsearch understands our document ids, so re-indexing the same
# document won't create duplicates.
LOG.info "importing any documents that changed between #{start_time} and now"
cursor = Content.where(:_type.in => ["Comment", "CommentThread"], :updated_at.gte => start_time)
import_from_cursor(cursor, new_index, opts)
end
end
desc "Copies contents of MongoDB into Elasticsearch if updated in the last N minutes."
task :catchup, [:minutes, :batch_size, :sleep_time] => :environment do |t, args|
opts = batch_opts args
the_index = get_es_index
alias_ = Tire::Alias.find the_index.name
# this check makes sure we are working with the index to which
# the desired model's alias presently points.
raise RuntimeError, "could not find live index" if alias_.nil?
start_time = Time.now - (args[:minutes].to_i * 60)
cursor = Content.where(:_type.in => ["Comment", "CommentThread"], :updated_at.gte => start_time)
import_from_cursor(cursor, the_index, opts)
end
def batch_opts(args)
args = args.to_hash
{ :batch_size => args[:batch_size].nil? ? 500 : args[:batch_size].to_i,
:sleep_time => args[:sleep_time].nil? ? 0 : args[:sleep_time].to_i }
end
desc "Removes any data from Elasticsearch that no longer exists in MongoDB."
task :prune, [:batch_size, :sleep_time] => :environment do |t, args|
opts = batch_opts args
the_index = get_es_index
puts "pruning #{the_index.name}"
alias_ = Tire::Alias.find the_index.name
raise RuntimeError, "could not find live index" if alias_.nil?
scan_size = opts[:batch_size] / get_number_of_primary_shards(the_index.name)
cnt = 0
[CommentThread, Comment].each do |klass|
doc_type = klass.document_type
# this check makes sure we are working with the index to which
# the desired model's alias presently points.
search = Tire::Search::Scan.new the_index.name, {size: scan_size, type: doc_type}
search.each do |results|
es_ids = results.map(&:id)
mongo_ids = klass.where(:id.in => es_ids).map {|d| d.id.to_s}
to_delete = es_ids - mongo_ids
if to_delete.size > 0
cnt += to_delete.size
puts "deleting #{to_delete.size} orphaned #{doc_type} documents from elasticsearch"
the_index.bulk_delete (to_delete).map {|v| {"type" => doc_type, "id" => v}}
end
puts "#{the_index.name}/#{doc_type}: processed #{search.seen} of #{search.total}"
sleep opts[:sleep_time]
end
end
puts "done pruning #{the_index.name}, deleted a total of #{cnt} orphaned documents"
end
desc "Rebuild the content index from MongoDB data."
task :rebuild, [:batch_size, :sleep_time] => :environment do |t, args|
do_reindex(batch_opts(args))
end
desc "Rebuild the content index from already-indexed data (in place)."
task :reindex, [:batch_size, :sleep_time] => :environment do |t, args|
do_reindex(batch_opts(args), true)
end
desc "Generate a new, empty physical index, without bringing it online."
task :create_index => :environment do
create_es_index
end
end
namespace :jobs do
desc "Clear the delayed_job queue."
task :clear => :environment do
Delayed::Job.delete_all
end
desc "Start a delayed_job worker."
task :work => :environment do
Delayed::Worker.new(:min_priority => ENV['MIN_PRIORITY'], :max_priority => ENV['MAX_PRIORITY'], :queues => (ENV['QUEUES'] || ENV['QUEUE'] || '').split(','), :quiet => false).start
end
end
namespace :i18n do
desc "Push source strings to Transifex for translation"
task :push do
sh("tx push -s")
end
desc "Pull translated strings from Transifex"
task :pull do
sh("tx pull --mode=reviewed --all --minimum-perc=1")
end
desc "Clean the locale directory"
task :clean do
sh("git clean -f locale/")
end
desc "Commit translated strings to the repository"
task :commit => ["i18n:clean", "i18n:pull"] do
sh("git add locale")
sh("git commit -m 'Updated translations (autogenerated message)'")
end
end
......@@ -27,9 +27,12 @@ get "#{APIPREFIX}/threads/:thread_id" do |thread_id|
error 404, [t(:requested_object_not_found)].to_json
end
if params["user_id"] and bool_mark_as_read
# user is required to return user-specific fields, such as "read" (even if bool_mark_as_read is False)
if params["user_id"]
user = User.only([:id, :username, :read_states]).find_by(external_id: params["user_id"])
user.mark_as_read(thread) if user
end
if user and bool_mark_as_read
user.mark_as_read(thread)
end
presenter = ThreadPresenter.factory(thread, user || nil)
......@@ -47,7 +50,7 @@ get "#{APIPREFIX}/threads/:thread_id" do |thread_id|
else
resp_limit = nil
end
presenter.to_hash(true, resp_skip, resp_limit).to_json
presenter.to_hash(true, resp_skip, resp_limit, bool_recursive).to_json
end
put "#{APIPREFIX}/threads/:thread_id" do |thread_id|
......@@ -69,6 +72,7 @@ post "#{APIPREFIX}/threads/:thread_id/comments" do |thread_id|
comment.anonymous_to_peers = bool_anonymous_to_peers || false
comment.author = user
comment.comment_thread = thread
comment.child_count = 0
comment.save
if comment.errors.any?
error 400, comment.errors.full_messages.to_json
......
......@@ -47,6 +47,8 @@ post "#{APIPREFIX}/:commentable_id/threads" do |commentable_id|
else
user.subscribe(thread) if bool_auto_subscribe
presenter = ThreadPresenter.factory(thread, nil)
presenter.to_hash.to_json
thread = presenter.to_hash
thread["resp_total"] = 0
thread.to_json
end
end
get "#{APIPREFIX}/comments/:comment_id" do |comment_id|
comment.to_hash(recursive: bool_recursive).to_json
@comment = comment
comment_hash = @comment.to_hash(recursive: bool_recursive)
verify_or_fix_cached_comment_count(@comment, comment_hash)
comment_hash.to_json
end
put "#{APIPREFIX}/comments/:comment_id" do |comment_id|
......@@ -8,7 +11,11 @@ put "#{APIPREFIX}/comments/:comment_id" do |comment_id|
if params.has_key?("endorsed")
new_endorsed_val = Boolean.mongoize(params["endorsed"])
if new_endorsed_val != comment.endorsed
endorsement = {:user_id => params["endorsement_user_id"], :time => DateTime.now}
if params["endorsement_user_id"].nil?
endorsement = nil
else
endorsement = {:user_id => params["endorsement_user_id"], :time => DateTime.now}
end
updated_content["endorsement"] = new_endorsed_val ? endorsement : nil
end
end
......@@ -27,16 +34,31 @@ post "#{APIPREFIX}/comments/:comment_id" do |comment_id|
sub_comment.anonymous_to_peers = bool_anonymous_to_peers || false
sub_comment.author = user
sub_comment.comment_thread = comment.comment_thread
sub_comment.child_count = 0
sub_comment.save
if sub_comment.errors.any?
error 400, sub_comment.errors.full_messages.to_json
else
user.subscribe(comment.comment_thread) if bool_auto_subscribe
sub_comment.to_hash.to_json
comment.update_cached_child_count
if comment.errors.any?
error 400, comment.errors.full_messages.to_json
else
user.subscribe(comment.comment_thread) if bool_auto_subscribe
sub_comment.to_hash.to_json
end
end
end
delete "#{APIPREFIX}/comments/:comment_id" do |comment_id|
parent_id = comment.parent_id
comment.destroy
unless parent_id.nil?
begin
parent_comment = Comment.find(parent_id)
parent_comment.update_cached_child_count
rescue Mongoid::Errors::DocumentNotFound
pass
end
end
comment.to_hash.to_json
end
require 'new_relic/agent/method_tracer'
get "#{APIPREFIX}/search/threads" do
local_params = params # Necessary for params to be available inside blocks
group_ids = get_group_ids_from_params(local_params)
......
require 'new_relic/agent/method_tracer'
post "#{APIPREFIX}/users" do
user = User.new(external_id: params["id"])
user.username = params["username"]
......@@ -76,3 +74,8 @@ put "#{APIPREFIX}/users/:user_id" do |user_id|
user.to_hash.to_json
end
end
post "#{APIPREFIX}/users/:user_id/read" do |user_id|
user.mark_as_read(source)
user.reload.to_hash.to_json
end
......@@ -19,29 +19,6 @@ module CommentService
API_PREFIX = "/api/#{API_VERSION}"
end
if ["staging", "production", "loadtest", "edgestage","edgeprod"].include? environment
require 'newrelic_rpm'
require 'new_relic/agent/method_tracer'
Moped::Session.class_eval do
include NewRelic::Agent::MethodTracer
add_method_tracer :new
add_method_tracer :use
add_method_tracer :login
end
Moped::Cluster.class_eval do
include NewRelic::Agent::MethodTracer
add_method_tracer :with_primary
add_method_tracer :nodes
end
Moped::Node.class_eval do
include NewRelic::Agent::MethodTracer
add_method_tracer :command
add_method_tracer :connect
add_method_tracer :flush
add_method_tracer :refresh
end
end
if ENV["ENABLE_GC_PROFILER"]
GC::Profiler.enable
end
......@@ -56,11 +33,12 @@ end
Mongoid.load!("config/mongoid.yml", environment)
Mongoid.logger.level = Logger::INFO
Moped.logger.level = ENV["ENABLE_MOPED_DEBUGGING"] ? Logger::DEBUG : Logger::INFO
Mongo::Logger.logger.level = ENV["ENABLE_MONGO_DEBUGGING"] ? Logger::DEBUG : Logger::INFO
# set up i18n
I18n.load_path += Dir[File.join(File.dirname(__FILE__), 'locale', '*.yml').to_s]
I18n.default_locale = CommentService.config[:default_locale]
I18n.enforce_available_locales = false
I18n::Backend::Simple.send(:include, I18n::Backend::Fallbacks)
use Rack::Locale
......@@ -97,27 +75,6 @@ before do
content_type "application/json"
end
if ENV["ENABLE_IDMAP_LOGGING"]
after do
idmap = Mongoid::Threaded.identity_map
vals = {
"pid" => Process.pid,
"dyno" => ENV["DYNO"],
"request_id" => params[:request_id]
}
idmap.each {|k, v| vals["idmap_count_#{k.to_s}"] = v.size }
logger.info vals.map{|e| e.join("=") }.join(" ")
end
end
# Enable the identity map. The middleware ensures that the identity map is
# cleared for every request.
Mongoid.identity_map_enabled = true
use Rack::Mongoid::Middleware::IdentityMap
# use yajl implementation for to_json.
# https://github.com/brianmario/yajl-ruby#json-gem-compatibility-api
#
......@@ -128,16 +85,27 @@ require 'yajl/json_gem'
# patch json serialization of ObjectIds to work properly with yajl.
# See https://groups.google.com/forum/#!topic/mongoid/MaXFVw7D_4s
module Moped
module BSON
class ObjectId
def to_json
self.to_s.to_json
end
# Note that BSON was moved from Moped::BSON::ObjectId to BSON::ObjectId
module BSON
class ObjectId
def to_json
self.to_s.to_json
end
end
end
# Patch json serialization of Time Objects
class Time
# Returns a hash, that will be turned into a JSON object and represent this
# object.
# Note that this was done to prevent milliseconds from showing up in the JSON response thus breaking
# API compatibility for downstream clients.
def to_json(*)
'"' + utc().strftime("%Y-%m-%dT%H:%M:%SZ") + '"'
end
end
# these files must be required in order
require './api/search'
......@@ -158,7 +126,7 @@ if RACK_ENV.to_s == "development"
end
end
error Moped::Errors::InvalidObjectId do
error Mongo::Error::InvalidDocument do
error 400, [t(:requested_object_not_found)].to_json
end
......@@ -170,10 +138,10 @@ error ArgumentError do
error 400, [env['sinatra.error'].message].to_json
end
CommentService.blocked_hashes = Content.mongo_session[:blocked_hash].find.select(hash: 1).each.map {|d| d["hash"]}
CommentService.blocked_hashes = Content.mongo_client[:blocked_hash].find(nil, projection: {hash: 1}).map {|d| d["hash"]}
def get_db_is_master
Mongoid::Sessions.default.command(isMaster: 1)
Mongoid::Clients.default.command(isMaster: 1)
end
def get_es_status
......@@ -186,7 +154,7 @@ get '/heartbeat' do
db_ok = false
begin
res = get_db_is_master
db_ok = ( res["ismaster"] == true and Integer(res["ok"]) == 1 )
db_ok = res.ok? && res.documents.first['ismaster'] == true
rescue
end
error 500, JSON.generate({"OK" => false, "check" => "db"}) unless db_ok
......@@ -221,4 +189,4 @@ get '/selftest' do
"#{ex.backtrace.first}: #{ex.message} (#{ex.class})\n\t#{ex.backtrace[1..-1].join("\n\t")}"
]
end
end
\ No newline at end of file
end
#!/usr/bin/env ruby
#
# This file was generated by Bundler.
#
# The application 'rake' is installed as part of a gem, and
# this file is here to facilitate running it.
#
require "pathname"
ENV["BUNDLE_GEMFILE"] ||= File.expand_path("../../Gemfile",
Pathname.new(__FILE__).realpath)
require "rubygems"
require "bundler/setup"
load Gem.bin_path("rake", "rake")
#!/usr/bin/env ruby
#
# This file was generated by Bundler.
#
# The application 'rspec' is installed as part of a gem, and
# this file is here to facilitate running it.
#
require "pathname"
ENV["BUNDLE_GEMFILE"] ||= File.expand_path("../../Gemfile",
Pathname.new(__FILE__).realpath)
require "rubygems"
require "bundler/setup"
load Gem.bin_path("rspec-core", "rspec")
#!/usr/bin/env ruby
#
# This file was generated by Bundler.
#
# The application 'unicorn' is installed as part of a gem, and
# this file is here to facilitate running it.
#
require "pathname"
ENV["BUNDLE_GEMFILE"] ||= File.expand_path("../../Gemfile",
Pathname.new(__FILE__).realpath)
require "rubygems"
require "bundler/setup"
load Gem.bin_path("unicorn", "unicorn")
# It is possible that the rack time out here is set to a different value than
# on the edx-platform django_comment_client timeout. An attempt was made to
# move these two values closer together (5s django_client_comment, 6s
# cs_comments_service from 20). This resulted in more reported timeout errors
# on the cs_comments_service side which better reflected the timeout errors the
# django_comment_client. On the downside, the shorter timeout lead to less time
# for processing longer queries in the background. The timeout has been set back
# to 20s. Until these slow queries that benefit from being cached in the
# background are resolved, reducing the timeout is not suggested.
# More conversation at https://github.com/edx/cs_comments_service/pull/146
# -Nov 18th, 2015
require "rack-timeout"
use Rack::Timeout # Call as early as possible so rack-timeout runs before other middleware.
Rack::Timeout.timeout = 20
Rack::Timeout.timeout = 20
require "mongoid"
use Mongoid::QueryCache::Middleware
require './app'
run Sinatra::Application
common: &default_client
options:
write:
w: 1
read:
mode: :primary
max_retries: <%= ENV['MONGOID_MAX_RETRIES'] || 1 %>
retry_interval: <%= ENV['MONGOID_RETRY_INTERVAL'] || 0 %>
timeout: <%= ENV['MONGOID_TIMEOUT'] || 0.5 %>
ssl: <%= ENV['MONGOID_USE_SSL'] || false %>
common_uri: &default_uri
uri: <%= ENV['MONGOHQ_URL'] %>
development:
sessions:
clients:
default:
<<: *default_client
database: cs_comments_service_development
hosts:
- localhost:27017
test:
sessions:
clients:
default:
<<: *default_client
database: cs_comments_service_test
hosts:
- localhost:27017
common: &default_session
uri: <%= ENV['MONGOHQ_URL'] %>
options:
skip_version_check: true
safe: true
consistency: strong
max_retries: <%= ENV['MONGOID_MAX_RETRIES'] || 1 %>
retry_interval: <%= ENV['MONGOID_RETRY_INTERVAL'] || 0 %>
timeout: <%= ENV['MONGOID_TIMEOUT'] || 0.5 %>
ssl: <%= ENV['MONGOID_USE_SSL'] || false %>
production:
sessions:
clients:
default:
<<: *default_session
<<: *default_uri
<<: *default_client
edgeprod:
sessions:
clients:
default:
<<: *default_session
<<: *default_uri
<<: *default_client
edgestage:
sessions:
clients:
default:
<<: *default_session
<<: *default_uri
<<: *default_client
staging:
sessions:
clients:
default:
<<: *default_session
<<: *default_uri
<<: *default_client
loadtest:
sessions:
clients:
default:
<<: *default_session
<<: *default_uri
<<: *default_client
defaults: &defaults
use_utc: false
......
......@@ -13,5 +13,5 @@ after_fork do |server, worker|
Signal.trap 'TERM' do
puts 'Unicorn worker intercepting TERM and doing nothing. Waiting for master to send QUIT'
end
::Mongoid.default_session.disconnect
::Mongoid.default_client.close
end
......@@ -7,5 +7,5 @@ listen "unix:#{data_dir}/forum.sock", :backlog => 512
pid "#{data_dir}/forum_unicorn.pid"
after_fork do |server, worker|
::Mongoid.default_session.disconnect
::Mongoid.default_client.close
end
......@@ -11,5 +11,5 @@ data_dir = ENV['DATA_DIR'] || Dir.tmpdir
pid "#{data_dir}/forum_unicorn.pid"
after_fork do |server, worker|
::Mongoid.default_session.disconnect
::Mongoid.default_client.close
end
......@@ -19,6 +19,16 @@ helpers do
@comment ||= Comment.find(params[:comment_id])
end
def verify_or_fix_cached_comment_count(comment, comment_hash)
# if child count cached value gets stale; re-calculate and update it
unless comment_hash["children"].nil?
if comment_hash["child_count"] != comment_hash["children"].length
comment.update_cached_child_count
comment_hash["child_count"] = comment.get_cached_child_count
end
end
end
def source
@source ||= case params["source_type"]
when "user"
......@@ -190,24 +200,21 @@ helpers do
to_skip = (page - 1) * per_page
has_more = false
# batch_size is used to cap the number of documents we might load into memory at any given time
# TODO: starting with Mongoid 3.1, you can just do comment_threads.batch_size(size).each()
comment_threads.query.batch_size(CommentService.config["manual_pagination_batch_size"].to_i)
Mongoid.unit_of_work(disable: :current) do # this is to prevent Mongoid from memoizing every document we look at
comment_threads.each do |thread|
thread_key = thread._id.to_s
if !read_dates.has_key?(thread_key) || read_dates[thread_key] < thread.last_activity_at
if skipped >= to_skip
if threads.length == per_page
has_more = true
break
end
threads << thread
else
skipped += 1
comment_threads.batch_size(CommentService.config["manual_pagination_batch_size"].to_i).each do |thread|
thread_key = thread._id.to_s
if !read_dates.has_key?(thread_key) || read_dates[thread_key] < thread.last_activity_at
if skipped >= to_skip
if threads.length == per_page
has_more = true
break
end
threads << thread
else
skipped += 1
end
end
end
# The following trick makes frontend pagers work without recalculating
# the number of all unread threads per user on every request (since the number
# of threads in a course could be tens or hundreds of thousands). It has the
......@@ -219,7 +226,7 @@ helpers do
# let the installed paginator library handle pagination
num_pages = [1, (comment_threads.count / per_page.to_f).ceil].max
page = [1, page].max
threads = comment_threads.page(page).per(per_page).to_a
threads = comment_threads.paginate(:page => page, :per_page => per_page).to_a
end
if threads.length == 0
......@@ -228,7 +235,7 @@ helpers do
pres_threads = ThreadListPresenter.new(threads, request_user, course_id)
collection = pres_threads.to_hash
end
{collection: collection, num_pages: num_pages, page: page}
{collection: collection, num_pages: num_pages, page: page, thread_count: comment_threads.count}
end
end
......@@ -368,7 +375,7 @@ helpers do
rescue
# body was nil, or the hash function failed somehow - never mind
return
end
end
if CommentService.blocked_hashes.include? hash then
msg = t(:blocked_content_with_body_hash, :hash => hash)
logger.warn msg
......
module TaskHelpers
module ElasticsearchHelper
def self.create_index(name=nil)
name ||= "#{Content::ES_INDEX_NAME}_#{Time.now.strftime('%Y%m%d%H%M%S')}"
index = Tire.index(name)
LOG.info "Creating new index: #{name}..."
index.create
[CommentThread, Comment].each do |model|
LOG.info "Applying index mappings for #{model.name}"
model.put_search_index_mapping(index)
end
LOG.info '...done!'
index
end
def self.delete_index(name)
Tire.index(name).delete
end
def self.get_index
CommentThread.tire.index
end
def self.get_index_shard_count(name)
settings = Tire.index(name)
settings['index.number_of_shards']
end
end
end
require 'factory_girl'
namespace :db do
FactoryGirl.find_definitions
def create_test_user(id)
User.create!(external_id: id, username: "user#{id}")
end
task :init => :environment do
puts 'recreating indexes...'
[Comment, CommentThread, User, Notification, Subscription, Activity, Delayed::Backend::Mongoid::Job].each(&:remove_indexes).each(&:create_indexes)
puts 'finished'
end
task :clean => :environment do
Comment.delete_all
CommentThread.delete_all
User.delete_all
Notification.delete_all
Subscription.delete_all
end
THREADS_PER_COMMENTABLE = 20
TOP_COMMENTS_PER_THREAD = 3
ADDITIONAL_COMMENTS_PER_THREAD = 5
COURSE_ID = 'MITx/6.002x/2012_Fall'
def generate_comments_for(commentable_id, num_threads=THREADS_PER_COMMENTABLE, num_top_comments=TOP_COMMENTS_PER_THREAD, num_subcomments=ADDITIONAL_COMMENTS_PER_THREAD)
level_limit = CommentService.config['level_limit']
users = User.all.to_a
puts "Generating threads and comments for #{commentable_id}..."
threads = []
top_comments = []
additional_comments = []
num_threads.times do
inner_top_comments = []
# Create a new thread
comment_thread = FactoryGirl::create(:comment_thread, commentable_id: commentable_id, author: users.sample, course_id: COURSE_ID)
threads << comment_thread
# Subscribe a few users to the thread
users.sample(3).each { |user| user.subscribe(comment_thread) }
# Create a few top-level comments for the thread
(1 + rand(num_top_comments)).times do
endorsed = [true, false].sample
comment = FactoryGirl::create(:comment, author: users.sample, comment_thread: comment_thread, endorsed: endorsed, course_id: COURSE_ID)
top_comments << comment
inner_top_comments << comment
end
# Created additional nested comments
parent_comments = inner_top_comments
(level_limit-1).times do
current_level_comments = []
(1 + rand(num_subcomments)).times do
parent = parent_comments.sample
endorsed = [true, false].sample
child = FactoryGirl::create(:comment, author: users.sample, parent: parent, endorsed: endorsed)
current_level_comments << child
end
parent_comments = current_level_comments
end
end
puts 'voting'
(threads + top_comments + additional_comments).each do |c|
users.each do |user|
user.vote(c, [:up, :down].sample)
end
end
puts 'finished'
end
task :generate_comments, [:commentable_id, :num_threads, :num_top_comments, :num_subcomments] => :environment do |t, args|
args.with_defaults(num_threads: THREADS_PER_COMMENTABLE,
num_top_comments: TOP_COMMENTS_PER_THREAD,
num_subcomments: ADDITIONAL_COMMENTS_PER_THREAD)
generate_comments_for(args[:commentable_id], args[:num_threads], args[:num_top_comments], args[:num_subcomments])
end
task :seed => [:environment, :clean] do
Tire.index 'comment_threads' do
delete
end
CommentThread.create_elasticsearch_index
beginning_time = Time.now
(1..10).map { |id| create_test_user(id) }
generate_comments_for('video_1')
generate_comments_for('lab_1')
generate_comments_for('lab_2')
end_time = Time.now
puts "Number of comments generated: #{Comment.count}"
puts "Number of comment threads generated: #{CommentThread.count}"
puts "Time elapsed #{(end_time - beginning_time)*1000} milliseconds"
end
task :add_anonymous_to_peers => :environment do
Content.collection.find(:anonymous_to_peers => nil).update_all({'$set' => {anonymous_to_peers: false}})
end
end
namespace :i18n do
desc "Push source strings to Transifex for translation"
task :push do
sh("tx push -s")
end
desc "Pull translated strings from Transifex"
task :pull do
sh("tx pull --mode=reviewed --all --minimum-perc=1")
end
desc "Clean the locale directory"
task :clean do
sh("git clean -f locale/")
end
desc "Commit translated strings to the repository"
task :commit => ["i18n:clean", "i18n:pull"] do
sh("git add locale")
sh("git commit -m 'Updated translations (autogenerated message)'")
end
end
namespace :jobs do
desc "Clear the delayed_job queue."
task :clear => :environment do
Delayed::Job.delete_all
end
desc "Start a delayed_job worker."
task :work => :environment do
Delayed::Worker.new(:min_priority => ENV['MIN_PRIORITY'], :max_priority => ENV['MAX_PRIORITY'], :queues => (ENV['QUEUES'] || ENV['QUEUE'] || '').split(','), :quiet => false).start
end
end
require 'task_helpers'
namespace :search do
def import_from_cursor(cursor, index, opts)
tot = cursor.count
cnt = 0
t = Time.now
index.import cursor, {:method => :paginate, :per_page => opts[:batch_size]} do |documents|
if cnt % opts[:batch_size] == 0 then
elapsed_secs = (Time.now - t).round(2)
pct_complete = (100 * (cnt/tot.to_f)).round(2)
LOG.info "#{index.name}: imported #{cnt} of #{tot} (#{pct_complete}% complete after #{elapsed_secs} seconds)"
end
cnt += documents.length
sleep opts[:sleep_time]
documents
end
LOG.info "#{index.name}: finished importing #{cnt} documents"
cnt
end
def move_alias_to(name, index)
# if there was a previous index, switch over the alias to point to the new index
alias_ = Tire::Alias.find name
if alias_
# does the alias already point to this index?
if alias_.indices.include? index.name
return false
end
# remove the alias from wherever it points to now
LOG.info "alias already exists (will move): #{alias_.indices.to_ary.join(',')}"
alias_.indices.each do |old_index_name|
alias_.indices.delete old_index_name unless old_index_name == name
end
else
# create the alias
LOG.info "alias \"#{name}\" does not yet exist - creating."
alias_ = Tire::Alias.new :name => name
end
# point the alias at our new index
alias_.indices.add index.name
alias_.save
LOG.info "alias \"#{name}\" now points to index #{index.name}."
true
end
def do_reindex (opts, in_place=false)
start_time = Time.now
# create the new index with a unique name
new_index = TaskHelpers::ElasticsearchHelper.create_index
# unless the user is forcing a rebuild, or the index does not yet exist, we
# can do a Tire api reindex which is much faster than reimporting documents
# from mongo.
#
# Checking if the index exists is tricky. Tire automatically created an index
# for the model class when the app loaded if one did not already exist. However,
# it won't create an alias, which is what our app uses. So if the index exists
# but not the alias, we know that it's auto-created.
old_index = TaskHelpers::ElasticsearchHelper.get_index
alias_name = old_index.name
alias_ = Tire::Alias.find alias_name
if alias_.nil?
# edge case.
# the alias doesn't exist, so we know the index was auto-created.
# We will delete it and replace it with an alias.
raise RuntimeError, 'Cannot reindex in-place, no valid source index' if in_place
LOG.warn 'deleting auto-created index to make room for the alias'
old_index.delete
# NOTE on the small chance that another process re-auto-creates the index
# we just deleted before we have a chance to create the alias, this next
# call will fail.
move_alias_to(Content::ES_INDEX_NAME, new_index_name)
end
op = in_place ? 'reindex' : '(re)build index'
LOG.info "preparing to #{op}"
content_types = %w(Comment CommentThread)
if in_place
# reindex, moving source documents directly from old index to new
LOG.info 'copying documents from original index (this may take a while!)'
old_index.reindex new_index.name
LOG.info 'done copying!'
else
# fetch all the documents ever, up til start_time
cursor = Content.where(:_type.in => content_types, :updated_at.lte => start_time)
# import them to the new index
import_from_cursor(cursor, new_index, opts)
end
# move the alias if necessary
did_alias_move = move_alias_to(Content::ES_INDEX_NAME, new_index)
if did_alias_move
# Reimport any source documents that got updated since start_time,
# while the alias still pointed to the old index.
# Elasticsearch understands our document ids, so re-indexing the same
# document won't create duplicates.
LOG.info "importing any documents that changed between #{start_time} and now"
cursor = Content.where(:_type.in => content_types, :updated_at.gte => start_time)
import_from_cursor(cursor, new_index, opts)
end
end
desc 'Copies contents of MongoDB into Elasticsearch if updated in the last N minutes.'
task :catchup, [:minutes, :batch_size, :sleep_time] => :environment do |t, args|
opts = batch_opts args
the_index = TaskHelpers::ElasticsearchHelper.get_index
alias_ = Tire::Alias.find the_index.name
# this check makes sure we are working with the index to which
# the desired model's alias presently points.
raise RuntimeError, "could not find live index" if alias_.nil?
start_time = Time.now - (args[:minutes].to_i * 60)
cursor = Content.where(:_type.in => %w(Comment CommentThread), :updated_at.gte => start_time)
import_from_cursor(cursor, the_index, opts)
end
def batch_opts(args)
args = args.to_hash
{:batch_size => args[:batch_size].nil? ? 500 : args[:batch_size].to_i,
:sleep_time => args[:sleep_time].nil? ? 0 : args[:sleep_time].to_i}
end
desc 'Removes any data from Elasticsearch that no longer exists in MongoDB.'
task :prune, [:batch_size, :sleep_time] => :environment do |t, args|
opts = batch_opts args
the_index = TaskHelpers::ElasticsearchHelper.get_index
puts "pruning #{the_index.name}"
alias_ = Tire::Alias.find the_index.name
raise RuntimeError, 'could not find live index' if alias_.nil?
scan_size = opts[:batch_size] / TaskHelpers::ElasticsearchHelper.get_index_shard_count(the_index.name)
cnt = 0
[CommentThread, Comment].each do |klass|
doc_type = klass.document_type
# this check makes sure we are working with the index to which
# the desired model's alias presently points.
search = Tire::Search::Scan.new the_index.name, {size: scan_size, type: doc_type}
search.each do |results|
es_ids = results.map(&:id)
mongo_ids = klass.where(:id.in => es_ids).map { |d| d.id.to_s }
to_delete = es_ids - mongo_ids
if to_delete.size > 0
cnt += to_delete.size
puts "deleting #{to_delete.size} orphaned #{doc_type} documents from elasticsearch"
the_index.bulk_delete (to_delete).map { |v| {"type" => doc_type, "id" => v} }
end
puts "#{the_index.name}/#{doc_type}: processed #{search.seen} of #{search.total}"
sleep opts[:sleep_time]
end
end
puts "done pruning #{the_index.name}, deleted a total of #{cnt} orphaned documents"
end
desc 'Rebuild the content index from MongoDB data.'
task :rebuild, [:batch_size, :sleep_time] => :environment do |t, args|
do_reindex(batch_opts(args))
end
desc 'Rebuild the content index from already-indexed data (in place).'
task :reindex, [:batch_size, :sleep_time] => :environment do |t, args|
do_reindex(batch_opts(args), true)
end
desc 'Generate a new, empty physical index, without bringing it online.'
task :create_index => :environment do
TaskHelpers::ElasticsearchHelper.create_index
end
end
......@@ -5,7 +5,10 @@ class Comment < Content
include Mongoid::Tree
include Mongoid::Timestamps
include Mongoid::MagicCounterCache
include ActiveModel::MassAssignmentSecurity
include Tire::Model::Search
include Tire::Model::Callbacks
voteable self, :up => +1, :down => -1
field :course_id, type: String
......@@ -14,22 +17,14 @@ class Comment < Content
field :endorsement, type: Hash
field :anonymous, type: Boolean, default: false
field :anonymous_to_peers, type: Boolean, default: false
field :commentable_id, type: String
field :at_position_list, type: Array, default: []
field :sk, type: String, default: nil
field :child_count, type: Integer
index({author_id: 1, course_id: 1})
index({_type: 1, comment_thread_id: 1, author_id: 1, updated_at: 1})
field :sk, type: String, default: nil
before_save :set_sk
def set_sk()
# this attribute is explicitly write-once
if self.sk.nil?
self.sk = (self.parent_ids.dup << self.id).join("-")
end
end
include Tire::Model::Search
include Tire::Model::Callbacks
index_name Content::ES_INDEX_NAME
......@@ -43,10 +38,10 @@ class Comment < Content
indexes :created_at, type: :date, included_in_all: false
indexes :updated_at, type: :date, included_in_all: false
end
belongs_to :comment_thread, index: true
belongs_to :author, class_name: "User", index: true
belongs_to :author, class_name: 'User', index: true
attr_accessible :body, :course_id, :anonymous, :anonymous_to_peers, :endorsed, :endorsement
......@@ -57,13 +52,13 @@ class Comment < Content
counter_cache :comment_thread
before_destroy :destroy_children # TODO async
before_destroy :destroy_children
before_create :set_thread_last_activity_at
before_update :set_thread_last_activity_at
before_save :set_sk
def self.hash_tree(nodes)
nodes.map{|node, sub_nodes| node.to_hash.merge("children" => hash_tree(sub_nodes).compact)}
nodes.map { |node, sub_nodes| node.to_hash.merge('children' => hash_tree(sub_nodes).compact) }
end
# This should really go somewhere else, but sticking it here for now. This is
......@@ -74,9 +69,9 @@ class Comment < Content
# actually creates the subtree.
def self.flatten_subtree(x)
if x.is_a? Array
x.flatten.map{|y| self.flatten_subtree(y)}
x.flatten.map { |y| self.flatten_subtree(y) }
elsif x.is_a? Hash
x.to_a.map{|y| self.flatten_subtree(y)}.flatten
x.to_a.map { |y| self.flatten_subtree(y) }.flatten
else
x
end
......@@ -97,20 +92,31 @@ class Comment < Content
self.class.hash_tree(subtree_hash).first
else
as_document.slice(*%w[body course_id endorsed endorsement anonymous anonymous_to_peers created_at updated_at at_position_list])
.merge("id" => _id)
.merge("user_id" => author_id)
.merge("username" => author_username)
.merge("depth" => depth)
.merge("closed" => comment_thread.nil? ? false : comment_thread.closed) # ditto
.merge("thread_id" => comment_thread_id)
.merge("parent_id" => parent_ids[-1])
.merge("commentable_id" => comment_thread.nil? ? nil : comment_thread.commentable_id) # ditto
.merge("votes" => votes.slice(*%w[count up_count down_count point]))
.merge("abuse_flaggers" => abuse_flaggers)
.merge("type" => "comment")
.merge("id" => _id)
.merge("user_id" => author_id)
.merge("username" => author_username)
.merge("depth" => depth)
.merge("closed" => comment_thread.nil? ? false : comment_thread.closed)
.merge("thread_id" => comment_thread_id)
.merge("parent_id" => parent_ids[-1])
.merge("commentable_id" => comment_thread.nil? ? nil : comment_thread.commentable_id)
.merge("votes" => votes.slice(*%w[count up_count down_count point]))
.merge("abuse_flaggers" => abuse_flaggers)
.merge("type" => "comment")
.merge("child_count" => get_cached_child_count)
end
end
def get_cached_child_count
update_cached_child_count if self.child_count.nil?
self.child_count
end
def update_cached_child_count
child_comments_count = Comment.where({"parent_id" => self._id}).count()
self.set(child_count: child_comments_count)
end
def commentable_id
#we need this to have a universal access point for the flag rake task
if self.comment_thread_id
......@@ -147,16 +153,22 @@ class Comment < Content
end
def self.by_date_range_and_thread_ids from_when, to_when, thread_ids
#return all content between from_when and to_when
#return all content between from_when and to_when
self.where(:created_at.gte => (from_when)).where(:created_at.lte => (to_when)).
where(:comment_thread_id.in => thread_ids)
self.where(:created_at.gte => (from_when)).where(:created_at.lte => (to_when)).
where(:comment_thread_id.in => thread_ids)
end
private
private
def set_thread_last_activity_at
self.comment_thread.update_attributes!(last_activity_at: Time.now.utc)
self.comment_thread.update_attribute(:last_activity_at, Time.now.utc)
end
def set_sk
# this attribute is explicitly write-once
if self.sk.nil?
self.sk = (self.parent_ids.dup << self.id).join("-")
end
end
end
......@@ -5,6 +5,10 @@ require_relative 'content'
class CommentThread < Content
include Mongoid::Timestamps
include Mongoid::Attributes::Dynamic
include ActiveModel::MassAssignmentSecurity
include Tire::Model::Search
include Tire::Model::Callbacks
extend Enumerize
voteable self, :up => +1, :down => -1
......@@ -28,8 +32,6 @@ class CommentThread < Content
index({author_id: 1, course_id: 1})
include Tire::Model::Search
include Tire::Model::Callbacks
index_name Content::ES_INDEX_NAME
......@@ -48,12 +50,12 @@ class CommentThread < Content
indexes :commentable_id, type: :string, index: :not_analyzed, included_in_all: false
indexes :author_id, type: :string, as: 'author_id', index: :not_analyzed, included_in_all: false
indexes :group_id, type: :integer, as: 'group_id', index: :not_analyzed, included_in_all: false
indexes :id, :index => :not_analyzed
indexes :thread_id, :analyzer => :keyword, :as => "_id"
indexes :id, :index => :not_analyzed
indexes :thread_id, :analyzer => :keyword, :as => '_id'
end
belongs_to :author, class_name: "User", inverse_of: :comment_threads, index: true#, autosave: true
has_many :comments, dependent: :destroy#, autosave: true# Use destroy to envoke callback on the top-level comments TODO async
belongs_to :author, class_name: 'User', inverse_of: :comment_threads, index: true
has_many :comments, dependent: :destroy # Use destroy to invoke callback on the top-level comments
has_many :activities, autosave: true
attr_accessible :title, :body, :course_id, :commentable_id, :anonymous, :anonymous_to_peers, :closed, :thread_type
......@@ -69,24 +71,12 @@ class CommentThread < Content
before_create :set_last_activity_at
before_update :set_last_activity_at, :unless => lambda { closed_changed? }
after_update :clear_endorsements
before_destroy :destroy_subscriptions
scope :active_since, ->(from_time) { where(:last_activity_at => {:$gte => from_time}) }
scope :standalone_context, ->() { where(:context => :standalone) }
scope :course_context, ->() { where(:context => :course) }
def self.new_dumb_thread(options={})
c = self.new
c.title = options[:title] || "title"
c.body = options[:body] || "body"
c.commentable_id = options[:commentable_id] || "commentable_id"
c.course_id = options[:course_id] || "course_id"
c.author = options[:author] || User.first
c.save!
c
end
def activity_since(from_time=nil)
if from_time
activities.where(:created_at => {:$gte => from_time})
......@@ -95,13 +85,21 @@ class CommentThread < Content
end
end
def activity_today; activity_since(Date.today.to_time); end
def activity_today
activity_since(Date.today.to_time)
end
def activity_this_week; activity_since(Date.today.to_time - 1.weeks); end
def activity_this_week
activity_since(Date.today.to_time - 1.weeks)
end
def activity_this_month; activity_since(Date.today.to_time - 1.months); end
def activity_this_month
activity_since(Date.today.to_time - 1.months)
end
def activity_overall; activity_since(nil); end
def activity_overall
activity_since(nil)
end
def root_comments
Comment.roots.where(comment_thread_id: self.id)
......@@ -124,25 +122,26 @@ class CommentThread < Content
end
def to_hash(params={})
as_document.slice(*%w[thread_type title body course_id anonymous anonymous_to_peers commentable_id created_at updated_at at_position_list closed context])
.merge("id" => _id, "user_id" => author_id,
"username" => author_username,
"votes" => votes.slice(*%w[count up_count down_count point]),
"abuse_flaggers" => abuse_flaggers,
"tags" => [],
"type" => "thread",
"group_id" => group_id,
"pinned" => pinned?,
"comments_count" => comment_count)
as_document.slice(*%w[thread_type title body course_id anonymous anonymous_to_peers commentable_id created_at updated_at at_position_list closed context last_activity_at])
.merge('id' => _id,
'user_id' => author_id,
'username' => author_username,
'votes' => votes.slice(*%w[count up_count down_count point]),
'abuse_flaggers' => abuse_flaggers,
'tags' => [],
'type' => 'thread',
'group_id' => group_id,
'pinned' => pinned?,
'comments_count' => comment_count)
end
def comment_thread_id
#so that we can use the comment thread id as a common attribute for flagging
self.id
end
private
end
private
def set_last_activity_at
self.last_activity_at = Time.now.utc unless last_activity_at_changed?
......@@ -154,8 +153,8 @@ private
# the last activity time on the thread. Therefore the callbacks would be mutually recursive and we end up with a
# 'SystemStackError'. The 'set' method skips callbacks and therefore bypasses this issue.
self.comments.each do |comment|
comment.set :endorsed, false
comment.set :endorsement, nil
comment.set(endorsed: false)
comment.set(endorsement: nil)
end
end
end
......@@ -163,5 +162,4 @@ private
def destroy_subscriptions
subscriptions.delete_all
end
end
class Content
include Mongoid::Document
include Mongo::Voteable
field :visible, type: Boolean, default: true
field :abuse_flaggers, type: Array, default: []
field :historical_abuse_flaggers, type: Array, default: [] #preserve abuse flaggers after a moderator unflags
field :author_username, type: String, default: nil
index({_type: 1, course_id: 1, pinned: -1, created_at: -1 }, {background: true} )
index({_type: 1, course_id: 1, pinned: -1, comment_count: -1, created_at: -1}, {background: true})
index({_type: 1, course_id: 1, pinned: -1, "votes.point" => -1, created_at: -1}, {background: true})
index({_type: 1, course_id: 1, pinned: -1, created_at: -1}, {background: true})
index({_type: 1, course_id: 1, pinned: -1, comment_count: -1, created_at: -1}, {background: true})
index({_type: 1, course_id: 1, pinned: -1, 'votes.point' => -1, created_at: -1}, {background: true})
index({_type: 1, course_id: 1, pinned: -1, last_activity_at: -1, created_at: -1}, {background: true})
index({comment_thread_id: 1, sk: 1}, {sparse: true})
index({comment_thread_id: 1, endorsed: 1}, {sparse: true})
index({commentable_id: 1}, {sparse: true, background: true})
......@@ -27,10 +27,7 @@ class Content
end
before_save :set_username
def set_username
# avoid having to look this attribute up later, since it does not change
self.author_username = author.username
end
def author_with_anonymity(attr=nil, attr_when_anonymous=nil)
if not attr
......@@ -43,7 +40,7 @@ class Content
def self.flagged
#return an array of flagged content
holder = []
Content.where(:abuse_flaggers.ne => [],:abuse_flaggers.exists => true).each do |c|
Content.where(:abuse_flaggers.ne => [], :abuse_flaggers.exists => true).each do |c|
holder << c
end
holder
......@@ -53,61 +50,65 @@ class Content
#take a hash of criteria (what) and return a hash of hashes
#course => user => count
contributors = {}
map = "function(){emit(this.author_id,1)}"
reduce = "function(k, vals) { var sum = 0; for(var i in vals) sum += vals[i]; return sum; }"
map = 'function(){emit(this.author_id,1)}'
reduce = 'function(k, vals) { var sum = 0; for(var i in vals) sum += vals[i]; return sum; }'
contributors = []
self.where(what).map_reduce(map,reduce).out(replace: "results").each do |d|
self.where(what).map_reduce(map, reduce).out(replace: 'results').each do |d|
contributors << d
end
#now sort and limit them
#first sort destructively
contributors.sort! {|a,b| -a["value"] <=> -b["value"]}
contributors.sort! { |a, b| -a['value'] <=> -b['value'] }
#then trim it
contributors = contributors[0..(count - 1)]
contributors
end
def self.summary what
#take a hash of criteria (what) and return a hash of hashes
#of total users, votes, comments, endorsements,
answer = {}
vote_count = 0
thread_count = 0
comment_count = 0
contributors = []
content = self.where(what)
content.each do |c|
contributors << c.author_id
contributors << c["votes"]["up"]
contributors << c["votes"]["down"]
vote_count += c["votes"]["count"]
if c._type == "CommentThread"
contributors << c['votes']['up']
contributors << c['votes']['down']
vote_count += c['votes']['count']
if c._type == 'CommentThread'
thread_count += 1
elsif c._type == "Comment"
elsif c._type == 'Comment'
comment_count += 1
end
end
#uniquify contributors
contributors = contributors.uniq
#assemble the answer and ship
answer["vote_count"] = vote_count
answer["thread_count"] = thread_count
answer["comment_count"] = comment_count
answer["contributor_count"] = contributors.count
answer['vote_count'] = vote_count
answer['thread_count'] = thread_count
answer['comment_count'] = comment_count
answer['contributor_count'] = contributors.count
answer
end
private
def set_username
# avoid having to look this attribute up later, since it does not change
self.author_username = author.username
end
end
class Notification
include Mongoid::Document
include Mongoid::Timestamps
include ActiveModel::MassAssignmentSecurity
field :notification_type, type: String
field :info, type: Hash
......
......@@ -12,7 +12,7 @@ class Subscription
index({source_id: 1, source_type: 1}, {background: true})
def to_hash
as_document.slice(*%w[subscriber_id source_id source_type])
as_document.slice(*%w[subscriber_id source_id source_type]).merge("id" => _id)
end
def subscriber
......
......@@ -153,8 +153,9 @@ class ReadState
field :last_read_times, type: Hash, default: {}
embedded_in :user
validates :course_id, uniqueness: true, presence: true
validates_presence_of :course_id
validates_uniqueness_of :course_id
def to_hash
to_json
end
......
......@@ -23,7 +23,7 @@ class ThreadPresenter
@is_endorsed = is_endorsed
end
def to_hash with_responses=false, resp_skip=0, resp_limit=nil
def to_hash with_responses=false, resp_skip=0, resp_limit=nil, recursive=true
raise ArgumentError unless resp_skip >= 0
raise ArgumentError unless resp_limit.nil? or resp_limit >= 1
h = @thread.to_hash
......@@ -32,7 +32,11 @@ class ThreadPresenter
h["endorsed"] = @is_endorsed || false
if with_responses
if @thread.thread_type.discussion? && resp_skip == 0 && resp_limit.nil?
content = Comment.where(comment_thread_id: @thread._id).order_by({"sk" => 1})
if recursive
content = Comment.where(comment_thread_id: @thread._id).order_by({"sk" => 1})
else
content = Comment.where(comment_thread_id: @thread._id, "parent_ids" => []).order_by({"sk" => 1})
end
h["children"] = merge_response_content(content)
h["resp_total"] = content.to_a.select{|d| d.depth == 0 }.length
else
......@@ -41,18 +45,20 @@ class ThreadPresenter
when "question"
endorsed_responses = responses.where(endorsed: true)
non_endorsed_responses = responses.where(endorsed: false)
endorsed_response_info = get_paged_merged_responses(@thread._id, endorsed_responses, 0, nil)
endorsed_response_info = get_paged_merged_responses(@thread._id, endorsed_responses, 0, nil, recursive)
non_endorsed_response_info = get_paged_merged_responses(
@thread._id,
non_endorsed_responses,
resp_skip,
resp_limit
resp_limit,
recursive
)
h["endorsed_responses"] = endorsed_response_info["responses"]
h["non_endorsed_responses"] = non_endorsed_response_info["responses"]
h["non_endorsed_resp_total"] = non_endorsed_response_info["response_count"]
h["resp_total"] = non_endorsed_response_info["response_count"] + endorsed_response_info["response_count"]
when "discussion"
response_info = get_paged_merged_responses(@thread._id, responses, resp_skip, resp_limit)
response_info = get_paged_merged_responses(@thread._id, responses, resp_skip, resp_limit, recursive)
h["children"] = response_info["responses"]
h["resp_total"] = response_info["response_count"]
end
......@@ -67,15 +73,20 @@ class ThreadPresenter
# a hash containing the following:
# responses
# An array of hashes representing the page of responses (including
# children)
# children, if recursive is true)
# response_count
# The total number of responses
def get_paged_merged_responses(thread_id, responses, skip, limit)
def get_paged_merged_responses(thread_id, responses, skip, limit, recursive=false)
response_ids = responses.only(:_id).sort({"sk" => 1}).to_a.map{|doc| doc["_id"]}
paged_response_ids = limit.nil? ? response_ids.drop(skip) : response_ids.drop(skip).take(limit)
content = Comment.where(comment_thread_id: thread_id).
or({:parent_id => {"$in" => paged_response_ids}}, {:id => {"$in" => paged_response_ids}}).
sort({"sk" => 1})
if recursive
content = Comment.where(comment_thread_id: thread_id).
or({:parent_id => {"$in" => paged_response_ids}}, {:id => {"$in" => paged_response_ids}}).
sort({"sk" => 1})
else
content = Comment.where(comment_thread_id: thread_id, "parent_ids" => []).
where({:id => {"$in" => paged_response_ids}}).sort({"sk" => 1})
end
{"responses" => merge_response_content(content), "response_count" => response_ids.length}
end
......
......@@ -5,10 +5,10 @@ module ThreadUtils
# only threads which are endorsed will have entries, value will always be true.
endorsed_threads = {}
thread_ids = threads.collect {|t| t._id}
Comment.collection.aggregate(
Comment.collection.aggregate([
{"$match" => {"comment_thread_id" => {"$in" => thread_ids}, "endorsed" => true}},
{"$group" => {"_id" => "$comment_thread_id"}}
).each do |res|
]).each do |res|
endorsed_threads[res["_id"].to_s] = true
end
endorsed_threads
......@@ -26,7 +26,7 @@ module ThreadUtils
thread_key = t._id.to_s
if read_dates.has_key? thread_key
is_read = read_dates[thread_key] >= t.updated_at
unread_comment_count = Comment.collection.where(
unread_comment_count = Comment.collection.find(
:comment_thread_id => t._id,
:author_id => {"$ne" => user.id},
:updated_at => {"$gte" => read_dates[thread_key]}
......
require 'spec_helper'
def create_comment_flag(comment_id, user_id)
create_flag("/api/v1/comments/" + comment_id + "/abuse_flag", user_id)
end
describe 'Abuse API' do
before(:each) { set_api_key_header }
def create_thread_flag(thread_id, user_id)
create_flag("/api/v1/threads/" + thread_id + "/abuse_flag", user_id)
end
shared_examples 'an abuse endpoint' do
let(:affected_entity_id) { affected_entity.id }
let(:user_id) { create(:user).id }
def remove_thread_flag(thread_id, user_id)
remove_flag("/api/v1/threads/" + thread_id + "/abuse_unflag", user_id)
end
it { should be_ok }
def remove_comment_flag(comment_id, user_id)
remove_flag("/api/v1/comments/" + comment_id + "/abuse_unflag", user_id)
end
it 'updates the abuse flaggers' do
subject
def create_flag(put_command, user_id)
if user_id.nil?
put put_command
else
put put_command, user_id: user_id
end
end
affected_entity.reload
expect(affected_entity.abuse_flaggers).to eq expected_abuse_flaggers
expect(non_affected_entity.abuse_flaggers).to have(0).items
end
def remove_flag(put_command, user_id)
if user_id.nil?
put put_command
else
put put_command, user_id: user_id
context 'if the comment does not exist' do
let(:affected_entity_id) { 'does_not_exist' }
it { should be_bad_request }
its(:body) { should eq "[\"#{I18n.t(:requested_object_not_found)}\"]" }
end
context 'if no user_id is provided' do
let(:user_id) { nil }
it { should be_bad_request }
its(:body) { should eq "[\"#{I18n.t(:user_id_is_required)}\"]" }
end
end
end
describe "app" do
describe "abuse" do
describe 'comment actions' do
let(:affected_entity) { create(:comment, abuse_flaggers: []) }
let(:non_affected_entity) { affected_entity.comment_thread }
before(:each) do
init_without_subscriptions
set_api_key_header
end
context 'when flagging a comment for abuse' do
let(:expected_abuse_flaggers) { [user_id] }
subject { put "/api/v1/comments/#{affected_entity_id}/abuse_flag", user_id: user_id }
describe "flag a comment as abusive" do
it "create or update the abuse_flags on the comment" do
comment = Comment.first
# We get the count rather than just keeping the array, because the array
# will update as the Comment updates since the IdentityMap is enabled.
prev_abuse_flaggers_count = comment.abuse_flaggers.length
create_comment_flag("#{comment.id}", User.first.id)
comment = Comment.find(comment.id)
comment.abuse_flaggers.count.should == prev_abuse_flaggers_count + 1
# verify that the thread doesn't automatically get flagged
comment.comment_thread.abuse_flaggers.length.should == 0
end
it "returns 400 when the comment does not exist" do
create_comment_flag("does_not_exist", User.first.id)
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
it "returns 400 when user_id is not provided" do
create_comment_flag("#{Comment.first.id}", nil)
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:user_id_is_required)
end
#Would like to test the output of to_hash, but not sure how to deal with a Moped::BSON::Document object
#it "has a correct hash" do
# create_flag("#{Comment.first.id}", User.first.id)
# Comment.first.to_hash
#end
it_behaves_like 'an abuse endpoint'
end
describe "flag a thread as abusive" do
it "create or update the abuse_flags on the comment" do
comment = Comment.first
thread = comment.comment_thread
prev_abuse_flaggers_count = thread.abuse_flaggers.count
create_thread_flag("#{thread.id}", User.first.id)
comment = Comment.find(comment.id)
comment.comment_thread.abuse_flaggers.count.should == prev_abuse_flaggers_count + 1
# verify that the comment doesn't automatically get flagged
comment.abuse_flaggers.length.should == 0
end
it "returns 400 when the thread does not exist" do
create_thread_flag("does_not_exist", User.first.id)
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
it "returns 400 when user_id is not provided" do
create_thread_flag("#{Comment.first.comment_thread.id}", nil)
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:user_id_is_required)
end
#Would like to test the output of to_hash, but not sure how to deal with a Moped::BSON::Document object
#it "has a correct hash" do
# create_thread_flag("#{Comment.first.comment_thread.id}", User.first.id)
# Comment.first.comment_thread.to_hash
#end
context 'when un-flagging a comment for abuse' do
let(:affected_entity) { create(:comment, abuse_flaggers: [user_id]) }
let(:expected_abuse_flaggers) { [] }
subject { put "/api/v1/comments/#{affected_entity_id}/abuse_unflag", user_id: user_id }
it_behaves_like 'an abuse endpoint'
end
describe "unflag a comment as abusive" do
it "removes the user from the existing abuse_flaggers" do
comment = Comment.first
create_comment_flag("#{comment.id}", User.first.id)
comment = Comment.first
prev_abuse_flaggers = comment.abuse_flaggers
prev_abuse_flaggers_count = prev_abuse_flaggers.count
prev_abuse_flaggers.should include User.first.id
remove_comment_flag("#{comment.id}", User.first.id)
comment = Comment.find(comment.id)
comment.abuse_flaggers.count.should == prev_abuse_flaggers_count - 1
comment.abuse_flaggers.to_a.should_not include User.first.id
end
it "returns 400 when the comment does not exist" do
remove_comment_flag("does_not_exist", User.first.id)
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
it "returns 400 when the thread does not exist" do
remove_thread_flag("does_not_exist", User.first.id)
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
it "returns 400 when user_id is not provided" do
remove_thread_flag("#{Comment.first.comment_thread.id}", nil)
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:user_id_is_required)
end
#Would like to test the output of to_hash, but not sure how to deal with a Moped::BSON::Document object
#it "has a correct hash" do
# create_thread_flag("#{Comment.first.comment_thread.id}", User.first.id)
# Comment.first.comment_thread.to_hash
#end
end
describe 'comment thread actions' do
let(:affected_entity) { create(:comment_thread, abuse_flaggers: []) }
let(:non_affected_entity) { create(:comment, comment_thread: affected_entity) }
context 'when flagging a comment thread for abuse' do
let(:expected_abuse_flaggers) { [user_id] }
subject { put "/api/v1/threads/#{affected_entity_id}/abuse_flag", user_id: user_id }
it_behaves_like 'an abuse endpoint'
end
describe "unflag a thread as abusive" do
it "removes the user from the existing abuse_flaggers" do
thread = CommentThread.first
create_thread_flag("#{thread.id}", User.first.id)
thread = CommentThread.first
prev_abuse_flaggers = thread.abuse_flaggers
prev_abuse_flaggers_count = prev_abuse_flaggers.count
prev_abuse_flaggers.should include User.first.id
remove_thread_flag("#{thread.id}", User.first.id)
thread = CommentThread.find(thread.id)
thread.abuse_flaggers.count.should == prev_abuse_flaggers_count - 1
thread.abuse_flaggers.to_a.should_not include User.first.id
end
it "returns 400 when the thread does not exist" do
remove_thread_flag("does_not_exist", User.first.id)
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
it "returns 400 when user_id is not provided" do
remove_thread_flag("#{Comment.first.comment_thread.id}", nil)
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:user_id_is_required)
end
#Would like to test the output of to_hash, but not sure how to deal with a Moped::BSON::Document object
#it "has a correct hash" do
# create_thread_flag("#{Comment.first.comment_thread.id}", User.first.id)
# Comment.first.comment_thread.to_hash
#end
context 'when un-flagging a comment thread for abuse' do
let(:affected_entity) { create(:comment_thread, abuse_flaggers: [user_id]) }
let(:expected_abuse_flaggers) { [] }
subject { put "/api/v1/threads/#{affected_entity_id}/abuse_unflag", user_id: user_id }
it_behaves_like 'an abuse endpoint'
end
end
end
require 'spec_helper'
require 'unicode_shared_examples'
describe "app" do
BLOCKED_BODY = 'BLOCKED POST'
describe 'Comment API' do
before(:each) { set_api_key_header }
let(:thread) { create_comment_thread_and_comments }
describe "comments" do
before(:each) { init_without_subscriptions }
describe "GET /api/v1/comments/:comment_id" do
it "returns JSON" do
comment = Comment.first
get "/api/v1/comments/#{comment.id}"
last_response.should be_ok
last_response.content_type.should == "application/json;charset=utf-8"
end
it "retrieve information of a single comment" do
comment = Comment.first
get "/api/v1/comments/#{comment.id}"
last_response.should be_ok
retrieved = parse last_response.body
retrieved["body"].should == comment.body
retrieved["endorsed"].should == comment.endorsed
retrieved["id"].should == comment.id.to_s
retrieved["children"].should be_nil
retrieved["votes"]["point"].should == comment.votes_point
retrieved["depth"].should == comment.depth
retrieved["parent_id"].should == comment.parent_ids[-1]
end
it "retrieve information of a single comment with its sub comments" do
comment = Comment.first
get "/api/v1/comments/#{comment.id}", recursive: true
last_response.should be_ok
retrieved = parse last_response.body
retrieved["body"].should == comment.body
retrieved["endorsed"].should == comment.endorsed
retrieved["id"].should == comment.id.to_s
retrieved["votes"]["point"].should == comment.votes_point
retrieved["children"].length.should == comment.children.length
retrieved["children"].select{|c| c["body"] == comment.children.first.body}.first.should_not be_nil
retrieved["children"].each{|c| c["parent_id"].should == comment.id.to_s}
end
it "returns 400 when the comment does not exist" do
get "/api/v1/comments/does_not_exist"
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
describe 'GET /api/v1/comments/:comment_id' do
it 'returns JSON' do
comment = thread.comments.first
get "/api/v1/comments/#{comment.id}"
last_response.should be_ok
last_response.content_type.should == 'application/json;charset=utf-8'
end
def test_unicode_data(text)
comment = make_comment(User.first, CommentThread.first, text)
get "/api/v1/comments/#{comment.id}"
last_response.should be_ok
retrieved = parse last_response.body
retrieved["body"].should == text
end
it 'retrieves information of a single comment' do
comment = thread.comments.first
get "/api/v1/comments/#{comment.id}"
last_response.should be_ok
retrieved = parse last_response.body
retrieved['body'].should == comment.body
retrieved['endorsed'].should == comment.endorsed
retrieved['id'].should == comment.id.to_s
retrieved['children'].should be_nil
retrieved['votes']['point'].should == comment.votes_point
retrieved['depth'].should == comment.depth
retrieved['parent_id'].should == comment.parent_ids.map(&:to_s)[-1]
retrieved["child_count"].should == comment.children.length
end
include_examples "unicode data"
end
describe "PUT /api/v1/comments/:comment_id" do
def test_update_endorsed(true_val, false_val)
comment = Comment.first
before = DateTime.now
put "/api/v1/comments/#{comment.id}", endorsed: true_val, endorsement_user_id: "#{User.first.id}"
after = DateTime.now
last_response.should be_ok
comment.reload
comment.endorsed.should == true
comment.endorsement.should_not be_nil
comment.endorsement["user_id"].should == "#{User.first.id}"
comment.endorsement["time"].should be_between(before, after)
put "/api/v1/comments/#{comment.id}", endorsed: false_val
last_response.should be_ok
comment.reload
comment.endorsed.should == false
comment.endorsement.should be_nil
end
it "updates endorsed correctly" do
test_update_endorsed(true, false)
end
it "updates endorsed correctly with Pythonic values" do
test_update_endorsed("True", "False")
end
it "updates body correctly" do
comment = Comment.first
put "/api/v1/comments/#{comment.id}", body: "new body"
last_response.should be_ok
comment.reload
comment.body.should == "new body"
end
it "can update endorsed and body simultaneously" do
comment = Comment.first
put "/api/v1/comments/#{comment.id}", body: "new body", endorsed: true
last_response.should be_ok
comment.reload
comment.body.should == "new body"
comment.endorsed.should == true
end
it "returns 400 when the comment does not exist" do
put "/api/v1/comments/does_not_exist", body: "new body", endorsed: true
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
it "returns 503 and does not update when the post hash is blocked" do
comment = Comment.first
original_body = comment.body
put "/api/v1/comments/#{comment.id}", body: "BLOCKED POST", endorsed: true
last_response.status.should == 503
parse(last_response.body).first.should == I18n.t(:blocked_content_with_body_hash, :hash => Digest::MD5.hexdigest("blocked post"))
comment.reload
comment.body.should == original_body
end
it 'retrieves information of a single comment with its sub comments' do
comment = thread.comments.first
get "/api/v1/comments/#{comment.id}", recursive: true
last_response.should be_ok
retrieved = parse last_response.body
retrieved['body'].should == comment.body
retrieved['endorsed'].should == comment.endorsed
retrieved['id'].should == comment.id.to_s
retrieved['votes']['point'].should == comment.votes_point
def test_unicode_data(text)
comment = Comment.first
put "/api/v1/comments/#{comment.id}", body: text
last_response.should be_ok
comment.body.should == text
end
retrieved_children = retrieved['children']
retrieved_children.length.should == comment.children.length
retrieved["child_count"].should == comment.children.length
include_examples "unicode data"
end
describe "POST /api/v1/comments/:comment_id" do
it "create a sub comment to the comment" do
comment = Comment.first.to_hash(recursive: true)
user = User.first
post "/api/v1/comments/#{comment["id"]}", body: "new comment", course_id: "1", user_id: User.first.id
last_response.should be_ok
changed_comment = Comment.find(comment["id"]).to_hash(recursive: true)
changed_comment["children"].length.should == comment["children"].length + 1
subcomment = changed_comment["children"].select{|c| c["body"] == "new comment"}.first
subcomment.should_not be_nil
subcomment["user_id"].should == user.id
end
it "returns 400 when the comment does not exist" do
post "/api/v1/comments/does_not_exist", body: "new comment", course_id: "1", user_id: User.first.id
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
it "returns 503 and does not create when the post hash is blocked" do
comment = Comment.first.to_hash(recursive: true)
user = User.first
post "/api/v1/comments/#{comment["id"]}", body: "BLOCKED POST", course_id: "1", user_id: User.first.id
last_response.status.should == 503
parse(last_response.body).first.should == I18n.t(:blocked_content_with_body_hash, :hash => Digest::MD5.hexdigest("blocked post"))
Comment.where(body: "BLOCKED POST").to_a.should be_empty
comment.children.each_with_index do |child, index|
expect(retrieved_children[index]).to include('body' => child.body, 'parent_id' => comment.id.to_s)
end
end
def test_unicode_data(text)
parent = Comment.first
post "/api/v1/comments/#{parent.id}", body: text, course_id: parent.course_id, user_id: User.first.id
last_response.should be_ok
parent.children.where(body: text).should_not be_empty
end
it 'retrieves information of a single comment and fixes incorrect child count' do
comment = thread.comments.first
comment.set(child_count: 2000)
comment_hash = comment.to_hash(recursive: true)
comment_hash["child_count"].should == 2000
get "/api/v1/comments/#{comment.id}", recursive: true
last_response.should be_ok
retrieved = parse last_response.body
retrieved["child_count"].should == comment.children.length
include_examples "unicode data"
comment.set(child_count: nil)
get "/api/v1/comments/#{comment.id}"
last_response.should be_ok
retrieved = parse last_response.body
retrieved["child_count"].should == comment.children.length
end
describe "DELETE /api/v1/comments/:comment_id" do
it "delete the comment and its sub comments" do
comment = Comment.first
cnt_comments = comment.descendants_and_self.length
prev_count = Comment.count
delete "/api/v1/comments/#{comment.id}"
Comment.count.should == prev_count - cnt_comments
Comment.all.select{|c| c.id == comment.id}.first.should be_nil
end
it "can delete a sub comment" do
parent = CommentThread.first.comments.first
sub_comment = parent.children.first
id = sub_comment.id
delete "/api/v1/comments/#{id}"
Comment.where(:id => id).should be_empty
parent.children.where(:id => id).should be_empty
end
it "returns 400 when the comment does not exist" do
delete "/api/v1/comments/does_not_exist"
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
it 'returns 400 when the comment does not exist' do
get '/api/v1/comments/does_not_exist'
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
def test_unicode_data(text)
comment = create(:comment, body: text)
get "/api/v1/comments/#{comment.id}"
last_response.should be_ok
parse(last_response.body)['body'].should == text
end
include_examples 'unicode data'
end
describe 'PUT /api/v1/comments/:comment_id' do
def test_update_endorsed(true_val, false_val)
comment = thread.comments.first
before = DateTime.now
put "/api/v1/comments/#{comment.id}", endorsed: true_val, endorsement_user_id: "#{User.first.id}"
after = DateTime.now
last_response.should be_ok
comment.reload
comment.endorsed.should == true
comment.endorsement.should_not be_nil
comment.endorsement["user_id"].should == "#{User.first.id}"
comment.endorsement["time"].should be_between(before, after)
put "/api/v1/comments/#{comment.id}", endorsed: false_val
last_response.should be_ok
comment.reload
comment.endorsed.should == false
comment.endorsement.should be_nil
end
it 'updates endorsed correctly' do
test_update_endorsed(true, false)
end
it 'updates endorsed correctly with Pythonic values' do
test_update_endorsed('True', 'False')
end
it 'updates body correctly' do
comment = thread.comments.first
put "/api/v1/comments/#{comment.id}", body: 'new body'
last_response.should be_ok
comment.reload
comment.body.should == 'new body'
end
it 'can update endorsed and body simultaneously' do
comment = thread.comments.first
put "/api/v1/comments/#{comment.id}", body: 'new body', endorsed: true
last_response.should be_ok
comment.reload
comment.body.should == 'new body'
comment.endorsed.should == true
end
it 'returns 400 when the comment does not exist' do
put '/api/v1/comments/does_not_exist', body: 'new body', endorsed: true
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
it 'returns 503 and does not update when the post hash is blocked' do
blocked_hash = block_post_body(BLOCKED_BODY)
comment = thread.comments.first
original_body = comment.body
put "/api/v1/comments/#{comment.id}", body: BLOCKED_BODY, endorsed: true
last_response.status.should == 503
parse(last_response.body).first.should == I18n.t(:blocked_content_with_body_hash, :hash => blocked_hash)
comment.reload
comment.body.should == original_body
end
def test_unicode_data(text)
comment = thread.comments.first
put "/api/v1/comments/#{comment.id}", body: text
last_response.should be_ok
comment.reload
comment.body.should == text
end
include_examples 'unicode data'
end
describe 'POST /api/v1/comments/:comment_id' do
it 'creates a sub comment to the comment' do
comment = thread.comments.first
previous_child_count = comment.children.length
user = thread.author
body = 'new comment'
course_id = '1'
post "/api/v1/comments/#{comment.id}", body: body, course_id: course_id, user_id: user.id
last_response.should be_ok
comment.reload
comment.children.length.should == previous_child_count + 1
comment.child_count.should == previous_child_count + 1
sub_comment = comment.children.order_by(created_at: :desc).first
sub_comment.body.should == body
sub_comment.course_id.should == course_id
sub_comment.author.should == user
sub_comment.child_count.should == 0
end
it 'returns 400 when the comment does not exist' do
post '/api/v1/comments/does_not_exist', body: 'new comment', course_id: '1', user_id: thread.author.id
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
it 'returns 503 and does not create when the post hash is blocked' do
blocked_hash = block_post_body(BLOCKED_BODY)
comment = thread.comments.first
user = comment.author
post "/api/v1/comments/#{comment.id}", body: BLOCKED_BODY, course_id: '1', user_id: user.id
last_response.status.should == 503
parse(last_response.body).first.should == I18n.t(:blocked_content_with_body_hash, :hash => blocked_hash)
Comment.where(body: BLOCKED_BODY).to_a.should be_empty
end
def test_unicode_data(text)
parent = thread.comments.first
post "/api/v1/comments/#{parent.id}", body: text, course_id: parent.course_id, user_id: User.first.id
last_response.should be_ok
parent.children.where(body: text).should_not be_empty
end
include_examples 'unicode data'
end
describe 'DELETE /api/v1/comments/:comment_id' do
it 'deletes the comment and its sub comments' do
comment = thread.comments.first
cnt_comments = comment.descendants_and_self.length
prev_count = Comment.count
delete "/api/v1/comments/#{comment.id}"
Comment.count.should == prev_count - cnt_comments
Comment.all.select { |c| c.id == comment.id }.first.should be_nil
end
it 'can delete a sub comment' do
# Sort to ensure we get the thread's first comment, rather than the child of that comment.
parent_comment = thread.comments.sort_by(&:_id).first
child_comment = parent_comment.children.first
delete "/api/v1/comments/#{child_comment.id}"
Comment.where(:id => child_comment.id).should be_empty
parent_comment.children.where(:id => child_comment.id).should be_empty
end
it 'returns 400 when the comment does not exist' do
delete '/api/v1/comments/does_not_exist'
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
end
end
end
......@@ -18,12 +18,12 @@ describe "app" do
context "when filtering by course" do
it "returns only threads with matching course id" do
[@threads["t1"], @threads["t2"]].each do |t|
[@threads["t1"], @threads["t2"]].each do |t|
t.course_id = "abc"
t.save!
end
t.save!
end
rs = thread_result course_id: "abc", sort_order: "asc"
rs.length.should == 2
rs.length.should == 2
rs.each_with_index { |res, i|
check_thread_result_json(nil, @threads["t#{i+1}"], res)
res["course_id"].should == "abc"
......@@ -89,7 +89,7 @@ describe "app" do
@threads["t3"].group_id = 100
@threads["t3"].save!
rs = thread_result course_id: "omg", group_id: 100, sort_order: "asc"
rs.length.should == 2
rs.length.should == 2
rs.each_with_index { |res, i|
check_thread_result_json(nil, @threads["t#{i+1}"], res)
res["course_id"].should == "omg"
......@@ -97,32 +97,32 @@ describe "app" do
end
it "returns an empty result when no threads match course_id" do
rs = thread_result course_id: 99
rs.length.should == 0
rs.length.should == 0
end
it "returns only group-less threads when no threads have matching group id" do
@threads["t1"].group_id = 123
@threads["t1"].save!
rs = thread_result course_id: DFLT_COURSE_ID, group_id: 321
rs.each.map {|res| res["group_id"].should be_nil }
rs.each.map { |res| res["group_id"].should be_nil }
end
context "when filtering flagged posts" do
it "returns threads that are flagged" do
@threads["t1"].abuse_flaggers = [1]
@threads["t1"].save!
rs = thread_result course_id: DFLT_COURSE_ID, flagged: true
rs.length.should == 1
rs.length.should == 1
check_thread_result_json(nil, @threads["t1"], rs.first)
end
it "returns threads that have flagged comments" do
@comments["t2 c3"].abuse_flaggers = [1]
@comments["t2 c3"].abuse_flaggers = [1]
@comments["t2 c3"].save!
rs = thread_result course_id: DFLT_COURSE_ID, flagged: true
rs.length.should == 1
rs.length.should == 1
check_thread_result_json(nil, @threads["t2"], rs.first)
end
it "returns an empty result when no posts were flagged" do
rs = thread_result course_id: DFLT_COURSE_ID, flagged: true
rs.length.should == 0
rs.length.should == 0
end
end
it "filters unread posts" do
......@@ -180,12 +180,12 @@ describe "app" do
end
it "correctly considers read state" do
user = create_test_user(123)
[@threads["t1"], @threads["t2"]].each do |t|
[@threads["t1"], @threads["t2"]].each do |t|
t.course_id = "abc"
t.save!
end
t.save!
end
rs = thread_result course_id: "abc", user_id: "123", sort_order: "asc"
rs.length.should == 2
rs.length.should == 2
rs.each_with_index { |result, i|
check_thread_result_json(user, @threads["t#{i+1}"], result)
result["course_id"].should == "abc"
......@@ -230,20 +230,20 @@ describe "app" do
context "sorting" do
def thread_result_order (sort_key, sort_order)
results = thread_result course_id: DFLT_COURSE_ID, sort_key: sort_key, sort_order: sort_order
results.length.should == 10
results.map {|t| t["title"]}
results.length.should == 10
results.map { |t| t["title"] }
end
def move_to_end(ary, *vals)
vals.each do |val|
ary = ary.select {|v| v!=val } << val
ary = ary.select { |v| v!=val } << val
end
ary
end
def move_to_front(ary, *vals)
vals.reverse.each do |val|
ary = ary.select {|v| v!=val }.insert(0, val)
ary = ary.select { |v| v!=val }.insert(0, val)
end
ary
end
......@@ -263,7 +263,7 @@ describe "app" do
t5c.update(body: "changed!")
t5c.save!
actual_order = thread_result_order("activity", "desc")
expected_order = move_to_front(@default_order, "t5")
expected_order = move_to_front(@default_order, "t5")
actual_order.should == expected_order
end
it "sorts using last activity / ascending" do
......@@ -271,7 +271,7 @@ describe "app" do
t5c.update(body: "changed!")
t5c.save!
actual_order = thread_result_order("activity", "asc")
expected_order = move_to_end(@default_order.reverse, "t5")
expected_order = move_to_end(@default_order.reverse, "t5")
actual_order.should == expected_order
end
it "sorts using vote count / descending" do
......@@ -280,7 +280,7 @@ describe "app" do
user.vote(t5, :up)
t5.save!
actual_order = thread_result_order("votes", "desc")
expected_order = move_to_front(@default_order, "t5")
expected_order = move_to_front(@default_order, "t5")
actual_order.should == expected_order
end
it "sorts using vote count / ascending" do
......@@ -289,19 +289,19 @@ describe "app" do
user.vote(t5, :up)
t5.save!
actual_order = thread_result_order("votes", "asc")
expected_order = move_to_end(@default_order, "t5")
expected_order = move_to_end(@default_order, "t5")
actual_order.should == expected_order
end
it "sorts using comment count / descending" do
make_comment(@threads["t5"].author, @threads["t5"], "extra comment")
actual_order = thread_result_order("comments", "desc")
expected_order = move_to_front(@default_order, "t5")
expected_order = move_to_front(@default_order, "t5")
actual_order.should == expected_order
end
it "sorts using comment count / ascending" do
make_comment(@threads["t5"].author, @threads["t5"], "extra comment")
actual_order = thread_result_order("comments", "asc")
expected_order = move_to_end(@default_order, "t5")
expected_order = move_to_end(@default_order, "t5")
actual_order.should == expected_order
end
it "sorts pinned items first" do
......@@ -332,31 +332,48 @@ describe "app" do
expected_order = move_to_front(@default_order.reverse, "t7", "t8")
actual_order.should == expected_order
end
context "pagination" do
def thread_result_page (sort_key, sort_order, page, per_page, user_id=nil, unread=false)
get "/api/v1/threads", course_id: DFLT_COURSE_ID, sort_key: sort_key, sort_order: sort_order, page: page, per_page: per_page, user_id: user_id, unread: unread
def thread_result_page (sort_key, sort_order, page, per_page, course_id=DFLT_COURSE_ID, user_id=nil, unread=false)
get "/api/v1/threads", course_id: course_id, sort_key: sort_key, sort_order: sort_order, page: page, per_page: per_page, user_id: user_id, unread: unread
last_response.should be_ok
parse(last_response.body)
end
it "returns single page with no threads in a course" do
result = thread_result_page("date", "desc", 1, 20, "99")
result["collection"].length.should == 0
result["thread_count"].should == 0
result["num_pages"].should == 1
result["page"].should == 1
end
it "returns single page" do
result = thread_result_page("date", "desc", 1, 20)
result["collection"].length.should == 10
result["thread_count"].should == 10
result["num_pages"].should == 1
result["page"].should == 1
end
it "returns multiple pages" do
result = thread_result_page("date", "desc", 1, 5)
result["collection"].length.should == 5
result["thread_count"].should == 10
result["num_pages"].should == 2
result["page"].should == 1
result = thread_result_page("date", "desc", 2, 5)
result["collection"].length.should == 5
result["thread_count"].should == 10
result["num_pages"].should == 2
result["page"].should == 2
end
it "returns page exceeding available pages with no results" do
#TODO: Review whether we can switch pagination endpoint to raise an exception; rather than an empty page
result = thread_result_page("date", "desc", 3, 5)
result["collection"].length.should == 0
result["thread_count"].should == 10
result["num_pages"].should == 2
result["page"].should == 3
end
def test_paged_order (sort_spec, expected_order, filter_spec=[], user_id=nil)
# sort spec is a hash with keys: sort_key, sort_dir, per_page
......@@ -368,12 +385,13 @@ describe "app" do
num_pages.times do |i|
page = i + 1
result = thread_result_page(
sort_spec['sort_key'],
sort_spec['sort_dir'],
page,
per_page,
user_id,
filter_spec.include?("unread")
sort_spec['sort_key'],
sort_spec['sort_dir'],
page,
per_page,
DFLT_COURSE_ID,
user_id,
filter_spec.include?("unread")
)
result["collection"].length.should == (page * per_page <= expected_order.length ? per_page : expected_order.length % per_page)
if filter_spec.include?("unread")
......@@ -383,7 +401,7 @@ describe "app" do
result["num_pages"].should == num_pages
end
result["page"].should == page
actual_order += result["collection"].map {|v| v["title"]}
actual_order += result["collection"].map { |v| v["title"] }
end
actual_order.should == expected_order
end
......@@ -393,7 +411,7 @@ describe "app" do
@threads["t7"].pinned = true
@threads["t7"].save!
expected_order = move_to_front(move_to_end(@default_order, "t5"), "t7")
test_paged_order({'sort_key'=>'comments', 'sort_dir'=>'asc', 'per_page'=>3}, expected_order)
test_paged_order({'sort_key' => 'comments', 'sort_dir' => 'asc', 'per_page' => 3}, expected_order)
end
it "orders correctly acrosss pages with unread filter" do
......@@ -405,21 +423,21 @@ describe "app" do
@threads["t7"].save!
expected_order = move_to_front(move_to_end(@default_order[1..8], "t5"), "t7")
test_paged_order(
{'sort_key'=>'comments', 'sort_dir'=>'asc', 'per_page'=>3},
expected_order,
["unread"],
user.id
{'sort_key' => 'comments', 'sort_dir' => 'asc', 'per_page' => 3},
expected_order,
["unread"],
user.id
)
end
end
end
end
def test_unicode_data(text)
course_id = "unicode_course"
thread = make_thread(User.first, text, course_id, "unicode_commentable")
make_comment(User.first, thread, text)
course_id = 'unicode_course'
thread = create(:comment_thread, body: text, course_id: course_id)
create(:comment, comment_thread: thread, body: text)
result = thread_result(course_id: course_id).first
check_thread_result_json(nil, thread, result)
end
......@@ -427,98 +445,118 @@ describe "app" do
include_examples "unicode data"
end
describe "GET /api/v1/threads/:thread_id" do
describe 'GET /api/v1/threads/:thread_id' do
let(:thread) do
comment = create(:comment)
comment.comment_thread
end
before(:each) { init_without_subscriptions }
it "returns JSON" do
thread = CommentThread.first
subject do
get "/api/v1/threads/#{thread.id}"
last_response.should be_ok
last_response.content_type.should == "application/json;charset=utf-8"
end
it "get information of a single comment thread" do
thread = CommentThread.first
get "/api/v1/threads/#{thread.id}"
last_response.should be_ok
response_thread = parse last_response.body
check_thread_result_json(nil, thread, response_thread)
it { should be_ok }
it 'returns JSON' do
expect(subject.content_type).to eq 'application/json;charset=utf-8'
end
it "computes endorsed correctly" do
thread = CommentThread.first
comment = thread.root_comments[1]
it 'get information of a single comment thread' do
check_thread_result_json(nil, thread, parse(subject.body))
end
it 'computes endorsed correctly' do
comment = thread.root_comments[0]
comment.endorsed = true
comment.save!
get "/api/v1/threads/#{thread.id}"
last_response.should be_ok
response_thread = parse last_response.body
response_thread["endorsed"].should == true
check_thread_result_json(nil, thread, response_thread)
end
# This is a test to ensure that the username is included even if the
# thread's author is the one looking at the comment. This is because of a
# regression in which we used User.only(:id, :read_states). This worked
# before we included the identity map, but afterwards, the user was
# missing the username and was not refetched.
it "includes the username even if the thread is being marked as read for the thread author" do
thread = CommentThread.first
expected_username = thread.author.username
expect(subject).to be_ok
parsed = parse(subject.body)
expect(parsed).to include('endorsed' => true)
thread.reload
check_thread_result_json(nil, thread, parsed)
end
# We need to clear the IdentityMap after getting the expected data to
# ensure that this spec fails when it should. If we don't do this, then
# in the cases where the User is fetched without its username, the spec
# won't fail because the User will already be in the identity map.
Mongoid::IdentityMap.clear
context 'when marking as read' do
subject do
get "/api/v1/threads/#{thread.id}", {:user_id => thread.author.id, :mark_as_read => true}
end
get "/api/v1/threads/#{thread.id}", {:user_id => thread.author_id, :mark_as_read => true}
last_response.should be_ok
response_thread = parse last_response.body
response_thread["username"].should == expected_username
it { should be_ok }
# This is a test to ensure that the username is included even if the
# thread's author is the one looking at the comment. This is because of a
# regression in which we used User.only(:id, :read_states). This worked
# before we included the identity map, but afterwards, the user was
# missing the username and was not refetched.
# BBEGGS - Note 8/4/2015: Identify map has been removed during the mongoid 4.x upgrade.
# Should no longer be an issue.
it 'includes the username even if the thread is being marked as read for the thread author' do
expect(parse(subject.body)).to include('username' => thread.author.username)
end
end
it "get information of a single comment thread with its comments" do
thread = CommentThread.first
get "/api/v1/threads/#{thread.id}", recursive: true
last_response.should be_ok
check_thread_result_json(nil, thread, parse(last_response.body))
check_thread_response_paging_json(thread, parse(last_response.body))
context 'with comments' do
subject do
get "/api/v1/threads/#{thread.id}", recursive: true
end
it { should be_ok }
it 'get information of a single comment thread with its comments' do
parsed = parse(subject.body)
check_thread_result_json(nil, thread, parsed)
check_thread_response_paging_json(thread, parsed)
end
end
it "returns 404 when the thread does not exist" do
thread = CommentThread.first
path = "/api/v1/threads/#{thread.id}"
get path
last_response.should be_ok
it 'returns 404 when the thread does not exist' do
thread.destroy
get path
last_response.status.should == 404
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
expect(subject.status).to eq 404
expect(parse(last_response.body).first).to eq I18n.t(:requested_object_not_found)
end
context 'with user specified' do
let(:user) { create(:user) }
subject do
user.mark_as_read(thread)
get "/api/v1/threads/#{thread.id}", user_id: user.id
last_response
end
it { should be_ok }
it 'marks thread as read and confirms its value on returned response' do
parsed = parse(subject.body)
thread.reload
check_thread_result_json(user, thread, parsed)
expect(parsed).to include('read' => true)
end
end
def test_unicode_data(text)
thread = make_thread(User.first, text, "unicode_course", "unicode_commentable")
make_comment(User.first, thread, text)
thread = create(:comment_thread, body: text)
create(:comment, comment_thread: thread, body: text)
get "/api/v1/threads/#{thread.id}", recursive: true
last_response.should be_ok
result = parse last_response.body
check_thread_result_json(nil, thread, result)
check_thread_response_paging_json(thread, result)
expect(last_response).to be_ok
parsed = parse(last_response.body)
check_thread_result_json(nil, thread, parsed)
check_thread_response_paging_json(thread, parsed)
end
include_examples "unicode data"
include_examples 'unicode data'
context "response pagination" do
before(:each) do
User.all.delete
Content.all.delete
@user = create_test_user(999)
@threads = {}
@comments = {}
[20,10,3,2,1,0].each do |n|
[20, 10, 3, 2, 1, 0].each do |n|
thread_key = "t#{n}"
thread = make_thread(@user, thread_key, DFLT_COURSE_ID, "pdq")
@threads[n] = thread
......@@ -544,29 +582,29 @@ describe "app" do
it "returns all responses when no skip/limit params given" do
@threads.each do |n, thread|
res = thread_result thread.id, {}
check_thread_response_paging_json thread, res
end
check_thread_response_paging_json thread, res, 0, nil, false
end
end
it "skips the specified number of responses" do
@threads.each do |n, thread|
res = thread_result thread.id, {:resp_skip => 1}
check_thread_response_paging_json thread, res, 1, nil
end
check_thread_response_paging_json thread, res, 1, nil, false
end
end
it "limits the specified number of responses" do
@threads.each do |n, thread|
res = thread_result thread.id, {:resp_limit => 2}
check_thread_response_paging_json thread, res, 0, 2
end
check_thread_response_paging_json thread, res, 0, 2, false
end
end
it "skips and limits responses" do
@threads.each do |n, thread|
res = thread_result thread.id, {:resp_skip => 3, :resp_limit => 5}
check_thread_response_paging_json thread, res, 3, 5
end
check_thread_response_paging_json thread, res, 3, 5, false
end
end
end
......@@ -575,8 +613,8 @@ describe "app" do
describe "PUT /api/v1/threads/:thread_id" do
before(:each) { init_without_subscriptions }
it "update information of comment thread" do
it "updates information of comment thread" do
thread = CommentThread.first
comment = thread.comments.first
comment.endorsed = true
......@@ -592,7 +630,7 @@ describe "app" do
comment.reload
comment.endorsed.should == false
comment.endorsement.should == nil
check_thread_result_json(nil, changed_thread, parse(last_response.body))
check_unread_thread_result_json(changed_thread, parse(last_response.body))
end
it "returns 400 when the thread does not exist" do
put "/api/v1/threads/does_not_exist", body: "new body", title: "new title"
......@@ -616,6 +654,7 @@ describe "app" do
thread = CommentThread.first
put "/api/v1/threads/#{thread.id}", body: text, title: text
last_response.should be_ok
thread = CommentThread.find(thread.id)
thread.body.should == text
thread.title.should == text
end
......@@ -626,7 +665,7 @@ describe "app" do
before(:each) { init_without_subscriptions }
let :default_params do
let :default_params do
{body: "new comment", course_id: "1", user_id: User.first.id}
end
it "create a comment to the comment thread" do
......@@ -635,11 +674,13 @@ describe "app" do
orig_count = thread.comment_count
post "/api/v1/threads/#{thread.id}/comments", default_params
last_response.should be_ok
retrieved = parse last_response.body
changed_thread = CommentThread.find(thread.id)
changed_thread.comment_count.should == orig_count + 1
comment = changed_thread.comments.select{|c| c["body"] == "new comment"}.first
comment = changed_thread.comments.select { |c| c["body"] == "new comment" }.first
comment.should_not be_nil
comment.author_id.should == user.id
retrieved["child_count"].should == 0
end
it "allows anonymous comment" do
thread = CommentThread.first
......@@ -649,7 +690,7 @@ describe "app" do
last_response.should be_ok
changed_thread = CommentThread.find(thread.id)
changed_thread.comment_count.should == orig_count + 1
comment = changed_thread.comments.select{|c| c["body"] == "new comment"}.first
comment = changed_thread.comments.select { |c| c["body"] == "new comment" }.first
comment.should_not be_nil
comment.anonymous.should be_true
end
......@@ -681,18 +722,29 @@ describe "app" do
include_examples "unicode data"
end
describe "DELETE /api/v1/threads/:thread_id" do
before(:each) { init_without_subscriptions }
it "delete the comment thread and its comments" do
thread = CommentThread.first.to_hash
delete "/api/v1/threads/#{thread['id']}"
last_response.should be_ok
CommentThread.where(title: thread["title"]).first.should be_nil
describe 'DELETE /api/v1/threads/:thread_id' do
let(:thread) { create_comment_thread_and_comments }
subject { delete "/api/v1/threads/#{thread.id}" }
it { should be_ok }
it 'deletes the comment thread and its comments' do
expect(CommentThread.where(id: thread.id).count).to eq 1
expect(Comment.where(comment_thread: thread).count).to eq 2
subject
expect(CommentThread.where(id: thread.id).count).to eq 0
expect(Comment.where(comment_thread: thread).count).to eq 0
end
it "returns 400 when the thread does not exist" do
delete "/api/v1/threads/does_not_exist"
last_response.status.should == 400
parse(last_response.body).first.should == I18n.t(:requested_object_not_found)
context 'when thread does not exist' do
subject { delete '/api/v1/threads/does_not_exist' }
it 'returns 400 when the thread does not exist' do
expect(subject.status).to eq 400
expect(parse(subject.body).first).to eq I18n.t(:requested_object_not_found)
end
end
end
end
......
require 'spec_helper'
require 'unicode_shared_examples'
describe "app" do
describe "commentables" do
describe 'app' do
describe 'commentables' do
before(:each) { set_api_key_header }
let(:commentable_id) { Faker::Lorem.word }
before(:each) do
init_without_subscriptions
set_api_key_header
end
describe 'DELETE /api/v1/:commentable_id/threads' do
it 'delete all associated threads and comments of a commentable' do
thread_count = 2
create_list(:comment_thread, thread_count, commentable_id: commentable_id)
expect(Commentable.find(commentable_id).comment_threads.count).to eq thread_count
describe "DELETE /api/v1/:commentable_id/threads" do
it "delete all associated threads and comments of a commentable" do
delete '/api/v1/question_1/threads'
last_response.should be_ok
Commentable.find("question_1").comment_threads.count.should == 0
delete "/api/v1/#{commentable_id}/threads"
expect(last_response).to be_ok
expect(Commentable.find(commentable_id).comment_threads.count).to eq 0
end
it "handle normally when commentable does not exist" do
delete '/api/v1/does_not_exist/threads'
last_response.should be_ok
context 'if the commentable does not exist' do
subject { delete '/api/v1/does_not_exist/threads' }
it { should be_ok }
end
end
describe "GET /api/v1/:commentable_id/threads" do
def thread_result(commentable_id, params={})
get "/api/v1/#{commentable_id}/threads", params
last_response.should be_ok
parse(last_response.body)["collection"]
end
it "get all comment threads associated with a commentable object" do
threads = thread_result "question_1"
threads.length.should == 2
threads.index{|c| c["body"] == "can anyone help me?"}.should_not be_nil
threads.index{|c| c["body"] == "it is unsolvable"}.should_not be_nil
end
it "returns standalone threads if explicitly requested" do
threads = thread_result "question_1", context: "standalone"
threads.length.should == 1
threads[0]["body"].should == "no one can see us"
end
it "filters by course_id" do
course1_threads = thread_result "question_1", course_id: "1"
course1_threads.length.should == 1
course2_threads = thread_result "question_1", course_id: "2"
course2_threads.length.should == 1
course1_threads.should_not == course2_threads
end
it "filters by group_id" do
group_thread = Commentable.find("question_1").comment_threads.first
threads = thread_result "question_1", group_id: 42
threads.length.should == 2
group_thread.group_id = 43
group_thread.save!
threads = thread_result "question_1", group_id: 42
threads.length.should == 1
group_thread.group_id = 42
group_thread.save!
threads = thread_result "question_1", group_id: 42
threads.length.should == 2
end
it "filters by group_ids" do
group_thread = Commentable.find("question_1").comment_threads.first
group_thread.group_id = 42
group_thread.save!
threads = thread_result "question_1", group_ids: "42,43"
threads.length.should == 2
group_thread.group_id = 43
group_thread.save!
threads = thread_result "question_1", group_ids: "42,43"
threads.length.should == 2
group_thread.group_id = 44
group_thread.save
threads = thread_result "question_1", group_ids: "42,43"
threads.length.should == 1
end
it "returns an empty array when the commentable object does not exist (no threads)" do
threads = thread_result "does_not_exist"
threads.length.should == 0
describe 'GET /api/v1/:commentable_id/threads' do
let(:returned_threads) { parse(subject.body)['collection'] }
subject { get "/api/v1/#{commentable_id}/threads" }
shared_examples_for 'a filterable API endpoint' do
let!(:ignored_threads) { create_list(:comment_thread, 3, commentable_id: commentable_id) }
subject { get "/api/v1/#{commentable_id}/threads", parameters }
it { should be_ok }
it 'returns the correct CommentThreads' do
expect(returned_threads.length).to eq threads.length
threads.sort_by!(&:_id).reverse!
threads.each_with_index do |thread, index|
expect(returned_threads[index]).to include('id' => thread.id.to_s, 'body' => thread.body)
end
end
end
context 'without filtering' do
let(:parameters) { {} }
let!(:threads) { ignored_threads + create_list(:comment_thread, 3, :with_group_id, commentable_id: commentable_id) }
it_behaves_like 'a filterable API endpoint'
end
context 'when filtering by the standalone context' do
let(:parameters) { {context: :standalone} }
let!(:threads) { create_list(:comment_thread, 3, commentable_id: commentable_id, context: :standalone) }
it_behaves_like 'a filterable API endpoint'
end
context 'when filtering by course_id' do
let(:course_id) { Faker::Lorem.word }
let(:parameters) { {course_id: course_id} }
let!(:threads) { create_list(:comment_thread, 3, commentable_id: commentable_id, course_id: course_id) }
it_behaves_like 'a filterable API endpoint'
end
context 'when filtering by group_id' do
let(:group_id) { Faker::Number.number(4) }
let(:parameters) { {group_id: group_id} }
let!(:threads) { create_list(:comment_thread, 3, commentable_id: commentable_id, group_id: group_id) }
it_behaves_like 'a filterable API endpoint'
end
context 'when filtering by multiple group_id values' do
let(:group_ids) { [Faker::Number.number(4), Faker::Number.number(4)] }
let(:parameters) { {group_ids: group_ids.join(',')} }
it_behaves_like 'a filterable API endpoint' do
let!(:threads) do
threads = []
group_ids.each do |group_id|
threads += create_list(:comment_thread, 3, commentable_id: commentable_id, group_id: group_id)
end
threads
end
end
end
context 'when the commentable does not exist' do
subject { get '/api/v1/does_not_exist/threads' }
it { should be_ok }
it 'should not return any results' do
expect(returned_threads.length).to eq 0
end
end
def test_unicode_data(text)
commentable_id = "unicode_commentable"
thread = make_thread(User.first, text, "unicode_course", commentable_id)
make_comment(User.first, thread, text)
commentable_id = 'unicode_commentable'
thread = create(:comment_thread, commentable_id: commentable_id, body: text)
create(:comment, comment_thread: thread, body: text)
get "/api/v1/#{commentable_id}/threads"
last_response.should be_ok
result = parse(last_response.body)["collection"]
result = parse(last_response.body)['collection']
result.should_not be_empty
check_thread_result_json(nil, thread, result.first)
end
include_examples "unicode data"
include_examples 'unicode data'
end
describe "POST /api/v1/:commentable_id/threads" do
let(:default_params) do
{title: "Interesting question", body: "cool", course_id: "1", user_id: "1"}
describe 'POST /api/v1/:commentable_id/threads' do
let(:commentable_id) { Faker::Lorem.word }
let(:user) { create(:user) }
let(:parameters) { attributes_for(:comment_thread, user_id: user.id) }
subject { post "/api/v1/#{commentable_id}/threads", parameters }
shared_examples_for 'CommentThread creation API' do |context='course'|
it 'creates a new CommentThread' do
expect(CommentThread.count).to eq 0
body = parse(subject.body)
expect(body).to include('read' => false,
'unread_comments_count' => 0,
'endorsed' => false,
'resp_total' => 0)
expect(CommentThread.count).to eq 1
thread = CommentThread.find(body['id'])
expect(thread).to_not be_nil
expect(thread.context).to eq context
end
end
it "create a new comment thread for the commentable object" do
old_count = CommentThread.count
post '/api/v1/question_1/threads', default_params
last_response.should be_ok
result = parse(last_response.body)
result["read"].should == false
result["unread_comments_count"].should == 0
result["endorsed"].should == false
CommentThread.count.should == old_count + 1
thread = CommentThread.where(title: "Interesting question").first
thread.should_not be_nil
thread.context.should == "course"
end
it "can create a standalone thread" do
old_count = CommentThread.count
post '/api/v1/question_1/threads', default_params.merge(:context => "standalone")
CommentThread.count.should == old_count + 1
thread = CommentThread.where(title: "Interesting question").first
thread.should_not be_nil
thread.context.should == "standalone"
it { should be_ok }
it_behaves_like 'CommentThread creation API'
it_behaves_like 'CommentThread creation API', 'standalone' do
let(:parameters) { attributes_for(:comment_thread, user_id: user.id, context: 'standalone') }
end
CommentThread.thread_type.values.each do |thread_type|
it "can create a #{thread_type} thread" do
old_count = CommentThread.where(thread_type: thread_type).count
post "/api/v1/question_1/threads", default_params.merge(thread_type: thread_type.to_s)
post '/api/v1/question_1/threads', parameters.merge(thread_type: thread_type.to_s)
last_response.should be_ok
parse(last_response.body)["thread_type"].should == thread_type.to_s
parse(last_response.body)['thread_type'].should == thread_type.to_s
CommentThread.where(thread_type: thread_type).count.should == old_count + 1
end
end
it "allows anonymous thread" do
old_count = CommentThread.count
post '/api/v1/question_1/threads', default_params.merge(anonymous: true)
last_response.should be_ok
CommentThread.count.should == old_count + 1
c = CommentThread.where(title: "Interesting question").first
c.should_not be_nil
c["anonymous"].should be_true
end
it "create a new comment thread for a new commentable object" do
post '/api/v1/does_not_exist/threads', default_params
it 'allows anonymous thread' do
post '/api/v1/question_1/threads', parameters.merge!(anonymous: true)
last_response.should be_ok
Commentable.find("does_not_exist").comment_threads.length.should == 1
Commentable.find("does_not_exist").comment_threads.first.body.should == "cool"
body = parse(subject.body)
thread = CommentThread.find(body['id'])
expect(thread).to_not be_nil
expect(thread['anonymous']).to be_true
end
it "returns error when title, body or course id does not exist" do
params = default_params.dup
params.delete(:title)
post '/api/v1/question_1/threads', params
last_response.status.should == 400
params = default_params.dup
params.delete(:body)
post '/api/v1/question_1/threads', params
last_response.status.should == 400
params = default_params.dup
params.delete(:course_id)
post '/api/v1/question_1/threads', params
last_response.status.should == 400
it 'returns error when title, body or course id does not exist' do
[:title, :body, :course_id].each do |parameter|
params = parameters.dup
params.delete(parameter)
post '/api/v1/question_1/threads', params
last_response.status.should == 400
end
end
it "returns error when title or body is blank (only consists of spaces and new lines)" do
post '/api/v1/question_1/threads', default_params.merge(title: " ")
post '/api/v1/question_1/threads', parameters.merge(title: " ")
last_response.status.should == 400
post '/api/v1/question_1/threads', default_params.merge(body: " \n \n")
post '/api/v1/question_1/threads', parameters.merge(body: " \n \n")
last_response.status.should == 400
end
it "returns 503 and does not create when the post content is blocked" do
post '/api/v1/question_1/threads', default_params.merge(body: "BLOCKED POST")
it 'returns 503 and does not create when the post content is blocked' do
body = 'BLOCKED POST'
hash = block_post_body
post '/api/v1/question_1/threads', parameters.merge!(body: body)
last_response.status.should == 503
parse(last_response.body).first.should == I18n.t(:blocked_content_with_body_hash, :hash => Digest::MD5.hexdigest("blocked post"))
CommentThread.where(body: "BLOCKED POST").to_a.should be_empty
parse(last_response.body).first.should == I18n.t(:blocked_content_with_body_hash, :hash => hash)
expect(CommentThread.where(body: body).length).to eq 0
end
def test_unicode_data(text)
commentable_id = "unicode_commentable"
post "/api/v1/#{commentable_id}/threads", default_params.merge(body: text, title: text)
commentable_id = 'unicode_commentable'
post "/api/v1/#{commentable_id}/threads", parameters.merge!(body: text, title: text)
last_response.should be_ok
CommentThread.where(commentable_id: commentable_id, body: text, title: text).should_not be_empty
expect(CommentThread.where(commentable_id: commentable_id, body: text, title: text)).to_not be_empty
end
include_examples "unicode data"
include_examples 'unicode data'
end
end
end
require "spec_helper"
require 'spec_helper'
describe "i18n" do
describe 'i18n' do
before(:each) { set_api_key_header }
it "should respect the Accept-Language header" do
put "/api/v1/comments/does_not_exist/votes", {}, {"HTTP_ACCEPT_LANGUAGE" => "x-test"}
it 'should respect the Accept-Language header' do
put '/api/v1/comments/does_not_exist/votes', {}, {'HTTP_ACCEPT_LANGUAGE' => 'x-test'}
last_response.status.should == 400
parse(last_response.body).first.should == "##x-test## requested object not found"
parse(last_response.body).first.should == '##x-test## requested object not found'
end
end
......@@ -17,7 +17,7 @@ describe "app" do
random_string = (0...8).map{ ('a'..'z').to_a[rand(26)] }.join
thread = CommentThread.new(
title: "Test title", body: "elephant otter", course_id: "1",
commentable_id: commentable.id, comments_text_dummy: random_string
commentable_id: commentable.id, body: random_string
)
thread.thread_type = :discussion
thread.author = user
......@@ -94,7 +94,8 @@ describe "app" do
subscription = Subscription.create({:subscriber_id => user._id.to_s, :source_id => thread._id.to_s})
comment = Comment.new(body: "dummy body text", course_id: "1", commentable_id: commentable.id)
comment = Comment.new(body: "dummy body text", course_id: "1")
comment.commentable_id = commentable.id
comment.author = user
comment.comment_thread = thread
comment.save!
......
require 'spec_helper'
require 'faker'
describe "app" do
before (:each) { set_api_key_header }
describe 'app' do
before(:each) { set_api_key_header }
let(:body) { Faker::Lorem.word }
let(:author) { create_test_user(1) }
describe "thread search" do
describe "GET /api/v1/search/threads" do
it "returns thread with query match" do
commentable = Commentable.new("question_1")
describe 'GET /api/v1/search/threads' do
random_string = (0...8).map{ ('a'..'z').to_a[rand(26)] }.join
shared_examples_for 'a search endpoint' do
subject do
refresh_es_index
get '/api/v1/search/threads', text: body
end
thread = CommentThread.new(title: "Test title", body: random_string, course_id: "1", commentable_id: commentable.id)
thread.thread_type = :discussion
thread.author = author
thread.save!
let(:matched_thread) { parse(subject.body)['collection'].select { |t| t['id'] == thread.id.to_s }.first }
sleep 3
it { should be_ok }
get "/api/v1/search/threads", text: random_string
last_response.should be_ok
threads = parse(last_response.body)['collection']
check_thread_result_json(nil, thread, threads.select{|t| t["id"] == thread.id.to_s}.first)
it 'returns thread with query match' do
expect(matched_thread).to_not be_nil
check_thread_result_json(nil, thread, matched_thread)
end
end
end
describe "comment search" do
describe "GET /api/v1/search/threads" do
it "returns thread with comment query match" do
commentable = Commentable.new("question_1")
random_string = (0...8).map{ ('a'..'z').to_a[rand(26)] }.join
thread = CommentThread.new(title: "Test title", body: "elephant otter", course_id: "1", commentable_id: commentable.id)
thread.thread_type = :discussion
thread.author = author
thread.save!
context 'when searching on thread content' do
let!(:thread) { create(:comment_thread, body: body) }
sleep 3
comment = Comment.new(body: random_string, course_id: "1", commentable_id: commentable.id)
comment.author = author
comment.comment_thread = thread
comment.save!
sleep 1
it_behaves_like 'a search endpoint'
end
get "/api/v1/search/threads", text: random_string
last_response.should be_ok
threads = parse(last_response.body)['collection']
check_thread_result_json(nil, thread, threads.select{|t| t["id"] == thread.id.to_s}.first)
context 'when searching on comment content' do
let!(:thread) do
comment = create(:comment, body: body)
thread = comment.comment_thread
end
it_behaves_like 'a search endpoint'
end
end
end
......@@ -361,5 +361,22 @@ describe "app" do
include_examples "unicode data"
end
describe "POST /api/v1/users/:user_id/read" do
before(:each) { setup_10_threads }
it "marks a thread as read for the user" do
thread = @threads["t0"]
user = create_test_user(42)
post "/api/v1/users/#{user.external_id}/read", source_type: "thread", source_id: thread.id
last_response.should be_ok
user.reload
read_states = user.read_states.where(course_id: thread.course_id).to_a
read_date = read_states.first.last_read_times[thread.id.to_s]
read_date.should >= thread.updated_at
end
end
end
end
......@@ -45,8 +45,11 @@ describe "app" do
context "db check" do
def test_db_check(response, is_success)
db = double("db")
stub_const("Mongoid::Sessions", Class.new).stub(:default).and_return(db)
db.should_receive(:command).with({:isMaster => 1}).and_return(response)
stub_const("Mongoid::Clients", Class.new).stub(:default).and_return(db)
result = double('result')
result.stub(:ok?).and_return(response['ok'] == 1)
result.stub(:documents).and_return([response])
db.should_receive(:command).with({:isMaster => 1}).and_return(result)
get "/heartbeat"
if is_success
last_response.status.should == 200
......@@ -75,7 +78,7 @@ describe "app" do
it "reports failure when db command raises an error" do
db = double("db")
stub_const("Mongoid::Sessions", Class.new).stub(:default).and_return(db)
stub_const("Mongoid::Clients", Class.new).stub(:default).and_return(db)
db.should_receive(:command).with({:isMaster => 1}).and_raise(StandardError)
get "/heartbeat"
last_response.status.should == 500
......@@ -168,4 +171,4 @@ describe "app" do
end
end
end
\ No newline at end of file
end
require 'faker'
# Reload i18n data for faker
I18n.reload!
FactoryGirl.define do
factory :user do
# Initialize the model with all attributes since we are using a custom _id field.
# See https://github.com/thoughtbot/factory_girl/issues/544.
initialize_with { new(attributes) }
sequence(:username) { |n| "#{Faker::Internet.user_name}_#{n}" }
sequence(:external_id) { username }
end
factory :comment_thread do
title { Faker::Lorem.sentence }
body { Faker::Lorem.paragraph }
course_id { Faker::Lorem.word }
thread_type :discussion
commentable_id { Faker::Lorem.word }
association :author, factory: :user
group_id nil
pinned false
trait :subscribe_author do
after(:create) do |thread|
thread.author.subscribe(thread)
end
end
trait :with_group_id do
group_id { Faker::Number.number(4) }
end
end
factory :comment do
association :author, factory: :user
comment_thread { parent ? parent.comment_thread : create(:comment_thread) }
body { Faker::Lorem.paragraph }
course_id { comment_thread.course_id }
commentable_id { comment_thread.commentable_id }
endorsed false
end
end
......@@ -69,4 +69,32 @@ describe Comment do
end
end
end
describe '#child_count' do
context 'with course_thread' do
it 'returns cached child count' do
comment = make_comment(author, course_thread, "comment")
child_comment = make_comment(author, comment, "comment")
expect(comment.get_cached_child_count).to eq(1)
end
it 'returns cached child count' do
comment = make_comment(author, course_thread, "comment")
child_comment = make_comment(author, comment, "comment")
comment.child_count = nil
expect(comment.get_cached_child_count).to eq(1)
end
it 'updates cached child count' do
comment = make_comment(author, course_thread, "comment")
expect(comment.get_cached_child_count).to eq(0)
comment.child_count = 2
expect(comment.get_cached_child_count).to eq(2)
comment.update_cached_child_count
expect(comment.get_cached_child_count).to eq(0)
end
end
end
end
......@@ -77,19 +77,33 @@ describe ThreadPresenter do
@reader = create_test_user('thread reader')
end
it "handles with_responses=false" do
it "handles with_responses=false and recursive has no impact" do
@threads_with_num_comments.each do |thread, num_comments|
is_endorsed = num_comments > 0 && endorse_responses
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash
# with response=false and recursive=false
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(false, 0, nil, false)
check_thread_result(@reader, thread, hash)
['children', 'resp_skip', 'resp_limit', 'resp_total'].each {|k| (hash.has_key? k).should be_false }
# with response=false and recursive=true
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(false, 0, nil, true)
check_thread_result(@reader, thread, hash)
['children', 'resp_skip', 'resp_limit', 'resp_total'].each {|k| (hash.has_key? k).should be_false }
end
end
it "handles with_responses=true and recursive=true" do
@threads_with_num_comments.each do |thread, num_comments|
is_endorsed = num_comments > 0 && endorse_responses
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, 0, nil, true)
check_thread_result(@reader, thread, hash)
check_thread_response_paging(thread, hash, 0, nil, false, true)
end
end
it "handles with_responses=true" do
it "handles with_responses=true and recursive=false" do
@threads_with_num_comments.each do |thread, num_comments|
is_endorsed = num_comments > 0 && endorse_responses
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash true
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, 0, nil, false)
check_thread_result(@reader, thread, hash)
check_thread_response_paging(thread, hash)
end
......@@ -99,7 +113,7 @@ describe ThreadPresenter do
@threads_with_num_comments.each do |thread, num_comments|
is_endorsed = num_comments > 0 && endorse_responses
[0, 1, 2, 9, 10, 11, 1000].each do |skip|
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash true, skip
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, skip, nil, true)
check_thread_result(@reader, thread, hash)
check_thread_response_paging(thread, hash, skip)
end
......@@ -111,7 +125,7 @@ describe ThreadPresenter do
is_endorsed = num_comments > 0 && endorse_responses
[1, 2, 3, 9, 10, 11, 1000].each do |limit|
[0, 1, 2, 9, 10, 11, 1000].each do |skip|
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash true, skip, limit
hash = ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, skip, limit, true)
check_thread_result(@reader, thread, hash)
check_thread_response_paging(thread, hash, skip, limit)
end
......@@ -122,9 +136,9 @@ describe ThreadPresenter do
it "fails with invalid arguments" do
@threads_with_num_comments.each do |thread, num_comments|
is_endorsed = num_comments > 0 && endorse_responses
expect{ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, -1, nil)}.to raise_error(ArgumentError)
expect{ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, -1, nil, true)}.to raise_error(ArgumentError)
[-1, 0].each do |limit|
expect{ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, 0, limit)}.to raise_error(ArgumentError)
expect{ThreadPresenter.new(thread, @reader, false, num_comments, is_endorsed).to_hash(true, 0, limit, true)}.to raise_error(ArgumentError)
end
end
end
......
ENV["SINATRA_ENV"] = "test"
require 'simplecov'
SimpleCov.start
if ENV['CI']=='true'
require 'codecov'
SimpleCov.formatter = SimpleCov::Formatter::Codecov
end
require File.join(File.dirname(__FILE__), '..', 'app')
require 'sinatra'
require 'rack/test'
require 'sinatra'
require 'yajl'
require 'database_cleaner'
require 'support/database_cleaner'
require 'support/elasticsearch'
require 'support/factory_girl'
# setup test environment
set :environment, :test
......@@ -15,6 +23,9 @@ set :run, false
set :raise_errors, true
set :logging, false
Mongoid.logger.level = Logger::WARN
Mongo::Logger.logger.level = ENV["ENABLE_MONGO_DEBUGGING"] ? Logger::DEBUG : Logger::WARN
Delayed::Worker.delay_jobs = false
def app
......@@ -28,36 +39,12 @@ def set_api_key_header
current_session.header "X-Edx-Api-Key", TEST_API_KEY
end
def delete_es_index
Tire.index Content::ES_INDEX_NAME do delete end
end
def create_es_index
new_index = Tire.index Content::ES_INDEX_NAME
new_index.create
[CommentThread, Comment].each do |klass|
klass.put_search_index_mapping
end
end
def refresh_es_index
# we are using the same index for two types, which is against the
# grain of Tire's design. This is why this method works for both
# comment_threads and comments.
CommentThread.tire.index.refresh
end
RSpec.configure do |config|
config.include Rack::Test::Methods
config.treat_symbols_as_metadata_keys_with_true_values = true
config.filter_run focus: true
config.run_all_when_everything_filtered = true
config.before(:each) do
Mongoid::IdentityMap.clear
DatabaseCleaner.clean
delete_es_index
create_es_index
end
end
Mongoid.configure do |config|
......@@ -72,16 +59,24 @@ def create_test_user(id)
User.create!(external_id: id.to_s, username: "user#{id}")
end
def init_without_subscriptions
# Add the given body of text to the list of blocked texts/hashes.
def block_post_body(body='blocked post')
body = body.strip.downcase.gsub(/[^a-z ]/, '').gsub(/\s+/, ' ')
blocked_hash = Digest::MD5.hexdigest(body)
Content.mongo_client[:blocked_hash].insert_one(hash: blocked_hash)
# reload the global holding the blocked hashes
CommentService.blocked_hashes = Content.mongo_client[:blocked_hash].find(nil, projection: {hash: 1}).map do |d|
d['hash']
end
blocked_hash
end
[Comment, CommentThread, User, Notification, Subscription, Activity, Delayed::Backend::Mongoid::Job].each(&:delete_all).each(&:remove_indexes).each(&:create_indexes)
Content.mongo_session[:blocked_hash].drop
delete_es_index
create_es_index
def init_without_subscriptions
commentable = Commentable.new("question_1")
users = (1..10).map{|id| create_test_user(id)}
users = (1..10).map { |id| create_test_user(id) }
user = users.first
thread = CommentThread.new(title: "I can't solve this problem", body: "can anyone help me?", course_id: "1", commentable_id: commentable.id)
......@@ -150,63 +145,15 @@ def init_without_subscriptions
Comment.all.each do |c|
user.vote(c, :up) # make the first user always vote up for convenience
users[2,9].each {|user| user.vote(c, [:up, :down].sample)}
users[2, 9].each { |user| user.vote(c, [:up, :down].sample) }
end
CommentThread.all.each do |c|
user.vote(c, :up) # make the first user always vote up for convenience
users[2,9].each {|user| user.vote(c, [:up, :down].sample)}
users[2, 9].each { |user| user.vote(c, [:up, :down].sample) }
end
Content.mongo_session[:blocked_hash].insert(hash: Digest::MD5.hexdigest("blocked post"))
# reload the global holding the blocked hashes
CommentService.blocked_hashes = Content.mongo_session[:blocked_hash].find.select(hash: 1).each.map {|d| d["hash"]}
end
def init_with_subscriptions
[Comment, CommentThread, User, Notification, Subscription, Activity, Delayed::Backend::Mongoid::Job].each(&:delete_all).each(&:remove_indexes).each(&:create_indexes)
delete_es_index
create_es_index
user1 = create_test_user(1)
user2 = create_test_user(2)
user2.subscribe(user1)
commentable = Commentable.new("question_1")
user1.subscribe(commentable)
user2.subscribe(commentable)
thread = CommentThread.new(title: "I can't solve this problem", body: "can anyone help me?", course_id: "1", commentable_id: commentable.id)
thread.author = user1
user1.subscribe(thread)
user2.subscribe(thread)
thread.save!
thread = thread.reload
comment = thread.comments.new(body: "this problem is so easy", course_id: "1")
comment.author = user2
comment.save!
comment1 = comment.children.new(body: "not for me!", course_id: "1")
comment1.author = user1
comment1.comment_thread = thread
comment1.save!
comment2 = comment1.children.new(body: "not for me neither!", course_id: "1")
comment2.author = user2
comment2.comment_thread = thread
comment2.save!
thread = CommentThread.new(title: "This problem is wrong", body: "it is unsolvable", course_id: "2", commentable_id: commentable.id)
thread.author = user2
user2.subscribe(thread)
thread.save!
thread = CommentThread.new(title: "I don't know what to say", body: "lol", course_id: "2", commentable_id: "something else")
thread.author = user1
thread.save!
block_post_body
end
# this method is used to test results produced using the helper function handle_threads_query
......@@ -215,28 +162,28 @@ def check_thread_result(user, thread, hash, is_json=false)
expected_keys = %w(id thread_type title body course_id commentable_id created_at updated_at context)
expected_keys += %w(anonymous anonymous_to_peers at_position_list closed user_id)
expected_keys += %w(username votes abuse_flaggers tags type group_id pinned)
expected_keys += %w(comments_count unread_comments_count read endorsed)
expected_keys += %w(comments_count unread_comments_count read endorsed last_activity_at)
# these keys are checked separately, when desired, using check_thread_response_paging.
actual_keys = hash.keys - [
"children", "endorsed_responses", "non_endorsed_responses", "resp_skip",
"resp_limit", "resp_total", "non_endorsed_resp_total"
"children", "endorsed_responses", "non_endorsed_responses", "resp_skip",
"resp_limit", "resp_total", "non_endorsed_resp_total"
]
actual_keys.sort.should == expected_keys.sort
hash["title"].should == thread.title
hash["body"].should == thread.body
hash["course_id"].should == thread.course_id
hash["anonymous"].should == thread.anonymous
hash["anonymous_to_peers"].should == thread.anonymous_to_peers
hash["commentable_id"].should == thread.commentable_id
hash["at_position_list"].should == thread.at_position_list
hash["closed"].should == thread.closed
hash["course_id"].should == thread.course_id
hash["anonymous"].should == thread.anonymous
hash["anonymous_to_peers"].should == thread.anonymous_to_peers
hash["commentable_id"].should == thread.commentable_id
hash["at_position_list"].should == thread.at_position_list
hash["closed"].should == thread.closed
hash["user_id"].should == thread.author.id
hash["username"].should == thread.author.username
hash["votes"]["point"].should == thread.votes["point"]
hash["votes"]["count"].should == thread.votes["count"]
hash["votes"]["up_count"].should == thread.votes["up_count"]
hash["votes"]["down_count"].should == thread.votes["down_count"]
hash["votes"]["point"].should == thread.votes["point"]
hash["votes"]["count"].should == thread.votes["count"]
hash["votes"]["up_count"].should == thread.votes["up_count"]
hash["votes"]["down_count"].should == thread.votes["down_count"]
hash["abuse_flaggers"].should == thread.abuse_flaggers
hash["tags"].should == []
hash["type"].should == "thread"
......@@ -249,15 +196,17 @@ def check_thread_result(user, thread, hash, is_json=false)
if is_json
hash["id"].should == thread._id.to_s
hash["created_at"].should == thread.created_at.utc.strftime("%Y-%m-%dT%H:%M:%SZ")
hash["updated_at"].should == thread.updated_at.utc.strftime("%Y-%m-%dT%H:%M:%SZ")
hash["updated_at"].should == thread.updated_at.utc.strftime("%Y-%m-%dT%H:%M:%SZ")
hash["last_activity_at"].should == thread.last_activity_at.utc.strftime("%Y-%m-%dT%H:%M:%SZ")
else
hash["created_at"].should == thread.created_at
hash["updated_at"].should == thread.updated_at
hash["last_activity_at"].should == thread.last_activity_at
end
if user.nil?
hash["unread_comments_count"].should == thread.comments.length
hash["read"].should == false
hash["read"].should == false
else
expected_unread_cnt = thread.comments.length # initially assume nothing has been read
read_states = user.read_states.where(course_id: thread.course_id).to_a
......@@ -265,7 +214,7 @@ def check_thread_result(user, thread, hash, is_json=false)
read_date = read_states.first.last_read_times[thread.id.to_s]
if read_date
thread.comments.each do |c|
if c.author != user and c.updated_at < read_date
if c.updated_at < read_date
expected_unread_cnt -= 1
end
end
......@@ -282,16 +231,22 @@ def check_thread_result_json(user, thread, json_response)
check_thread_result(user, thread, json_response, true)
end
def check_thread_response_paging(thread, hash, resp_skip=0, resp_limit=nil, is_json=false)
def check_unread_thread_result_json(thread, json_response)
# when thread is unread we do not check if thread matches the user read states data
# and explicitly asserts `read` to false; hence pass user=nil
check_thread_result(nil, thread, json_response, true)
end
def check_thread_response_paging(thread, hash, resp_skip=0, resp_limit=nil, is_json=false, recursive=false)
case thread.thread_type
when "discussion"
check_discussion_response_paging(thread, hash, resp_skip, resp_limit, is_json)
when "question"
check_question_response_paging(thread, hash, resp_skip, resp_limit, is_json)
when "discussion"
check_discussion_response_paging(thread, hash, resp_skip, resp_limit, is_json, recursive)
when "question"
check_question_response_paging(thread, hash, resp_skip, resp_limit, is_json, recursive)
end
end
def check_comment(comment, hash, is_json)
def check_comment(comment, hash, is_json, recursive=false)
hash["id"].should == (is_json ? comment.id.to_s : comment.id) # Convert from ObjectId if necessary
hash["body"].should == comment.body
hash["user_id"].should == comment.author_id
......@@ -299,22 +254,27 @@ def check_comment(comment, hash, is_json)
hash["endorsed"].should == comment.endorsed
hash["endorsement"].should == comment.endorsement
children = Comment.where({"parent_id" => comment.id}).sort({"sk" => 1}).to_a
hash["children"].length.should == children.length
hash["children"].each_with_index do |child_hash, i|
check_comment(children[i], child_hash, is_json)
hash["child_count"].should == children.length
if recursive
hash["children"].length.should == children.length
hash["children"].each_with_index do |child_hash, i|
check_comment(children[i], child_hash, is_json)
end
end
end
def check_discussion_response_paging(thread, hash, resp_skip=0, resp_limit=nil, is_json=false)
def check_discussion_response_paging(thread, hash, resp_skip=0, resp_limit=nil, is_json=false, recursive=false)
all_responses = thread.root_comments.sort({"sk" => 1}).to_a
total_responses = all_responses.length
hash["resp_total"].should == total_responses
expected_responses = resp_limit.nil? ?
all_responses.drop(resp_skip) :
all_responses.drop(resp_skip).take(resp_limit)
all_responses.drop(resp_skip) :
all_responses.drop(resp_skip).take(resp_limit)
hash["children"].length.should == expected_responses.length
hash["children"].each_with_index do |response_hash, i|
check_comment(expected_responses[i], response_hash, is_json)
check_comment(expected_responses[i], response_hash, is_json, recursive)
end
hash["resp_skip"].to_i.should == resp_skip
if resp_limit.nil?
......@@ -324,23 +284,26 @@ def check_discussion_response_paging(thread, hash, resp_skip=0, resp_limit=nil,
end
end
def check_question_response_paging(thread, hash, resp_skip=0, resp_limit=nil, is_json=false)
def check_question_response_paging(thread, hash, resp_skip=0, resp_limit=nil, is_json=false, recursive=false)
all_responses = thread.root_comments.sort({"sk" => 1}).to_a
endorsed_responses, non_endorsed_responses = all_responses.partition { |resp| resp.endorsed }
hash["endorsed_responses"].length.should == endorsed_responses.length
hash["endorsed_responses"].each_with_index do |response_hash, i|
check_comment(endorsed_responses[i], response_hash, is_json)
check_comment(endorsed_responses[i], response_hash, is_json, recursive)
end
hash["non_endorsed_resp_total"] == non_endorsed_responses.length
expected_non_endorsed_responses = resp_limit.nil? ?
non_endorsed_responses.drop(resp_skip) :
non_endorsed_responses.drop(resp_skip).take(resp_limit)
non_endorsed_responses.drop(resp_skip) :
non_endorsed_responses.drop(resp_skip).take(resp_limit)
hash["non_endorsed_responses"].length.should == expected_non_endorsed_responses.length
hash["non_endorsed_responses"].each_with_index do |response_hash, i|
check_comment(expected_non_endorsed_responses[i], response_hash, is_json)
check_comment(expected_non_endorsed_responses[i], response_hash, is_json, recursive)
end
total_responses = endorsed_responses.length + non_endorsed_responses.length
hash["resp_total"].should == total_responses
hash["resp_skip"].to_i.should == resp_skip
if resp_limit.nil?
hash["resp_limit"].should be_nil
......@@ -349,8 +312,8 @@ def check_question_response_paging(thread, hash, resp_skip=0, resp_limit=nil, is
end
end
def check_thread_response_paging_json(thread, hash, resp_skip=0, resp_limit=nil)
check_thread_response_paging(thread, hash, resp_skip, resp_limit, true)
def check_thread_response_paging_json(thread, hash, resp_skip=0, resp_limit=nil, recursive=false)
check_thread_response_paging(thread, hash, resp_skip, resp_limit, true, recursive)
end
# general purpose factory helpers
......@@ -370,6 +333,7 @@ def make_comment(author, parent, text)
else
coll = parent.children
thread = parent.comment_thread
parent.set(child_count: coll.length + 1)
end
comment = coll.new(body: text, course_id: parent.course_id)
comment.author = author
......@@ -384,12 +348,12 @@ end
# AKA this will overwrite "standalone t0" each time it is called.
def make_standalone_thread_with_comments(author, index=0)
thread = make_thread(
author,
"standalone thread #{index}",
DFLT_COURSE_ID,
"pdq",
:discussion,
:standalone
author,
"standalone thread #{index}",
DFLT_COURSE_ID,
"pdq",
:discussion,
:standalone
)
3.times do |i|
......@@ -418,5 +382,19 @@ def setup_10_threads
@comments["t#{i} c#{j}"] = comment
end
end
@default_order = 10.times.map {|i| "t#{i}"}.reverse
@default_order = 10.times.map { |i| "t#{i}" }.reverse
end
# Creates a CommentThread with a Comment, and nested child Comment.
# The author of the thread is subscribed to the thread.
def create_comment_thread_and_comments
# Create a new comment thread, and subscribe the author to the thread
thread = create(:comment_thread, :subscribe_author)
# Create a comment along with a nested child comment
comment = create(:comment, comment_thread: thread)
create(:comment, parent: comment)
comment.set(child_count: 1)
thread
end
require 'database_cleaner'
RSpec.configure do |config|
config.before(:suite) do
# Mongoid only supports truncation.
DatabaseCleaner.strategy = :truncation
DatabaseCleaner.clean_with(:truncation)
end
config.around(:each) do |example|
DatabaseCleaner.cleaning do
example.run
end
end
end
def delete_es_index
Tire.index Content::ES_INDEX_NAME do
delete
end
end
def create_es_index
new_index = Tire.index Content::ES_INDEX_NAME
new_index.create
[CommentThread, Comment].each do |klass|
klass.put_search_index_mapping
end
end
def refresh_es_index
es_index_name = Content::ES_INDEX_NAME
Tire.index es_index_name do
refresh
end
end
RSpec.configure do |config|
config.before(:each) do
delete_es_index
create_es_index
end
end
require 'factory_girl'
RSpec.configure do |config|
config.include FactoryGirl::Syntax::Methods
FactoryGirl.find_definitions
config.before(:suite) do
begin
DatabaseCleaner.start
FactoryGirl.lint
ensure
DatabaseCleaner.clean
end
end
end
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment