Commit fe8ee252 by Jim Abramson

Merge pull request #55 from edx/feature/jsa/new-handle-threads

Feature/jsa/new handle threads
parents 03153453 8e41d39b
...@@ -48,6 +48,7 @@ group :test do ...@@ -48,6 +48,7 @@ group :test do
gem 'rack-test', :require => "rack/test" gem 'rack-test', :require => "rack/test"
gem 'guard' gem 'guard'
gem 'guard-unicorn' gem 'guard-unicorn'
gem 'simplecov', :require => false
end end
gem 'newrelic_rpm' gem 'newrelic_rpm'
......
...@@ -127,6 +127,10 @@ GEM ...@@ -127,6 +127,10 @@ GEM
rspec-expectations (2.11.2) rspec-expectations (2.11.2)
diff-lcs (~> 1.1.3) diff-lcs (~> 1.1.3)
rspec-mocks (2.11.2) rspec-mocks (2.11.2)
simplecov (0.7.1)
multi_json (~> 1.0)
simplecov-html (~> 0.7.1)
simplecov-html (0.7.1)
sinatra (1.3.3) sinatra (1.3.3)
rack (~> 1.3, >= 1.3.6) rack (~> 1.3, >= 1.3.6)
rack-protection (~> 1.2) rack-protection (~> 1.2)
...@@ -186,6 +190,7 @@ DEPENDENCIES ...@@ -186,6 +190,7 @@ DEPENDENCIES
rdiscount rdiscount
rest-client rest-client
rspec rspec
simplecov
sinatra sinatra
tire tire
tire-contrib tire-contrib
......
...@@ -20,7 +20,8 @@ get "#{APIPREFIX}/threads/:thread_id" do |thread_id| ...@@ -20,7 +20,8 @@ get "#{APIPREFIX}/threads/:thread_id" do |thread_id|
user.mark_as_read(thread) if user user.mark_as_read(thread) if user
end end
thread.to_hash(recursive: bool_recursive, user_id: params["user_id"]).to_json presenter = ThreadPresenter.new([thread], user || nil, thread.course_id)
presenter.to_hash_array(true).first.to_json
end end
put "#{APIPREFIX}/threads/:thread_id" do |thread_id| put "#{APIPREFIX}/threads/:thread_id" do |thread_id|
...@@ -34,7 +35,8 @@ put "#{APIPREFIX}/threads/:thread_id" do |thread_id| ...@@ -34,7 +35,8 @@ put "#{APIPREFIX}/threads/:thread_id" do |thread_id|
if thread.errors.any? if thread.errors.any?
error 400, thread.errors.full_messages.to_json error 400, thread.errors.full_messages.to_json
else else
thread.to_hash.to_json presenter = ThreadPresenter.new([thread], nil, thread.course_id)
presenter.to_hash_array.first.to_json
end end
end end
......
...@@ -39,11 +39,8 @@ get "#{APIPREFIX}/users/:user_id/active_threads" do |user_id| ...@@ -39,11 +39,8 @@ get "#{APIPREFIX}/users/:user_id/active_threads" do |user_id|
paged_thread_ids.index(t.id) paged_thread_ids.index(t.id)
end end
# Fetch all the usernames in bulk to save on queries. Since we're using the presenter = ThreadPresenter.new(paged_active_threads.to_a, user, params[:course_id])
# identity map, the users won't need to be fetched again later. collection = presenter.to_hash_array(true)
User.only(:username).find(paged_active_threads.map{|x| x.author_id})
collection = paged_active_threads.map{|t| t.to_hash recursive: true}
collection = author_contents_only(collection, user_id) collection = author_contents_only(collection, user_id)
{ {
......
...@@ -23,14 +23,19 @@ end ...@@ -23,14 +23,19 @@ end
if ["staging", "production", "loadtest", "edgestage","edgeprod"].include? environment if ["staging", "production", "loadtest", "edgestage","edgeprod"].include? environment
require 'newrelic_rpm' require 'newrelic_rpm'
require 'new_relic/agent/method_tracer'
Moped::Session.class_eval do
include NewRelic::Agent::MethodTracer
add_method_tracer :new
add_method_tracer :use
add_method_tracer :login
end
end end
if ENV["ENABLE_GC_PROFILER"] if ENV["ENABLE_GC_PROFILER"]
GC::Profiler.enable GC::Profiler.enable
end end
set :cache, Dalli::Client.new
application_yaml = ERB.new(File.read("config/application.yml")).result() application_yaml = ERB.new(File.read("config/application.yml")).result()
CommentService.config = YAML.load(application_yaml).with_indifferent_access CommentService.config = YAML.load(application_yaml).with_indifferent_access
...@@ -43,6 +48,7 @@ Mongoid.logger.level = Logger::INFO ...@@ -43,6 +48,7 @@ Mongoid.logger.level = Logger::INFO
Dir[File.dirname(__FILE__) + '/lib/**/*.rb'].each {|file| require file} Dir[File.dirname(__FILE__) + '/lib/**/*.rb'].each {|file| require file}
Dir[File.dirname(__FILE__) + '/models/*.rb'].each {|file| require file} Dir[File.dirname(__FILE__) + '/models/*.rb'].each {|file| require file}
Dir[File.dirname(__FILE__) + '/presenters/*.rb'].each {|file| require file}
# Comment out observers until notifications are actually set up properly. # Comment out observers until notifications are actually set up properly.
#Dir[File.dirname(__FILE__) + '/models/observers/*.rb'].each {|file| require file} #Dir[File.dirname(__FILE__) + '/models/observers/*.rb'].each {|file| require file}
......
level_limit: 3 level_limit: 3
cache_enabled: false
api_key: <%= ENV['API_KEY'] || 'PUT_YOUR_API_KEY_HERE' %> api_key: <%= ENV['API_KEY'] || 'PUT_YOUR_API_KEY_HERE' %>
elasticsearch_server: <%= ENV['SEARCH_SERVER'] || 'http://localhost:9200' %> elasticsearch_server: <%= ENV['SEARCH_SERVER'] || 'http://localhost:9200' %>
cache_timeout:
threads_search: 10
threads_query: 10
max_deep_search_comment_count: 5000 max_deep_search_comment_count: 5000
...@@ -11,20 +11,10 @@ test: ...@@ -11,20 +11,10 @@ test:
hosts: hosts:
- localhost:27017 - localhost:27017
#production:
# sessions:
# default:
# uri: <%= ENV['MONGOHQ_URL'] %>
production: production:
sessions: sessions:
default: default:
hosts: uri: <%= ENV['MONGOHQ_URL'] %>
- hurley.member0.mongohq.com:10000
- hurley.member1.mongohq.com:10000
username: <%= ENV['MONGOHQ_USER'] %>
password: <%= ENV['MONGOHQ_PASS'] %>
database: app6929933
options: options:
skip_version_check: true skip_version_check: true
safe: true safe: true
...@@ -33,11 +23,7 @@ production: ...@@ -33,11 +23,7 @@ production:
edgeprod: edgeprod:
sessions: sessions:
default: default:
hosts: uri: <%= ENV['MONGOHQ_URL'] %>
- sayid.member1.mongohq.com:10001
username: <%= ENV['MONGOHQ_USER'] %>
password: <%= ENV['MONGOHQ_PASS'] %>
database: app10640640
options: options:
skip_version_check: true skip_version_check: true
safe: true safe: true
...@@ -46,11 +32,7 @@ edgeprod: ...@@ -46,11 +32,7 @@ edgeprod:
edgestage: edgestage:
sessions: sessions:
default: default:
hosts: uri: <%= ENV['MONGOHQ_URL'] %>
- vincent.mongohq.com:10001
username: <%= ENV['MONGOHQ_USER'] %>
password: <%= ENV['MONGOHQ_PASS'] %>
database: app10640673
options: options:
skip_version_check: true skip_version_check: true
safe: true safe: true
...@@ -64,19 +46,14 @@ staging: ...@@ -64,19 +46,14 @@ staging:
loadtest: loadtest:
sessions: sessions:
default: default:
hosts: uri: <%= ENV['MONGOHQ_URL'] %>
- sayid.member0.mongohq.com:10002
- sayid.member1.mongohq.com:10002
username: <%= ENV['MONGOHQ_USER'] %>
password: <%= ENV['MONGOHQ_PASS'] %>
database: app16669863
options: options:
skip_version_check: true skip_version_check: true
safe: true safe: true
consistency: strong consistency: strong
max_retries: 1 max_retries: 1
retry_interval: 1 retry_interval: 1
defaults: &defaults defaults: &defaults
use_utc: false use_utc: false
use_activesupport_time_zone: true use_activesupport_time_zone: true
helpers do helpers do
def commentable def commentable
@commentable ||= Commentable.find(params[:commentable_id]) @commentable ||= Commentable.find(params[:commentable_id])
end end
...@@ -115,7 +116,6 @@ helpers do ...@@ -115,7 +116,6 @@ helpers do
def handle_threads_query(comment_threads) def handle_threads_query(comment_threads)
if params[:course_id] if params[:course_id]
comment_threads = comment_threads.where(:course_id=>params[:course_id]) comment_threads = comment_threads.where(:course_id=>params[:course_id])
...@@ -134,18 +134,6 @@ helpers do ...@@ -134,18 +134,6 @@ helpers do
end end
end end
if CommentService.config[:cache_enabled]
query_params = params.slice(*%w[course_id commentable_id sort_key sort_order page per_page user_id])
memcached_key = "threads_query_#{query_params.hash}"
cached_results = Sinatra::Application.cache.get(memcached_key)
if cached_results
return {
collection: cached_results[:collection_ids].map{|id| CommentThread.find(id).to_hash(recursive: bool_recursive, user_id: params["user_id"])},
num_pages: cached_results[:num_pages],
page: cached_results[:page],
}.to_json
end
end
sort_key_mapper = { sort_key_mapper = {
"date" => :created_at, "date" => :created_at,
...@@ -159,8 +147,8 @@ helpers do ...@@ -159,8 +147,8 @@ helpers do
"asc" => :asc, "asc" => :asc,
} }
sort_key = sort_key_mapper[params["sort_key"]] sort_key = sort_key_mapper[params["sort_key"] || "date"]
sort_order = sort_order_mapper[params["sort_order"]] sort_order = sort_order_mapper[params["sort_order"] || "desc"]
sort_keyword_valid = (!params["sort_key"] && !params["sort_order"] || sort_key && sort_order) sort_keyword_valid = (!params["sort_key"] && !params["sort_order"] || sort_key && sort_order)
if not sort_keyword_valid if not sort_keyword_valid
...@@ -168,23 +156,31 @@ helpers do ...@@ -168,23 +156,31 @@ helpers do
else else
page = (params["page"] || DEFAULT_PAGE).to_i page = (params["page"] || DEFAULT_PAGE).to_i
per_page = (params["per_page"] || DEFAULT_PER_PAGE).to_i per_page = (params["per_page"] || DEFAULT_PER_PAGE).to_i
#KChugh turns out we don't need to go through all the extra work on the back end because the client is resorting anyway
#KChugh boy was I wrong, we need to sort for pagination order_clause = "pinned DESC, #{sort_key} #{sort_order}"
comment_threads = comment_threads.order_by("pinned DESC,#{sort_key} #{sort_order}") if sort_key && sort_order if ![:created_at, :last_activity_at].include? sort_key
# make sort order predictable when preceding sorts are non-unique
order_clause = "#{order_clause}, created_at DESC"
end
comment_threads = comment_threads.order_by(order_clause)
num_pages = [1, (comment_threads.count / per_page.to_f).ceil].max num_pages = [1, (comment_threads.count / per_page.to_f).ceil].max
page = [num_pages, [1, page].max].min page = [num_pages, [1, page].max].min
paged_comment_threads = comment_threads.page(page).per(per_page) # actual query happens here (by doing to_a)
if CommentService.config[:cache_enabled] threads = comment_threads.page(page).per(per_page).to_a
cached_results = {
collection_ids: paged_comment_threads.map(&:id),
num_pages: num_pages,
page: page,
}
Sinatra::Application.cache.set(memcached_key, cached_results, CommentService.config[:cache_timeout][:threads_query].to_i)
end
if threads.length == 0
collection = []
else
pres_threads = ThreadPresenter.new(
threads,
params[:user_id] ? user : nil,
params[:course_id] || threads.first.course_id
)
collection = pres_threads.to_hash_array(bool_recursive)
end
{ {
collection: paged_comment_threads.map{|t| t.to_hash(recursive: bool_recursive, user_id: params["user_id"])}, collection: collection,
num_pages: num_pages, num_pages: num_pages,
page: page, page: page,
}.to_json }.to_json
......
...@@ -17,6 +17,7 @@ class Comment < Content ...@@ -17,6 +17,7 @@ class Comment < Content
field :at_position_list, type: Array, default: [] field :at_position_list, type: Array, default: []
index({author_id: 1, course_id: 1}) index({author_id: 1, course_id: 1})
index({_type: 1, comment_thread_id: 1, author_id: 1, updated_at: 1})
field :sk, type: String, default: nil field :sk, type: String, default: nil
before_save :set_sk before_save :set_sk
......
...@@ -25,6 +25,7 @@ class CommentThread < Content ...@@ -25,6 +25,7 @@ class CommentThread < Content
field :pinned, type: Boolean field :pinned, type: Boolean
index({author_id: 1, course_id: 1}) index({author_id: 1, course_id: 1})
index({_type: 1, course_id: 1, pinned: -1, created_at: -1})
include Tire::Model::Search include Tire::Model::Search
include Tire::Model::Callbacks include Tire::Model::Callbacks
...@@ -99,13 +100,6 @@ class CommentThread < Content ...@@ -99,13 +100,6 @@ class CommentThread < Content
per_page = options[:per_page] || 20 per_page = options[:per_page] || 20
sort_key = options[:sort_key] sort_key = options[:sort_key]
sort_order = options[:sort_order] sort_order = options[:sort_order]
if CommentService.config[:cache_enabled]
memcached_key = "threads_search_#{params.merge(options).hash}"
results = Sinatra::Application.cache.get(memcached_key)
if results
return results
end
end
#GET /api/v1/search/threads?user_id=1&recursive=False&sort_key=date&│[2013-06-28 10:16:46,104][INFO ][plugins ] [Glamor] loaded [], sites [] #GET /api/v1/search/threads?user_id=1&recursive=False&sort_key=date&│[2013-06-28 10:16:46,104][INFO ][plugins ] [Glamor] loaded [], sites []
...@@ -203,10 +197,6 @@ class CommentThread < Content ...@@ -203,10 +197,6 @@ class CommentThread < Content
end end
if CommentService.config[:cache_enabled]
Sinatra::Application.cache.set(memcached_key, results, CommentService.config[:cache_timeout][:threads_search].to_i)
end
results results
end end
...@@ -247,86 +237,43 @@ class CommentThread < Content ...@@ -247,86 +237,43 @@ class CommentThread < Content
end end
def to_hash(params={}) def to_hash(params={})
doc = as_document.slice(*%w[title body course_id anonymous anonymous_to_peers commentable_id created_at updated_at at_position_list closed])
# to_hash returns the following model for each thread
# title body course_id anonymous anonymous_to_peers commentable_id
# created_at updated_at at_position_list closed
# (all the above direct from the original document)
# id
# from doc._id
# user_id
# from doc.author_id
# username
# from doc.author_username
# votes
# from subdocument votes - {count, up_count, down_count, point}
# abuse_flaggers
# from original document
# tags
# from orig doc tags_array
# type
# hardcoded "thread"
# group_id
# from orig doc
# pinned
# from orig doc
# comments_count
# count across all comments
as_document.slice(*%w[title body course_id anonymous anonymous_to_peers commentable_id created_at updated_at at_position_list closed])
.merge("id" => _id, "user_id" => author_id, .merge("id" => _id, "user_id" => author_id,
"username" => author.username, "username" => author_username,
"votes" => votes.slice(*%w[count up_count down_count point]), "votes" => votes.slice(*%w[count up_count down_count point]),
"abuse_flaggers" => abuse_flaggers, "abuse_flaggers" => abuse_flaggers,
"tags" => tags_array, "tags" => tags_array,
"type" => "thread", "type" => "thread",
"group_id" => group_id, "group_id" => group_id,
"pinned" => pinned?, "pinned" => pinned?,
"endorsed" => endorsed?) "comments_count" => comment_count)
if params[:recursive]
doc = doc.merge("children" => [])
rs = Comment.where(comment_thread_id: self.id).order_by({"sk"=> 1})
ancestry = [doc]
comments_count = 0
# weave the fetched comments into a single hierarchical doc
rs.each do | comment |
h = comment.to_hash.merge("children" => [])
parent_id = comment.parent_id || self.id
found_parent = false
while ancestry.length > 0 do
if parent_id == ancestry.last["id"] then
# found the children collection to which this comment belongs
ancestry.last["children"] << h
ancestry << h
found_parent = true
comments_count += 1
break
else
# try again with one level back in the ancestry til we find the parent
ancestry.pop
next
end
end
if not found_parent
# if we arrive here, it means a parent_id somewhere in the result set
# is pointing to an invalid place.
msg = "recursion ended: thread_id=#{self.id} comment_id=#{comment.id} parent_ids=#{comment.parent_ids} sk=#{comment.sk}"
logger.warn msg
ancestry = [doc]
end
end
else
comments_count = comments.count
end
if params[:user_id]
user = User.find_or_create_by(external_id: params[:user_id])
read_state = user.read_states.where(course_id: self.course_id).first
last_read_time = read_state.last_read_times[self.id.to_s] if read_state
# comments created by the user are excluded in the count
# this is rather like a hack but it avoids the following situation:
# when you reply to a thread and while you are editing,
# other people also replied to the thread. Now if we simply
# update the last_read_time, then the other people's replies
# will not be included in the unread_count; if we leave it
# that way, then your own comment will be included in the
# unread count
if last_read_time
unread_count = self.comments.where(
:updated_at => {:$gte => last_read_time},
:author_id => {:$ne => params[:user_id]},
).count
read = last_read_time >= self.updated_at
else
unread_count = self.comments.where(:author_id => {:$ne => params[:user_id]}).count
read = false
end
else
# If there's no user, say it's unread and all comments are unread
unread_count = comments_count
read = false
end
doc = doc.merge("unread_comments_count" => unread_count)
.merge("read" => read)
.merge("comments_count" => comments_count)
doc
end end
def self.tag_name_valid?(tag) def self.tag_name_valid?(tag)
......
...@@ -7,6 +7,9 @@ class Content ...@@ -7,6 +7,9 @@ class Content
field :historical_abuse_flaggers, type: Array, default: [] #preserve abuse flaggers after a moderator unflags field :historical_abuse_flaggers, type: Array, default: [] #preserve abuse flaggers after a moderator unflags
field :author_username, type: String, default: nil field :author_username, type: String, default: nil
index({comment_thread_id: 1, sk: 1}, {sparse: true})
index({comment_thread_id: 1, endorsed: 1}, {sparse: true})
before_save :set_username before_save :set_username
def set_username def set_username
# avoid having to look this attribute up later, since it does not change # avoid having to look this attribute up later, since it does not change
......
...@@ -104,7 +104,7 @@ class User ...@@ -104,7 +104,7 @@ class User
def mark_as_read(thread) def mark_as_read(thread)
read_state = read_states.find_or_create_by(course_id: thread.course_id) read_state = read_states.find_or_create_by(course_id: thread.course_id)
read_state.last_read_times[thread.id] = Time.now.utc read_state.last_read_times[thread.id.to_s] = Time.now.utc
read_state.save read_state.save
end end
......
require 'new_relic/agent/method_tracer'
class ThreadPresenter
def initialize(comment_threads, user, course_id)
@threads = comment_threads
@user = user
@course_id = course_id
@read_dates = nil # Hash, sparse, thread_key (str) => date
@unread_counts = nil # Hash, sparse, thread_key (str) => int
@endorsed_threads = nil # Hash, sparse, thread_key (str) => bool
load_aggregates
end
def load_aggregates
@read_dates = {}
if @user
read_state = @user.read_states.where(:course_id => @course_id).first
if read_state
@read_dates = read_state["last_read_times"].to_hash
end
end
@unread_counts = {}
@endorsed_threads = {}
thread_ids = @threads.collect {|t| t._id}
Comment.collection.aggregate(
{"$match" => {"comment_thread_id" => {"$in" => thread_ids}, "endorsed" => true}},
{"$group" => {"_id" => "$comment_thread_id"}}
).each do |res|
@endorsed_threads[res["_id"].to_s] = true
end
@threads.each do |t|
thread_key = t._id.to_s
if @read_dates.has_key? thread_key
@unread_counts[thread_key] = Comment.collection.where(
:comment_thread_id => t._id,
:author_id => {"$ne" => @user.id},
:updated_at => {"$gte" => @read_dates[thread_key]}
).count
end
end
end
def to_hash thread, with_comments=false
thread_key = thread._id.to_s
h = thread.to_hash
if @user
cnt_unread = @unread_counts.fetch(thread_key, thread.comment_count)
h["unread_comments_count"] = cnt_unread
h["read"] = @read_dates.has_key?(thread_key) && @read_dates[thread_key] >= thread.updated_at
else
h["unread_comments_count"] = thread.comment_count
h["read"] = false
end
h["endorsed"] = @endorsed_threads.fetch(thread_key, false)
h = merge_comments_recursive(h) if with_comments
h
end
def to_hash_array with_comments=false
@threads.map {|t| to_hash(t, with_comments)}
end
def merge_comments_recursive thread_hash
thread_id = thread_hash["id"]
root = thread_hash = thread_hash.merge("children" => [])
# Content model is used deliberately here (instead of Comment), to work with sparse index
rs = Content.where(comment_thread_id: thread_id).order_by({"sk"=> 1})
ancestry = [thread_hash]
# weave the fetched comments into a single hierarchical doc
rs.each do | comment |
thread_hash = comment.to_hash.merge("children" => [])
parent_id = comment.parent_id || thread_id
found_parent = false
while ancestry.length > 0 do
if parent_id == ancestry.last["id"] then
# found the children collection to which this comment belongs
ancestry.last["children"] << thread_hash
ancestry << thread_hash
found_parent = true
break
else
# try again with one level back in the ancestry til we find the parent
ancestry.pop
next
end
end
if not found_parent
# if we arrive here, it means a parent_id somewhere in the result set
# is pointing to an invalid place. reset the ancestry search path.
ancestry = [root]
end
end
ancestry.first
end
include ::NewRelic::Agent::MethodTracer
add_method_tracer :load_aggregates
add_method_tracer :to_hash
add_method_tracer :to_hash_array
add_method_tracer :merge_comments_recursive
end
db.contents.ensureIndex({ _type: 1, comment_thread_id: 1, author_id: 1, updated_at: 1 }, { background: true })
db.contents.ensureIndex({ comment_thread_id: 1, sk: 1 }, { background: true, sparse: true })
db.contents.ensureIndex({ comment_thread_id: 1, endorsed: 1 }, { background: true, sparse: true })
db.contents.ensureIndex({ _type: 1, course_id: 1, pinned: -1, created_at: -1 }, { background: true })
db.contents.dropIndex({ sk: 1 }) // the new one (created above) supersedes this
db.contents.ensureIndex({ sk: 1 }, { background: true, safe: true })
db.contents.dropIndex({ comment_thread_id: 1, updated_at: 1 })
db.contents.dropIndex({ comment_thread_id: 1, sk: 1 })
db.contents.dropIndex({ comment_thread_id: 1, endorsed: 1 })
db.contents.dropIndex({ _type: 1, course_id: 1, pinned: -1, created_at: -1 })
...@@ -55,15 +55,17 @@ describe "app" do ...@@ -55,15 +55,17 @@ describe "app" do
describe "POST /api/v1/:commentable_id/threads" do describe "POST /api/v1/:commentable_id/threads" do
default_params = {title: "Interesting question", body: "cool", course_id: "1", user_id: "1"} default_params = {title: "Interesting question", body: "cool", course_id: "1", user_id: "1"}
it "create a new comment thread for the commentable object" do it "create a new comment thread for the commentable object" do
old_count = CommentThread.count
post '/api/v1/question_1/threads', default_params post '/api/v1/question_1/threads', default_params
last_response.should be_ok last_response.should be_ok
CommentThread.count.should == 3 CommentThread.count.should == old_count + 1
CommentThread.where(title: "Interesting question").first.should_not be_nil CommentThread.where(title: "Interesting question").first.should_not be_nil
end end
it "allows anonymous thread" do it "allows anonymous thread" do
old_count = CommentThread.count
post '/api/v1/question_1/threads', default_params.merge(anonymous: true) post '/api/v1/question_1/threads', default_params.merge(anonymous: true)
last_response.should be_ok last_response.should be_ok
CommentThread.count.should == 3 CommentThread.count.should == old_count + 1
c = CommentThread.where(title: "Interesting question").first c = CommentThread.where(title: "Interesting question").first
c.should_not be_nil c.should_not be_nil
c["anonymous"].should be_true c["anonymous"].should be_true
...@@ -99,9 +101,10 @@ describe "app" do ...@@ -99,9 +101,10 @@ describe "app" do
last_response.status.should == 503 last_response.status.should == 503
end end
it "create a new comment thread with tag" do it "create a new comment thread with tag" do
old_count = CommentThread.count
post '/api/v1/question_1/threads', default_params.merge(tags: "a, b, c") post '/api/v1/question_1/threads', default_params.merge(tags: "a, b, c")
last_response.should be_ok last_response.should be_ok
CommentThread.count.should == 3 CommentThread.count.should == old_count + 1
thread = CommentThread.where(title: "Interesting question").first thread = CommentThread.where(title: "Interesting question").first
thread.tags_array.length.should == 3 thread.tags_array.length.should == 3
thread.tags_array.should include "a" thread.tags_array.should include "a"
...@@ -109,9 +112,10 @@ describe "app" do ...@@ -109,9 +112,10 @@ describe "app" do
thread.tags_array.should include "c" thread.tags_array.should include "c"
end end
it "strip spaces in tags" do it "strip spaces in tags" do
old_count = CommentThread.count
post '/api/v1/question_1/threads', default_params.merge(tags: " a, b ,c ") post '/api/v1/question_1/threads', default_params.merge(tags: " a, b ,c ")
last_response.should be_ok last_response.should be_ok
CommentThread.count.should == 3 CommentThread.count.should == old_count + 1
thread = CommentThread.where(title: "Interesting question").first thread = CommentThread.where(title: "Interesting question").first
thread.tags_array.length.should == 3 thread.tags_array.length.should == 3
thread.tags_array.should include "a" thread.tags_array.should include "a"
...@@ -119,9 +123,10 @@ describe "app" do ...@@ -119,9 +123,10 @@ describe "app" do
thread.tags_array.should include "c" thread.tags_array.should include "c"
end end
it "accepts [a-z 0-9 + # - .]words, numbers, dashes, spaces but no underscores in tags" do it "accepts [a-z 0-9 + # - .]words, numbers, dashes, spaces but no underscores in tags" do
old_count = CommentThread.count
post '/api/v1/question_1/threads', default_params.merge(tags: "artificial-intelligence, machine-learning, 7-is-a-lucky-number, interesting problem, interesting problems in c++") post '/api/v1/question_1/threads', default_params.merge(tags: "artificial-intelligence, machine-learning, 7-is-a-lucky-number, interesting problem, interesting problems in c++")
last_response.should be_ok last_response.should be_ok
CommentThread.count.should == 3 CommentThread.count.should == old_count + 1
thread = CommentThread.where(title: "Interesting question").first thread = CommentThread.where(title: "Interesting question").first
thread.tags_array.length.should == 5 thread.tags_array.length.should == 5
end end
......
...@@ -46,5 +46,154 @@ describe "app" do ...@@ -46,5 +46,154 @@ describe "app" do
last_response.status.should == 400 last_response.status.should == 400
end end
end end
describe "GET /api/v1/users/:user_id/active_threads" do
before(:each) { setup_10_threads }
def thread_result(user_id, params)
get "/api/v1/users/#{user_id}/active_threads", params
last_response.should be_ok
parse(last_response.body)["collection"]
end
it "requires that a course id be passed" do
get "/api/v1/users/100/active_threads"
# this is silly, but it is the legacy behavior
last_response.should be_ok
last_response.body.should == "{}"
end
it "only returns threads with activity from the specified user" do
rs = thread_result 100, course_id: "xyz"
rs.length.should == 1
check_thread_result(@users["u100"], @threads["t0"], rs.first, true)
rs.first["children"].length.should == 5
end
it "does not include anonymous leaves" do
@comments["t0 c4"].anonymous = true
@comments["t0 c4"].save!
rs = thread_result 100, course_id: "xyz"
rs.length.should == 1
check_thread_result(@users["100"], @threads["t0"], rs.first, false)
rs.first["children"].length.should == 4
end
it "does not include anonymous-to-peers leaves" do
@comments["t0 c3"].anonymous_to_peers = true
@comments["t0 c3"].save!
rs = thread_result 100, course_id: "xyz"
rs.length.should == 1
check_thread_result(@users["100"], @threads["t0"], rs.first, false)
rs.first["children"].length.should == 4
end
it "only returns threads from the specified course" do
@threads.each do |k, v|
v.author = @users["u100"]
v.save!
end
@threads["t9"].course_id = "zzz"
@threads["t9"].save!
rs = thread_result 100, course_id: "xyz"
rs.length.should == 9
end
it "correctly orders results by most recently updated" do
@threads.each do |k, v|
v.author = @users["u100"]
v.save!
end
@threads["t5"].updated_at = DateTime.now
@threads["t5"].save!
expected_order = @threads.keys.reverse.select{|k| k!="t5"}.insert(0, "t5")
rs = thread_result 100, course_id: "xyz"
actual_order = rs.map {|v| v["title"]}
actual_order.should == expected_order
end
it "only returns content authored by the specified user, and ancestors of that content" do
# by default, number of comments returned by u100 would be 5
@comments["t0 c2"].author = @users["u101"]
# now 4
make_comment(@users["u100"], @comments["t0 c2"], "should see me")
# now 5
make_comment(@users["u101"], @comments["t0 c2"], "should not see me")
make_comment(@users["u100"], @threads["t1"], "should see me")
# now 6
make_comment(@users["u101"], @threads["t1"], "should not see me")
rs = thread_result 100, course_id: "xyz"
rs.length.should == 2
# the leaf of every subtree in the rs must have author==u100
# and the comment count should match our expectation
expected_comment_count = 6
@actual_comment_count = 0
def check_leaf(result)
if !result["children"] or result["children"].length == 0
result["username"].should == "user100"
@actual_comment_count += 1
else
result["children"].each do |child|
check_leaf(child)
end
end
end
rs.each do |r|
check_leaf(r)
end
@actual_comment_count.should == expected_comment_count
end
# TODO: note the checks on result["num_pages"] are disabled.
# there is a bug in GET "#{APIPREFIX}/users/:user_id/active_threads
# and this value is often wrong.
context "pagination" do
def thread_result_page (page, per_page)
get "/api/v1/users/100/active_threads", course_id: "xyz", page: page, per_page: per_page
last_response.should be_ok
parse(last_response.body)
end
before(:each) do
@threads.each do |k, v|
@comments["#{k} c4"].author = @users["u100"]
@comments["#{k} c4"].save!
end
end
it "returns single page" do
result = thread_result_page(1, 20)
result["collection"].length.should == 10
#result["num_pages"].should == 1
result["page"].should == 1
end
it "returns multiple pages" do
result = thread_result_page(1, 5)
result["collection"].length.should == 5
#result["num_pages"].should == 2
result["page"].should == 1
result = thread_result_page(2, 5)
result["collection"].length.should == 5
#result["num_pages"].should == 2
result["page"].should == 2
end
it "orders correctly across pages" do
expected_order = @threads.keys.reverse
actual_order = []
per_page = 3
num_pages = (@threads.length + per_page - 1) / per_page
num_pages.times do |i|
page = i + 1
result = thread_result_page(page, per_page)
result["collection"].length.should == (page * per_page <= @threads.length ? per_page : @threads.length % per_page)
#result["num_pages"].should == num_pages
result["page"].should == page
actual_order += result["collection"].map {|v| v["title"]}
end
actual_order.should == expected_order
end
end
end
end end
end end
...@@ -45,80 +45,6 @@ describe CommentThread do ...@@ -45,80 +45,6 @@ describe CommentThread do
end end
end end
context "#to_hash (recursive=true)" do
before(:each) { @cid_seq = 10 }
def stub_each_from_array(obj, ary)
stub = obj.stub(:each)
ary.each {|v| stub = stub.and_yield(v)}
obj
end
def set_comment_results(thread, ary)
# avoids an unrelated expecation error
thread.stub(:endorsed?).and_return(true)
rs = stub_each_from_array(double("rs"), ary)
criteria = double("criteria")
criteria.stub(:order_by).and_return(rs)
Comment.stub(:where).with(comment_thread_id: thread.id).and_return(criteria)
end
def make_comment(parent=nil)
c = Comment.new
c.id = @cid_seq
@cid_seq += 1
c.parent_id = parent.nil? ? nil : parent.id
c
end
it "nests comments in the correct order" do
thread = CommentThread.new
thread.id = 1
thread.author = User.new
c0 = make_comment()
c00 = make_comment(c0)
c01 = make_comment(c0)
c010 = make_comment(c01)
set_comment_results(thread, [c0, c00, c01, c010])
h = thread.to_hash({:recursive => true})
h["children"].size.should == 1 # c0
h["children"][0]["id"].should == c0.id
h["children"][0]["children"].size.should == 2 # c00, c01
h["children"][0]["children"][0]["id"].should == c00.id
h["children"][0]["children"][1]["id"].should == c01.id
h["children"][0]["children"][1]["children"].size.should == 1 # c010
h["children"][0]["children"][1]["children"][0]["id"].should == c010.id
h["comments_count"].should == 4
end
it "handles orphaned child comments gracefully" do
thread = CommentThread.new
thread.id = 33
thread.author = User.new
c0 = make_comment()
c00 = make_comment(c0)
c000 = make_comment(c00)
c1 = make_comment()
c10 = make_comment(c1)
c11 = make_comment(c1)
c111 = make_comment(c11)
# lose c0 and c11 from result set. their descendants should
# be silently skipped over.
set_comment_results(thread, [c00, c000, c1, c10, c111])
h = thread.to_hash({:recursive => true})
h["children"].size.should == 1 # c1
h["children"][0]["id"].should == c1.id
h["children"][0]["children"].size.should == 1 # c10
h["children"][0]["children"][0]["id"].should == c10.id
h["comments_count"].should == 2
end
end
context "sorting" do context "sorting" do
......
require 'spec_helper'
describe ThreadPresenter do
context "#to_hash_array" do
before(:each) { @cid_seq = 10 }
def stub_each_from_array(obj, ary)
stub = obj.stub(:each)
ary.each {|v| stub = stub.and_yield(v)}
obj
end
def set_comment_results(thread, ary)
# avoids an unrelated expecation error
thread.stub(:endorsed?).and_return(true)
rs = stub_each_from_array(double("rs"), ary)
criteria = double("criteria")
criteria.stub(:order_by).and_return(rs)
# stub Content, not Comment, because that's the model we will be querying against
Content.stub(:where).with(comment_thread_id: thread.id).and_return(criteria)
end
def make_comment(parent=nil)
c = Comment.new
c.id = @cid_seq
@cid_seq += 1
c.parent_id = parent.nil? ? nil : parent.id
c
end
it "nests comments in the correct order" do
thread = CommentThread.new
thread.id = 1
thread.author = User.new
c0 = make_comment()
c00 = make_comment(c0)
c01 = make_comment(c0)
c010 = make_comment(c01)
set_comment_results(thread, [c0, c00, c01, c010])
h = ThreadPresenter.new([thread], nil, thread.course_id).to_hash_array(true).first
h["children"].size.should == 1 # c0
h["children"][0]["id"].should == c0.id
h["children"][0]["children"].size.should == 2 # c00, c01
h["children"][0]["children"][0]["id"].should == c00.id
h["children"][0]["children"][1]["id"].should == c01.id
h["children"][0]["children"][1]["children"].size.should == 1 # c010
h["children"][0]["children"][1]["children"][0]["id"].should == c010.id
end
it "handles orphaned child comments gracefully" do
thread = CommentThread.new
thread.id = 33
thread.author = User.new
c0 = make_comment()
c00 = make_comment(c0)
c000 = make_comment(c00)
c1 = make_comment()
c10 = make_comment(c1)
c11 = make_comment(c1)
c111 = make_comment(c11)
# lose c0 and c11 from result set. their descendants should
# be silently skipped over.
set_comment_results(thread, [c00, c000, c1, c10, c111])
h = ThreadPresenter.new([thread], nil, thread.course_id).to_hash_array(true).first
h["children"].size.should == 1 # c1
h["children"][0]["id"].should == c1.id
h["children"][0]["children"].size.should == 1 # c10
h["children"][0]["children"][0]["id"].should == c10.id
end
end
end
ENV["SINATRA_ENV"] = "test" ENV["SINATRA_ENV"] = "test"
require 'simplecov'
SimpleCov.start
require File.join(File.dirname(__FILE__), '..', 'app') require File.join(File.dirname(__FILE__), '..', 'app')
...@@ -47,7 +49,8 @@ def init_without_subscriptions ...@@ -47,7 +49,8 @@ def init_without_subscriptions
commentable = Commentable.new("question_1") commentable = Commentable.new("question_1")
user = create_test_user(1) users = (1..10).map{|id| create_test_user(id)}
user = users.first
thread = CommentThread.new(title: "I can't solve this problem", body: "can anyone help me?", course_id: "1", commentable_id: commentable.id) thread = CommentThread.new(title: "I can't solve this problem", body: "can anyone help me?", course_id: "1", commentable_id: commentable.id)
thread.author = user thread.author = user
...@@ -94,16 +97,23 @@ def init_without_subscriptions ...@@ -94,16 +97,23 @@ def init_without_subscriptions
comment1.comment_thread = thread comment1.comment_thread = thread
comment1.save! comment1.save!
users = (2..10).map{|id| create_test_user(id)} thread = CommentThread.new(title: "I don't know what to say", body: "lol", course_id: "2", commentable_id: "something else")
thread.author = users[1]
thread.save!
comment = thread.comments.new(body: "i am objectionable", course_id: "2")
comment.author = users[2]
comment.abuse_flaggers = [users[3]._id]
comment.save!
Comment.all.each do |c| Comment.all.each do |c|
user.vote(c, :up) # make the first user always vote up for convenience user.vote(c, :up) # make the first user always vote up for convenience
users.each {|user| user.vote(c, [:up, :down].sample)} users[2,9].each {|user| user.vote(c, [:up, :down].sample)}
end end
CommentThread.all.each do |c| CommentThread.all.each do |c|
user.vote(c, :up) # make the first user always vote up for convenience user.vote(c, :up) # make the first user always vote up for convenience
users.each {|user| user.vote(c, [:up, :down].sample)} users[2,9].each {|user| user.vote(c, [:up, :down].sample)}
end end
Content.mongo_session[:blocked_hash].insert(hash: Digest::MD5.hexdigest("blocked post")) Content.mongo_session[:blocked_hash].insert(hash: Digest::MD5.hexdigest("blocked post"))
...@@ -156,3 +166,123 @@ def init_with_subscriptions ...@@ -156,3 +166,123 @@ def init_with_subscriptions
thread.author = user1 thread.author = user1
thread.save! thread.save!
end end
# this method is used to test results produced using the helper function handle_threads_query
# which is used in multiple areas of the API
def check_thread_result(user, thread, json_response, check_comments=false)
expected_keys = %w(id title body course_id commentable_id created_at updated_at)
expected_keys += %w(anonymous anonymous_to_peers at_position_list closed user_id)
expected_keys += %w(username votes abuse_flaggers tags type group_id pinned)
expected_keys += %w(comments_count unread_comments_count read endorsed)
# the "children" key is not always present - depends on the invocation + test use case.
# exclude it from this check - if check_comments is set, we'll assert against it later
actual_keys = json_response.keys - ["children"]
actual_keys.sort.should == expected_keys.sort
json_response["title"].should == thread.title
json_response["body"].should == thread.body
json_response["course_id"].should == thread.course_id
json_response["anonymous"].should == thread.anonymous
json_response["anonymous_to_peers"].should == thread.anonymous_to_peers
json_response["commentable_id"].should == thread.commentable_id
json_response["created_at"].should == thread.created_at.utc.strftime("%Y-%m-%dT%H:%M:%SZ")
json_response["updated_at"].should == thread.updated_at.utc.strftime("%Y-%m-%dT%H:%M:%SZ")
json_response["at_position_list"].should == thread.at_position_list
json_response["closed"].should == thread.closed
json_response["id"].should == thread._id.to_s
json_response["user_id"].should == thread.author.id
json_response["username"].should == thread.author.username
json_response["votes"]["point"].should == thread.votes["point"]
json_response["votes"]["count"].should == thread.votes["count"]
json_response["votes"]["up_count"].should == thread.votes["up_count"]
json_response["votes"]["down_count"].should == thread.votes["down_count"]
json_response["abuse_flaggers"].should == thread.abuse_flaggers
json_response["tags"].should == thread.tags_array
json_response["type"].should == "thread"
json_response["group_id"].should == thread.group_id
json_response["pinned"].should == thread.pinned?
json_response["endorsed"].should == thread.endorsed?
if check_comments
# warning - this only checks top-level comments and may not handle all possible sorting scenarios
# proper composition / ordering of the children is currently covered in models/comment_thread_spec.
# it also does not check for author-only results (e.g. user active threads view)
# author-only is covered by a test in api/user_spec.
root_comments = thread.root_comments.sort(_id:1).to_a
json_response["children"].should_not be_nil
json_response["children"].length.should == root_comments.length
json_response["children"].each_with_index { |v, i|
v["body"].should == root_comments[i].body
v["user_id"].should == root_comments[i].author_id
v["username"].should == root_comments[i].author_username
}
end
json_response["comments_count"].should == thread.comments.length
if user.nil?
json_response["unread_comments_count"].should == thread.comments.length
json_response["read"].should == false
else
expected_unread_cnt = thread.comments.length # initially assume nothing has been read
read_states = user.read_states.where(course_id: thread.course_id).to_a
if read_states.length == 1
read_date = read_states.first.last_read_times[thread.id.to_s]
if read_date
thread.comments.each do |c|
if c.author != user and c.updated_at < read_date
expected_unread_cnt -= 1
end
end
json_response["read"].should == (read_date >= thread.updated_at)
else
json_response["read"].should == false
end
end
json_response["unread_comments_count"].should == expected_unread_cnt
end
end
# general purpose factory helpers
def make_thread(author, text, course_id, commentable_id)
thread = CommentThread.new(title: text, body: text, course_id: course_id, commentable_id: commentable_id)
thread.author = author
thread.save!
thread
end
def make_comment(author, parent, text)
if parent.is_a?(CommentThread)
coll = parent.comments
thread = parent
else
coll = parent.children
thread = parent.comment_thread
end
comment = coll.new(body: text, course_id: parent.course_id)
comment.author = author
comment.comment_thread = thread
comment.save!
comment
end
DFLT_COURSE_ID = "xyz"
def setup_10_threads
User.all.delete
Content.all.delete
@threads = {}
@comments = {}
@users = {}
10.times do |i|
author = create_test_user(i+100)
@users["u#{i+100}"] = author
thread = make_thread(author, "t#{i}", DFLT_COURSE_ID, "pdq")
@threads["t#{i}"] = thread
5.times do |j|
comment = make_comment(author, thread, "t#{i} c#{j}")
@comments["t#{i} c#{j}"] = comment
end
end
@default_order = 10.times.map {|i| "t#{i}"}.reverse
end
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment