Commit 7299378c by jimabramson

refactor to generate thread list views more efficiently.

additionally, remove unused caching features and update mongo indexing.
parent 413246bc
...@@ -20,7 +20,8 @@ get "#{APIPREFIX}/threads/:thread_id" do |thread_id| ...@@ -20,7 +20,8 @@ get "#{APIPREFIX}/threads/:thread_id" do |thread_id|
user.mark_as_read(thread) if user user.mark_as_read(thread) if user
end end
thread.to_hash(recursive: bool_recursive, user_id: params["user_id"]).to_json presenter = ThreadPresenter.new([thread], user || nil, thread.course_id)
presenter.to_hash_array(true).first.to_json
end end
put "#{APIPREFIX}/threads/:thread_id" do |thread_id| put "#{APIPREFIX}/threads/:thread_id" do |thread_id|
...@@ -34,7 +35,8 @@ put "#{APIPREFIX}/threads/:thread_id" do |thread_id| ...@@ -34,7 +35,8 @@ put "#{APIPREFIX}/threads/:thread_id" do |thread_id|
if thread.errors.any? if thread.errors.any?
error 400, thread.errors.full_messages.to_json error 400, thread.errors.full_messages.to_json
else else
thread.to_hash.to_json presenter = ThreadPresenter.new([thread], nil, thread.course_id)
presenter.to_hash_array.first.to_json
end end
end end
......
...@@ -39,11 +39,8 @@ get "#{APIPREFIX}/users/:user_id/active_threads" do |user_id| ...@@ -39,11 +39,8 @@ get "#{APIPREFIX}/users/:user_id/active_threads" do |user_id|
paged_thread_ids.index(t.id) paged_thread_ids.index(t.id)
end end
# Fetch all the usernames in bulk to save on queries. Since we're using the presenter = ThreadPresenter.new(paged_active_threads.to_a, user, params[:course_id])
# identity map, the users won't need to be fetched again later. collection = presenter.to_hash_array(true)
User.only(:username).find(paged_active_threads.map{|x| x.author_id})
collection = paged_active_threads.map{|t| t.to_hash recursive: true}
collection = author_contents_only(collection, user_id) collection = author_contents_only(collection, user_id)
{ {
......
...@@ -23,14 +23,19 @@ end ...@@ -23,14 +23,19 @@ end
if ["staging", "production", "loadtest", "edgestage","edgeprod"].include? environment if ["staging", "production", "loadtest", "edgestage","edgeprod"].include? environment
require 'newrelic_rpm' require 'newrelic_rpm'
require 'new_relic/agent/method_tracer'
Moped::Session.class_eval do
include NewRelic::Agent::MethodTracer
add_method_tracer :new
add_method_tracer :use
add_method_tracer :login
end
end end
if ENV["ENABLE_GC_PROFILER"] if ENV["ENABLE_GC_PROFILER"]
GC::Profiler.enable GC::Profiler.enable
end end
set :cache, Dalli::Client.new
application_yaml = ERB.new(File.read("config/application.yml")).result() application_yaml = ERB.new(File.read("config/application.yml")).result()
CommentService.config = YAML.load(application_yaml).with_indifferent_access CommentService.config = YAML.load(application_yaml).with_indifferent_access
...@@ -43,6 +48,7 @@ Mongoid.logger.level = Logger::INFO ...@@ -43,6 +48,7 @@ Mongoid.logger.level = Logger::INFO
Dir[File.dirname(__FILE__) + '/lib/**/*.rb'].each {|file| require file} Dir[File.dirname(__FILE__) + '/lib/**/*.rb'].each {|file| require file}
Dir[File.dirname(__FILE__) + '/models/*.rb'].each {|file| require file} Dir[File.dirname(__FILE__) + '/models/*.rb'].each {|file| require file}
Dir[File.dirname(__FILE__) + '/presenters/*.rb'].each {|file| require file}
# Comment out observers until notifications are actually set up properly. # Comment out observers until notifications are actually set up properly.
#Dir[File.dirname(__FILE__) + '/models/observers/*.rb'].each {|file| require file} #Dir[File.dirname(__FILE__) + '/models/observers/*.rb'].each {|file| require file}
......
level_limit: 3 level_limit: 3
cache_enabled: false
api_key: <%= ENV['API_KEY'] || 'PUT_YOUR_API_KEY_HERE' %> api_key: <%= ENV['API_KEY'] || 'PUT_YOUR_API_KEY_HERE' %>
elasticsearch_server: <%= ENV['SEARCH_SERVER'] || 'http://localhost:9200' %> elasticsearch_server: <%= ENV['SEARCH_SERVER'] || 'http://localhost:9200' %>
cache_timeout:
threads_search: 10
threads_query: 10
max_deep_search_comment_count: 5000 max_deep_search_comment_count: 5000
...@@ -64,12 +64,7 @@ staging: ...@@ -64,12 +64,7 @@ staging:
loadtest: loadtest:
sessions: sessions:
default: default:
hosts: uri: <%= ENV['MONGOHQ_URL'] %>
- sayid.member0.mongohq.com:10002
- sayid.member1.mongohq.com:10002
username: <%= ENV['MONGOHQ_USER'] %>
password: <%= ENV['MONGOHQ_PASS'] %>
database: app16669863
options: options:
skip_version_check: true skip_version_check: true
safe: true safe: true
......
helpers do helpers do
def commentable def commentable
@commentable ||= Commentable.find(params[:commentable_id]) @commentable ||= Commentable.find(params[:commentable_id])
end end
...@@ -115,7 +116,6 @@ helpers do ...@@ -115,7 +116,6 @@ helpers do
def handle_threads_query(comment_threads) def handle_threads_query(comment_threads)
if params[:course_id] if params[:course_id]
comment_threads = comment_threads.where(:course_id=>params[:course_id]) comment_threads = comment_threads.where(:course_id=>params[:course_id])
...@@ -134,18 +134,6 @@ helpers do ...@@ -134,18 +134,6 @@ helpers do
end end
end end
if CommentService.config[:cache_enabled]
query_params = params.slice(*%w[course_id commentable_id sort_key sort_order page per_page user_id])
memcached_key = "threads_query_#{query_params.hash}"
cached_results = Sinatra::Application.cache.get(memcached_key)
if cached_results
return {
collection: cached_results[:collection_ids].map{|id| CommentThread.find(id).to_hash(recursive: bool_recursive, user_id: params["user_id"])},
num_pages: cached_results[:num_pages],
page: cached_results[:page],
}.to_json
end
end
sort_key_mapper = { sort_key_mapper = {
"date" => :created_at, "date" => :created_at,
...@@ -159,8 +147,8 @@ helpers do ...@@ -159,8 +147,8 @@ helpers do
"asc" => :asc, "asc" => :asc,
} }
sort_key = sort_key_mapper[params["sort_key"]] sort_key = sort_key_mapper[params["sort_key"] || "date"]
sort_order = sort_order_mapper[params["sort_order"]] sort_order = sort_order_mapper[params["sort_order"] || "desc"]
sort_keyword_valid = (!params["sort_key"] && !params["sort_order"] || sort_key && sort_order) sort_keyword_valid = (!params["sort_key"] && !params["sort_order"] || sort_key && sort_order)
if not sort_keyword_valid if not sort_keyword_valid
...@@ -168,23 +156,31 @@ helpers do ...@@ -168,23 +156,31 @@ helpers do
else else
page = (params["page"] || DEFAULT_PAGE).to_i page = (params["page"] || DEFAULT_PAGE).to_i
per_page = (params["per_page"] || DEFAULT_PER_PAGE).to_i per_page = (params["per_page"] || DEFAULT_PER_PAGE).to_i
#KChugh turns out we don't need to go through all the extra work on the back end because the client is resorting anyway
#KChugh boy was I wrong, we need to sort for pagination order_clause = "pinned DESC, #{sort_key} #{sort_order}"
comment_threads = comment_threads.order_by("pinned DESC,#{sort_key} #{sort_order}") if sort_key && sort_order if ![:created_at, :last_activity_at].include? sort_key
# make sort order predictable when preceding sorts are non-unique
order_clause = "#{order_clause}, created_at DESC"
end
comment_threads = comment_threads.order_by(order_clause)
num_pages = [1, (comment_threads.count / per_page.to_f).ceil].max num_pages = [1, (comment_threads.count / per_page.to_f).ceil].max
page = [num_pages, [1, page].max].min page = [num_pages, [1, page].max].min
paged_comment_threads = comment_threads.page(page).per(per_page) # actual query happens here (by doing to_a)
if CommentService.config[:cache_enabled] threads = comment_threads.page(page).per(per_page).to_a
cached_results = {
collection_ids: paged_comment_threads.map(&:id),
num_pages: num_pages,
page: page,
}
Sinatra::Application.cache.set(memcached_key, cached_results, CommentService.config[:cache_timeout][:threads_query].to_i)
end
if threads.length == 0
collection = []
else
pres_threads = ThreadPresenter.new(
threads,
params[:user_id] ? user : nil,
params[:course_id] || threads.first.course_id
)
collection = pres_threads.to_hash_array(bool_recursive)
end
{ {
collection: paged_comment_threads.map{|t| t.to_hash(recursive: bool_recursive, user_id: params["user_id"])}, collection: collection,
num_pages: num_pages, num_pages: num_pages,
page: page, page: page,
}.to_json }.to_json
......
...@@ -17,6 +17,7 @@ class Comment < Content ...@@ -17,6 +17,7 @@ class Comment < Content
field :at_position_list, type: Array, default: [] field :at_position_list, type: Array, default: []
index({author_id: 1, course_id: 1}) index({author_id: 1, course_id: 1})
index({_type: 1, comment_thread_id: 1, author_id: 1, updated_at: 1})
field :sk, type: String, default: nil field :sk, type: String, default: nil
before_save :set_sk before_save :set_sk
......
...@@ -25,6 +25,7 @@ class CommentThread < Content ...@@ -25,6 +25,7 @@ class CommentThread < Content
field :pinned, type: Boolean field :pinned, type: Boolean
index({author_id: 1, course_id: 1}) index({author_id: 1, course_id: 1})
index({_type: 1, course_id: 1, pinned: -1, created_at: -1})
include Tire::Model::Search include Tire::Model::Search
include Tire::Model::Callbacks include Tire::Model::Callbacks
...@@ -99,13 +100,6 @@ class CommentThread < Content ...@@ -99,13 +100,6 @@ class CommentThread < Content
per_page = options[:per_page] || 20 per_page = options[:per_page] || 20
sort_key = options[:sort_key] sort_key = options[:sort_key]
sort_order = options[:sort_order] sort_order = options[:sort_order]
if CommentService.config[:cache_enabled]
memcached_key = "threads_search_#{params.merge(options).hash}"
results = Sinatra::Application.cache.get(memcached_key)
if results
return results
end
end
#GET /api/v1/search/threads?user_id=1&recursive=False&sort_key=date&│[2013-06-28 10:16:46,104][INFO ][plugins ] [Glamor] loaded [], sites [] #GET /api/v1/search/threads?user_id=1&recursive=False&sort_key=date&│[2013-06-28 10:16:46,104][INFO ][plugins ] [Glamor] loaded [], sites []
...@@ -203,10 +197,6 @@ class CommentThread < Content ...@@ -203,10 +197,6 @@ class CommentThread < Content
end end
if CommentService.config[:cache_enabled]
Sinatra::Application.cache.set(memcached_key, results, CommentService.config[:cache_timeout][:threads_search].to_i)
end
results results
end end
...@@ -270,100 +260,20 @@ class CommentThread < Content ...@@ -270,100 +260,20 @@ class CommentThread < Content
# from orig doc # from orig doc
# pinned # pinned
# from orig doc # from orig doc
# endorsed
# if any comment within the thread is endorsed, see above (endorsed?)
# children (if recursive=true)
# will do a separate query for all the children
# unread_comments_count
# assuming this is being called on behalf of a specific user U, this counts the
# number of comments in this thread not authored by U AND whose updated_at
# is greater than (U[last_read_dates][this._id] or 0)
# comments_count # comments_count
# count across all comments # count across all comments
# read
# currently this checks doc.updated_at (which is kept up to date by callbacks)
# iow, any comment or the thread itself have been updated since i last read
doc = as_document.slice(*%w[title body course_id anonymous anonymous_to_peers commentable_id created_at updated_at at_position_list closed]) as_document.slice(*%w[title body course_id anonymous anonymous_to_peers commentable_id created_at updated_at at_position_list closed])
.merge("id" => _id, "user_id" => author_id, .merge("id" => _id, "user_id" => author_id,
"username" => author.username, "username" => author_username,
"votes" => votes.slice(*%w[count up_count down_count point]), "votes" => votes.slice(*%w[count up_count down_count point]),
"abuse_flaggers" => abuse_flaggers, "abuse_flaggers" => abuse_flaggers,
"tags" => tags_array, "tags" => tags_array,
"type" => "thread", "type" => "thread",
"group_id" => group_id, "group_id" => group_id,
"pinned" => pinned?, "pinned" => pinned?,
"endorsed" => endorsed?) "comments_count" => comment_count)
if params[:recursive]
doc = doc.merge("children" => [])
rs = Comment.where(comment_thread_id: self.id).order_by({"sk"=> 1})
ancestry = [doc]
comments_count = 0
# weave the fetched comments into a single hierarchical doc
rs.each do | comment |
h = comment.to_hash.merge("children" => [])
parent_id = comment.parent_id || self.id
found_parent = false
while ancestry.length > 0 do
if parent_id == ancestry.last["id"] then
# found the children collection to which this comment belongs
ancestry.last["children"] << h
ancestry << h
found_parent = true
comments_count += 1
break
else
# try again with one level back in the ancestry til we find the parent
ancestry.pop
next
end
end
if not found_parent
# if we arrive here, it means a parent_id somewhere in the result set
# is pointing to an invalid place.
msg = "recursion ended: thread_id=#{self.id} comment_id=#{comment.id} parent_ids=#{comment.parent_ids} sk=#{comment.sk}"
logger.warn msg
ancestry = [doc]
end
end
else
comments_count = comments.count
end
if params[:user_id]
user = User.find_or_create_by(external_id: params[:user_id])
read_state = user.read_states.where(course_id: self.course_id).first
last_read_time = read_state.last_read_times[self.id.to_s] if read_state
# comments created by the user are excluded in the count
# this is rather like a hack but it avoids the following situation:
# when you reply to a thread and while you are editing,
# other people also replied to the thread. Now if we simply
# update the last_read_time, then the other people's replies
# will not be included in the unread_count; if we leave it
# that way, then your own comment will be included in the
# unread count
if last_read_time
unread_count = self.comments.where(
:updated_at => {:$gte => last_read_time},
:author_id => {:$ne => params[:user_id]},
).count
read = last_read_time >= self.updated_at
else
unread_count = self.comments.where(:author_id => {:$ne => params[:user_id]}).count
read = false
end
else
# If there's no user, say it's unread and all comments are unread
unread_count = comments_count
read = false
end
doc = doc.merge("unread_comments_count" => unread_count)
.merge("read" => read)
.merge("comments_count" => comments_count)
doc
end end
def self.tag_name_valid?(tag) def self.tag_name_valid?(tag)
......
...@@ -7,6 +7,9 @@ class Content ...@@ -7,6 +7,9 @@ class Content
field :historical_abuse_flaggers, type: Array, default: [] #preserve abuse flaggers after a moderator unflags field :historical_abuse_flaggers, type: Array, default: [] #preserve abuse flaggers after a moderator unflags
field :author_username, type: String, default: nil field :author_username, type: String, default: nil
index({comment_thread_id: 1, sk: 1}, {sparse: true})
index({comment_thread_id: 1, endorsed: 1}, {sparse: true})
before_save :set_username before_save :set_username
def set_username def set_username
# avoid having to look this attribute up later, since it does not change # avoid having to look this attribute up later, since it does not change
......
...@@ -104,7 +104,7 @@ class User ...@@ -104,7 +104,7 @@ class User
def mark_as_read(thread) def mark_as_read(thread)
read_state = read_states.find_or_create_by(course_id: thread.course_id) read_state = read_states.find_or_create_by(course_id: thread.course_id)
read_state.last_read_times[thread.id] = Time.now.utc read_state.last_read_times[thread.id.to_s] = Time.now.utc
read_state.save read_state.save
end end
......
require 'new_relic/agent/method_tracer'
class ThreadPresenter
def initialize(comment_threads, user, course_id)
@threads = comment_threads
@user = user
@course_id = course_id
@read_dates = nil # Hash, sparse, thread_key (str) => date
@unread_counts = nil # Hash, sparse, thread_key (str) => int
@endorsed_threads = nil # Hash, sparse, thread_key (str) => bool
load_aggregates
end
def load_aggregates
@read_dates = {}
if @user
read_state = @user.read_states.where(:course_id => @course_id).first
if read_state
@read_dates = read_state["last_read_times"].to_hash
end
end
@unread_counts = {}
@endorsed_threads = {}
thread_ids = @threads.collect {|t| t._id}
Comment.collection.aggregate(
{"$match" => {"comment_thread_id" => {"$in" => thread_ids}, "endorsed" => true}},
{"$group" => {"_id" => "$comment_thread_id"}}
).each do |res|
@endorsed_threads[res["_id"].to_s] = true
end
@threads.each do |t|
thread_key = t._id.to_s
if @read_dates.has_key? thread_key
@unread_counts[thread_key] = Comment.collection.where(
:comment_thread_id => t._id,
:author_id => {"$ne" => @user.id},
:updated_at => {"$gte" => @read_dates[thread_key]}
).count
end
end
end
def to_hash thread, with_comments=false
thread_key = thread._id.to_s
h = thread.to_hash
if @user
cnt_unread = @unread_counts.fetch(thread_key, thread.comment_count)
h["unread_comments_count"] = cnt_unread
h["read"] = @read_dates.has_key?(thread_key) && @read_dates[thread_key] >= thread.updated_at
else
h["unread_comments_count"] = thread.comment_count
h["read"] = false
end
h["endorsed"] = @endorsed_threads.fetch(thread_key, false)
h = merge_comments_recursive(h) if with_comments
h
end
def to_hash_array with_comments=false
@threads.map {|t| to_hash(t, with_comments)}
end
def merge_comments_recursive thread_hash
thread_id = thread_hash["id"]
root = thread_hash = thread_hash.merge("children" => [])
# Content model is used deliberately here (instead of Comment), to work with sparse index
rs = Content.where(comment_thread_id: thread_id).order_by({"sk"=> 1})
ancestry = [thread_hash]
# weave the fetched comments into a single hierarchical doc
rs.each do | comment |
thread_hash = comment.to_hash.merge("children" => [])
parent_id = comment.parent_id || thread_id
found_parent = false
while ancestry.length > 0 do
if parent_id == ancestry.last["id"] then
# found the children collection to which this comment belongs
ancestry.last["children"] << thread_hash
ancestry << thread_hash
found_parent = true
break
else
# try again with one level back in the ancestry til we find the parent
ancestry.pop
next
end
end
if not found_parent
# if we arrive here, it means a parent_id somewhere in the result set
# is pointing to an invalid place. reset the ancestry search path.
ancestry = [root]
end
end
ancestry.first
end
include ::NewRelic::Agent::MethodTracer
add_method_tracer :load_aggregates
add_method_tracer :to_hash
add_method_tracer :to_hash_array
add_method_tracer :merge_comments_recursive
end
db.contents.ensureIndex({ _type: 1, comment_thread_id: 1, author_id: 1, updated_at: 1 }, { background: true })
db.contents.ensureIndex({ comment_thread_id: 1, sk: 1 }, { background: true, sparse: true })
db.contents.ensureIndex({ comment_thread_id: 1, endorsed: 1 }, { background: true, sparse: true })
db.contents.ensureIndex({ _type: 1, course_id: 1, pinned: -1, created_at: -1 }, { background: true })
db.contents.dropIndex({ sk: 1 }) // the new one (created above) supersedes this
db.contents.ensureIndex({ sk: 1 }, { background: true, safe: true })
db.contents.dropIndex({ comment_thread_id: 1, updated_at: 1 })
db.contents.dropIndex({ comment_thread_id: 1, sk: 1 })
db.contents.dropIndex({ comment_thread_id: 1, endorsed: 1 })
db.contents.dropIndex({ _type: 1, course_id: 1, pinned: -1, created_at: -1 })
...@@ -55,15 +55,17 @@ describe "app" do ...@@ -55,15 +55,17 @@ describe "app" do
describe "POST /api/v1/:commentable_id/threads" do describe "POST /api/v1/:commentable_id/threads" do
default_params = {title: "Interesting question", body: "cool", course_id: "1", user_id: "1"} default_params = {title: "Interesting question", body: "cool", course_id: "1", user_id: "1"}
it "create a new comment thread for the commentable object" do it "create a new comment thread for the commentable object" do
old_count = CommentThread.count
post '/api/v1/question_1/threads', default_params post '/api/v1/question_1/threads', default_params
last_response.should be_ok last_response.should be_ok
CommentThread.count.should == 4 CommentThread.count.should == old_count + 1
CommentThread.where(title: "Interesting question").first.should_not be_nil CommentThread.where(title: "Interesting question").first.should_not be_nil
end end
it "allows anonymous thread" do it "allows anonymous thread" do
old_count = CommentThread.count
post '/api/v1/question_1/threads', default_params.merge(anonymous: true) post '/api/v1/question_1/threads', default_params.merge(anonymous: true)
last_response.should be_ok last_response.should be_ok
CommentThread.count.should == 4 CommentThread.count.should == old_count + 1
c = CommentThread.where(title: "Interesting question").first c = CommentThread.where(title: "Interesting question").first
c.should_not be_nil c.should_not be_nil
c["anonymous"].should be_true c["anonymous"].should be_true
...@@ -99,9 +101,10 @@ describe "app" do ...@@ -99,9 +101,10 @@ describe "app" do
last_response.status.should == 503 last_response.status.should == 503
end end
it "create a new comment thread with tag" do it "create a new comment thread with tag" do
old_count = CommentThread.count
post '/api/v1/question_1/threads', default_params.merge(tags: "a, b, c") post '/api/v1/question_1/threads', default_params.merge(tags: "a, b, c")
last_response.should be_ok last_response.should be_ok
CommentThread.count.should == 4 CommentThread.count.should == old_count + 1
thread = CommentThread.where(title: "Interesting question").first thread = CommentThread.where(title: "Interesting question").first
thread.tags_array.length.should == 3 thread.tags_array.length.should == 3
thread.tags_array.should include "a" thread.tags_array.should include "a"
...@@ -109,9 +112,10 @@ describe "app" do ...@@ -109,9 +112,10 @@ describe "app" do
thread.tags_array.should include "c" thread.tags_array.should include "c"
end end
it "strip spaces in tags" do it "strip spaces in tags" do
old_count = CommentThread.count
post '/api/v1/question_1/threads', default_params.merge(tags: " a, b ,c ") post '/api/v1/question_1/threads', default_params.merge(tags: " a, b ,c ")
last_response.should be_ok last_response.should be_ok
CommentThread.count.should == 4 CommentThread.count.should == old_count + 1
thread = CommentThread.where(title: "Interesting question").first thread = CommentThread.where(title: "Interesting question").first
thread.tags_array.length.should == 3 thread.tags_array.length.should == 3
thread.tags_array.should include "a" thread.tags_array.should include "a"
...@@ -119,9 +123,10 @@ describe "app" do ...@@ -119,9 +123,10 @@ describe "app" do
thread.tags_array.should include "c" thread.tags_array.should include "c"
end end
it "accepts [a-z 0-9 + # - .]words, numbers, dashes, spaces but no underscores in tags" do it "accepts [a-z 0-9 + # - .]words, numbers, dashes, spaces but no underscores in tags" do
old_count = CommentThread.count
post '/api/v1/question_1/threads', default_params.merge(tags: "artificial-intelligence, machine-learning, 7-is-a-lucky-number, interesting problem, interesting problems in c++") post '/api/v1/question_1/threads', default_params.merge(tags: "artificial-intelligence, machine-learning, 7-is-a-lucky-number, interesting problem, interesting problems in c++")
last_response.should be_ok last_response.should be_ok
CommentThread.count.should == 4 CommentThread.count.should == old_count + 1
thread = CommentThread.where(title: "Interesting question").first thread = CommentThread.where(title: "Interesting question").first
thread.tags_array.length.should == 5 thread.tags_array.length.should == 5
end end
......
...@@ -48,26 +48,7 @@ describe "app" do ...@@ -48,26 +48,7 @@ describe "app" do
end end
describe "GET /api/v1/users/:user_id/active_threads" do describe "GET /api/v1/users/:user_id/active_threads" do
before(:each) do before(:each) { setup_10_threads }
User.all.delete
Content.all.delete
@users = {}
@threads = {}
@comments = {}
10.times do |i|
author = create_test_user(i+100)
@users["u#{i+100}"] = author
thread = make_thread(author, "t#{i}", "xyz", "pdq")
@threads["t#{i}"] = thread
5.times do |j|
comment = make_comment(author, thread, "t#{i} c#{j}")
@comments["t#{i} c#{j}"] = comment
end
end
@natural_order = 10.times.map {|i| "t#{i}"}
end
def thread_result(user_id, params) def thread_result(user_id, params)
get "/api/v1/users/#{user_id}/active_threads", params get "/api/v1/users/#{user_id}/active_threads", params
...@@ -89,7 +70,7 @@ describe "app" do ...@@ -89,7 +70,7 @@ describe "app" do
rs.first["children"].length.should == 5 rs.first["children"].length.should == 5
end end
it "does not include anonymous threads" do it "does not include anonymous leaves" do
@comments["t0 c4"].anonymous = true @comments["t0 c4"].anonymous = true
@comments["t0 c4"].save! @comments["t0 c4"].save!
rs = thread_result 100, course_id: "xyz" rs = thread_result 100, course_id: "xyz"
...@@ -98,7 +79,7 @@ describe "app" do ...@@ -98,7 +79,7 @@ describe "app" do
rs.first["children"].length.should == 4 rs.first["children"].length.should == 4
end end
it "does not include anonymous-to-peers threads" do it "does not include anonymous-to-peers leaves" do
@comments["t0 c3"].anonymous_to_peers = true @comments["t0 c3"].anonymous_to_peers = true
@comments["t0 c3"].save! @comments["t0 c3"].save!
rs = thread_result 100, course_id: "xyz" rs = thread_result 100, course_id: "xyz"
...@@ -163,11 +144,9 @@ describe "app" do ...@@ -163,11 +144,9 @@ describe "app" do
@actual_comment_count.should == expected_comment_count @actual_comment_count.should == expected_comment_count
end end
# FIXME note the checks on result["num_pages"] are disabled. # TODO: note the checks on result["num_pages"] are disabled.
# turns out there is a bug in GET "#{APIPREFIX}/users/:user_id/active_threads # there is a bug in GET "#{APIPREFIX}/users/:user_id/active_threads
# and this value is often wrong. however, since these tests are being # and this value is often wrong.
# created as a precursor to refactoring, the bug will not be fixed, and the
# checks will stay disabled.
context "pagination" do context "pagination" do
def thread_result_page (page, per_page) def thread_result_page (page, per_page)
get "/api/v1/users/100/active_threads", course_id: "xyz", page: page, per_page: per_page get "/api/v1/users/100/active_threads", course_id: "xyz", page: page, per_page: per_page
...@@ -202,13 +181,14 @@ describe "app" do ...@@ -202,13 +181,14 @@ describe "app" do
it "orders correctly across pages" do it "orders correctly across pages" do
expected_order = @threads.keys.reverse expected_order = @threads.keys.reverse
actual_order = [] actual_order = []
pp = 3 # per page per_page = 3
4.times do |i| num_pages = (@threads.length + per_page - 1) / per_page
n = i + 1 num_pages.times do |i|
result = thread_result_page(n, pp) page = i + 1
result["collection"].length.should == [n * pp, 10].min - ((n - 1) * pp) result = thread_result_page(page, per_page)
#result["num_pages"].should == 4 result["collection"].length.should == (page * per_page <= @threads.length ? per_page : @threads.length % per_page)
result["page"].should == n #result["num_pages"].should == num_pages
result["page"].should == page
actual_order += result["collection"].map {|v| v["title"]} actual_order += result["collection"].map {|v| v["title"]}
end end
actual_order.should == expected_order actual_order.should == expected_order
......
...@@ -45,80 +45,6 @@ describe CommentThread do ...@@ -45,80 +45,6 @@ describe CommentThread do
end end
end end
context "#to_hash (recursive=true)" do
before(:each) { @cid_seq = 10 }
def stub_each_from_array(obj, ary)
stub = obj.stub(:each)
ary.each {|v| stub = stub.and_yield(v)}
obj
end
def set_comment_results(thread, ary)
# avoids an unrelated expecation error
thread.stub(:endorsed?).and_return(true)
rs = stub_each_from_array(double("rs"), ary)
criteria = double("criteria")
criteria.stub(:order_by).and_return(rs)
Comment.stub(:where).with(comment_thread_id: thread.id).and_return(criteria)
end
def make_comment(parent=nil)
c = Comment.new
c.id = @cid_seq
@cid_seq += 1
c.parent_id = parent.nil? ? nil : parent.id
c
end
it "nests comments in the correct order" do
thread = CommentThread.new
thread.id = 1
thread.author = User.new
c0 = make_comment()
c00 = make_comment(c0)
c01 = make_comment(c0)
c010 = make_comment(c01)
set_comment_results(thread, [c0, c00, c01, c010])
h = thread.to_hash({:recursive => true})
h["children"].size.should == 1 # c0
h["children"][0]["id"].should == c0.id
h["children"][0]["children"].size.should == 2 # c00, c01
h["children"][0]["children"][0]["id"].should == c00.id
h["children"][0]["children"][1]["id"].should == c01.id
h["children"][0]["children"][1]["children"].size.should == 1 # c010
h["children"][0]["children"][1]["children"][0]["id"].should == c010.id
h["comments_count"].should == 4
end
it "handles orphaned child comments gracefully" do
thread = CommentThread.new
thread.id = 33
thread.author = User.new
c0 = make_comment()
c00 = make_comment(c0)
c000 = make_comment(c00)
c1 = make_comment()
c10 = make_comment(c1)
c11 = make_comment(c1)
c111 = make_comment(c11)
# lose c0 and c11 from result set. their descendants should
# be silently skipped over.
set_comment_results(thread, [c00, c000, c1, c10, c111])
h = thread.to_hash({:recursive => true})
h["children"].size.should == 1 # c1
h["children"][0]["id"].should == c1.id
h["children"][0]["children"].size.should == 1 # c10
h["children"][0]["children"][0]["id"].should == c10.id
h["comments_count"].should == 2
end
end
context "sorting" do context "sorting" do
......
require 'spec_helper'
describe ThreadPresenter do
context "#to_hash_array" do
before(:each) { @cid_seq = 10 }
def stub_each_from_array(obj, ary)
stub = obj.stub(:each)
ary.each {|v| stub = stub.and_yield(v)}
obj
end
def set_comment_results(thread, ary)
# avoids an unrelated expecation error
thread.stub(:endorsed?).and_return(true)
rs = stub_each_from_array(double("rs"), ary)
criteria = double("criteria")
criteria.stub(:order_by).and_return(rs)
# stub Content, not Comment, because that's the model we will be querying against
Content.stub(:where).with(comment_thread_id: thread.id).and_return(criteria)
end
def make_comment(parent=nil)
c = Comment.new
c.id = @cid_seq
@cid_seq += 1
c.parent_id = parent.nil? ? nil : parent.id
c
end
it "nests comments in the correct order" do
thread = CommentThread.new
thread.id = 1
thread.author = User.new
c0 = make_comment()
c00 = make_comment(c0)
c01 = make_comment(c0)
c010 = make_comment(c01)
set_comment_results(thread, [c0, c00, c01, c010])
h = ThreadPresenter.new([thread], nil, thread.course_id).to_hash_array(true).first
h["children"].size.should == 1 # c0
h["children"][0]["id"].should == c0.id
h["children"][0]["children"].size.should == 2 # c00, c01
h["children"][0]["children"][0]["id"].should == c00.id
h["children"][0]["children"][1]["id"].should == c01.id
h["children"][0]["children"][1]["children"].size.should == 1 # c010
h["children"][0]["children"][1]["children"][0]["id"].should == c010.id
end
it "handles orphaned child comments gracefully" do
thread = CommentThread.new
thread.id = 33
thread.author = User.new
c0 = make_comment()
c00 = make_comment(c0)
c000 = make_comment(c00)
c1 = make_comment()
c10 = make_comment(c1)
c11 = make_comment(c1)
c111 = make_comment(c11)
# lose c0 and c11 from result set. their descendants should
# be silently skipped over.
set_comment_results(thread, [c00, c000, c1, c10, c111])
h = ThreadPresenter.new([thread], nil, thread.course_id).to_hash_array(true).first
h["children"].size.should == 1 # c1
h["children"][0]["id"].should == c1.id
h["children"][0]["children"].size.should == 1 # c10
h["children"][0]["children"][0]["id"].should == c10.id
end
end
end
...@@ -170,6 +170,15 @@ end ...@@ -170,6 +170,15 @@ end
# this method is used to test results produced using the helper function handle_threads_query # this method is used to test results produced using the helper function handle_threads_query
# which is used in multiple areas of the API # which is used in multiple areas of the API
def check_thread_result(user, thread, json_response, check_comments=false) def check_thread_result(user, thread, json_response, check_comments=false)
expected_keys = %w(id title body course_id commentable_id created_at updated_at)
expected_keys += %w(anonymous anonymous_to_peers at_position_list closed user_id)
expected_keys += %w(username votes abuse_flaggers tags type group_id pinned)
expected_keys += %w(comments_count unread_comments_count read endorsed)
# the "children" key is not always present - depends on the invocation + test use case.
# exclude it from this check - if check_comments is set, we'll assert against it later
actual_keys = json_response.keys - ["children"]
actual_keys.sort.should == expected_keys.sort
json_response["title"].should == thread.title json_response["title"].should == thread.title
json_response["body"].should == thread.body json_response["body"].should == thread.body
json_response["course_id"].should == thread.course_id json_response["course_id"].should == thread.course_id
...@@ -192,10 +201,12 @@ def check_thread_result(user, thread, json_response, check_comments=false) ...@@ -192,10 +201,12 @@ def check_thread_result(user, thread, json_response, check_comments=false)
json_response["type"].should == "thread" json_response["type"].should == "thread"
json_response["group_id"].should == thread.group_id json_response["group_id"].should == thread.group_id
json_response["pinned"].should == thread.pinned? json_response["pinned"].should == thread.pinned?
json_response["endorsed"].should == (thread.endorsed? or thread.comments.any? {|c| c.endorsed?}) json_response["endorsed"].should == thread.endorsed?
if check_comments if check_comments
# warning - this only checks top-level comments and may not handle all possible sorting scenarios # warning - this only checks top-level comments and may not handle all possible sorting scenarios
# proper composition / ordering of the children is currently covered in models/comment_thread_spec.
# it also does not check for author-only results (e.g. user active threads view) # it also does not check for author-only results (e.g. user active threads view)
# author-only is covered by a test in api/user_spec.
root_comments = thread.root_comments.sort(_id:1).to_a root_comments = thread.root_comments.sort(_id:1).to_a
json_response["children"].should_not be_nil json_response["children"].should_not be_nil
json_response["children"].length.should == root_comments.length json_response["children"].length.should == root_comments.length
...@@ -212,19 +223,21 @@ def check_thread_result(user, thread, json_response, check_comments=false) ...@@ -212,19 +223,21 @@ def check_thread_result(user, thread, json_response, check_comments=false)
json_response["read"].should == false json_response["read"].should == false
else else
expected_unread_cnt = thread.comments.length # initially assume nothing has been read expected_unread_cnt = thread.comments.length # initially assume nothing has been read
read_states = user.read_states.where(course_id: thread.course_id) read_states = user.read_states.where(course_id: thread.course_id).to_a
if read_states.length == 1 if read_states.length == 1
read_date = read_states.first.last_read_times[thread.id] read_date = read_states.first.last_read_times[thread.id.to_s]
if read_date if read_date
thread.comments.each do |c| thread.comments.each do |c|
if c.author != user and c.updated_at < read_date if c.author != user and c.updated_at < read_date
expected_unread_cnt -= 1 expected_unread_cnt -= 1
end end
end end
json_response["read"].should == (read_date >= thread.updated_at)
else
json_response["read"].should == false
end end
end end
json_response["unread_comments_count"].should == expected_unread_cnt json_response["unread_comments_count"].should == expected_unread_cnt
json_response["read"].should == (expected_unread_cnt == 0)
end end
end end
...@@ -237,17 +250,39 @@ def make_thread(author, text, course_id, commentable_id) ...@@ -237,17 +250,39 @@ def make_thread(author, text, course_id, commentable_id)
thread thread
end end
def make_comment(author, obj, text) def make_comment(author, parent, text)
if obj.is_a?(CommentThread) if parent.is_a?(CommentThread)
coll = obj.comments coll = parent.comments
thread = obj thread = parent
else else
coll = obj.children coll = parent.children
thread = obj.comment_thread thread = parent.comment_thread
end end
comment = coll.new(body: text, course_id: obj.course_id) comment = coll.new(body: text, course_id: parent.course_id)
comment.author = author comment.author = author
comment.comment_thread = thread comment.comment_thread = thread
comment.save! comment.save!
comment comment
end end
DFLT_COURSE_ID = "xyz"
def setup_10_threads
User.all.delete
Content.all.delete
@threads = {}
@comments = {}
@users = {}
10.times do |i|
author = create_test_user(i+100)
@users["u#{i+100}"] = author
thread = make_thread(author, "t#{i}", DFLT_COURSE_ID, "pdq")
@threads["t#{i}"] = thread
5.times do |j|
comment = make_comment(author, thread, "t#{i} c#{j}")
@comments["t#{i} c#{j}"] = comment
end
end
@default_order = 10.times.map {|i| "t#{i}"}.reverse
end
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment