Commit 50a06762 by Rocky Duan

unread count tracking

Conflicts:

	api/users.rb
	models/user.rb
parent bf8d4d5b
......@@ -3,7 +3,7 @@ get "#{APIPREFIX}/threads" do # retrieve threads by course
end
get "#{APIPREFIX}/threads/:thread_id" do |thread_id|
CommentThread.find(thread_id).to_hash(recursive: bool_recursive).to_json
CommentThread.find(thread_id).to_hash(recursive: bool_recursive, user_id: params["user_id"]).to_json
end
put "#{APIPREFIX}/threads/:thread_id" do |thread_id|
......
......@@ -40,7 +40,7 @@ get "#{APIPREFIX}/search/threads" do
num_pages = results.total_pages
page = [num_pages, [1, page].max].min
{
collection: results.map{|t| CommentThread.search_result_to_hash(t, recursive: bool_recursive)},
collection: results.map{|t| CommentThread.search_result_to_hash(t, recursive: bool_recursive, user_id: params[:user_id])},
num_pages: num_pages,
page: page,
}.to_json
......
......@@ -43,11 +43,27 @@ get "#{APIPREFIX}/users/:user_id/active_threads" do |user_id|
end
put "#{APIPREFIX}/users/:user_id" do |user_id|
user = User.where(external_id: user_id).first
if not user
user = User.new(external_id: user_id)
put "#{APIPREFIX}/users/:user_id/read_states" do |user_id|
user = User.find_or_create_by(external_id: user_id)
read_state = user.read_states.find_or_create_by(course_id: params["course_id"])
# support updating single thread data or bulk update
if params["last_read_time"] and params["thread_id"]
read_state.last_read_time = read_state.last_read_time.with_indifferent_access.merge({
params["thread_id"] => params["last_read_time"]
})
elsif params["read_states"]
read_state.last_read_time = read_state.last_read_time.with_indifferent_access.merge(params["read_states"])
end
read_state.save
if read_state.errors.any?
error 400, read_state.errors.full_messages.to_json
else
read_state.to_hash.to_json
end
end
put "#{APIPREFIX}/users/:user_id" do |user_id|
user = User.find_or_create_by(external_id: user_id)
user.update_attributes(params.slice(*%w[username email default_sort_key]))
if user.errors.any?
error 400, user.errors.full_messages.to_json
......
......@@ -16,6 +16,10 @@ helpers do
@comment ||= Comment.find(params[:comment_id])
end
def profile
@profile ||= user.profiles.find_or_create_by(course_id: params["course_id"])
end
def source
@source ||= case params["source_type"]
when "user"
......@@ -79,7 +83,7 @@ helpers do
cached_results = Sinatra::Application.cache.get(memcached_key)
if cached_results
return {
collection: cached_results[:collection_ids].map{|id| CommentThread.find(id).to_hash(recursive: bool_recursive)},
collection: cached_results[:collection_ids].map{|id| CommentThread.find(id).to_hash(recursive: bool_recursive, user_id: params["user_id"])},
num_pages: cached_results[:num_pages],
page: cached_results[:page],
}.to_json
......@@ -119,7 +123,7 @@ helpers do
Sinatra::Application.cache.set(memcached_key, cached_results, CommentService.config[:cache_timeout][:threads_query].to_i)
end
{
collection: paged_comment_threads.map{|t| t.to_hash(recursive: bool_recursive)},
collection: paged_comment_threads.map{|t| t.to_hash(recursive: bool_recursive, user_id: params["user_id"])},
num_pages: num_pages,
page: page,
}.to_json
......
......@@ -165,6 +165,34 @@ class CommentThread < Content
if params[:recursive]
doc = doc.merge("children" => root_comments.map{|c| c.to_hash(recursive: true)})
end
if params[:user_id]
user = User.find_or_create_by(external_id: params[:user_id])
read_state = user.read_states.where(course_id: self.course_id).first
last_read_time = Time.parse(read_state.last_read_time[self.id.to_s]) if read_state
# comments created by the user are excluded in the count
# this is rather like a hack but it avoids the following situation:
# when you reply to a thread and while you are editing,
# other people also replied to the thread. Now if we simply
# update the last_read_time, then the other people's replies
# will not be included in the unread_count; if we leave it
# that way, then your own comment will be included in the
# unread count
if last_read_time
unread_count = self.comments.where(
:updated_at => {:$gte => last_read_time},
:author_id => {:$ne => params[:user_id]},
).count
viewed = last_read_time >= self.updated_at
else
unread_count = self.comments.where(:author_id => {:$ne => params[:user_id]}).count
viewed = false
end
doc = doc.merge("unread_comments_count" => unread_count)
.merge("viewed" => viewed)
else
doc = doc.merge("unread_comments_count" => comments.count)
.merge("viewed" => false)
end
doc = doc.merge("comments_count" => comments.count)
doc
end
......
......@@ -8,6 +8,7 @@ class User
field :email, type: String
field :default_sort_key, type: String, default: "date"
embeds_many :read_states
has_many :comments, inverse_of: :author
has_many :comment_threads, inverse_of: :author
has_many :activities, class_name: "Notification", inverse_of: :actor
......@@ -102,3 +103,16 @@ class User
end
end
class ReadState
include Mongoid::Document
field :course_id, type: String
field :last_read_time, type: Hash, default: {}
embedded_in :user
validates :course_id, uniqueness: true, presence: true
def to_hash
to_json
end
end
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment