Commit c4f5a1f2 by Greg Price

Merge pull request #105 from edx/gprice/search-spell-correction

Refactor search, fix a bug, and implement spelling correction
parents 4d01f169 5e22e139
require 'new_relic/agent/method_tracer' require 'new_relic/agent/method_tracer'
get "#{APIPREFIX}/search/threads" do get "#{APIPREFIX}/search/threads" do
local_params = params # Necessary for params to be available inside blocks
sort_criteria = get_sort_criteria(local_params)
sort_key_mapper = { search_text = local_params["text"]
"date" => :created_at, if !search_text || !sort_criteria
"activity" => :last_activity_at,
"votes" => :votes_point,
"comments" => :comment_count,
}
sort_order_mapper = {
"desc" => :desc,
"asc" => :asc,
}
sort_key = sort_key_mapper[params["sort_key"]]
sort_order = sort_order_mapper[params["sort_order"]]
sort_keyword_valid = (!params["sort_key"] && !params["sort_order"] || sort_key && sort_order)
if !params["text"] || !sort_keyword_valid
{}.to_json {}.to_json
else else
page = (params["page"] || DEFAULT_PAGE).to_i page = (local_params["page"] || DEFAULT_PAGE).to_i
per_page = (params["per_page"] || DEFAULT_PER_PAGE).to_i per_page = (local_params["per_page"] || DEFAULT_PER_PAGE).to_i
# for multi commentable searching
params["commentable_ids"] = params["commentable_ids"].split(',') if params["commentable_ids"] # Because threads and comments are currently separate unrelated documents in
options = { # Elasticsearch, we must first query for all matching documents, then
sort_key: sort_key, # extract the set of thread ids, and then sort the threads by the specified
sort_order: sort_order, # criteria and paginate. For performance reasons, we currently limit the
page: page, # number of documents considered (ordered by update recency), which means
per_page: per_page, # that matching threads can be missed if the search terms are very common.
}
get_matching_thread_ids = lambda do |search_text|
self.class.trace_execution_scoped(["Custom/get_search_threads/es_search"]) do
search = Tire.search Content::ES_INDEX_NAME do
query do
match [:title, :body], search_text, :operator => "AND"
filtered do
filter :term, :commentable_id => local_params["commentable_id"] if local_params["commentable_id"]
filter :terms, :commentable_id => local_params["commentable_ids"].split(",") if local_params["commentable_ids"]
filter :term, :course_id => local_params["course_id"] if local_params["course_id"]
if local_params["group_id"]
filter :or, [
{:not => {:exists => {:field => :group_id}}},
{:term => {:group_id => local_params["group_id"]}}
]
end
end
end
sort do
by "updated_at", "desc"
end
size CommentService.config["max_deep_search_comment_count"].to_i
end
thread_ids = Set.new
search.results.each do |content|
case content.type
when "comment_thread"
thread_ids.add(content.id)
when "comment"
thread_ids.add(content.comment_thread_id)
end
end
thread_ids
end
end
result_hash = CommentThread.perform_search(params, options) # Sadly, Elasticsearch does not have a facility for computing suggestions
results = result_hash[:results] # with respect to a filter. It would be expensive to determine the best
total_results = result_hash[:total_results] # suggestion with respect to our filter parameters, so we simply re-query
# with the top suggestion. If that has no results, then we return no results
# and no correction.
thread_ids = get_matching_thread_ids.call(search_text)
corrected_text = nil
if thread_ids.empty?
suggest = Tire.suggest Content::ES_INDEX_NAME do
suggestion "" do
text search_text
phrase :_all
end
end
corrected_text = suggest.results.texts.first
thread_ids = get_matching_thread_ids.call(corrected_text) if corrected_text
corrected_text = nil if thread_ids.empty?
end
if page > results.total_pages #TODO find a better way for this results = nil
result_hash = CommentThread.perform_search(params, options.merge(page: results.total_pages)) self.class.trace_execution_scoped(["Custom/get_search_threads/mongo_sort_page"]) do
results = result_hash[:results] results = CommentThread.
total_results = result_hash[:total_results] where(:id.in => thread_ids.to_a).
order_by(sort_criteria).
page(page).
per(per_page).
to_a
end end
total_results = thread_ids.size
num_pages = (total_results + per_page - 1) / per_page
if results.length == 0 if results.length == 0
collection = [] collection = []
else else
pres_threads = ThreadSearchResultsPresenter.new( pres_threads = ThreadListPresenter.new(
results, results,
params[:user_id] ? user : nil, local_params[:user_id] ? user : nil,
params[:course_id] || results.first.course_id local_params[:course_id] || results.first.course_id
) )
collection = pres_threads.to_hash collection = pres_threads.to_hash
end end
num_pages = results.total_pages
page = [num_pages, [1, page].max].min
json_output = nil json_output = nil
self.class.trace_execution_scoped(['Custom/get_search_threads/json_serialize']) do self.class.trace_execution_scoped(['Custom/get_search_threads/json_serialize']) do
json_output = { json_output = {
collection: collection, collection: collection,
corrected_text: corrected_text,
total_results: total_results, total_results: total_results,
num_pages: num_pages, num_pages: num_pages,
page: page, page: page,
......
...@@ -142,34 +142,15 @@ helpers do ...@@ -142,34 +142,15 @@ helpers do
comment_threads = comment_threads.in(commentable_id: params[:commentable_ids].split(",")) comment_threads = comment_threads.in(commentable_id: params[:commentable_ids].split(","))
end end
sort_key_mapper = { sort_criteria = get_sort_criteria(params)
"date" => :created_at,
"activity" => :last_activity_at,
"votes" => :"votes.point",
"comments" => :comment_count,
}
sort_order_mapper = {
"desc" => :desc,
"asc" => :asc,
}
sort_key = sort_key_mapper[params["sort_key"] || "date"] if not sort_criteria
sort_order = sort_order_mapper[params["sort_order"] || "desc"]
sort_keyword_valid = (!params["sort_key"] && !params["sort_order"] || sort_key && sort_order)
if not sort_keyword_valid
{}.to_json {}.to_json
else else
page = (params["page"] || DEFAULT_PAGE).to_i page = (params["page"] || DEFAULT_PAGE).to_i
per_page = (params["per_page"] || DEFAULT_PER_PAGE).to_i per_page = (params["per_page"] || DEFAULT_PER_PAGE).to_i
order_clause = "pinned DESC, #{sort_key} #{sort_order}" comment_threads = comment_threads.order_by(sort_criteria)
if ![:created_at, :last_activity_at].include? sort_key
# make sort order predictable when preceding sorts are non-unique
order_clause = "#{order_clause}, created_at DESC"
end
comment_threads = comment_threads.order_by(order_clause)
num_pages = [1, (comment_threads.count / per_page.to_f).ceil].max num_pages = [1, (comment_threads.count / per_page.to_f).ceil].max
page = [num_pages, [1, page].max].min page = [num_pages, [1, page].max].min
# actual query happens here (by doing to_a) # actual query happens here (by doing to_a)
...@@ -198,6 +179,35 @@ helpers do ...@@ -198,6 +179,35 @@ helpers do
end end
end end
# Given query params, return sort criteria appropriate for passing to the
# order_by function of a Mongoid query. Returns nil if params are not valid.
def get_sort_criteria(params)
sort_key_mapper = {
"date" => :created_at,
"activity" => :last_activity_at,
"votes" => :"votes.point",
"comments" => :comment_count,
}
sort_order_mapper = {
"desc" => :desc,
"asc" => :asc,
}
sort_key = sort_key_mapper[params["sort_key"] || "date"]
sort_order = sort_order_mapper[params["sort_order"] || "desc"]
if sort_key && sort_order
sort_criteria = [[:pinned, :desc], [sort_key, sort_order]]
if ![:created_at, :last_activity_at].include? sort_key
sort_criteria << [:created_at, :desc]
end
sort_criteria
else
nil
end
end
def notifications_by_date_range_and_user_ids start_date_time, end_date_time, user_ids def notifications_by_date_range_and_user_ids start_date_time, end_date_time, user_ids
#given a date range and a user, find all of the notifiable content #given a date range and a user, find all of the notifiable content
#key by thread id, and return notification messages for each user #key by thread id, and return notification messages for each user
......
...@@ -76,107 +76,6 @@ class CommentThread < Content ...@@ -76,107 +76,6 @@ class CommentThread < Content
c c
end end
def self.perform_search(params, options={})
page = [1, options[:page] || 1].max
per_page = options[:per_page] || 20
sort_key = options[:sort_key]
sort_order = options[:sort_order]
#GET /api/v1/search/threads?user_id=1&recursive=False&sort_key=date&│[2013-06-28 10:16:46,104][INFO ][plugins ] [Glamor] loaded [], sites []
#text=response&sort_order=desc&course_id=HarvardX%2FHLS1xD%2FCopyright&per_page=20&api_key=PUT_YOUR_API_KE│T1GYWxzZSZzb3J0X2tleT1kYXRlJnRleHQ9cmVzcG9uc2Umc29ydF9vcmRlcj1kZXNjJmNvdXJzZV9pZA==: initialized
#Y_HERE&page=1
#KChugh - Unfortunately, there's no algorithmically nice way to handle pagination with
#stitching together Comments and CommentThreads, because there is no determinstic relationship
#between the ordinality of comments and threads.
#the best solution is to find all of the thread ids for matching comment hits, and union them
#with the comment thread query, however, Tire does not support ORing a query key with a term filter
#so the 3rd best solution is to run two Tire searches (3 actually, one to query the comments, one to query the threads based on
#thread ids and the original thread search) and merge the results, uniqifying the results in the process.
#so first, find the comment threads associated with comments that hit the query
search = Tire::Search::Search.new Content::ES_INDEX_NAME
search.query {|query| query.match [:title, :body], params["text"]} if params["text"]
search.highlight({title: { number_of_fragments: 0 } } , {body: { number_of_fragments: 0 } }, options: { tag: "<highlight>" })
search.filter(:type, value: 'comment_thread')
search.filter(:term, commentable_id: params["commentable_id"]) if params["commentable_id"]
search.filter(:terms, commentable_id: params["commentable_ids"]) if params["commentable_ids"]
search.filter(:term, course_id: params["course_id"]) if params["course_id"]
if params["group_id"]
search.filter :or, [
{:not => {:exists => {:field => :group_id}}},
{:term => {:group_id => params["group_id"]}}
]
end
search.sort {|sort| sort.by sort_key, sort_order} if sort_key && sort_order #TODO should have search option 'auto sort or sth'
#again, b/c there is no relationship in ordinality, we cannot paginate if it's a text query
results = search.results
search = Tire::Search::Search.new Content::ES_INDEX_NAME
search.query {|query| query.match :body, params["text"]} if params["text"]
search.filter(:type, value: 'comment')
search.filter(:term, course_id: params["course_id"]) if params["course_id"]
search.size CommentService.config["max_deep_search_comment_count"].to_i
#unforutnately, we cannot paginate here, b/c we don't know how the ordinality is totally
#unrelated to that of threads
c_results = comment_ids = comments = thread_ids = nil
self.class.trace_execution_scoped(['Custom/perform_search/collect_comment_search_results']) do
c_results = search.results
comment_ids = c_results.collect{|c| c.id}.uniq
end
self.class.trace_execution_scoped(['Custom/perform_search/collect_comment_thread_ids']) do
comments = Comment.where(:id.in => comment_ids)
thread_ids = comments.collect{|c| c.comment_thread_id.to_s}
end
#thread_ids = c_results.collect{|c| c.comment_thread_id}
#as soon as we can add comment thread id to the ES index, via Tire updgrade, we'll
#use ES instead of mongo to collect the thread ids
#use the elasticsearch index instead to avoid DB hit
self.class.trace_execution_scoped(['Custom/perform_search/collect_unique_thread_ids']) do
original_thread_ids = results.collect{|r| r.id}
#now add the original search thread ids
thread_ids += original_thread_ids
thread_ids = thread_ids.uniq
end
#now run one more search to harvest the threads and filter by group
search = Tire::Search::Search.new Content::ES_INDEX_NAME
search.filter(:type, value: 'comment_thread')
search.filter(:terms, :thread_id => thread_ids)
search.filter(:terms, commentable_id: params["commentable_ids"]) if params["commentable_ids"]
search.filter(:term, course_id: params["course_id"]) if params["course_id"]
search.size per_page
search.from per_page * (page - 1)
if params["group_id"]
search.filter :or, [
{:not => {:exists => {:field => :group_id}}},
{:term => {:group_id => params["group_id"]}}
]
end
search.sort {|sort| sort.by sort_key, sort_order} if sort_key && sort_order
{results: search.results, total_results: thread_ids.length}
end
def activity_since(from_time=nil) def activity_since(from_time=nil)
if from_time if from_time
activities.where(:created_at => {:$gte => from_time}) activities.where(:created_at => {:$gte => from_time})
...@@ -268,9 +167,4 @@ private ...@@ -268,9 +167,4 @@ private
subscriptions.delete_all subscriptions.delete_all
end end
class << self
include ::NewRelic::Agent::MethodTracer
add_method_tracer :perform_search, 'Custom/perform_search'
end
end end
require_relative 'thread_list'
class ThreadSearchResultsPresenter < ThreadListPresenter
alias :super_to_hash :to_hash
def initialize(search_results, user, course_id)
@search_result_map = Hash[search_results.map { |t| [t.id, t] }]
threads = CommentThread.where(id: {"$in" => @search_result_map.keys}).to_a
# reorder fetched threads to match the original result order
threads = Hash[threads.map { |t| [t._id.to_s, t] }].values_at *search_results.map { |t| t.id }
super(threads, user, course_id)
end
def to_hash
super_to_hash.each do |thread_hash|
thread_key = thread_hash['id'].to_s
highlight = @search_result_map[thread_key].highlight || {}
thread_hash["highlighted_body"] = (highlight[:body] || []).first || thread_hash["body"]
thread_hash["highlighted_title"] = (highlight[:title] || []).first || thread_hash["title"]
end
end
end
...@@ -21,7 +21,7 @@ describe "app" do ...@@ -21,7 +21,7 @@ describe "app" do
get "/api/v1/search/threads", text: random_string get "/api/v1/search/threads", text: random_string
last_response.should be_ok last_response.should be_ok
threads = parse(last_response.body)['collection'] threads = parse(last_response.body)['collection']
check_thread_result_json(nil, thread, threads.select{|t| t["id"] == thread.id.to_s}.first, true) check_thread_result_json(nil, thread, threads.select{|t| t["id"] == thread.id.to_s}.first)
end end
end end
...@@ -50,7 +50,7 @@ describe "app" do ...@@ -50,7 +50,7 @@ describe "app" do
get "/api/v1/search/threads", text: random_string get "/api/v1/search/threads", text: random_string
last_response.should be_ok last_response.should be_ok
threads = parse(last_response.body)['collection'] threads = parse(last_response.body)['collection']
check_thread_result_json(nil, thread, threads.select{|t| t["id"] == thread.id.to_s}.first, true) check_thread_result_json(nil, thread, threads.select{|t| t["id"] == thread.id.to_s}.first)
end end
end end
end end
......
...@@ -8,7 +8,228 @@ describe "app" do ...@@ -8,7 +8,228 @@ describe "app" do
let(:author) { create_test_user(42) } let(:author) { create_test_user(42) }
let(:course_id) { "test/course/id" }
def get_result_ids(result)
result["collection"].map {|t| t["id"]}
end
describe "GET /api/v1/search/threads" do describe "GET /api/v1/search/threads" do
def assert_empty_response
last_response.should be_ok
result = parse(last_response.body)
result.should == {}
end
it "returns an empty reuslt if text parameter is missing" do
get "/api/v1/search/threads", course_id: course_id
assert_empty_response
end
it "returns an empty reuslt if sort key is invalid" do
get "/api/v1/search/threads", course_id: course_id, text: "foobar", sort_key: "invalid", sort_order: "desc"
assert_empty_response
end
it "returns an empty reuslt if sort order is invalid" do
get "/api/v1/search/threads", course_id: course_id, text: "foobar", sort_key: "date", sort_order: "invalid"
assert_empty_response
end
describe "filtering works" do
let!(:threads) do
threads = (0..29).map do |i|
thread = make_thread(author, "text", course_id + (i % 2).to_s, "commentable" + (i % 3).to_s)
if i % 5 != 0
thread.group_id = i % 5
thread.save!
end
thread
end
refresh_es_index
threads
end
def assert_response_contains(expected_thread_indexes)
last_response.should be_ok
result = parse(last_response.body)
actual_ids = Set.new get_result_ids(result)
expected_ids = Set.new expected_thread_indexes.map {|i| threads[i].id.to_s}
actual_ids.should == expected_ids
end
it "by course_id" do
get "/api/v1/search/threads", text: "text", course_id: "test/course/id0"
assert_response_contains((0..29).find_all {|i| i % 2 == 0})
end
it "by commentable_id" do
get "/api/v1/search/threads", text: "text", commentable_id: "commentable0"
assert_response_contains((0..29).find_all {|i| i % 3 == 0})
end
it "by commentable_ids" do
get "/api/v1/search/threads", text: "text", commentable_ids: "commentable0,commentable1"
assert_response_contains((0..29).find_all {|i| i % 3 == 0 || i % 3 == 1})
end
it "by group_id" do
get "/api/v1/search/threads", text: "text", group_id: "1"
assert_response_contains((0..29).find_all {|i| i % 5 == 0 || i % 5 == 1})
end
it "by all filters combined" do
get "/api/v1/search/threads", text: "text", course_id: "test/course/id0", commentable_id: "commentable0", group_id: "1"
assert_response_contains([0, 6])
end
end
describe "sorting works" do
let!(:threads) do
threads = (0..5).map {|i| make_thread(author, "text", course_id, "dummy")}
[1, 2].map {|i| author.vote(threads[i], :up)}
[1, 3].map do |i|
threads[i].comment_count = 5
threads[i].save!
end
threads[4].save!
refresh_es_index
threads
end
def check_sort(sort_key, sort_order, expected_thread_indexes)
get "/api/v1/search/threads", text: "text", course_id: course_id, sort_key: sort_key, sort_order: sort_order
last_response.should be_ok
result = parse(last_response.body)
actual_ids = get_result_ids(result)
expected_ids = expected_thread_indexes.map {|i| threads[i].id.to_s}
actual_ids.should == expected_ids
end
it "by date" do
asc_order = [0, 1, 2, 3, 4, 5]
check_sort("date", "asc", asc_order)
check_sort("date", "desc", asc_order.reverse)
end
it "by activity" do
asc_order = [0, 2, 5, 1, 3, 4]
check_sort("activity", "asc", asc_order)
check_sort("activity", "desc", asc_order.reverse)
end
it "by votes" do
check_sort("votes", "asc", [5, 4, 3, 0, 2, 1])
check_sort("votes", "desc", [2, 1, 5, 4, 3, 0])
end
it "by comments" do
check_sort("comments", "asc", [5, 4, 2, 0, 3, 1])
check_sort("comments", "desc", [3, 1, 5, 4, 2, 0])
end
it "by default" do
check_sort(nil, nil, [5, 4, 3, 2, 1, 0])
end
end
describe "pagination" do
let!(:threads) do
threads = (1..50).map {|i| make_thread(author, "text", course_id, "dummy")}
refresh_es_index
threads
end
def check_pagination(per_page, num_pages)
result_ids = []
(1..(num_pages + 1)).each do |i| # Go past the end to make sure non-existent pages are empty
get "/api/v1/search/threads", text: "text", page: i, per_page: per_page
last_response.should be_ok
result = parse(last_response.body)
result_ids += get_result_ids(result)
end
result_ids.should == threads.reverse.map {|t| t.id.to_s}
end
it "works correctly with page size 1" do
check_pagination(1, 50)
end
it "works correctly with page size 30" do
check_pagination(30, 2)
end
it "works correctly with default page size" do
check_pagination(nil, 3)
end
end
describe "spelling correction" do
let(:commentable_id) {"test_commentable"}
def check_correction(original_text, corrected_text)
get "/api/v1/search/threads", text: original_text
last_response.should be_ok
result = parse(last_response.body)
result["corrected_text"].should == corrected_text
result["collection"].first.should_not be_nil
end
before(:each) do
thread = make_thread(author, "a thread about green artichokes", course_id, commentable_id)
make_comment(author, thread, "a comment about greed pineapples")
refresh_es_index
end
it "can correct a word appearing only in a comment" do
check_correction("pinapples", "pineapples")
end
it "can correct a word appearing only in a thread" do
check_correction("arichokes", "artichokes")
end
it "can correct a word appearing in both a comment and a thread" do
check_correction("abot", "about")
end
it "can correct a word with multiple errors" do
check_correction("artcokes", "artichokes")
end
it "can correct misspellings in different terms in the same search" do
check_correction("comment abot pinapples", "comment about pineapples")
end
it "does not correct a word that appears in a thread but has a correction and no matches in comments" do
check_correction("green", nil)
end
it "does not correct a word that appears in a comment but has a correction and no matches in threads" do
check_correction("greed", nil)
end
it "does not return a suggestion with no results" do
# Add documents containing a word that is close to our search term
# but that do not match our filter criteria; because we currently only
# consider the top suggestion returned by Elasticsearch without regard
# to the filter, and that suggestion in this case does not match any
# results, we should get back no results and no correction.
10.times do
thread = make_thread(author, "abbot", "other_course_id", "other_commentable_id")
thread.group_id = 1
thread.save!
end
refresh_es_index
get "/api/v1/search/threads", text: "abot", course_id: course_id
last_response.should be_ok
result = parse(last_response.body)
result["corrected_text"].should be_nil
result["collection"].should be_empty
end
end
it "returns the correct values for total_results and num_pages" do it "returns the correct values for total_results and num_pages" do
course_id = "test/course/id" course_id = "test/course/id"
for i in 1..100 do for i in 1..100 do
...@@ -55,7 +276,7 @@ describe "app" do ...@@ -55,7 +276,7 @@ describe "app" do
last_response.should be_ok last_response.should be_ok
result = parse(last_response.body)["collection"] result = parse(last_response.body)["collection"]
result.length.should == 1 result.length.should == 1
check_thread_result_json(nil, thread, result.first, true) check_thread_result_json(nil, thread, result.first)
end end
include_examples "unicode data" include_examples "unicode data"
......
require 'spec_helper'
describe ThreadSearchResultsPresenter do
context "#to_hash" do
before(:each) { setup_10_threads }
# NOTE: throrough coverage of search result hash structure is presently provided in spec/presenters/thread_spec
def check_search_result_hash(search_result, hash)
hash["highlighted_body"].should == ((search_result.highlight[:body] || []).first || hash["body"])
hash["highlighted_title"].should == ((search_result.highlight[:title] || []).first || hash["title"])
end
def check_search_results_hash(search_results, hashes)
expected_order = search_results.map {|t| t.id}
actual_order = hashes.map {|h| h["id"].to_s}
actual_order.should == expected_order
hashes.each_with_index { |hash, i| check_search_result_hash(search_results[i], hash) }
end
it "presents search results in correct order" do
threads_random_order = @threads.values.shuffle
mock_results = threads_random_order.map do |t|
double(Tire::Results::Item, :id => t._id.to_s, :highlight => {:body => ["foo"], :title => ["bar"]})
end
pres = ThreadSearchResultsPresenter.new(mock_results, nil, DFLT_COURSE_ID)
check_search_results_hash(mock_results, pres.to_hash)
end
it "presents search results with correct default highlights" do
threads_random_order = @threads.values.shuffle
mock_results = threads_random_order.map do |t|
double(Tire::Results::Item, :id => t._id.to_s, :highlight => {})
end
pres = ThreadSearchResultsPresenter.new(mock_results, nil, DFLT_COURSE_ID)
check_search_results_hash(mock_results, pres.to_hash)
end
end
end
...@@ -201,14 +201,11 @@ end ...@@ -201,14 +201,11 @@ end
# this method is used to test results produced using the helper function handle_threads_query # this method is used to test results produced using the helper function handle_threads_query
# which is used in multiple areas of the API # which is used in multiple areas of the API
def check_thread_result(user, thread, hash, is_search=false, is_json=false) def check_thread_result(user, thread, hash, is_json=false)
expected_keys = %w(id title body course_id commentable_id created_at updated_at) expected_keys = %w(id title body course_id commentable_id created_at updated_at)
expected_keys += %w(anonymous anonymous_to_peers at_position_list closed user_id) expected_keys += %w(anonymous anonymous_to_peers at_position_list closed user_id)
expected_keys += %w(username votes abuse_flaggers tags type group_id pinned) expected_keys += %w(username votes abuse_flaggers tags type group_id pinned)
expected_keys += %w(comments_count unread_comments_count read endorsed) expected_keys += %w(comments_count unread_comments_count read endorsed)
if is_search
expected_keys += %w(highlighted_body highlighted_title)
end
# these keys are checked separately, when desired, using check_thread_response_paging. # these keys are checked separately, when desired, using check_thread_response_paging.
actual_keys = hash.keys - ["children", "resp_skip", "resp_limit", "resp_total"] actual_keys = hash.keys - ["children", "resp_skip", "resp_limit", "resp_total"]
actual_keys.sort.should == expected_keys.sort actual_keys.sort.should == expected_keys.sort
...@@ -267,8 +264,8 @@ def check_thread_result(user, thread, hash, is_search=false, is_json=false) ...@@ -267,8 +264,8 @@ def check_thread_result(user, thread, hash, is_search=false, is_json=false)
end end
end end
def check_thread_result_json(user, thread, json_response, is_search=false) def check_thread_result_json(user, thread, json_response)
check_thread_result(user, thread, json_response, is_search, true) check_thread_result(user, thread, json_response, true)
end end
def check_thread_response_paging(thread, hash, resp_skip=0, resp_limit=nil, is_json=false) def check_thread_response_paging(thread, hash, resp_skip=0, resp_limit=nil, is_json=false)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment