mirror of
https://github.com/discourse/discourse.git
synced 2025-04-19 10:19:05 +08:00
FEATURE: implement capping of new/unread
We cap new and unread at 2/5th of SiteSetting.max_tracked_new_unread This dynamic capping is applied under 2 conditions: 1. New capping is applied once every 15 minutes in the periodical job, this effectively ensures that usually even super active sites are capped at 200 new items 2. Unread capping is applied if a user hits max_tracked_new_unread, meaning if new + unread == 500, we defer a job that runs within 15 minutes that will cap user at 200 unread This logic ensures that at worst case a user gets "bad" numbers for 15 minutes and then the system goes ahead and fixes itself up
This commit is contained in:
parent
35998e1b74
commit
335be272ff
@ -85,10 +85,6 @@ const controllerOpts = {
|
||||
return this.get('model.filter') === 'new' && this.get('model.topics.length') > 0;
|
||||
}.property('model.filter', 'model.topics.length'),
|
||||
|
||||
tooManyTracked: function(){
|
||||
return this.topicTrackingState.tooManyTracked();
|
||||
}.property(),
|
||||
|
||||
showDismissAtTop: function() {
|
||||
return (this.isFilterPage(this.get('model.filter'), 'new') ||
|
||||
this.isFilterPage(this.get('model.filter'), 'unread')) &&
|
||||
|
@ -236,7 +236,6 @@ const TopicTrackingState = Discourse.Model.extend({
|
||||
},
|
||||
|
||||
countNew(category_id) {
|
||||
if (this.tooManyTracked()) { return(0); }
|
||||
return _.chain(this.states)
|
||||
.where(isNew)
|
||||
.where(topic => topic.category_id === category_id || !category_id)
|
||||
@ -244,10 +243,6 @@ const TopicTrackingState = Discourse.Model.extend({
|
||||
.length;
|
||||
},
|
||||
|
||||
tooManyTracked() {
|
||||
return this.initialStatesLength >= Discourse.SiteSettings.max_tracked_new_unread;
|
||||
},
|
||||
|
||||
resetNew() {
|
||||
Object.keys(this.states).forEach(id => {
|
||||
if (this.states[id].last_read_post_number === null) {
|
||||
@ -257,7 +252,6 @@ const TopicTrackingState = Discourse.Model.extend({
|
||||
},
|
||||
|
||||
countUnread(category_id) {
|
||||
if (this.tooManyTracked()) { return(0); }
|
||||
return _.chain(this.states)
|
||||
.where(isUnread)
|
||||
.where(topic => topic.category_id === category_id || !category_id)
|
||||
@ -266,7 +260,6 @@ const TopicTrackingState = Discourse.Model.extend({
|
||||
},
|
||||
|
||||
countCategory(category_id) {
|
||||
if (this.tooManyTracked()) { return(0); }
|
||||
let sum = 0;
|
||||
_.each(this.states, function(topic){
|
||||
if (topic.category_id === category_id) {
|
||||
|
@ -1,7 +1,3 @@
|
||||
{{#if tooManyTracked}}
|
||||
<div class="alert alert-info">{{{i18n 'topics.too_many_tracked'}}}</div>
|
||||
{{/if}}
|
||||
|
||||
{{#if redirectedReason}}
|
||||
<div class="alert alert-info">{{redirectedReason}}</div>
|
||||
{{/if}}
|
||||
|
@ -309,7 +309,11 @@ class ApplicationController < ActionController::Base
|
||||
|
||||
def preload_current_user_data
|
||||
store_preloaded("currentUser", MultiJson.dump(CurrentUserSerializer.new(current_user, scope: guardian, root: false)))
|
||||
serializer = ActiveModel::ArraySerializer.new(TopicTrackingState.report(current_user.id), each_serializer: TopicTrackingStateSerializer)
|
||||
report = TopicTrackingState.report(current_user.id)
|
||||
if report.length >= SiteSetting.max_tracked_new_unread.to_i
|
||||
TopicUser.cap_unread_later(current_user.id)
|
||||
end
|
||||
serializer = ActiveModel::ArraySerializer.new(report, each_serializer: TopicTrackingStateSerializer)
|
||||
store_preloaded("topicTrackingStates", MultiJson.dump(serializer))
|
||||
end
|
||||
|
||||
|
@ -32,6 +32,16 @@ module Jobs
|
||||
user_id = hash[:profile].user_id
|
||||
Discourse.handle_job_exception(hash[:ex], error_context(args, "Rebaking user id #{user_id}", user_id: user_id))
|
||||
end
|
||||
|
||||
TopicUser.cap_unread_backlog!
|
||||
|
||||
offset = (SiteSetting.max_tracked_new_unread * (2/5.0)).to_i
|
||||
last_new_topic = Topic.order('created_at desc').offset(offset).select(:created_at).first
|
||||
if last_new_topic
|
||||
SiteSetting.min_new_topics_time = last_new_topic.created_at.to_i
|
||||
end
|
||||
|
||||
nil
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -106,11 +106,12 @@ class TopicTrackingState
|
||||
WHEN COALESCE(u.new_topic_duration_minutes, :default_duration) = :always THEN u.created_at
|
||||
WHEN COALESCE(u.new_topic_duration_minutes, :default_duration) = :last_visit THEN COALESCE(u.previous_visit_at,u.created_at)
|
||||
ELSE (:now::timestamp - INTERVAL '1 MINUTE' * COALESCE(u.new_topic_duration_minutes, :default_duration))
|
||||
END, us.new_since)",
|
||||
END, us.new_since, :min_date)",
|
||||
now: DateTime.now,
|
||||
last_visit: User::NewTopicDuration::LAST_VISIT,
|
||||
always: User::NewTopicDuration::ALWAYS,
|
||||
default_duration: SiteSetting.default_other_new_topic_duration_minutes
|
||||
default_duration: SiteSetting.default_other_new_topic_duration_minutes,
|
||||
min_date: Time.at(SiteSetting.min_new_topics_time).to_datetime
|
||||
).where_values[0]
|
||||
end
|
||||
|
||||
@ -125,19 +126,49 @@ class TopicTrackingState
|
||||
# This code needs to be VERY efficient as it is triggered via the message bus and may steal
|
||||
# cycles from usual requests
|
||||
#
|
||||
|
||||
unread = TopicQuery.unread_filter(Topic).where_values.join(" AND ")
|
||||
new = TopicQuery.new_filter(Topic, "xxx").where_values.join(" AND ").gsub!("'xxx'", treat_as_new_topic_clause)
|
||||
#
|
||||
sql = report_raw_sql(topic_id: topic_id)
|
||||
|
||||
sql = <<SQL
|
||||
WITH x AS (
|
||||
SELECT u.id AS user_id,
|
||||
#{sql}
|
||||
) SELECT * FROM x LIMIT #{SiteSetting.max_tracked_new_unread.to_i}
|
||||
SQL
|
||||
|
||||
SqlBuilder.new(sql)
|
||||
.map_exec(TopicTrackingState, user_id: user_id, topic_id: topic_id)
|
||||
|
||||
end
|
||||
|
||||
|
||||
def self.report_raw_sql(opts=nil)
|
||||
|
||||
unread =
|
||||
if opts && opts[:skip_unread]
|
||||
"1=0"
|
||||
else
|
||||
TopicQuery.unread_filter(Topic).where_values.join(" AND ")
|
||||
end
|
||||
|
||||
new =
|
||||
if opts && opts[:skip_new]
|
||||
"1=0"
|
||||
else
|
||||
TopicQuery.new_filter(Topic, "xxx").where_values.join(" AND ").gsub!("'xxx'", treat_as_new_topic_clause)
|
||||
end
|
||||
|
||||
select = (opts && opts[:select]) || "
|
||||
u.id AS user_id,
|
||||
topics.id AS topic_id,
|
||||
topics.created_at,
|
||||
highest_post_number,
|
||||
last_read_post_number,
|
||||
c.id AS category_id,
|
||||
tu.notification_level
|
||||
tu.notification_level"
|
||||
|
||||
|
||||
sql = <<SQL
|
||||
SELECT #{select}
|
||||
FROM topics
|
||||
JOIN users u on u.id = :user_id
|
||||
JOIN user_stats AS us ON us.user_id = u.id
|
||||
@ -162,15 +193,11 @@ class TopicTrackingState
|
||||
|
||||
SQL
|
||||
|
||||
if topic_id
|
||||
if opts && opts[:topic_id]
|
||||
sql << " AND topics.id = :topic_id"
|
||||
end
|
||||
|
||||
sql << " ORDER BY topics.bumped_at DESC ) SELECT * FROM x LIMIT #{SiteSetting.max_tracked_new_unread.to_i}"
|
||||
|
||||
SqlBuilder.new(sql)
|
||||
.map_exec(TopicTrackingState, user_id: user_id, topic_id: topic_id)
|
||||
|
||||
sql << " ORDER BY topics.bumped_at DESC"
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -284,6 +284,43 @@ SQL
|
||||
builder.exec(action_type_id: PostActionType.types[action_type])
|
||||
end
|
||||
|
||||
# cap number of unread topics at count, bumping up highest_seen / last_read if needed
|
||||
def self.cap_unread!(user_id, count)
|
||||
sql = <<SQL
|
||||
UPDATE topic_users tu
|
||||
SET last_read_post_number = max_number,
|
||||
highest_seen_post_number = max_number
|
||||
FROM (
|
||||
SELECT MAX(post_number) max_number, p.topic_id FROM posts p
|
||||
WHERE deleted_at IS NULL
|
||||
GROUP BY p.topic_id
|
||||
) m
|
||||
WHERE tu.user_id = :user_id AND
|
||||
m.topic_id = tu.topic_id AND
|
||||
tu.topic_id IN (
|
||||
#{TopicTrackingState.report_raw_sql(skip_new: true, select: "topics.id")}
|
||||
offset :count
|
||||
)
|
||||
SQL
|
||||
|
||||
TopicUser.exec_sql(sql, user_id: user_id, count: count)
|
||||
end
|
||||
|
||||
def self.unread_cap_key
|
||||
"unread_cap_user".freeze
|
||||
end
|
||||
|
||||
def self.cap_unread_later(user_id)
|
||||
$redis.hset TopicUser.unread_cap_key, user_id, ""
|
||||
end
|
||||
|
||||
def self.cap_unread_backlog!
|
||||
$redis.hkeys(unread_cap_key).map(&:to_i).each do |user_id|
|
||||
cap_unread!(user_id, (SiteSetting.max_tracked_new_unread * (2/5.0)).to_i)
|
||||
$redis.hdel unread_cap_key, user_id
|
||||
end
|
||||
end
|
||||
|
||||
def self.ensure_consistency!(topic_id=nil)
|
||||
update_post_action_cache(topic_id: topic_id)
|
||||
|
||||
|
@ -588,14 +588,16 @@ class User < ActiveRecord::Base
|
||||
|
||||
def treat_as_new_topic_start_date
|
||||
duration = new_topic_duration_minutes || SiteSetting.default_other_new_topic_duration_minutes.to_i
|
||||
[case duration
|
||||
times = [case duration
|
||||
when User::NewTopicDuration::ALWAYS
|
||||
created_at
|
||||
when User::NewTopicDuration::LAST_VISIT
|
||||
previous_visit_at || user_stat.new_since
|
||||
else
|
||||
duration.minutes.ago
|
||||
end, user_stat.new_since].max
|
||||
end, user_stat.new_since, Time.at(SiteSetting.min_new_topics_time).to_datetime]
|
||||
|
||||
times.max
|
||||
end
|
||||
|
||||
def readable_name
|
||||
|
@ -962,7 +962,6 @@ en:
|
||||
current_user: 'go to your user page'
|
||||
|
||||
topics:
|
||||
too_many_tracked: "Warning: you have too many tracked new and unread topics, clear some using <a href='/new'>Dismiss New</a> or <a href='/unread'>Dismiss Posts</a>"
|
||||
bulk:
|
||||
reset_read: "Reset Read"
|
||||
delete: "Delete Topics"
|
||||
|
@ -910,6 +910,12 @@ uncategorized:
|
||||
default: false
|
||||
hidden: true
|
||||
|
||||
# Nothing past this threshold is ever considered new
|
||||
# this is calculated dynamically every 15 minutes
|
||||
min_new_topics_time:
|
||||
default: 0
|
||||
hidden: true
|
||||
|
||||
# Category IDs
|
||||
lounge_category_id:
|
||||
default: -1
|
||||
|
@ -39,6 +39,50 @@ describe TopicTrackingState do
|
||||
expect(report.length).to eq(1)
|
||||
end
|
||||
|
||||
|
||||
it "correctly handles capping" do
|
||||
$redis.del TopicUser.unread_cap_key
|
||||
|
||||
user = Fabricate(:user)
|
||||
|
||||
post1 = create_post
|
||||
Fabricate(:post, topic: post1.topic)
|
||||
|
||||
post2 = create_post
|
||||
Fabricate(:post, topic: post2.topic)
|
||||
|
||||
post3 = create_post
|
||||
Fabricate(:post, topic: post3.topic)
|
||||
|
||||
tracking = {
|
||||
notification_level: TopicUser.notification_levels[:tracking],
|
||||
last_read_post_number: 1,
|
||||
highest_seen_post_number: 1
|
||||
}
|
||||
|
||||
TopicUser.change(user.id, post1.topic_id, tracking)
|
||||
TopicUser.change(user.id, post2.topic_id, tracking)
|
||||
TopicUser.change(user.id, post3.topic_id, tracking)
|
||||
|
||||
report = TopicTrackingState.report(user.id)
|
||||
expect(report.length).to eq(3)
|
||||
|
||||
SiteSetting.max_tracked_new_unread = 5
|
||||
# business logic, we allow for 2/5th new .. 2/5th unread ... 1/5th buffer
|
||||
|
||||
TopicUser.cap_unread_backlog!
|
||||
|
||||
report = TopicTrackingState.report(user.id)
|
||||
expect(report.length).to eq(3)
|
||||
|
||||
TopicUser.cap_unread_later(user.id)
|
||||
TopicUser.cap_unread_backlog!
|
||||
|
||||
report = TopicTrackingState.report(user.id)
|
||||
expect(report.length).to eq(2)
|
||||
|
||||
end
|
||||
|
||||
it "correctly gets the tracking state" do
|
||||
report = TopicTrackingState.report(user.id)
|
||||
expect(report.length).to eq(0)
|
||||
|
Loading…
x
Reference in New Issue
Block a user