mirror of
https://github.com/discourse/discourse.git
synced 2025-06-01 01:14:29 +08:00
Merge branch 'fix_whisper'
This commit is contained in:
@ -24,7 +24,7 @@ class Post < ActiveRecord::Base
|
||||
rate_limit :limit_posts_per_day
|
||||
|
||||
belongs_to :user
|
||||
belongs_to :topic, counter_cache: :posts_count
|
||||
belongs_to :topic
|
||||
|
||||
belongs_to :reply_to_user, class_name: "User"
|
||||
|
||||
|
@ -435,8 +435,10 @@ SQL
|
||||
post_action_type: post_action_type_key)
|
||||
end
|
||||
|
||||
topic_count = Post.where(topic_id: topic_id).sum(column)
|
||||
Topic.where(id: topic_id).update_all ["#{column} = ?", topic_count]
|
||||
if column == "like_count"
|
||||
topic_count = Post.where(topic_id: topic_id).sum(column)
|
||||
Topic.where(id: topic_id).update_all ["#{column} = ?", topic_count]
|
||||
end
|
||||
|
||||
if PostActionType.notify_flag_type_ids.include?(post_action_type_id)
|
||||
PostAction.update_flagged_posts_count
|
||||
|
@ -466,23 +466,103 @@ class Topic < ActiveRecord::Base
|
||||
end
|
||||
|
||||
# Atomically creates the next post number
|
||||
def self.next_post_number(topic_id, reply = false)
|
||||
def self.next_post_number(topic_id, reply = false, whisper = false)
|
||||
highest = exec_sql("select coalesce(max(post_number),0) as max from posts where topic_id = ?", topic_id).first['max'].to_i
|
||||
|
||||
reply_sql = reply ? ", reply_count = reply_count + 1" : ""
|
||||
result = exec_sql("UPDATE topics SET highest_post_number = ? + 1#{reply_sql}
|
||||
WHERE id = ? RETURNING highest_post_number", highest, topic_id)
|
||||
result.first['highest_post_number'].to_i
|
||||
if whisper
|
||||
|
||||
result = exec_sql("UPDATE topics
|
||||
SET highest_staff_post_number = ? + 1
|
||||
WHERE id = ?
|
||||
RETURNING highest_staff_post_number", highest, topic_id)
|
||||
|
||||
result.first['highest_staff_post_number'].to_i
|
||||
|
||||
else
|
||||
|
||||
reply_sql = reply ? ", reply_count = reply_count + 1" : ""
|
||||
|
||||
result = exec_sql("UPDATE topics
|
||||
SET highest_staff_post_number = :highest + 1,
|
||||
highest_post_number = :highest + 1#{reply_sql},
|
||||
posts_count = posts_count + 1
|
||||
WHERE id = :topic_id
|
||||
RETURNING highest_post_number", highest: highest, topic_id: topic_id)
|
||||
|
||||
result.first['highest_post_number'].to_i
|
||||
end
|
||||
end
|
||||
|
||||
|
||||
def self.reset_all_highest!
|
||||
exec_sql <<SQL
|
||||
WITH
|
||||
X as (
|
||||
SELECT topic_id,
|
||||
COALESCE(MAX(post_number), 0) highest_post_number
|
||||
FROM posts
|
||||
WHERE deleted_at IS NULL
|
||||
GROUP BY topic_id
|
||||
),
|
||||
Y as (
|
||||
SELECT topic_id,
|
||||
coalesce(MAX(post_number), 0) highest_post_number,
|
||||
count(*) posts_count,
|
||||
max(created_at) last_posted_at
|
||||
FROM posts
|
||||
WHERE deleted_at IS NULL AND post_type <> 4
|
||||
GROUP BY topic_id
|
||||
)
|
||||
UPDATE topics
|
||||
SET
|
||||
highest_staff_post_number = X.highest_post_number,
|
||||
highest_post_number = Y.highest_post_number,
|
||||
last_posted_at = Y.last_posted_at,
|
||||
posts_count = Y.posts_count
|
||||
FROM X, Y
|
||||
WHERE
|
||||
X.topic_id = topics.id AND
|
||||
Y.topic_id = topics.id AND (
|
||||
topics.highest_staff_post_number <> X.highest_post_number OR
|
||||
topics.highest_post_number <> Y.highest_post_number OR
|
||||
topics.last_posted_at <> Y.last_posted_at OR
|
||||
topics.posts_count <> Y.posts_count
|
||||
)
|
||||
SQL
|
||||
end
|
||||
|
||||
|
||||
# If a post is deleted we have to update our highest post counters
|
||||
def self.reset_highest(topic_id)
|
||||
result = exec_sql "UPDATE topics
|
||||
SET highest_post_number = (SELECT COALESCE(MAX(post_number), 0) FROM posts WHERE topic_id = :topic_id AND deleted_at IS NULL),
|
||||
posts_count = (SELECT count(*) FROM posts WHERE deleted_at IS NULL AND topic_id = :topic_id),
|
||||
last_posted_at = (SELECT MAX(created_at) FROM POSTS WHERE topic_id = :topic_id AND deleted_at IS NULL)
|
||||
SET
|
||||
highest_staff_post_number = (
|
||||
SELECT COALESCE(MAX(post_number), 0) FROM posts
|
||||
WHERE topic_id = :topic_id AND
|
||||
deleted_at IS NULL
|
||||
),
|
||||
highest_post_number = (
|
||||
SELECT COALESCE(MAX(post_number), 0) FROM posts
|
||||
WHERE topic_id = :topic_id AND
|
||||
deleted_at IS NULL AND
|
||||
post_type <> 4
|
||||
),
|
||||
posts_count = (
|
||||
SELECT count(*) FROM posts
|
||||
WHERE deleted_at IS NULL AND
|
||||
topic_id = :topic_id AND
|
||||
post_type <> 4
|
||||
),
|
||||
|
||||
last_posted_at = (
|
||||
SELECT MAX(created_at) FROM posts
|
||||
WHERE topic_id = :topic_id AND
|
||||
deleted_at IS NULL AND
|
||||
post_type <> 4
|
||||
)
|
||||
WHERE id = :topic_id
|
||||
RETURNING highest_post_number", topic_id: topic_id
|
||||
|
||||
highest_post_number = result.first['highest_post_number'].to_i
|
||||
|
||||
# Update the forum topic user records
|
||||
@ -724,10 +804,7 @@ class Topic < ActiveRecord::Base
|
||||
end
|
||||
|
||||
def update_action_counts
|
||||
PostActionType.types.each_key do |type|
|
||||
count_field = "#{type}_count"
|
||||
update_column(count_field, Post.where(topic_id: id).sum(count_field))
|
||||
end
|
||||
update_column(:like_count, Post.where(topic_id: id).sum(:like_count))
|
||||
end
|
||||
|
||||
def posters_summary(options = {})
|
||||
|
@ -38,7 +38,7 @@ class TopicTrackingState
|
||||
publish_read(topic.id, 1, topic.user_id)
|
||||
end
|
||||
|
||||
def self.publish_latest(topic)
|
||||
def self.publish_latest(topic, staff_only=false)
|
||||
return unless topic.archetype == "regular"
|
||||
|
||||
message = {
|
||||
@ -52,15 +52,25 @@ class TopicTrackingState
|
||||
}
|
||||
}
|
||||
|
||||
group_ids = topic.category && topic.category.secure_group_ids
|
||||
group_ids =
|
||||
if staff_only
|
||||
[Group::AUTO_GROUPS[:staff]]
|
||||
else
|
||||
topic.category && topic.category.secure_group_ids
|
||||
end
|
||||
MessageBus.publish("/latest", message.as_json, group_ids: group_ids)
|
||||
end
|
||||
|
||||
def self.publish_unread(post)
|
||||
# TODO at high scale we are going to have to defer this,
|
||||
# perhaps cut down to users that are around in the last 7 days as well
|
||||
#
|
||||
group_ids = post.topic.category && post.topic.category.secure_group_ids
|
||||
|
||||
group_ids =
|
||||
if post.post_type == Post.types[:whisper]
|
||||
[Group::AUTO_GROUPS[:staff]]
|
||||
else
|
||||
post.topic.category && post.topic.category.secure_group_ids
|
||||
end
|
||||
|
||||
TopicUser
|
||||
.tracking(post.topic_id)
|
||||
@ -148,7 +158,7 @@ class TopicTrackingState
|
||||
).where_values[0]
|
||||
end
|
||||
|
||||
def self.report(user_id, topic_id = nil)
|
||||
def self.report(user, topic_id = nil)
|
||||
|
||||
# Sam: this is a hairy report, in particular I need custom joins and fancy conditions
|
||||
# Dropping to sql_builder so I can make sense of it.
|
||||
@ -160,12 +170,12 @@ class TopicTrackingState
|
||||
# cycles from usual requests
|
||||
#
|
||||
#
|
||||
sql = report_raw_sql(topic_id: topic_id, skip_unread: true, skip_order: true)
|
||||
sql = report_raw_sql(topic_id: topic_id, skip_unread: true, skip_order: true, staff: user.staff?)
|
||||
sql << "\nUNION ALL\n\n"
|
||||
sql << report_raw_sql(topic_id: topic_id, skip_new: true, skip_order: true)
|
||||
sql << report_raw_sql(topic_id: topic_id, skip_new: true, skip_order: true, staff: user.staff?)
|
||||
|
||||
SqlBuilder.new(sql)
|
||||
.map_exec(TopicTrackingState, user_id: user_id, topic_id: topic_id)
|
||||
.map_exec(TopicTrackingState, user_id: user.id, topic_id: topic_id)
|
||||
|
||||
end
|
||||
|
||||
@ -176,7 +186,7 @@ class TopicTrackingState
|
||||
if opts && opts[:skip_unread]
|
||||
"1=0"
|
||||
else
|
||||
TopicQuery.unread_filter(Topic).where_values.join(" AND ")
|
||||
TopicQuery.unread_filter(Topic, staff: opts && opts[:staff]).where_values.join(" AND ")
|
||||
end
|
||||
|
||||
new =
|
||||
@ -190,7 +200,7 @@ class TopicTrackingState
|
||||
u.id AS user_id,
|
||||
topics.id AS topic_id,
|
||||
topics.created_at,
|
||||
highest_post_number,
|
||||
#{opts && opts[:staff] ? "highest_staff_post_number highest_post_number" : "highest_post_number"},
|
||||
last_read_post_number,
|
||||
c.id AS category_id,
|
||||
tu.notification_level"
|
||||
|
@ -236,6 +236,8 @@ SQL
|
||||
topic_users.notification_level, tu.notification_level old_level, tu.last_read_post_number
|
||||
"
|
||||
|
||||
UPDATE_TOPIC_USER_SQL_STAFF = UPDATE_TOPIC_USER_SQL.gsub("highest_post_number", "highest_staff_post_number")
|
||||
|
||||
INSERT_TOPIC_USER_SQL = "INSERT INTO topic_users (user_id, topic_id, last_read_post_number, highest_seen_post_number, last_visited_at, first_visited_at, notification_level)
|
||||
SELECT :user_id, :topic_id, :post_number, ft.highest_post_number, :now, :now, :new_status
|
||||
FROM topics AS ft
|
||||
@ -245,6 +247,8 @@ SQL
|
||||
FROM topic_users AS ftu
|
||||
WHERE ftu.user_id = :user_id and ftu.topic_id = :topic_id)"
|
||||
|
||||
INSERT_TOPIC_USER_SQL_STAFF = INSERT_TOPIC_USER_SQL.gsub("highest_post_number", "highest_staff_post_number")
|
||||
|
||||
def update_last_read(user, topic_id, post_number, msecs, opts={})
|
||||
return if post_number.blank?
|
||||
msecs = 0 if msecs.to_i < 0
|
||||
@ -265,7 +269,11 @@ SQL
|
||||
# ... user visited the topic but did not read the posts
|
||||
#
|
||||
# 86400000 = 1 day
|
||||
rows = exec_sql(UPDATE_TOPIC_USER_SQL,args).values
|
||||
rows = if user.staff?
|
||||
exec_sql(UPDATE_TOPIC_USER_SQL_STAFF,args).values
|
||||
else
|
||||
exec_sql(UPDATE_TOPIC_USER_SQL,args).values
|
||||
end
|
||||
|
||||
if rows.length == 1
|
||||
before = rows[0][1].to_i
|
||||
@ -295,7 +303,11 @@ SQL
|
||||
user.update_posts_read!(post_number, mobile: opts[:mobile])
|
||||
|
||||
begin
|
||||
exec_sql(INSERT_TOPIC_USER_SQL, args)
|
||||
if user.staff?
|
||||
exec_sql(INSERT_TOPIC_USER_SQL_STAFF, args)
|
||||
else
|
||||
exec_sql(INSERT_TOPIC_USER_SQL, args)
|
||||
end
|
||||
rescue PG::UniqueViolation
|
||||
# if record is inserted between two statements this can happen
|
||||
# we retry once to avoid failing the req
|
||||
|
Reference in New Issue
Block a user