Initial release of Discourse

This commit is contained in:
Robin Ward
2013-02-05 14:16:51 -05:00
commit 21b5628528
2932 changed files with 143949 additions and 0 deletions

10
lib/admin_constraint.rb Normal file
View File

@ -0,0 +1,10 @@
require_dependency 'current_user'
class AdminConstraint
def matches?(request)
return false unless request.session[:current_user_id].present?
User.where(id: request.session[:current_user_id].to_i).where(admin: true).exists?
end
end

20
lib/age_words.rb Normal file
View File

@ -0,0 +1,20 @@
module AgeWords
def self.age_words(secs)
return "—" if secs.blank?
mins = (secs / 60.0)
hours = (mins / 60.0)
days = (hours / 24.0)
months = (days / 30.0)
years = (months / 12.0)
return "#{years.floor}y" if years > 1
return "#{months.floor}mo" if months > 1
return "#{days.floor}d" if days > 1
return "#{hours.floor}h" if hours > 1
return "&lt; 1m" if mins < 1
return "#{mins.floor}m"
end
end

43
lib/archetype.rb Normal file
View File

@ -0,0 +1,43 @@
class Archetype
include ActiveModel::Serialization
attr_accessor :id, :options
def initialize(id, options)
@id = id
@options = options
end
def attributes
{'id' => @id,
'options' => @options}
end
def self.default
'regular'
end
def self.poll
'poll'
end
def self.private_message
'private_message'
end
def self.list
return [] unless @archetypes.present?
@archetypes.values
end
def self.register(name, options={})
@archetypes ||= {}
@archetypes[name] = Archetype.new(name, options)
end
# By default we have a regular archetype and a private message
register 'regular'
register 'private_message'
end

0
lib/assets/.gitkeep Normal file
View File

View File

@ -0,0 +1,8 @@
<table cellspacing="0" cellpadding="0" style="border: 1px solid #eee; -webkit-border-radius: 10px;">
<tr>
<th style="text-align:left; background-color: #eee; padding: 5px">{{{avatarImg}}} {{username}} said:</th>
</tr>
<tr>
<td style="padding: 10px; background-color: #f9f9f9">{{{quote}}}</td>
</tr>
</table>

35
lib/avatar_lookup.rb Normal file
View File

@ -0,0 +1,35 @@
class AvatarLookup
def initialize(user_ids)
@user_ids = user_ids
@user_ids.flatten!
@user_ids.compact! if @user_ids.present?
@user_ids.uniq! if @user_ids.present?
@loaded = false
end
# Lookup a user by id
def [](user_id)
ensure_loaded!
@users_hashed[user_id]
end
protected
def ensure_loaded!
return if @loaded
@users_hashed = {}
# need email for hash
User.where(id: @user_ids).select([:id, :email, :email, :username]).each do |u|
@users_hashed[u.id] = u
end
@loaded = true
end
end

64
lib/content_buffer.rb Normal file
View File

@ -0,0 +1,64 @@
# this class is used to track changes to an arbirary buffer
class ContentBuffer
def initialize(initial_content)
@initial_content = initial_content
@lines = @initial_content.split("\n")
end
def apply_transform!(transform)
start_row = transform[:start][:row]
start_col = transform[:start][:col]
finish_row = transform[:finish][:row] if transform[:finish]
finish_col = transform[:finish][:col] if transform[:finish]
text = transform[:text]
if transform[:operation] == :delete
# fix first line
l = @lines[start_row]
l = l[0...start_col]
if (finish_row == start_row)
l << @lines[start_row][finish_col..-1]
@lines[start_row] = l
return
end
@lines[start_row] = l
# remove middle lines
(finish_row - start_row).times do
l = @lines.delete_at start_row + 1
end
# fix last line
@lines[start_row] << @lines[finish_row][finish_col-1..-1]
end
if transform[:operation] == :insert
@lines[start_row].insert(start_col, text)
split = @lines[start_row].split("\n")
if split.length > 1
@lines[start_row] = split[0]
i = 1
split[1..-2].each do |line|
@lines.insert(start_row + i, line)
i += 1
end
@lines.insert(i, "") unless @lines.length > i
@lines[i] = split[-1] + @lines[i]
end
end
end
def to_s
@lines.join("\n")
end
end

View File

@ -0,0 +1,93 @@
# Post processing that we can do after a post has already been cooked. For
# example, inserting the onebox content, or image sizes.
require_dependency 'oneboxer'
class CookedPostProcessor
def initialize(post, opts={})
@dirty = false
@opts = opts
@post = post
@doc = Hpricot(post.cooked)
end
def dirty?
@dirty
end
# Bake onebox content into the post
def post_process_oneboxes
args = {post_id: @post.id}
args[:invalidate_oneboxes] = true if @opts[:invalidate_oneboxes]
Oneboxer.each_onebox_link(@doc) do |url, element|
onebox = Oneboxer.onebox(url, args)
if onebox
element.swap onebox
@dirty = true
end
end
end
# First let's consider the images
def post_process_images
images = @doc.search("img")
if images.present?
# Extract the first image from the first post and use it as the 'topic image'
if @post.post_number == 1
img = images.first
@post.topic.update_column :image_url, img['src'] if img['src'].present?
end
images.each do |img|
if img['src'].present?
# If we provided some image sizes, look those up first
if @opts[:image_sizes].present?
if dim = @opts[:image_sizes][img['src']]
w, h = ImageSizer.resize(dim['width'], dim['height'])
img.set_attribute 'width', w.to_s
img.set_attribute 'height', h.to_s
@dirty = true
end
end
# If the image has no width or height, figure them out.
if img['width'].blank? or img['height'].blank?
dim = CookedPostProcessor.image_dimensions(img['src'])
if dim.present?
img.set_attribute 'width', dim[0].to_s
img.set_attribute 'height', dim[1].to_s
@dirty = true
end
end
end
end
end
end
def post_process
return unless @doc.present?
post_process_images
post_process_oneboxes
end
def html
@doc.try(:to_html)
end
# Retrieve the image dimensions for a url
def self.image_dimensions(url)
return nil unless SiteSetting.crawl_images?
uri = URI.parse(url)
return nil unless %w(http https).include?(uri.scheme)
w, h = FastImage.size(url)
ImageSizer.resize(w, h)
end
end

32
lib/current_user.rb Normal file
View File

@ -0,0 +1,32 @@
module CurrentUser
def current_user
return @current_user if @current_user || @not_logged_in
if session[:current_user_id].blank?
# maybe we have a cookie?
auth_token = cookies[:_t]
if auth_token && auth_token.length == 32
@current_user = User.where(auth_token: auth_token).first
session[:current_user_id] = @current_user.id if @current_user
end
else
@current_user ||= User.where(id: session[:current_user_id]).first
end
if @current_user && @current_user.is_banned?
@current_user = nil
end
@not_logged_in = session[:current_user_id].blank?
if @current_user
@current_user.update_last_seen!
if @current_user.ip_address != request.remote_ip
@current_user.ip_address = request.remote_ip
User.exec_sql('update users set ip_address = ? where id = ?', request.remote_ip, @current_user.id)
end
end
@current_user
end
end

31
lib/custom_renderer.rb Normal file
View File

@ -0,0 +1,31 @@
class CustomRenderer < AbstractController::Base
include ActiveSupport::Configurable
include AbstractController::Rendering
include AbstractController::Helpers
include AbstractController::Translation
include AbstractController::AssetPaths
include Rails.application.routes.url_helpers
helper ApplicationHelper
self.view_paths = "app/views"
include CurrentUser
def action_name
""
end
def controller_name
""
end
def cookies
@parent.send(:cookies)
end
def session
@parent.send(:session)
end
def initialize(parent)
@parent = parent
end
end

49
lib/discourse.rb Normal file
View File

@ -0,0 +1,49 @@
module Discourse
# When they try to do something they should be logged in for
class NotLoggedIn < Exception; end
# When the input is somehow bad
class InvalidParameters < Exception; end
# When they don't have permission to do something
class InvalidAccess < Exception; end
# When something they want is not found
class NotFound < Exception; end
# Get the current base URL for the current site
def self.current_hostname
RailsMultisite::ConnectionManagement.current_hostname
end
def self.base_url
protocol = "http"
protocol = "https" if SiteSetting.use_ssl?
result = "#{protocol}://#{current_hostname}"
result << ":#{SiteSetting.port}" if SiteSetting.port.present?
result
end
def self.enable_maintenance_mode
$redis.set maintenance_mode_key, 1
true
end
def self.disable_maintenance_mode
$redis.del maintenance_mode_key
true
end
def self.maintenance_mode?
!!$redis.get( maintenance_mode_key )
end
private
def self.maintenance_mode_key
'maintenance_mode'
end
end

48
lib/discourse_observer.rb Normal file
View File

@ -0,0 +1,48 @@
#
# Support delegating after_create to an appropriate helper for that class name.
# For example, an observer on post will call after_create_post if that method
# is defined.
#
# It does this after_commit by default, and contains a hack to make this work
# even in test mode.
#
class DiscourseObserver < ActiveRecord::Observer
def after_create_delegator(model)
observer_method = :"after_create_#{model.class.name.underscore}"
send(observer_method, model) if respond_to?(observer_method)
end
def after_destroy_delegator(model)
observer_method = :"after_destroy_#{model.class.name.underscore}"
send(observer_method, model) if respond_to?(observer_method)
end
end
if Rails.env.test?
# In test mode, call the delegator right away
class DiscourseObserver < ActiveRecord::Observer
alias_method :after_create, :after_create_delegator
alias_method :after_destroy, :after_destroy_delegator
end
else
# Outside of test mode, use after_commit
class DiscourseObserver < ActiveRecord::Observer
def after_commit(model)
if model.send(:transaction_include_action?, :create)
after_create_delegator(model)
end
if model.send(:transaction_include_action?, :destroy)
after_destroy_delegator(model)
end
end
end
end

View File

@ -0,0 +1,57 @@
#
# A class that handles interaction between a plugin and the Discourse App.
#
class DiscoursePluginRegistry
class << self
attr_accessor :javascripts
attr_accessor :server_side_javascripts
attr_accessor :stylesheets
end
def register_js(filename, options={})
self.class.javascripts ||= Set.new
self.class.server_side_javascripts ||= Set.new
# If we have a server side option, add that too.
self.class.server_side_javascripts << options[:server_side] if options[:server_side].present?
self.class.javascripts << filename
end
def register_css(filename)
self.class.stylesheets ||= Set.new
self.class.stylesheets << filename
end
def stylesheets
self.class.stylesheets || Set.new
end
def register_archetype(name, options={})
Archetype.register(name, options)
end
def server_side_javascripts
self.class.javascripts || Set.new
end
def javascripts
self.class.javascripts || Set.new
end
def self.clear
self.stylesheets = Set.new
self.server_side_javascripts = Set.new
self.javascripts = Set.new
end
def self.setup(plugin_class)
registry = DiscoursePluginRegistry.new
plugin = plugin_class.new(registry)
plugin.setup
end
end

44
lib/discourse_redis.rb Normal file
View File

@ -0,0 +1,44 @@
#
# A wrapper around redis that namespaces keys with the current site id
#
class DiscourseRedis
def initialize
@config = YAML::load(File.open("#{Rails.root}/config/redis.yml"))[Rails.env]
redis_opts = {:host => @config['host'], :port => @config['port'], :db => @config['db']}
@redis = Redis.new(redis_opts)
end
# prefix the key with the namespace
def method_missing(meth, *args, &block)
if @redis.respond_to?(meth)
@redis.send(meth, *args, &block)
else
super
end
end
# Proxy key methods through, but prefix the keys with the namespace
%w(append blpop brpop brpoplpush decr decrby del exists expire expireat get getbit getrange getset hdel
hexists hget hgetall hincrby hincrbyfloat hkeys hlen hmget hmset hset hsetnx hvals incr incrby incrbyfloat
lindex linsert llen lpop lpush lpushx lrange lrem lset ltrim mget move mset msetnx persist pexpire pexpireat psetex
pttl rename renamenx rpop rpoplpush rpush rpushx sadd scard sdiff set setbit setex setnx setrange sinter
sismember smembers sort spop srandmember srem strlen sunion ttl type watch zadd zcard zcount zincrby
zrange zrangebyscore zrank zrem zremrangebyrank zremrangebyscore zrevrange zrevrangebyscore zrevrank zrangebyscore).each do |m|
class_eval %{
def #{m}(*args)
args[0] = "\#\{DiscourseRedis.namespace\}:\#\{args[0]\}"
@redis.#{m}(*args)
end
}
end
def self.namespace
RailsMultisite::ConnectionManagement.current_db
end
def url
"redis://#{@config['host']}:#{@config['port']}/#{@config['db']}"
end
end

36
lib/distributed_hash.rb Normal file
View File

@ -0,0 +1,36 @@
# Like a hash, just does its best to stay in sync accross the farm
#
# Redis backed with an allowance for a certain amount of latency
class DistributedHash
@lock = Mutex.new
def self.ensure_subscribed
@lock.synchronize do
unless @subscribed
end
@subscribed = true
end
end
def initialize(key, options={})
@key = key
end
def []=(k,v)
end
def [](k)
end
def delete(k)
end
def clear
end
end

15
lib/email.rb Normal file
View File

@ -0,0 +1,15 @@
require 'mail'
module Email
def self.is_valid?(email)
parser = Mail::RFC2822Parser.new
parser.root = :addr_spec
result = parser.parse(email)
# Don't allow for a TLD by itself list (sam@localhost)
# The Grammar is: (local_part "@" domain) / local_part ... need to discard latter
result && result.respond_to?(:domain) && result.domain.dot_atom_text.elements.size > 1
end
end

20
lib/email_builder.rb Normal file
View File

@ -0,0 +1,20 @@
# Help us build an email
module EmailBuilder
def build_email(to, email_key, params={})
params[:site_name] = SiteSetting.title
params[:base_url] = Discourse.base_url
params[:user_preferences_url] = "#{Discourse.base_url}/user_preferences"
body = I18n.t("#{email_key}.text_body_template", params)
# Are we appending an unsubscribe link?
if params[:add_unsubscribe_link]
body << "\n"
body << I18n.t("unsubscribe_link", params)
end
mail to: to, subject: I18n.t("#{email_key}.subject_template", params), body: body
end
end

37
lib/email_sender.rb Normal file
View File

@ -0,0 +1,37 @@
#
# A helper class to send an email. It will also handle a nil message, which it considers
# to be "do nothing". This is because some Mailers will decide not to do work for some
# reason. For example, emailing a user too frequently. A nil to address is also considered
# "do nothing"
#
# It also adds an HTML part for the plain text body using markdown
#
class EmailSender
def initialize(message, email_type, user=nil)
@message = message
@email_type = email_type
@user = user
end
def send
return if @message.blank?
return if @message.to.blank?
return if @message.body.blank?
plain_body = @message.body.to_s
@message.html_part = Mail::Part.new do
content_type 'text/html; charset=UTF-8'
body PrettyText.cook(plain_body, environment: 'email')
end
@message.deliver
to_address = @message.to
to_address = to_address.first if to_address.is_a?(Array)
EmailLog.create!(email_type: @email_type, to_address: to_address, user_id: @user.try(:id))
end
end

35
lib/export/export.rb Normal file
View File

@ -0,0 +1,35 @@
module Export
class UnsupportedExportSource < RuntimeError; end
class FormatInvalidError < RuntimeError; end
class FilenameMissingError < RuntimeError; end
class ExportInProgressError < RuntimeError; end
def self.current_schema_version
ActiveRecord::Migrator.current_version.to_s
end
def self.models_included_in_export
@models_included_in_export ||= begin
Rails.application.eager_load! # So that all models get loaded now
ActiveRecord::Base.descendants
end
end
def self.export_running_key
'exporter_is_running'
end
def self.is_export_running?
$redis.get(export_running_key) == '1'
end
def self.set_export_started
$redis.set export_running_key, '1'
end
def self.set_export_is_not_running
$redis.del export_running_key
end
end

View File

@ -0,0 +1,75 @@
module Export
class SchemaArgumentsError < RuntimeError; end
# TODO: Use yajl-ruby for performance.
# https://github.com/brianmario/yajl-ruby
class JsonEncoder
def initialize
@table_data = {}
end
def tmp_directory
@tmp_directory ||= begin
f = File.join( Rails.root, 'tmp', Time.now.strftime('export%Y%m%d%H%M%S') )
Dir.mkdir(f) unless Dir[f].present?
f
end
end
def json_output_stream
@json_output_stream ||= File.new( File.join( tmp_directory, 'tables.json' ), 'w+b' )
end
def write_schema_info(args)
raise SchemaArgumentsError unless args[:source].present? and args[:version].present?
@schema_data = {
schema: {
source: args[:source],
version: args[:version]
}
}
end
def write_table(table_name, columns)
@table_data[table_name] ||= {}
@table_data[table_name][:fields] = columns.map(&:name)
@table_data[table_name][:rows] ||= []
row_count = 0
begin
rows = yield(row_count)
if rows
row_count += rows.size
@table_data[table_name][:rows] << rows
end
# TODO: write to multiple files as needed.
# one file per table? multiple files per table?
end while rows and rows.size > 0
@table_data[table_name][:rows].flatten!(1)
@table_data[table_name][:row_count] = @table_data[table_name][:rows].size
end
def finish
@schema_data[:schema][:table_count] = @table_data.keys.count
json_output_stream.write( @schema_data.merge(@table_data).to_json )
json_output_stream.close
@filenames = [File.join( tmp_directory, 'tables.json' )]
end
def filenames
@filenames ||= []
end
def cleanup_temp
FileUtils.rm_rf(tmp_directory) if Dir[tmp_directory].present?
end
end
end

View File

@ -0,0 +1,24 @@
class ActiveRecord::Base
# Execute SQL manually
def self.exec_sql(*args)
conn = ActiveRecord::Base.connection
sql = ActiveRecord::Base.send(:sanitize_sql_array, args)
conn.execute(sql)
end
def self.exec_sql_row_count(*args)
exec_sql(*args).cmd_tuples
end
def exec_sql(*args)
ActiveRecord::Base.exec_sql(*args)
end
# Support for psql. If we want to support multiple RDBMs in the future we can
# split this.
def exec_sql_row_count(*args)
exec_sql(*args).cmd_tuples
end
end

View File

@ -0,0 +1,69 @@
# this file can be deleted when we port to rails4
module FreedomPatches
module Rails4
def self.distance_of_time_in_words(from_time, to_time = 0, include_seconds = false, options = {})
options = {
:scope => :'datetime.distance_in_words',
}.merge!(options)
from_time = from_time.to_time if from_time.respond_to?(:to_time)
to_time = to_time.to_time if to_time.respond_to?(:to_time)
distance = (to_time.to_f - from_time.to_f).abs
distance_in_minutes = (distance / 60.0).round
distance_in_seconds = distance.round
I18n.with_options :locale => options[:locale], :scope => options[:scope] do |locale|
case distance_in_minutes
when 0..1
return distance_in_minutes == 0 ?
locale.t(:less_than_x_minutes, :count => 1) :
locale.t(:x_minutes, :count => distance_in_minutes) unless include_seconds
case distance_in_seconds
when 0..4 then locale.t :less_than_x_seconds, :count => 5
when 5..9 then locale.t :less_than_x_seconds, :count => 10
when 10..19 then locale.t :less_than_x_seconds, :count => 20
when 20..39 then locale.t :half_a_minute
when 40..59 then locale.t :less_than_x_minutes, :count => 1
else locale.t :x_minutes, :count => 1
end
when 2..44 then locale.t :x_minutes, :count => distance_in_minutes
when 45..89 then locale.t :about_x_hours, :count => 1
when 90..1439 then locale.t :about_x_hours, :count => (distance_in_minutes.to_f / 60.0).round
when 1440..2519 then locale.t :x_days, :count => 1
when 2520..43199 then locale.t :x_days, :count => (distance_in_minutes.to_f / 1440.0).round
when 43200..86399 then locale.t :about_x_months, :count => 1
when 86400..525599 then locale.t :x_months, :count => (distance_in_minutes.to_f / 43200.0).round
else
fyear = from_time.year
fyear += 1 if from_time.month >= 3
tyear = to_time.year
tyear -= 1 if to_time.month < 3
leap_years = (fyear > tyear) ? 0 : (fyear..tyear).count{|x| Date.leap?(x)}
minute_offset_for_leap_year = leap_years * 1440
# Discount the leap year days when calculating year distance.
# e.g. if there are 20 leap year days between 2 dates having the same day
# and month then the based on 365 days calculation
# the distance in years will come out to over 80 years when in written
# english it would read better as about 80 years.
minutes_with_offset = distance_in_minutes - minute_offset_for_leap_year
remainder = (minutes_with_offset % 525600)
distance_in_years = (minutes_with_offset / 525600)
if remainder < 131400
locale.t(:about_x_years, :count => distance_in_years)
elsif remainder < 394200
locale.t(:over_x_years, :count => distance_in_years)
else
locale.t(:almost_x_years, :count => distance_in_years + 1)
end
end
end
end
def self.time_ago_in_words(from_time, include_seconds = false, options = {})
distance_of_time_in_words(from_time, Time.now, include_seconds, options)
end
end
end

315
lib/guardian.rb Normal file
View File

@ -0,0 +1,315 @@
# The guardian is responsible for confirming access to various site resources and opreations
class Guardian
attr_reader :user
def initialize(user=nil)
@user = user
end
def current_user
@user
end
def is_admin?
!@user.nil? && @user.admin?
end
# Can the user see the object?
def can_see?(obj)
return false if obj.blank?
see_method = :"can_see_#{obj.class.name.underscore}?"
return send(see_method, obj) if respond_to?(see_method)
return true
end
# Can the user edit the obj
def can_edit?(obj)
return false if obj.blank?
return false if @user.blank?
edit_method = :"can_edit_#{obj.class.name.underscore}?"
return send(edit_method, obj) if respond_to?(edit_method)
true
end
# Can we delete the object
def can_delete?(obj)
return false if obj.blank?
return false if @user.blank?
delete_method = :"can_delete_#{obj.class.name.underscore}?"
return send(delete_method, obj) if respond_to?(delete_method)
true
end
def can_moderate?(obj)
return false if obj.blank?
return false if @user.blank?
@user.has_trust_level?(:moderator)
end
alias :can_move_posts? :can_moderate?
alias :can_see_flags? :can_moderate?
# Can the user create a topic in the forum
def can_create?(klass, parent=nil)
return false if klass.blank?
return false if @user.blank?
# If no parent is provided, we look for a can_i_create_klass?
# custom method.
#
# If a parent is provided, we look for a method called
# can_i_create_klass_on_parent?
target = klass.name.underscore
if parent.present?
return false unless can_see?(parent)
target << "_on_#{parent.class.name.underscore}"
end
create_method = :"can_create_#{target}?"
return send(create_method, parent) if respond_to?(create_method)
true
end
# Can we impersonate this user?
def can_impersonate?(target)
return false if target.blank?
return false if @user.blank?
# You must be an admin to impersonate
return false unless @user.admin?
# You may not impersonate other admins
return false if target.admin?
# You may not impersonate yourself
return false if @user == target
true
end
# Can we approve it?
def can_approve?(target)
return false if target.blank?
return false if @user.blank?
return false if target.approved?
@user.has_trust_level?(:moderator)
end
def can_ban?(user)
return false if user.blank?
return false unless @user.try(:admin?)
return false if user.admin?
true
end
def can_revoke_admin?(admin)
return false unless @user.try(:admin?)
return false if admin.blank?
return false if @user.id == admin.id
return false unless admin.admin?
true
end
def can_grant_admin?(user)
return false unless @user.try(:admin?)
return false if user.blank?
return false if @user.id == user.id
return false if user.admin?
true
end
# Can we see who acted on a post in a particular way?
def can_see_post_actors?(topic, post_action_type_id)
return false unless topic.present?
type_symbol = PostActionType.Types.invert[post_action_type_id]
return false if type_symbol == :bookmark
return can_see_flags?(topic) if PostActionType.is_flag?(type_symbol)
if type_symbol == :vote
# We can see votes if the topic allows for public voting
return false if topic.has_meta_data_boolean?(:private_poll)
end
true
end
def can_see_pending_invites_from?(user)
return false if user.blank?
return false if @user.blank?
return user == @user
end
# For now, can_invite_to is basically can_see?
def can_invite_to?(object)
return false if @user.blank?
return false unless can_see?(object)
return false if SiteSetting.must_approve_users?
@user.has_trust_level?(:moderator)
end
def can_see_deleted_posts?
return true if is_admin?
false
end
def can_see_private_messages?(user_id)
return true if is_admin?
return false if @user.blank?
@user.id == user_id
end
# Support for ensure_{blah}! methods.
def method_missing(method, *args, &block)
if method.to_s =~ /^ensure_(.*)\!$/
can_method = :"#{Regexp.last_match[1]}?"
if respond_to?(can_method)
raise Discourse::InvalidAccess.new("#{can_method} failed") unless send(can_method, *args, &block)
return
end
end
super.method_missing(method, *args, &block)
end
# Make sure we can see the object. Will raise a NotFound if it's nil
def ensure_can_see!(obj)
raise Discourse::InvalidAccess.new("Can't see #{obj}") unless can_see?(obj)
end
# Creating Methods
def can_create_category?(parent)
@user.has_trust_level?(:moderator)
end
def can_create_post_on_topic?(topic)
return true if @user.has_trust_level?(:moderator)
return false if topic.closed?
return false if topic.archived?
true
end
# Editing Methods
def can_edit_category?(category)
@user.has_trust_level?(:moderator)
end
def can_edit_post?(post)
return true if @user.has_trust_level?(:moderator)
return false if post.topic.archived?
(post.user == @user)
end
def can_edit_user?(user)
return true if user == @user
@user.admin?
end
def can_edit_topic?(topic)
return true if @user.has_trust_level?(:moderator)
return true if topic.user == @user
false
end
# Deleting Methods
def can_delete_post?(post)
# Can't delete the first post
return false if post.post_number == 1
@user.has_trust_level?(:moderator)
end
def can_delete_category?(category)
return false unless @user.has_trust_level?(:moderator)
return category.topic_count == 0
end
def can_delete_topic?(topic)
return false unless @user.has_trust_level?(:moderator)
return false if Category.exists?(topic_id: topic.id)
true
end
def can_delete_post_action?(post_action)
# You can only undo your own actions
return false unless post_action.user == @user
# Make sure they want to delete it within the window
return post_action.created_at > SiteSetting.post_undo_action_window_mins.minutes.ago
end
def can_send_private_message?(target_user)
return false unless User === target_user
return false if @user.blank?
# Can't send message to yourself
return false if @user.id == target_user.id
# Have to be a basic level at least
return false unless @user.has_trust_level?(:basic)
SiteSetting.enable_private_messages
end
def can_reply_as_new_topic?(topic)
return false if @user.blank?
return false if topic.blank?
return false if topic.private_message?
@user.has_trust_level?(:basic)
end
def can_see_topic?(topic)
if topic.private_message?
return false if @user.blank?
return true if topic.allowed_users.include?(@user)
return is_admin?
end
true
end
def can_vote?(post, opts={})
post_can_act?(post,:vote, opts)
end
# Can the user act on the post in a particular way.
# taken_actions = the list of actions the user has already taken
def post_can_act?(post, action_key, opts={})
return false if @user.blank?
return false if post.blank?
return false if post.topic.archived?
taken = opts[:taken_actions]
taken = taken.keys if taken
if PostActionType.is_flag?(action_key)
return false unless @user.has_trust_level?(:basic)
if taken
return false unless (taken & PostActionType.FlagTypes).empty?
end
else
return false if taken && taken.include?(PostActionType.Types[action_key])
end
case action_key
when :like
return false if post.user == @user
when :vote then
return false if opts[:voted_in_topic] and post.topic.has_meta_data_boolean?(:single_vote)
end
return true
end
end

29
lib/headless-ember.js Normal file
View File

@ -0,0 +1,29 @@
// DOM
var Element = {};
Element.firstChild = function () { return Element; };
Element.innerHTML = function () { return Element; };
var document = { createRange: false, createElement: function() { return Element; } };
var window = this;
this.document = document;
// Console
var console = window.console = {};
console.log = console.info = console.warn = console.error = function(){};
// jQuery
var jQuery = window.jQuery = function() { return jQuery; };
jQuery.ready = function() { return jQuery; };
jQuery.inArray = function() { return jQuery; };
jQuery.event = {
fixHooks: function() {
}
};
jQuery.jquery = "1.7.2";
var $ = jQuery;
// Ember
function precompileEmberHandlebars(string) {
return Ember.Handlebars.precompile(string).toString();
}

17
lib/image_sizer.rb Normal file
View File

@ -0,0 +1,17 @@
module ImageSizer
# Resize an image to the aspect ratio we want
def self.resize(width, height)
max_width = SiteSetting.max_image_width.to_f
return nil if width.blank? or height.blank?
w = width.to_f
h = height.to_f
return [w.floor, h.floor] if w < max_width
# Using the maximum width, resize the heigh retaining the aspect ratio
[max_width.floor, (h * (max_width / w)).floor]
end
end

24
lib/imgur.rb Normal file
View File

@ -0,0 +1,24 @@
require 'rest_client'
require 'image_size'
module Imgur
def self.upload_file(file)
blob = file.read
response = RestClient.post(SiteSetting.imgur_endpoint, key: SiteSetting.imgur_api_key, image: Base64.encode64(blob))
json = JSON.parse(response.body)['upload'] rescue nil
return nil if json.blank?
# Resize the image
json['image']['width'], json['image']['height'] = ImageSizer.resize(json['image']['width'], json['image']['height'])
{url: json['links']['original'],
filesize: json['image']['size'],
width: json['image']['width'],
height: json['image']['height']}
end
end

View File

@ -0,0 +1,31 @@
module Import
module Adapter
class Base
def self.register(opts={})
Import.add_import_adapter self, opts[:version], opts[:tables]
@table_names = opts[:tables]
end
def apply_to_column_names(table_name, column_names)
up_column_names(table_name, column_names)
end
def apply_to_row(table_name, row)
up_row(table_name, row)
end
# Implement the following methods in subclasses:
def up_column_names(table_name, column_names)
column_names
end
def up_row(table_name, row)
row
end
end
end
end

View File

@ -0,0 +1,28 @@
module Import
module Adapter
class MergeMuteOptionsOnTopicUsers < Base
register version: '20130115012140', tables: [:topic_users]
def up_column_names(table_name, column_names)
# rename_column :topic_users, :notifications, :notification_level
# remove_column :topic_users, :muted_at
if table_name.to_sym == :topic_users
column_names.map {|col| col == 'notifications' ? 'notification_level' : col}.reject {|col| col == 'muted_at'}
else
column_names
end
end
def up_row(table_name, row)
# remove_column :topic_users, :muted_at
if table_name.to_sym == :topic_users
row[0..6] + row[8..-1]
else
row
end
end
end
end
end

View File

@ -0,0 +1,27 @@
module Import
module Adapter
class RemoveSubTagFromTopics < Base
register version: '20130116151829', tables: [:topics]
def up_column_names(table_name, column_names)
# remove_column :topics, :sub_tag
if table_name.to_sym == :topics
column_names.reject {|col| col == 'sub_tag'}
else
column_names
end
end
def up_row(table_name, row)
# remove_column :topics, :sub_tag
if table_name.to_sym == :topics
row[0..29] + row[31..-1]
else
row
end
end
end
end
end

54
lib/import/import.rb Normal file
View File

@ -0,0 +1,54 @@
require_dependency 'import/adapter/base'
module Import
class UnsupportedExportSource < RuntimeError; end
class FormatInvalidError < RuntimeError; end
class FilenameMissingError < RuntimeError; end
class ImportInProgressError < RuntimeError; end
class ImportDisabledError < RuntimeError; end
class UnsupportedSchemaVersion < RuntimeError; end
class WrongTableCountError < RuntimeError; end
class WrongFieldCountError < RuntimeError; end
def self.import_running_key
'importer_is_running'
end
def self.is_import_running?
$redis.get(import_running_key) == '1'
end
def self.set_import_started
$redis.set import_running_key, '1'
end
def self.set_import_is_not_running
$redis.del import_running_key
end
def self.clear_adapters
@adapters = {}
@adapter_instances = {}
end
def self.add_import_adapter(klass, version, tables)
@adapters ||= {}
@adapter_instances ||= {}
unless @adapter_instances[klass]
@adapter_instances[klass] = klass.new
tables.each do |table|
@adapters[table.to_s] ||= []
@adapters[table.to_s] << [version, @adapter_instances[klass]]
end
end
end
def self.adapters_for_version(version)
a = Hash.new([])
@adapters.each {|table_name,adapters| a[table_name] = adapters.reject {|i| i[0].to_i <= version.to_i}.map {|j| j[1]} } if defined?(@adapters)
a
end
end

View File

@ -0,0 +1,27 @@
module Import
class JsonDecoder
def initialize(input_filename)
@input_filename = input_filename
end
def input_stream
@input_stream ||= begin
File.open( @input_filename, 'rb' )
end
end
def start( opts )
@json = JSON.parse(input_stream.read)
opts[:callbacks][:schema_info].call( source: @json['schema']['source'], version: @json['schema']['version'], table_count: @json.keys.size - 1)
@json.each do |key, val|
next if key == 'schema'
opts[:callbacks][:table_data].call( key, val['fields'], val['rows'], val['row_count'] )
end
end
end
end

83
lib/jobs.rb Normal file
View File

@ -0,0 +1,83 @@
module Jobs
class Base
include Sidekiq::Worker
def self.delayed_perform(opts={})
self.new.perform(opts)
end
def self.mutex
@mutex ||= Mutex.new
end
def execute(opts={})
raise "Overwrite me!"
end
def perform(opts={})
opts = opts.with_indifferent_access
if opts.delete(:sync_exec)
if opts.has_key?(:current_site_id) and opts[:current_site_id] != RailsMultisite::ConnectionManagement.current_db
raise ArgumentError.new("You can't connect to another database when executing a job synchronously.")
else
return execute(opts)
end
end
dbs =
if opts[:current_site_id]
[opts[:current_site_id]]
else
RailsMultisite::ConnectionManagement.all_dbs
end
dbs.each do |db|
begin
Jobs::Base.mutex.synchronize do
RailsMultisite::ConnectionManagement.establish_connection(:db => db)
execute(opts)
end
ensure
ActiveRecord::Base.connection_handler.clear_active_connections!
end
end
end
end
def self.enqueue(job_name, opts={})
klass_name = "Jobs::#{job_name.to_s.camelcase}"
klass = klass_name.constantize
# Unless we want to work on all sites
unless opts.delete(:all_sites)
opts[:current_site_id] ||= RailsMultisite::ConnectionManagement.current_db
end
# If we are able to queue a job, do it
if SiteSetting.queue_jobs?
if opts[:delay_for].present?
klass.delay_for(opts.delete(:delay_for)).delayed_perform(opts)
else
Sidekiq::Client.enqueue(klass_name.constantize, opts)
end
else
# Otherwise execute the job right away
opts.delete(:delay_for)
opts[:sync_exec] = true
klass.new.perform(opts)
end
end
def self.enqueue_in(secs, job_name, opts={})
enqueue(job_name, opts.merge!(delay_for: secs))
end
end
# Require all jobs
Dir["#{Rails.root}/lib/jobs/*"].each {|file| require_dependency file }

View File

@ -0,0 +1,12 @@
module Jobs
class CalculateAvgTime < Jobs::Base
def execute(args)
Post.calculate_avg_time
Topic.calculate_avg_time
end
end
end

View File

@ -0,0 +1,13 @@
require_dependency 'score_calculator'
module Jobs
class CalculateScore < Jobs::Base
def execute(args)
ScoreCalculator.new.calculate
end
end
end

View File

@ -0,0 +1,13 @@
require_dependency 'score_calculator'
module Jobs
class CalculateViewCounts < Jobs::Base
def execute(args)
User.update_view_counts
end
end
end

View File

@ -0,0 +1,11 @@
module Jobs
class CategoryStats < Jobs::Base
def execute(args)
Category.update_stats
end
end
end

View File

@ -0,0 +1,23 @@
module Jobs
# A daily job that will enqueue digest emails to be sent to users
class EnqueueDigestEmails < Jobs::Base
def execute(args)
target_users.each do |u|
Jobs.enqueue(:user_email, type: :digest, user_id: u.id)
end
end
def target_users
# Users who want to receive emails and haven't been emailed int he last day
User
.select(:id)
.where(email_digests: true)
.where("COALESCE(last_emailed_at, '2010-01-01') <= CURRENT_TIMESTAMP - ('1 DAY'::INTERVAL * digest_after_days)")
.where("COALESCE(last_seen_at, '2010-01-01') <= CURRENT_TIMESTAMP - ('1 DAY'::INTERVAL * digest_after_days)")
end
end
end

119
lib/jobs/exporter.rb Normal file
View File

@ -0,0 +1,119 @@
require_dependency 'export/json_encoder'
require_dependency 'export/export'
require_dependency 'import/import'
module Jobs
class Exporter < Jobs::Base
sidekiq_options :retry => false
def execute(args)
raise Import::ImportInProgressError if Import::is_import_running?
raise Export::ExportInProgressError if Export::is_export_running?
@format = args[:format] || :json
@output_base_filename = File.absolute_path( args[:filename] || File.join( Rails.root, 'tmp', "export-#{Time.now.strftime('%Y-%m-%d-%H%M%S')}" ) )
@output_base_filename = @output_base_filename[0...-3] if @output_base_filename[-3..-1] == '.gz'
@output_base_filename = @output_base_filename[0...-4] if @output_base_filename[-4..-1] == '.tar'
@user = args[:user_id] ? User.where(id: args[:user_id].to_i).first : nil
start_export
@encoder.write_schema_info( source: 'discourse', version: Export.current_schema_version )
ordered_models_for_export.each do |model|
log " #{model.table_name}"
column_info = model.columns
order_col = column_info.map(&:name).find {|x| x == 'id'} || order_columns_for(model)
@encoder.write_table(model.table_name, column_info) do |num_rows_written|
if order_col
model.connection.select_rows("select * from #{model.table_name} order by #{order_col} limit #{batch_size} offset #{num_rows_written}")
else
# Take the rows in the order the database returns them
log "WARNING: no order by clause is being used for #{model.name} (#{model.table_name}). Please update Jobs::Exporter order_columns_for for #{model.name}."
model.connection.select_rows("select * from #{model.table_name} limit #{batch_size} offset #{num_rows_written}")
end
end
end
"#{@output_base_filename}.tar.gz"
ensure
finish_export
end
def ordered_models_for_export
Export.models_included_in_export
end
def order_columns_for(model)
@order_columns_for_hash ||= {
'CategoryFeaturedTopic' => 'category_id, topic_id',
'PostOneboxRender' => 'post_id, onebox_render_id',
'PostReply' => 'post_id, reply_id',
'PostTiming' => 'topic_id, post_number, user_id',
'TopicUser' => 'topic_id, user_id',
'View' => 'parent_id, parent_type, ip, viewed_at'
}
@order_columns_for_hash[model.name]
end
def batch_size
1000
end
def start_export
if @format == :json
@encoder = Export::JsonEncoder.new
else
raise Export::FormatInvalidError
end
Export.set_export_started
Discourse.enable_maintenance_mode
end
def finish_export
if @encoder
@encoder.finish
create_tar_file
@encoder.cleanup_temp
end
ensure
Export.set_export_is_not_running
Discourse.disable_maintenance_mode
send_notification
end
def create_tar_file
filenames = @encoder.filenames
FileUtils.cd( File.dirname(filenames.first) ) do
`tar cvf #{@output_base_filename}.tar #{File.basename(filenames.first)}`
end
FileUtils.cd( File.join(Rails.root, 'public') ) do
Upload.find_each do |upload|
`tar rvf #{@output_base_filename}.tar #{upload.url[1..-1]}` unless upload.url[0,4] == 'http'
end
end
`gzip #{@output_base_filename}.tar`
true
end
def send_notification
SystemMessage.new(@user).create('export_succeeded') if @user
true
end
def log(*args)
puts args
args.each do |arg|
Rails.logger.info "#{Time.now.to_formatted_s(:db)}: [EXPORTER] #{arg}"
end
true
end
end
end

View File

@ -0,0 +1,11 @@
module Jobs
class FeatureTopics < Jobs::Base
def execute(args)
CategoryFeaturedTopic.feature_topics
end
end
end

View File

@ -0,0 +1,35 @@
module Jobs
class FeatureTopicUsers < Jobs::Base
def execute(args)
topic = Topic.where(id: args[:topic_id]).first
raise Discourse::InvalidParameters.new(:topic_id) unless topic.present?
to_feature = topic.posts
# Don't include the OP or the last poster
to_feature = to_feature.where('user_id <> ?', topic.user_id)
to_feature = to_feature.where('user_id <> ?', topic.last_post_user_id)
# Exclude a given post if supplied (in the case of deletes)
to_feature = to_feature.where("id <> ?", args[:except_post_id]) if args[:except_post_id].present?
# Clear the featured users by default
Topic::FEATURED_USERS.times do |i|
topic.send("featured_user#{i+1}_id=", nil)
end
# Assign the featured_user{x} columns
to_feature = to_feature.group(:user_id).order('count_all desc').limit(Topic::FEATURED_USERS)
to_feature.count.keys.each_with_index do |user_id, i|
topic.send("featured_user#{i+1}_id=", user_id)
end
topic.save
end
end
end

289
lib/jobs/importer.rb Normal file
View File

@ -0,0 +1,289 @@
require_dependency 'import/json_decoder'
require_dependency 'import/import'
require_dependency 'import/adapter/base'
(Dir.entries(File.join( Rails.root, 'lib', 'import', 'adapter' )) - ['.', '..', 'base.rb']).each do |f|
require_dependency "import/adapter/#{f}"
end
module Jobs
class Importer < Jobs::Base
sidekiq_options :retry => false
BACKUP_SCHEMA = 'backup'
def initialize
@index_definitions = {}
@format = :json
@warnings = []
end
def execute(args)
ordered_models_for_import.each { |model| model.primary_key } # a HACK to workaround cache problems
raise Import::ImportDisabledError unless SiteSetting.allow_import?
raise Import::ImportInProgressError if Import::is_import_running?
raise Export::ExportInProgressError if Export::is_export_running?
# Disable printing of NOTICE, DETAIL and other unimportant messages from postgresql
User.exec_sql("SET client_min_messages TO WARNING")
@format = args[:format] || :json
@archive_filename = args[:filename]
if args[:user_id]
# After the import is done, we'll need to reload the user record and make sure it's the same person
# before sending a notification
user = User.where(id: args[:user_id].to_i).first
@user_info = { user_id: user.id, email: user.email }
else
@user_info = nil
end
start_import
backup_tables
begin
load_data
create_indexes
extract_uploads
rescue
log "Performing a ROLLBACK because something went wrong!"
rollback
raise
end
ensure
finish_import
end
def ordered_models_for_import
Export.models_included_in_export
end
def start_import
if @format != :json
raise Import::FormatInvalidError
elsif @archive_filename.nil?
raise Import::FilenameMissingError
else
extract_files
@decoder = Import::JsonDecoder.new( File.join(tmp_directory, 'tables.json') )
Import.set_import_started
Discourse.enable_maintenance_mode
end
self
end
def tmp_directory
@tmp_directory ||= begin
f = File.join( Rails.root, 'tmp', Time.now.strftime('import%Y%m%d%H%M%S') )
Dir.mkdir(f) unless Dir[f].present?
f
end
end
def extract_files
FileUtils.cd( tmp_directory ) do
`tar xvzf #{@archive_filename} tables.json`
end
end
def backup_tables
log " Backing up tables"
ActiveRecord::Base.transaction do
create_backup_schema
ordered_models_for_import.each do |model|
backup_and_setup_table( model )
end
end
self
end
def create_backup_schema
User.exec_sql("DROP SCHEMA IF EXISTS #{BACKUP_SCHEMA} CASCADE")
User.exec_sql("CREATE SCHEMA #{BACKUP_SCHEMA}")
self
end
def backup_and_setup_table( model )
log " #{model.table_name}"
@index_definitions[model.table_name] = model.exec_sql("SELECT indexdef FROM pg_indexes WHERE tablename = '#{model.table_name}' and schemaname = 'public';").map { |x| x['indexdef'] }
model.exec_sql("ALTER TABLE #{model.table_name} SET SCHEMA #{BACKUP_SCHEMA}")
model.exec_sql("CREATE TABLE #{model.table_name} (LIKE #{BACKUP_SCHEMA}.#{model.table_name} INCLUDING DEFAULTS INCLUDING CONSTRAINTS INCLUDING COMMENTS INCLUDING STORAGE);")
self
end
def load_data
log " Importing data"
@decoder.start(
callbacks: {
schema_info: method(:set_schema_info),
table_data: method(:load_table)
}
)
self
end
def set_schema_info(arg)
if arg[:source] and arg[:source].downcase == 'discourse'
if arg[:version] and arg[:version] <= Export.current_schema_version
@export_schema_version = arg[:version]
if arg[:table_count] == ordered_models_for_import.size
true
else
raise Import::WrongTableCountError.new("Expected to find #{ordered_models_for_import.size} tables, but export file has #{arg[:table_count]} tables!")
end
elsif arg[:version].nil?
raise ArgumentError.new("The schema version must be provided.")
else
raise Import::UnsupportedSchemaVersion.new("Export file is from a newer version of Discourse. Upgrade and run migrations to import this file.")
end
else
raise Import::UnsupportedExportSource
end
end
def load_table(table_name, fields_arg, row_data, row_count)
fields = fields_arg.dup
model = Export::models_included_in_export.find { |m| m.table_name == table_name }
if model
@adapters ||= Import.adapters_for_version( @export_schema_version )
log " #{table_name}: #{row_count} rows"
if @adapters[table_name]
@adapters[table_name].each do |adapter|
fields = adapter.apply_to_column_names(table_name, fields)
end
end
if fields.size > model.columns.size
raise Import::WrongFieldCountError.new("Table #{table_name} is expected to have #{model.columns.size} fields, but got #{fields.size}! Maybe your Discourse server is older than the server that this export file comes from?")
end
# If there are fewer fields in the data than the model has, then insert only those fields and
# hope that the table uses default values or allows null for the missing columns.
# If the table doesn't have defaults or is not nullable, then a migration adapter should have been created
# along with the migration.
column_info = model.columns
col_num = -1
rows = row_data.map do |row|
if @adapters[table_name]
@adapters[table_name].each do |adapter|
row = adapter.apply_to_row(table_name, row)
end
end
row
end.transpose.map do |col_values|
col_num += 1
case column_info[col_num].type
when :boolean
col_values.map { |v| v.nil? ? nil : (v == 'f' ? false : true) }
else
col_values
end
end.transpose
parameter_markers = fields.map {|x| "?"}.join(',')
sql_stmt = "INSERT INTO #{table_name} (#{fields.join(',')}) VALUES (#{parameter_markers})"
rows.each do |row|
User.exec_sql(sql_stmt, *row)
end
true
else
add_warning "Export file contained an unrecognized table named: #{table_name}! It was ignored."
end
end
def create_indexes
log " Creating indexes"
ordered_models_for_import.each do |model|
log " #{model.table_name}"
@index_definitions[model.table_name].each do |indexdef|
model.exec_sql( indexdef )
end
# The indexdef statements don't create the primary keys, so we need to find the primary key and do it ourselves.
pkey_index_def = @index_definitions[model.table_name].find { |ixdef| ixdef =~ / ([\S]{1,}_pkey) / }
if pkey_index_def and pkey_index_name = / ([\S]{1,}_pkey) /.match(pkey_index_def)[1]
model.exec_sql( "ALTER TABLE ONLY #{model.table_name} ADD PRIMARY KEY USING INDEX #{pkey_index_name}" )
end
if model.columns.map(&:name).include?('id')
max_id = model.exec_sql("SELECT MAX(id) AS max FROM #{model.table_name}")[0]['max'].to_i + 1
seq_name = "#{model.table_name}_id_seq"
model.exec_sql("CREATE SEQUENCE #{seq_name} START WITH #{max_id} INCREMENT BY 1 NO MINVALUE NO MAXVALUE CACHE 1")
model.exec_sql("ALTER SEQUENCE #{seq_name} OWNED BY #{model.table_name}.id")
model.exec_sql("ALTER TABLE #{model.table_name} ALTER COLUMN id SET DEFAULT nextval('#{seq_name}')")
end
end
self
end
def extract_uploads
if `tar tf #{@archive_filename} | grep "uploads/"`.present?
FileUtils.cd( File.join(Rails.root, 'public') ) do
`tar -xz --keep-newer-files -f #{@archive_filename} uploads/`
end
end
end
def rollback
ordered_models_for_import.each do |model|
log " #{model.table_name}"
model.exec_sql("DROP TABLE IF EXISTS #{model.table_name}") rescue nil
begin
model.exec_sql("ALTER TABLE #{BACKUP_SCHEMA}.#{model.table_name} SET SCHEMA public")
rescue => e
log " Failed to restore. #{e.class.name}: #{e.message}"
end
end
end
def finish_import
Import.set_import_is_not_running
Discourse.disable_maintenance_mode
FileUtils.rm_rf(tmp_directory) if Dir[tmp_directory].present?
if @warnings.size > 0
log "WARNINGS:"
@warnings.each do |message|
log " #{message}"
end
end
# send_notification
end
def send_notification
# Doesn't work. "WARNING: Can't mass-assign protected attributes: created_at"
# Still a problem with the activerecord schema_cache I think.
# if @user_info and @user_info[:user_id]
# user = User.where(id: @user_info[:user_id]).first
# if user and user.email == @user_info[:email]
# SystemMessage.new(user).create('import_succeeded')
# end
# end
true
end
def add_warning(message)
@warnings << message
end
def log(*args)
puts args
args.each do |arg|
Rails.logger.info "#{Time.now.to_formatted_s(:db)}: [IMPORTER] #{arg}"
end
true
end
end
end

18
lib/jobs/invite_email.rb Normal file
View File

@ -0,0 +1,18 @@
require_dependency 'email_sender'
module Jobs
# Asynchronously send an email
class InviteEmail < Jobs::Base
def execute(args)
raise Discourse::InvalidParameters.new(:invite_id) unless args[:invite_id].present?
invite = Invite.where(id: args[:invite_id]).first
message = InviteMailer.send_invite(invite)
EmailSender.new(message, :invite).send
end
end
end

View File

@ -0,0 +1,31 @@
module Jobs
class NotifyMovedPosts < Jobs::Base
def execute(args)
raise Discourse::InvalidParameters.new(:post_ids) if args[:post_ids].blank?
raise Discourse::InvalidParameters.new(:moved_by_id) if args[:moved_by_id].blank?
# Make sure we don't notify the same user twice (in case multiple posts were moved at once.)
users_notified = Set.new
posts = Post.where(id: args[:post_ids]).where('user_id <> ?', args[:moved_by_id]).includes(:user, :topic)
if posts.present?
moved_by = User.where(id: args[:moved_by_id]).first
posts.each do |p|
unless users_notified.include?(p.user_id)
p.user.notifications.create(notification_type: Notification.Types[:moved_post],
topic_id: p.topic_id,
post_number: p.post_number,
data: {topic_title: p.topic.title,
display_username: moved_by.username}.to_json)
users_notified << p.user_id
end
end
end
end
end
end

25
lib/jobs/process_post.rb Normal file
View File

@ -0,0 +1,25 @@
require 'image_sizer'
require_dependency 'cooked_post_processor'
module Jobs
class ProcessPost < Jobs::Base
def execute(args)
post = Post.where(id: args[:post_id]).first
return unless post.present?
if args[:cook].present?
post.update_column(:cooked, post.cook(post.raw, topic_id: post.topic_id))
end
cp = CookedPostProcessor.new(post, args)
cp.post_process
# If we changed the document, save it
post.update_column(:cooked, cp.html) if cp.dirty?
end
end
end

View File

@ -0,0 +1,21 @@
require 'image_sizer'
require_dependency 'system_message'
module Jobs
class SendSystemMessage < Jobs::Base
def execute(args)
raise Discourse::InvalidParameters.new(:user_id) unless args[:user_id].present?
raise Discourse::InvalidParameters.new(:message_type) unless args[:message_type].present?
user = User.where(id: args[:user_id]).first
return if user.blank?
system_message = SystemMessage.new(user)
system_message.create(args[:message_type])
end
end
end

18
lib/jobs/test_email.rb Normal file
View File

@ -0,0 +1,18 @@
require_dependency 'email_sender'
module Jobs
# Asynchronously send an email
class TestEmail < Jobs::Base
def execute(args)
raise Discourse::InvalidParameters.new(:to_address) unless args[:to_address].present?
message = TestMailer.send_test(args[:to_address])
EmailSender.new(message, :test_message).send
end
end
end

71
lib/jobs/user_email.rb Normal file
View File

@ -0,0 +1,71 @@
require_dependency 'email_sender'
module Jobs
# Asynchronously send an email to a user
class UserEmail < Jobs::Base
def execute(args)
# Required parameters
raise Discourse::InvalidParameters.new(:user_id) unless args[:user_id].present?
raise Discourse::InvalidParameters.new(:type) unless args[:type].present?
# Find the user
user = User.where(id: args[:user_id]).first
return unless user.present?
seen_recently = (user.last_seen_at.present? and user.last_seen_at > SiteSetting.email_time_window_mins.minutes.ago)
email_args = {}
if args[:post_id]
# Don't email a user about a post when we've seen them recently.
return if seen_recently
post = Post.where(id: args[:post_id]).first
return unless post.present?
# Don't send the email if the user has read the post
return if PostTiming.where(topic_id: post.topic_id, post_number: post.post_number, user_id: user.id).present?
email_args[:post] = post
end
email_args[:email_token] = args[:email_token] if args[:email_token].present?
notification = nil
notification = Notification.where(id: args[:notification_id]).first if args[:notification_id].present?
if notification.present?
# Don't email a user about a post when we've seen them recently.
return if seen_recently
# Load the post if present
email_args[:post] ||= notification.post if notification.post.present?
email_args[:notification] = notification
# Don't send email if the notification this email is about has already been read
return if notification.read?
end
# Make sure that mailer exists
raise Discourse::InvalidParameters.new(:type) unless UserNotifications.respond_to?(args[:type])
message = UserNotifications.send(args[:type], user, email_args)
# Update the to address if we have a custom one
if args[:to_address].present?
message.to = [args[:to_address]]
end
EmailSender.new(message, args[:type], user).send
end
end
end

27
lib/markdown_linker.rb Normal file
View File

@ -0,0 +1,27 @@
# Helps create links using markdown (where references are at the bottom)
class MarkdownLinker
def initialize(base_url)
@base_url = base_url
@index = 1
@markdown_links = {}
@rendered = 1
end
def create(title, url)
@markdown_links[@index] = "#{@base_url}#{url}"
result = "[#{title}][#{@index}]"
@index += 1
result
end
def references
result = ""
(@rendered..@index-1).each do |i|
result << " [#{i}]: #{@markdown_links[i]}\n"
end
@rendered = @index
result
end
end

58
lib/mothership.rb Normal file
View File

@ -0,0 +1,58 @@
require_dependency 'rest_client'
module Mothership
class NicknameUnavailable < RuntimeError; end
def self.nickname_available?(nickname)
response = get('/users/nickname_available', {nickname: nickname})
[response['available'], response['suggestion']]
end
def self.nickname_match?(nickname, email)
response = get('/users/nickname_match', {nickname: nickname, email: email})
[response['match'], response['available'] || false, response['suggestion']]
end
def self.register_nickname(nickname, email)
json = post('/users', {nickname: nickname, email: email})
if json.has_key?('success')
true
else
raise NicknameUnavailable # json['failed'] == -200
end
end
def self.current_discourse_version
get('current_version')['version']
end
private
def self.get(rel_url, params={})
response = RestClient.get( "#{mothership_base_url}#{rel_url}", {params: {access_token: access_token}.merge(params), accept: accepts } )
JSON.parse(response)
end
def self.post(rel_url, params={})
response = RestClient.post( "#{mothership_base_url}#{rel_url}", {access_token: access_token}.merge(params), content_type: :json, accept: accepts )
JSON.parse(response)
end
def self.mothership_base_url
if Rails.env == 'production'
'http://api.discourse.org/api'
else
'http://local.mothership:3000/api'
end
end
def self.access_token
@access_token ||= SiteSetting.discourse_org_access_key
end
def self.accepts
[:json, 'application/vnd.discoursehub.v1']
end
end

140
lib/oneboxer.rb Normal file
View File

@ -0,0 +1,140 @@
require 'open-uri'
require_dependency 'oneboxer/base'
require_dependency 'oneboxer/whitelist'
Dir["#{Rails.root}/lib/oneboxer/*_onebox.rb"].each {|f|
require_dependency(f.split('/')[-2..-1].join('/'))
}
module Oneboxer
extend Oneboxer::Base
Dir["#{Rails.root}/lib/oneboxer/*_onebox.rb"].each do |f|
add_onebox "Oneboxer::#{Pathname.new(f).basename.to_s.gsub(/\.rb$/, '').classify}".constantize
end
def self.default_expiry
1.month
end
# Return a oneboxer for a given URL
def self.onebox_for_url(url)
matchers.each do |regexp, oneboxer|
return oneboxer.new(url) if url =~ regexp
end
nil
end
# Retrieve the onebox for a url without caching
def self.onebox_nocache(url)
oneboxer = onebox_for_url(url)
return oneboxer.onebox if oneboxer.present?
if Whitelist.allowed?(url)
page_html = open(url).read
if page_html.present?
doc = Hpricot(page_html)
# See if if it has an oembed thing we can use
(doc/"link[@type='application/json+oembed']").each do |oembed|
return OembedOnebox.new(oembed[:href]).onebox
end
(doc/"link[@type='text/json+oembed']").each do |oembed|
return OembedOnebox.new(oembed[:href]).onebox
end
# Check for opengraph
open_graph = Oneboxer.parse_open_graph(doc)
return OpenGraphOnebox.new(url, open_graph).onebox if open_graph.present?
end
end
nil
end
# Parse URLs out of HTML, returning the document when finished.
def self.each_onebox_link(string_or_doc)
doc = string_or_doc
doc = Hpricot(doc) if doc.is_a?(String)
onebox_links = doc.search("a.onebox")
if onebox_links.present?
onebox_links.each do |link|
if link['href'].present?
yield link['href'], link
end
end
end
doc
end
def self.create_post_reference(result, args={})
result.post_onebox_renders.create(post_id: args[:post_id]) if args[:post_id].present?
rescue ActiveRecord::RecordNotUnique
end
def self.render_from_cache(url, args={})
result = OneboxRender.where(url: url).first
# Return the result but also create a reference to it
if result.present?
create_post_reference(result, args)
return result
end
nil
end
# Cache results from a onebox call
def self.fetch_and_cache(url, args)
cooked, preview = onebox_nocache(url)
return nil if cooked.blank?
# Store a cooked version in the database
OneboxRender.transaction do
begin
render = OneboxRender.create(url: url, preview: preview, cooked: cooked, expires_at: Oneboxer.default_expiry.from_now)
create_post_reference(render, args)
rescue ActiveRecord::RecordNotUnique
end
end
[cooked, preview]
end
# Retrieve a preview of a onebox, caching the result for performance
def self.preview(url, args={})
cached = render_from_cache(url, args) unless args[:no_cache].present?
# If we have a preview stored, return that. Otherwise return cooked content.
if cached.present?
return cached.preview if cached.preview.present?
return cached.cooked
end
cooked, preview = fetch_and_cache(url, args)
return preview if preview.present?
cooked
end
def self.invalidate(url)
OneboxRender.destroy_all(url: url)
end
# Return the cooked content for a url, caching the result for performance
def self.onebox(url, args={})
if args[:invalidate_oneboxes].present?
# Remove the onebox from the cache
Oneboxer.invalidate(url)
else
cached = render_from_cache(url, args) unless args[:no_cache].present?
return cached.cooked if cached.present?
end
cooked, preview = fetch_and_cache(url, args)
cooked
end
end

View File

@ -0,0 +1,44 @@
require_dependency 'oneboxer/handlebars_onebox'
module Oneboxer
class AmazonOnebox < HandlebarsOnebox
matcher /^https?:\/\/(?:www\.)?amazon.(com|ca)\/.*$/
favicon 'amazon.png'
def template
template_path("simple_onebox")
end
# Use the mobile version of the site
def translate_url
# If we're already mobile don't translate the url
return @url if @url =~ /https?:\/\/www\.amazon\.com\/gp\/aw\/d\//
m = @url.match(/(?:d|g)p\/(?:product\/)?(?<id>[^\/]+)(?:\/|$)/mi)
return "http://www.amazon.com/gp/aw/d/" + URI::encode(m[:id]) if m.present?
@url
end
def parse(data)
hp = Hpricot(data)
result = {}
result[:title] = hp.at("h1")
result[:title] = result[:title].inner_html if result[:title].present?
image = hp.at(".main-image img")
result[:image] = image['src'] if image
result[:by_info] = hp.at("#by-line")
result[:by_info] = BaseOnebox.remove_whitespace(result[:by_info].inner_html) if result[:by_info].present?
summary = hp.at("#description-and-details-content")
result[:text] = summary.inner_html if summary.present?
result
end
end
end

View File

@ -0,0 +1,35 @@
require_dependency 'oneboxer/handlebars_onebox'
module Oneboxer
class AndroidAppStoreOnebox < HandlebarsOnebox
matcher /^https?:\/\/play\.google\.com\/.+$/
favicon 'google_play.png'
def template
template_path('simple_onebox')
end
def parse(data)
hp = Hpricot(data)
result = {}
m = hp.at("h1.doc-banner-title")
result[:title] = m.inner_text if m
m = hp.at("div#doc-original-text")
if m
result[:text] = BaseOnebox.replace_tags_with_spaces(m.inner_html)
result[:text] = result[:text][0..MAX_TEXT]
end
m = hp.at("div.doc-banner-icon img")
result[:image] = m['src'] if m
result
end
end
end

View File

@ -0,0 +1,37 @@
require_dependency 'oneboxer/handlebars_onebox'
module Oneboxer
class AppleAppOnebox < HandlebarsOnebox
matcher /^https?:\/\/itunes\.apple\.com\/.+$/
favicon 'apple.png'
# Don't masquerade as mobile
def http_params
{}
end
def template
template_path('simple_onebox')
end
def parse(data)
hp = Hpricot(data)
result = {}
m = hp.at("h1")
result[:title] = m.inner_text if m
m = hp.at("h4 ~ p")
result[:text] = m.inner_text[0..MAX_TEXT] if m
m = hp.at(".product img.artwork")
result[:image] = m['src'] if m
result
end
end
end

45
lib/oneboxer/base.rb Normal file
View File

@ -0,0 +1,45 @@
module Oneboxer
class << self
def parse_open_graph(doc)
result = {}
%w(title type image url description).each do |prop|
node = doc.at("/html/head/meta[@property='og:#{prop}']")
result[prop] = (node['content'] || node['value']) if node
end
# If there's no description, try and get one from the meta tags
if result['description'].blank?
node = doc.at("/html/head/meta[@name='description']")
result['description'] = node['content'] if node
end
if result['description'].blank?
node = doc.at("/html/head/meta[@name='Description']")
result['description'] = node['content'] if node
end
result
end
end
module Base
def matchers
@matchers ||= {}
@matchers
end
# Add a matcher
def add_matcher(regexp, klass)
matchers[regexp] = klass
end
def add_onebox(klass)
matchers[klass.regexp] = klass
end
end
end

View File

@ -0,0 +1,48 @@
require 'open-uri'
module Oneboxer
class BaseOnebox
class << self
attr_accessor :regexp
attr_accessor :favicon_file
def matcher(regexp)
self.regexp = regexp
end
def favicon(favicon_file)
self.favicon_file = "favicons/#{favicon_file}"
end
def remove_whitespace(s)
s.gsub /\n/, ''
end
def image_html(url, title, page_url)
"<a href='#{page_url}' target='_blank'><img src='#{url}' alt='#{title}'></a>"
end
def replace_tags_with_spaces(s)
s.gsub /<[^>]+>/, ' '
end
def uriencode(val)
return URI.escape(val, Regexp.new("[^#{URI::PATTERN::UNRESERVED}]"))
end
end
def initialize(url, opts={})
@url = url
@opts = opts
end
def translate_url
@url
end
end
end

View File

@ -0,0 +1,13 @@
require_dependency 'oneboxer/oembed_onebox'
module Oneboxer
class BliptvOnebox < OembedOnebox
matcher /^https?\:\/\/blip\.tv\/.+$/
def oembed_endpoint
"http://blip.tv/oembed/?url=#{BaseOnebox.uriencode(@url)}&width=300"
end
end
end

View File

@ -0,0 +1,14 @@
require_dependency 'oneboxer/oembed_onebox'
module Oneboxer
class ClikthroughOnebox < OembedOnebox
matcher /clikthrough\.com\/theater\/video\/\d+$/
def oembed_endpoint
"http://clikthrough.com/services/oembed?url=#{BaseOnebox.uriencode(@url)}"
end
end
end

View File

@ -0,0 +1,14 @@
require_dependency 'oneboxer/oembed_onebox'
module Oneboxer
class CollegeHumorOnebox < OembedOnebox
matcher /^https?\:\/\/www\.collegehumor\.com\/video\/.*$/
def oembed_endpoint
"http://www.collegehumor.com/oembed.json?url=#{BaseOnebox.uriencode(@url)}"
end
end
end

View File

@ -0,0 +1,14 @@
require_dependency 'oneboxer/oembed_onebox'
module Oneboxer
class DailymotionOnebox < OembedOnebox
matcher /dailymotion\.com\/.+$/
def oembed_endpoint
"http://www.dailymotion.com/api/oembed/?url=#{BaseOnebox.uriencode(@url)}"
end
end
end

View File

@ -0,0 +1,88 @@
require_dependency 'oneboxer/oembed_onebox'
require_dependency 'freedom_patches/rails4'
module Oneboxer
class DiscourseOnebox < BaseOnebox
include ActionView::Helpers::DateHelper
# TODO: we need to remove these hardcoded urls ASAP
matcher /^https?\:\/\/(dev.discourse.org|localhost\:3000|l.discourse|discuss.emberjs.com)\/.*$/
def onebox
uri = URI::parse(@url)
route = Rails.application.routes.recognize_path(uri.path)
args = {original_url: @url}
# Figure out what kind of onebox to show based on the URL
case route[:controller]
when 'users'
user = User.where(username_lower: route[:username].downcase).first
Guardian.new.ensure_can_see!(user)
args.merge! avatar: PrettyText.avatar_img(user.username, 'tiny'), username: user.username
args[:bio] = user.bio_cooked if user.bio_cooked.present?
@template = 'user'
when 'topics'
if route[:post_number].present? and route[:post_number].to_i > 1
# Post Link
post = Post.where(topic_id: route[:topic_id], post_number: route[:post_number].to_i).first
Guardian.new.ensure_can_see!(post)
topic = post.topic
slug = Slug.for(topic.title)
excerpt = post.excerpt(SiteSetting.post_onebox_maxlength)
excerpt.gsub!("\n"," ")
# hack to make it render for now
excerpt.gsub!("[/quote]", "[quote]")
quote = "[quote=\"#{post.user.username}, topic:#{topic.id}, slug:#{slug}, post:#{post.post_number}\"]#{excerpt}[/quote]"
cooked = PrettyText.cook(quote)
return cooked
else
# Topic Link
topic = Topic.where(id: route[:topic_id].to_i).includes(:user).first
post = topic.posts.first
Guardian.new(nil).ensure_can_see!(topic)
posters = topic.posters_summary.map do |p|
{username: p[:user][:username],
avatar: PrettyText.avatar_img(p[:user][:username], 'tiny'),
description: p[:description],
extras: p[:extras]}
end
category = topic.category
if category
category = "<a href=\"/category/#{category.name}\" class=\"badge badge-category excerptable\" data-excerpt-size=\"medium\" style=\"background-color: ##{category.color}\">#{category.name}</a>"
end
quote = post.excerpt(SiteSetting.post_onebox_maxlength)
args.merge! title: topic.title,
avatar: PrettyText.avatar_img(topic.user.username, 'tiny'),
posts_count: topic.posts_count,
last_post: FreedomPatches::Rails4.time_ago_in_words(topic.last_posted_at, false, scope: :'datetime.distance_in_words_verbose'),
age: FreedomPatches::Rails4.time_ago_in_words(topic.created_at, false, scope: :'datetime.distance_in_words_verbose'),
views: topic.views,
posters: posters,
quote: quote,
category: category,
topic: topic.id
@template = 'topic'
end
end
return nil unless @template
Mustache.render(File.read("#{Rails.root}/lib/oneboxer/templates/discourse_#{@template}_onebox.hbrs"), args)
rescue ActionController::RoutingError
nil
end
end
end

View File

@ -0,0 +1,14 @@
require_dependency 'oneboxer/oembed_onebox'
module Oneboxer
class DotsubOnebox < OembedOnebox
matcher /dotsub\.com\/.+$/
def oembed_endpoint
"http://dotsub.com/services/oembed?url=#{BaseOnebox.uriencode(@url)}"
end
end
end

View File

@ -0,0 +1,24 @@
require_dependency 'oneboxer/oembed_onebox'
module Oneboxer
class FlickrOnebox < BaseOnebox
matcher /^https?\:\/\/.*\.flickr\.com\/.*$/
def onebox
page_html = open(@url).read
return nil if page_html.blank?
doc = Hpricot(page_html)
# Flikrs oembed just stopped returning images for no reason. Let's use opengraph instead.
open_graph = Oneboxer.parse_open_graph(doc)
# A site is supposed to supply all the basic og attributes, but some don't (like deviant art)
# If it just has image and no title, embed it as an image.
return BaseOnebox.image_html(open_graph['image'], nil, @url) if open_graph['image'].present?
nil
end
end
end

View File

@ -0,0 +1,10 @@
require_dependency 'oneboxer/oembed_onebox'
module Oneboxer
class FunnyOrDieOnebox < OembedOnebox
matcher /^https?\:\/\/(www\.)?funnyordie\.com\/videos\/.*$/
def oembed_endpoint
"http://www.funnyordie.com/oembed.json?url=#{BaseOnebox.uriencode(@url)}"
end
end
end

View File

@ -0,0 +1,30 @@
require_dependency 'oneboxer/handlebars_onebox'
module Oneboxer
class GistOnebox < HandlebarsOnebox
matcher /^https?:\/\/gist\.github\.com/
favicon 'github.png'
def translate_url
m = @url.match(/gist\.github\.com\/(?<id>[0-9a-f]+)/mi)
return "https://api.github.com/gists/#{m[:id]}" if m
@url
end
def parse(data)
parsed = JSON.parse(data)
result = {files: [], title: parsed['description']}
parsed['files'].each do |filename, attrs|
result[:files] << {filename: filename}.merge!(attrs)
end
result
end
end
end

View File

@ -0,0 +1,49 @@
require_dependency 'oneboxer/handlebars_onebox'
module Oneboxer
class GithubBlobOnebox < HandlebarsOnebox
matcher /github\.com\/[^\/]+\/[^\/]+\/blob\/.*/
favicon 'github.png'
def translate_url
m = @url.match(/github\.com\/(?<user>[^\/]+)\/(?<repo>[^\/]+)\/blob\/(?<sha1>[^\/]+)\/(?<file>[^#]+)(#(L(?<from>[^-]*)(-L(?<to>.*))?))?/mi)
if m
@from = (m[:from] || -1).to_i
@to = (m[:to] || -1).to_i
@file = m[:file]
return "https://raw.github.com/#{m[:user]}/#{m[:repo]}/#{m[:sha1]}/#{m[:file]}"
end
nil
end
def parse(data)
if @from > 0
if @to < 0
@from = @from - 10
@to = @from + 20
end
if @to > @from
data = data.split("\n")[@from..@to].join("\n")
end
end
extension = @file.split(".")[-1]
@lang = case extension
when "rb" then "ruby"
when "js" then "javascript"
else extension
end
truncated = false
if data.length > SiteSetting.onebox_max_chars
data = data[0..SiteSetting.onebox_max_chars-1]
truncated = true
end
{content: data, truncated: truncated}
end
end
end

View File

@ -0,0 +1,51 @@
require 'open-uri'
require_dependency 'oneboxer/base_onebox'
module Oneboxer
class HandlebarsOnebox < BaseOnebox
MAX_TEXT = 500
def template_path(template_name)
"#{Rails.root}/lib/oneboxer/templates/#{template_name}.hbrs"
end
def template
template_name = self.class.name.underscore
template_name.gsub!(/oneboxer\//, '')
template_path(template_name)
end
def default_url
"<a href='#{@url}' target='_blank'>#{@url}</a>"
end
def http_params
{'User-Agent' => 'Mozilla/5.0 (iPhone; CPU iPhone OS 5_0_1 like Mac OS X) AppleWebKit/534.46 (KHTML, like Gecko) Version/5.1 Mobile/9A405 Safari/7534.48.3'}
end
def onebox
html = open(translate_url, http_params).read
args = parse(html)
return default_url unless args.present?
args[:original_url] = @url
args[:lang] = @lang || ""
args[:favicon] = ActionController::Base.helpers.image_path(self.class.favicon_file) if self.class.favicon_file.present?
begin
parsed = URI.parse(@url)
args[:host] = parsed.host.split('.').last(2).join('.')
rescue URI::InvalidURIError
# In case there is a problem with the URL, we just won't set the host
end
Mustache.render(File.read(template), args)
rescue => ex
# If there's an exception, just embed the link
raise ex if Rails.env.development?
default_url
end
end
end

View File

@ -0,0 +1,13 @@
require_dependency 'oneboxer/oembed_onebox'
module Oneboxer
class HuluOnebox < OembedOnebox
matcher /^https?\:\/\/www\.hulu\.com\/watch\/.*$/
def oembed_endpoint
"http://www.hulu.com/api/oembed.json?url=#{BaseOnebox.uriencode(@url)}"
end
end
end

View File

@ -0,0 +1,13 @@
require_dependency 'oneboxer/base_onebox'
module Oneboxer
class ImageOnebox < BaseOnebox
matcher /^https?:\/\/.*\.(jpg|png|gif|jpeg)$/
def onebox
"<a href='#{@url}' target='_blank'><img src='#{@url}'></a>"
end
end
end

View File

@ -0,0 +1,29 @@
require 'open-uri'
require_dependency 'oneboxer/base_onebox'
module Oneboxer
class ImgurOnebox < BaseOnebox
matcher /^https?\:\/\/imgur\.com\/.*$/
def translate_url
m = @url.match(/\/gallery\/(?<hash>[^\/]+)/mi)
return "http://api.imgur.com/2/image/#{URI::encode(m[:hash])}.json" if m.present?
m = @url.match(/imgur\.com\/(?<hash>[^\/]+)/mi)
return "http://api.imgur.com/2/image/#{URI::encode(m[:hash])}.json" if m.present?
nil
end
def onebox
url = translate_url
return @url if url.blank?
parsed = JSON.parse(open(translate_url).read)
image = parsed['image']
BaseOnebox.image_html(image['links']['original'], image['image']['caption'], image['links']['imgur_page'])
end
end
end

View File

@ -0,0 +1,14 @@
require_dependency 'oneboxer/oembed_onebox'
module Oneboxer
class KinomapOnebox < OembedOnebox
matcher /kinomap\.com/
def oembed_endpoint
"http://www.kinomap.com/oembed?url=#{BaseOnebox.uriencode(@url)}&format=json"
end
end
end

View File

@ -0,0 +1,14 @@
require_dependency 'oneboxer/oembed_onebox'
module Oneboxer
class NfbOnebox < OembedOnebox
matcher /nfb\.ca\/film\/[-\w]+\/?/
def oembed_endpoint
"http://www.nfb.ca/remote/services/oembed/?url=#{BaseOnebox.uriencode(@url)}&format=json"
end
end
end

View File

@ -0,0 +1,52 @@
require 'open-uri'
require_dependency 'oneboxer/handlebars_onebox'
module Oneboxer
class OembedOnebox < HandlebarsOnebox
MAX_TEXT = 500
def oembed_endpoint
@url
end
def template
template_path('oembed_onebox')
end
def onebox
parsed = JSON.parse(open(oembed_endpoint).read)
# If it's a video, just embed the iframe
if %w(video rich).include?(parsed['type'])
# Return a preview of the thumbnail url, since iframes don't do well on previews
preview = nil
preview = "<img src='#{parsed['thumbnail_url']}'>" if parsed['thumbnail_url'].present?
return [parsed['html'], preview]
end
if %w(image photo).include?(parsed['type'])
return BaseOnebox.image_html(parsed['url'] || parsed['thumbnail_url'], parsed['title'], parsed['web_page'] || @url)
end
parsed['html'] ||= parsed['abstract']
begin
parsed_uri = URI.parse(@url)
parsed['host'] = parsed_uri.host.split('.').last(2).join('.')
rescue URI::InvalidURIError
# In case there is a problem with the URL, we just won't set the host
end
Mustache.render(File.read(template), parsed)
rescue OpenURI::HTTPError
nil
end
end
end

View File

@ -0,0 +1,35 @@
require_dependency 'oneboxer/handlebars_onebox'
module Oneboxer
class OpenGraphOnebox < HandlebarsOnebox
def template
template_path('simple_onebox')
end
def onebox
# We expect to have the options we need already
return nil unless @opts.present?
# A site is supposed to supply all the basic og attributes, but some don't (like deviant art)
# If it just has image and no title, embed it as an image.
return BaseOnebox.image_html(@opts['image'], nil, @url) if @opts['image'].present? and @opts['title'].blank?
@opts['title'] ||= @opts['description']
return nil if @opts['title'].blank?
@opts[:original_url] = @url
@opts[:text] = @opts['description']
begin
parsed = URI.parse(@url)
@opts[:host] = parsed.host.split('.').last(2).join('.')
rescue URI::InvalidURIError
# In case there is a problem with the URL, we just won't set the host
end
Mustache.render(File.read(template), @opts)
end
end
end

View File

@ -0,0 +1,13 @@
require_dependency 'oneboxer/oembed_onebox'
module Oneboxer
class QikOnebox < OembedOnebox
matcher /^https?\:\/\/qik\.com\/video\/.*$/
def oembed_endpoint
"http://qik.com/api/oembed.json?url=#{BaseOnebox.uriencode(@url)}"
end
end
end

View File

@ -0,0 +1,13 @@
require_dependency 'oneboxer/oembed_onebox'
module Oneboxer
class RevisionOnebox < OembedOnebox
matcher /^http\:\/\/(.*\.)?revision3\.com\/.*$/
def oembed_endpoint
"http://revision3.com/api/oembed/?url=#{BaseOnebox.uriencode(@url)}&format=json"
end
end
end

View File

@ -0,0 +1,13 @@
require_dependency 'oneboxer/oembed_onebox'
module Oneboxer
class SmugmugOnebox < OembedOnebox
matcher /^https?\:\/\/.*\.smugmug\.com\/.*$/
def oembed_endpoint
"http://api.smugmug.com/services/oembed/?url=#{BaseOnebox.uriencode(@url)}&format=json"
end
end
end

View File

@ -0,0 +1,10 @@
require_dependency 'oneboxer/oembed_onebox'
module Oneboxer
class TedOnebox < OembedOnebox
matcher /^https?\:\/\/(www\.)?ted\.com\/talks\/.*$/
def oembed_endpoint
"http://www.ted.com/talks/oembed.json?url=#{BaseOnebox.uriencode(@url)}"
end
end
end

View File

@ -0,0 +1,23 @@
<aside class='quote' data-post="1" data-topic="{{topic}}">
<div class='title'>
<div class='quote-controls'></div>
{{{avatar}}}
<a href="{{original_url}}">{{title}}</a> {{{category}}} </div>
<blockquote>{{{quote}}}
<div class='topic-info'>
<div class='info-line'>
{{posts_count}} posts, last post {{last_post}}, created {{age}}, {{views}} views
</div>
<div class='posters'>
{{#posters}}
{{{avatar}}}
{{/posters}}
</div>
<div class='clearfix'>
</div>
</div>
</blockquote>
</aside>

View File

@ -0,0 +1,8 @@
<div class='onebox-result'>
{{{avatar}}}
<h3><a href="{{original_url}}">{{username}}</a></h3>
{{#bio}}<p>{{bio}}</p>{{/bio}}
<div class='clearfix'></div>
</div>

View File

@ -0,0 +1,16 @@
<div class='onebox-result'>
{{#host}}
<a href='{{original_url}}' class='source' target="_blank">
{{#favicon}}<img class='favicon' src="{{favicon}}"> {{/favicon}}{{host}}
</a>
{{/host}}
<div class='onebox-result-body'>
{{#title}}
<h3><a href="{{original_url}}" target="_blank">{{title}}</a></h3>
{{/title}}
{{#files}}
<h4>{{filename}}</h4>
<pre><code>{{content}}</code></pre>
{{/files}}
</div>
</div>

View File

@ -0,0 +1,15 @@
<div class='onebox-result'>
{{#host}}
<a href='{{original_url}}' class='source' target="_blank">
{{#favicon}}<img class='favicon' src="{{favicon}}"> {{/favicon}}{{host}}
</a>
{{/host}}
<div class='onebox-result-body'>
<h4><a href="{{original_url}}" target="_blank">{{original_url}}</a></h4>
<pre><code class='{{lang}}'>{{content}}</code></pre>
{{#truncated}}
This file has been truncated. <a href="{{original_url}}" target="_blank">show original</a>
{{/truncated}}
</div>
</div>

View File

@ -0,0 +1,17 @@
<div class='onebox-result'>
{{#host}}
<div class="source">
<div class="info">
<a href='{{original_url}}' class='source' target="_blank">
{{#favicon}}<img class='favicon' src="{{favicon}}"> {{/favicon}}{{host}}
</a>
</div>
</div>
{{/host}}
<div class='onebox-result-body'>
<h3><a href="{{original_url}}" target="_blank">{{title}}</a></h3>
{{#author_info}}<h4>{{author_info}}</h4>{{/author_info}}
{{{html}}}
</div>
<div class='clearfix'></div>
</div>

View File

@ -0,0 +1,18 @@
<div class='onebox-result'>
{{#host}}
<div class='source'>
<div class='info'>
<a href='{{original_url}}' target="_blank">
{{#favicon}}<img class='favicon' src="{{favicon}}"> {{/favicon}}{{host}}
</a>
</div>
</div>
{{/host}}
<div class='onebox-result-body'>
{{#image}}<img src="{{image}}" class="thumbnail">{{/image}}
<h3><a href="{{original_url}}" target="_blank">{{title}}</a></h3>
{{#by_info}}<h4>{{by_info}}</h4>{{/by_info}}
{{{text}}}
</div>
<div class='clearfix'></div>
</div>

View File

@ -0,0 +1,24 @@
<div class='onebox-result'>
{{#host}}
<div class="source">
<div class="info">
<a href='{{original_url}}' target="_blank">
{{#favicon}}<img class='favicon' src="{{favicon}}"> {{/favicon}}{{host}}
</a>
</div>
</div>
{{/host}}
<div class='onebox-result-body'>
{{#user.profile_image_url}}<img src="{{user.profile_image_url}}">{{/user.profile_image_url}}
<h4>@{{user.screen_name}}</h4>
{{{text}}}
<div class='date'>
<a href="{{original_url}}" target="_blank">{{created_at}}</a>
</div>
</div>
<div class='clearfix'></div>
</div>

View File

@ -0,0 +1,30 @@
require_dependency 'oneboxer/handlebars_onebox'
module Oneboxer
class TwitterOnebox < HandlebarsOnebox
matcher /^https?:\/\/(?:www\.)?twitter.com\/.*$/
favicon 'twitter.png'
def translate_url
m = @url.match(/\/(?<user>[^\/]+)\/status\/(?<id>\d+)/mi)
return "http://api.twitter.com/1/statuses/show/#{URI::encode(m[:id])}.json" if m.present?
@url
end
def parse(data)
result = JSON.parse(data)
result["created_at"] = Time.parse(result["created_at"]).strftime("%I:%M%p - %d %b %y")
# Hyperlink URLs
URI.extract(result['text'], %w(http https)).each do |url|
result['text'].gsub!(url, "<a href='#{url}' target='_blank'>#{url}</a>")
end
result
end
end
end

View File

@ -0,0 +1,13 @@
require_dependency 'oneboxer/oembed_onebox'
module Oneboxer
class ViddlerOnebox < OembedOnebox
matcher /viddler\.com\/.+$/
def oembed_endpoint
"http://lab.viddler.com/services/oembed/?url=#{BaseOnebox.uriencode(@url)}"
end
end
end

View File

@ -0,0 +1,13 @@
require_dependency 'oneboxer/oembed_onebox'
module Oneboxer
class VimeoOnebox < OembedOnebox
matcher /^https?\:\/\/vimeo\.com\/.*$/
def oembed_endpoint
"http://vimeo.com/api/oembed.json?url=#{BaseOnebox.uriencode(@url)}&width=600"
end
end
end

86
lib/oneboxer/whitelist.rb Normal file
View File

@ -0,0 +1,86 @@
module Oneboxer
module Whitelist
def self.entries
[/cnn\.com\/.+/,
/washingtonpost\.com\/.+/,
/\/\d{4}\/\d{2}\/\d{2}\//, # wordpress
/funnyordie\.com\/.+/,
/youtube\.com\/.+/,
/youtu\.be\/.+/,
/500px\.com\/.+/,
/scribd\.com\/.+/,
/photobucket\.com\/.+/,
/ebay\.(com|ca|co\.uk)\/.+/,
/nytimes\.com\/.+/,
/tumblr\.com\/.+/,
/pinterest\.com\/.+/,
/imdb\.com\/.+/,
/bbc\.co\.uk\/.+/,
/ask\.com\/.+/,
/huffingtonpost\.com\/.+/,
/aol\.(com|ca)\/.+/,
/espn\.go\.com\/.+/,
/about\.com\/.+/,
/cnet\.com\/.+/,
/ehow\.com\/.+/,
/dailymail\.co\.uk\/.+/,
/indiatimes\.com\/.+/,
/answers\.com\/.+/,
/instagr\.am\/.+/,
/battle\.net\/.+/,
/sourceforge\.net\/.+/,
/myspace\.com\/.+/,
/wikia\.com\/.+/,
/etsy\.com\/.+/,
/walmart\.com\/.+/,
/reference\.com\/.+/,
/yelp\.com\/.+/,
/foxnews\.com\/.+/,
/guardian\.co\.uk\/.+/,
/digg\.com\/.+/,
/squidoo\.com\/.+/,
/wsj\.com\/.+/,
/archive\.org\/.+/,
/nba\.com\/.+/,
/samsung\.com\/.+/,
/mashable\.com\/.+/,
/forbes\.com\/.+/,
/soundcloud\.com\/.+/,
/thefreedictionary\.com\/.+/,
/groupon\.com\/.+/,
/ikea\.com\/.+/,
/dell\.com\/.+/,
/mlb\.com\/.+/,
/bestbuy\.(com|ca)\/.+/,
/bloomberg\.com\/.+/,
/ign\.com\/.+/,
/twitpic\.com\/.+/,
/techcrunch\.com\/.+/,
/usatoday\.com\/.+/,
/go\.com\/.+/,
/businessinsider\.com\/.+/,
/zillow\.com\/.+/,
/tmz\.com\/.+/,
/thesun\.co\.uk\/.+/,
/thestar\.(com|ca)\/.+/,
/theglobeandmail\.com\/.+/,
/torontosun\.com\/.+/,
/kickstarter\.com\/.+/,
/wired\.com\/.+/,
/time\.com\/.+/,
/npr\.org\/.+/,
/cracked\.com\/.+/,
/deadline\.com\/.+/
]
end
def self.allowed?(url)
#return true
entries.each {|e| return true if url =~ e }
false
end
end
end

View File

@ -0,0 +1,59 @@
require_dependency 'oneboxer/handlebars_onebox'
module Oneboxer
class WikipediaOnebox < HandlebarsOnebox
matcher /^https?:\/\/.*wikipedia.(com|org)\/.*$/
favicon 'wikipedia.png'
def template
template_path('simple_onebox')
end
def translate_url
m = @url.match(/wiki\/(?<identifier>[^#\/]+)/mi)
article_id = CGI::unescape(m[:identifier])
return "http://en.m.wikipedia.org/w/index.php?title=#{URI::encode(article_id)}"
@url
end
def parse(data)
hp = Hpricot(data)
result = {}
title = hp.at('title').inner_html
result[:title] = title.gsub!(/ - Wikipedia, the free encyclopedia/, '') if title.present?
# get the first image > 150 pix high
images = hp.search("img").select { |img| img['height'].to_i > 150 }
result[:image] = "http:#{images[0]["src"]}" unless images.empty?
# remove the table from mobile layout, as it can contain paras in some rare cases
hp.search("table").remove
# get all the paras
paras = hp.search("p")
text = ""
unless paras.empty?
cnt = 0
while text.length < MAX_TEXT and cnt <= 3
text << " " unless cnt == 0
paragraph = paras[cnt].inner_text[0..MAX_TEXT]
paragraph.gsub!(/\[\d+\]/mi, "")
text << paragraph
cnt += 1
end
end
text = "#{text[0..MAX_TEXT]}..." if text.length > MAX_TEXT
result[:text] = text
result
end
end
end

View File

@ -0,0 +1,13 @@
require_dependency 'oneboxer/oembed_onebox'
module Oneboxer
class YfrogOnebox < OembedOnebox
matcher /yfrog\.(com|ru|com\.tr|it|fr|co\.il|co\.uk|com\.pl|pl|eu|us)\/[a-zA-Z0-9]+/
def oembed_endpoint
"http://www.yfrog.com/api/oembed/?url=#{BaseOnebox.uriencode(@url)}&format=json"
end
end
end

98
lib/post_creator.rb Normal file
View File

@ -0,0 +1,98 @@
# Responsible for creating posts and topics
#
require_dependency 'rate_limiter'
class PostCreator
# Errors when creating the post
attr_reader :errors
# Acceptable options:
#
# raw - raw text of post
# image_sizes - We can pass a list of the sizes of images in the post as a shortcut.
#
# When replying to a topic:
# topic_id - topic we're replying to
# reply_to_post_number - post number we're replying to
#
# When creating a topic:
# title - New topic title
# archetype - Topic archetype
# category - Category to assign to topic
# target_usernames - comma delimited list of usernames for membership (private message)
# meta_data - Topic meta data hash
def initialize(user, opts)
@user = user
@opts = opts
raise Discourse::InvalidParameters.new(:raw) if @opts[:raw].blank?
end
def guardian
@guardian ||= Guardian.new(@user)
end
def create
topic = nil
post = nil
Post.transaction do
if @opts[:topic_id].blank?
topic_params = {title: @opts[:title], user_id: @user.id, last_post_user_id: @user.id}
topic_params[:archetype] = @opts[:archetype] if @opts[:archetype].present?
guardian.ensure_can_create!(Topic)
category = Category.where(name: @opts[:category]).first
topic_params[:category_id] = category.id if category.present?
topic_params[:meta_data] = @opts[:meta_data] if @opts[:meta_data].present?
topic = Topic.new(topic_params)
if @opts[:archetype] == Archetype.private_message
usernames = @opts[:target_usernames].split(',')
User.where(:username => usernames).each do |u|
unless guardian.can_send_private_message?(u)
topic.errors.add(:archetype, :cant_send_pm)
@errors = topic.errors
raise ActiveRecord::Rollback.new
end
topic.topic_allowed_users.build(user_id: u.id)
end
topic.topic_allowed_users.build(user_id: @user.id)
end
unless topic.save
@errors = topic.errors
raise ActiveRecord::Rollback.new
end
else
topic = Topic.where(id: @opts[:topic_id]).first
guardian.ensure_can_create!(Post, topic)
end
post = topic.posts.new(raw: @opts[:raw],
user: @user,
reply_to_post_number: @opts[:reply_to_post_number])
post.image_sizes = @opts[:image_sizes] if @opts[:image_sizes].present?
unless post.save
@errors = post.errors
raise ActiveRecord::Rollback.new
end
# Extract links
TopicLink.extract_from(post)
end
post
end
# Shortcut
def self.create(user, opts)
PostCreator.new(user, opts).create
end
end

266
lib/pretty_text.rb Normal file
View File

@ -0,0 +1,266 @@
require 'coffee_script'
require 'v8'
require 'nokogiri'
module PrettyText
def self.whitelist
{
:elements => %w[
a abbr aside b bdo blockquote br caption cite code col colgroup dd div del dfn dl
dt em hr figcaption figure h1 h2 h3 h4 h5 h6 hgroup i img ins kbd li mark
ol p pre q rp rt ruby s samp small span strike strong sub sup table tbody td
tfoot th thead time tr u ul var wbr
],
:attributes => {
:all => ['dir', 'lang', 'title', 'class'],
'aside' => ['data-post', 'data-full', 'data-topic'],
'a' => ['href'],
'blockquote' => ['cite'],
'col' => ['span', 'width'],
'colgroup' => ['span', 'width'],
'del' => ['cite', 'datetime'],
'img' => ['align', 'alt', 'height', 'src', 'width'],
'ins' => ['cite', 'datetime'],
'ol' => ['start', 'reversed', 'type'],
'q' => ['cite'],
'span' => ['style'],
'table' => ['summary', 'width', 'style', 'cellpadding', 'cellspacing'],
'td' => ['abbr', 'axis', 'colspan', 'rowspan', 'width', 'style'],
'th' => ['abbr', 'axis', 'colspan', 'rowspan', 'scope', 'width', 'style'],
'time' => ['datetime', 'pubdate'],
'ul' => ['type']
},
:protocols => {
'a' => {'href' => ['ftp', 'http', 'https', 'mailto', :relative]},
'blockquote' => {'cite' => ['http', 'https', :relative]},
'del' => {'cite' => ['http', 'https', :relative]},
'img' => {'src' => ['http', 'https', :relative]},
'ins' => {'cite' => ['http', 'https', :relative]},
'q' => {'cite' => ['http', 'https', :relative]}
}
}
end
class Helpers
# function here are available to v8
def avatar_template(username)
return "" unless username
user = User.where(username_lower: username.downcase).first
if user
user.avatar_template
end
end
def is_username_valid(username)
return false unless username
username = username.downcase
return User.exec_sql('select 1 from users where username_lower = ?', username).values.length == 1
end
end
@mutex = Mutex.new
def self.mention_matcher
/(\@[a-zA-Z0-9\-]+)/
end
def self.app_root
Rails.root
end
def self.v8
return @ctx unless @ctx.nil?
@ctx = V8::Context.new
@ctx["helpers"] = Helpers.new
@ctx.load(app_root + "app/assets/javascripts/external/Markdown.Converter.js")
@ctx.load(app_root + "app/assets/javascripts/external/twitter-text-1.5.0.js")
@ctx.load(app_root + "lib/headless-ember.js")
@ctx.load(app_root + "app/assets/javascripts/external/rsvp.js")
@ctx.load(Rails.configuration.ember.handlebars_location)
#@ctx.load(Rails.configuration.ember.ember_location)
@ctx.load(app_root + "app/assets/javascripts/external/sugar-1.3.5.js")
@ctx.eval("var Discourse = {}; Discourse.SiteSettings = #{SiteSetting.client_settings_json};")
@ctx.eval("var window = {}; window.devicePixelRatio = 2;") # hack to make code think stuff is retina
@ctx.eval(CoffeeScript.compile(File.read(app_root + "app/assets/javascripts/discourse/components/bbcode.js.coffee")))
@ctx.eval(CoffeeScript.compile(File.read(app_root + "app/assets/javascripts/discourse/components/utilities.coffee")))
# Load server side javascripts
if DiscoursePluginRegistry.server_side_javascripts.present?
DiscoursePluginRegistry.server_side_javascripts.each do |ssjs|
@ctx.load(ssjs)
end
end
@ctx['quoteTemplate'] = File.open(app_root + 'app/assets/javascripts/discourse/templates/quote.js.shbrs') {|f| f.read}
@ctx['quoteEmailTemplate'] = File.open(app_root + 'lib/assets/quote_email.js.shbrs') {|f| f.read}
@ctx.eval("HANDLEBARS_TEMPLATES = {
'quote': Handlebars.compile(quoteTemplate),
'quote_email': Handlebars.compile(quoteEmailTemplate),
};")
@ctx
end
def self.markdown(text, opts=nil)
# we use the exact same markdown converter as the client
# TODO: use the same extensions on both client and server (in particular the template for mentions)
baked = nil
@mutex.synchronize do
# we need to do this to work in a multi site environment, many sites, many settings
v8.eval("Discourse.SiteSettings = #{SiteSetting.client_settings_json};")
v8.eval("Discourse.BaseUrl = 'http://#{RailsMultisite::ConnectionManagement.current_hostname}';")
v8['opts'] = opts || {}
v8['raw'] = text
v8.eval('opts["mentionLookup"] = function(u){return helpers.is_username_valid(u);}')
v8.eval('opts["lookupAvatar"] = function(p){return Discourse.Utilities.avatarImg({username: p, size: "tiny", avatarTemplate: helpers.avatar_template(p)});}')
baked = v8.eval('Discourse.Utilities.markdownConverter(opts).makeHtml(raw)')
end
# we need some minimal server side stuff, apply CDN and TODO filter disallowed markup
baked = apply_cdn(baked, Rails.configuration.action_controller.asset_host)
baked
end
# leaving this here, cause it invokes v8, don't want to implement twice
def self.avatar_img(username, size)
r = nil
@mutex.synchronize do
v8['username'] = username
v8['size'] = size
v8.eval("Discourse.SiteSettings = #{SiteSetting.client_settings_json};")
v8.eval("Discourse.CDN = '#{Rails.configuration.action_controller.asset_host}';")
v8.eval("Discourse.BaseUrl = '#{RailsMultisite::ConnectionManagement.current_hostname}';")
r = v8.eval("Discourse.Utilities.avatarImg({ username: username, size: size });")
end
r
end
def self.apply_cdn(html, url)
return html unless url
image = /\.(jpg|jpeg|gif|png|tiff|tif)$/
doc = Nokogiri::HTML.fragment(html)
doc.css("a").each do |l|
href = l.attributes["href"].to_s
if href[0] == '/' && href =~ image
l["href"] = url + href
end
end
doc.css("img").each do |l|
src = l.attributes["src"].to_s
if src[0] == '/'
l["src"] = url + src
end
end
doc.to_s
end
def self.cook(text, opts={})
cloned = opts.dup
# we have a minor inconsistency
cloned[:topicId] = opts[:topic_id]
Sanitize.clean(markdown(text.dup, cloned), PrettyText.whitelist)
end
def self.extract_links(html)
doc = Nokogiri::HTML.fragment(html)
links = []
doc.css("a").each do |l|
links << l.attributes["href"].to_s
end
links
end
class ExcerptParser < Nokogiri::XML::SAX::Document
class DoneException < StandardError; end
attr_reader :excerpt
def initialize(length)
@length = length
@excerpt = ""
@current_length = 0
end
def self.get_excerpt(html, length)
me = self.new(length)
parser = Nokogiri::HTML::SAX::Parser.new(me)
begin
copy = "<div>"
copy << html unless html.nil?
copy << "</div>"
parser.parse(html) unless html.nil?
rescue DoneException
# we are done
end
me.excerpt
end
def start_element(name, attributes=[])
case name
when "img"
attributes = Hash[*attributes.flatten]
if attributes["alt"]
characters("[#{attributes["alt"]}]")
elsif attributes["title"]
characters("[#{attributes["title"]}]")
else
characters("[image]")
end
when "a"
c = "<a "
c << attributes.map{|k,v| "#{k}='#{v}'"}.join(' ')
c << ">"
characters(c, false, false, false)
@in_a = true
when "aside"
@in_quote = true
end
end
def end_element(name)
case name
when "a"
characters("</a>",false, false, false)
@in_a = false
when "p", "br"
characters(" ")
when "aside"
@in_quote = false
end
end
def characters(string, truncate = true, count_it = true, encode = true)
return if @in_quote
encode = encode ? lambda{|s| ERB::Util.html_escape(s)} : lambda {|s| s}
if @current_length + string.length > @length && count_it
@excerpt << encode.call(string[0..(@length-@current_length)-1]) if truncate
@excerpt << "&hellip;"
@excerpt << "</a>" if @in_a
raise DoneException.new
end
@excerpt << encode.call(string)
@current_length += string.length if count_it
end
end
def self.excerpt(html, length)
ExcerptParser.get_excerpt(html, length)
end
end

35
lib/promotion.rb Normal file
View File

@ -0,0 +1,35 @@
#
# Check whether a user is ready for a new trust level.
#
class Promotion
def initialize(user)
@user = user
end
# Review a user for a promotion. Delegates work to a review_#{trust_level} method.
# Returns true if the user was promoted, false otherwise.
def review
# nil users are never promoted
return false if @user.blank?
trust_key = TrustLevel.Levels.invert[@user.trust_level]
review_method = :"review_#{trust_key.to_s}"
return send(review_method) if respond_to?(review_method)
false
end
def review_new
return false if @user.topics_entered < SiteSetting.basic_requires_topics_entered
return false if @user.posts_read_count < SiteSetting.basic_requires_read_posts
return false if (@user.time_read / 60) < SiteSetting.basic_requires_time_spent_mins
@user.trust_level = TrustLevel.Levels[:basic]
@user.save
true
end
end

53
lib/rate_limiter.rb Normal file
View File

@ -0,0 +1,53 @@
require_dependency 'rate_limiter/limit_exceeded'
require_dependency 'rate_limiter/on_create_record'
# A redis backed rate limiter.
class RateLimiter
# We don't observe rate limits in test mode
def self.disabled?
Rails.env.test?
end
def initialize(user, key, max, secs)
@user = user
@key = "rate-limit:#{@user.id}:#{key}"
@max = max
@secs = secs
end
def clear!
$redis.del(@key)
end
def can_perform?
return true if RateLimiter.disabled?
return true if @user.has_trust_level?(:moderator)
result = $redis.get(@key)
return true if result.blank?
return true if result.to_i < @max
false
end
def performed!
return if RateLimiter.disabled?
return if @user.has_trust_level?(:moderator)
result = $redis.incr(@key).to_i
$redis.expire(@key, @secs) if result == 1
if result > @max
# In case we go over, clamp it to the maximum
$redis.decr(@key)
raise LimitExceeded.new($redis.ttl(@key))
end
end
def rollback!
return if RateLimiter.disabled?
$redis.decr(@key)
end
end

View File

@ -0,0 +1,11 @@
class RateLimiter
# A rate limit has been exceeded.
class LimitExceeded < Exception
attr_accessor :available_in
def initialize(available_in)
@available_in = available_in
end
end
end

View File

@ -0,0 +1,61 @@
class RateLimiter
# A mixin we can use on ActiveRecord Models to automatically rate limit them
# based on a SiteSetting.
#
# It expects a SiteSetting called `rate_limit_create_{model_name}` where
# `model_name` is the class name of your model, underscored.
#
module OnCreateRecord
# Over write to define your own rate limiter
def default_rate_limiter
return @rate_limiter if @rate_limiter.present?
limit_key = "create_#{self.class.name.underscore}"
max_setting = SiteSetting.send("rate_limit_#{limit_key}")
@rate_limiter = RateLimiter.new(user, limit_key, 1, max_setting)
end
def self.included(base)
base.extend(ClassMethods)
end
module ClassMethods
def rate_limit(limiter_method=nil)
limiter_method = limiter_method || :default_rate_limiter
self.after_create do
rate_limiter = send(limiter_method)
return unless rate_limiter.present?
rate_limiter.performed!
@performed ||= {}
@performed[limiter_method] = true
end
self.after_destroy do
rate_limiter = send(limiter_method)
return unless rate_limiter.present?
rate_limiter.rollback!
end
self.after_rollback do
rate_limiter = send(limiter_method)
return unless rate_limiter.present?
if @performed.present? and @performed[limiter_method]
rate_limiter.rollback!
@performed[limiter_method] = false
end
end
end
end
end
end

129
lib/remote_ip_improved.rb Normal file
View File

@ -0,0 +1,129 @@
# https://github.com/rails/rails/pull/7234
class RemoteIpImproved
class IpSpoofAttackError < StandardError ; end
# IP addresses that are "trusted proxies" that can be stripped from
# the comma-delimited list in the X-Forwarded-For header. See also:
# http://en.wikipedia.org/wiki/Private_network#Private_IPv4_address_spaces
# http://en.wikipedia.org/wiki/Private_network#Private_IPv6_addresses.
TRUSTED_PROXIES = %r{
^127\.0\.0\.1$ | # localhost
^::1$ |
^(10 | # private IP 10.x.x.x
172\.(1[6-9]|2[0-9]|3[0-1]) | # private IP in the range 172.16.0.0 .. 172.31.255.255
192\.168 | # private IP 192.168.x.x
fc00:: # private IP fc00
)\.
}x
attr_reader :check_ip, :proxies
def initialize(app, check_ip_spoofing = true, custom_proxies = nil)
@app = app
@check_ip = check_ip_spoofing
@proxies = case custom_proxies
when Regexp
custom_proxies
when nil
TRUSTED_PROXIES
else
Regexp.union(TRUSTED_PROXIES, custom_proxies)
end
end
def call(env)
env["action_dispatch.remote_ip"] = GetIp.new(env, self)
@app.call(env)
end
class GetIp
# IP v4 and v6 (with compression) validation regexp
# https://gist.github.com/1289635
VALID_IP = %r{
(^(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9]{1,2})){3}$) | # ip v4
(^(
(([0-9A-Fa-f]{1,4}:){7}[0-9A-Fa-f]{1,4}) | # ip v6 not abbreviated
(([0-9A-Fa-f]{1,4}:){6}:[0-9A-Fa-f]{1,4}) | # ip v6 with double colon in the end
(([0-9A-Fa-f]{1,4}:){5}:([0-9A-Fa-f]{1,4}:)?[0-9A-Fa-f]{1,4}) | # - ip addresses v6
(([0-9A-Fa-f]{1,4}:){4}:([0-9A-Fa-f]{1,4}:){0,2}[0-9A-Fa-f]{1,4}) | # - with
(([0-9A-Fa-f]{1,4}:){3}:([0-9A-Fa-f]{1,4}:){0,3}[0-9A-Fa-f]{1,4}) | # - double colon
(([0-9A-Fa-f]{1,4}:){2}:([0-9A-Fa-f]{1,4}:){0,4}[0-9A-Fa-f]{1,4}) | # - in the middle
(([0-9A-Fa-f]{1,4}:){6} ((\b((25[0-5])|(1\d{2})|(2[0-4]\d)|(\d{1,2}))\b)\.){3} (\b((25[0-5])|(1\d{2})|(2[0-4]\d)|(\d{1,2}))\b)) | # ip v6 with compatible to v4
(([0-9A-Fa-f]{1,4}:){1,5}:((\b((25[0-5])|(1\d{2})|(2[0-4]\d)|(\d{1,2}))\b)\.){3}(\b((25[0-5])|(1\d{2})|(2[0-4]\d)|(\d{1,2}))\b)) | # ip v6 with compatible to v4
(([0-9A-Fa-f]{1,4}:){1}:([0-9A-Fa-f]{1,4}:){0,4}((\b((25[0-5])|(1\d{2})|(2[0-4]\d)|(\d{1,2}))\b)\.){3}(\b((25[0-5])|(1\d{2})|(2[0-4]\d)|(\d{1,2}))\b)) | # ip v6 with compatible to v4
(([0-9A-Fa-f]{1,4}:){0,2}:([0-9A-Fa-f]{1,4}:){0,3}((\b((25[0-5])|(1\d{2})|(2[0-4]\d)|(\d{1,2}))\b)\.){3}(\b((25[0-5])|(1\d{2})|(2[0-4]\d)|(\d{1,2}))\b)) | # ip v6 with compatible to v4
(([0-9A-Fa-f]{1,4}:){0,3}:([0-9A-Fa-f]{1,4}:){0,2}((\b((25[0-5])|(1\d{2})|(2[0-4]\d)|(\d{1,2}))\b)\.){3}(\b((25[0-5])|(1\d{2})|(2[0-4]\d)|(\d{1,2}))\b)) | # ip v6 with compatible to v4
(([0-9A-Fa-f]{1,4}:){0,4}:([0-9A-Fa-f]{1,4}:){1}((\b((25[0-5])|(1\d{2})|(2[0-4]\d)|(\d{1,2}))\b)\.){3}(\b((25[0-5])|(1\d{2})|(2[0-4]\d)|(\d{1,2}))\b)) | # ip v6 with compatible to v4
(::([0-9A-Fa-f]{1,4}:){0,5}((\b((25[0-5])|(1\d{2})|(2[0-4]\d) |(\d{1,2}))\b)\.){3}(\b((25[0-5])|(1\d{2})|(2[0-4]\d)|(\d{1,2}))\b)) | # ip v6 with compatible to v4
([0-9A-Fa-f]{1,4}::([0-9A-Fa-f]{1,4}:){0,5}[0-9A-Fa-f]{1,4}) | # ip v6 with compatible to v4
(::([0-9A-Fa-f]{1,4}:){0,6}[0-9A-Fa-f]{1,4}) | # ip v6 with double colon at the begining
(([0-9A-Fa-f]{1,4}:){1,7}:) # ip v6 without ending
)$)
}x
def initialize(env, middleware)
@env = env
@middleware = middleware
@calculated_ip = false
end
# Determines originating IP address. REMOTE_ADDR is the standard
# but will be wrong if the user is behind a proxy. Proxies will set
# HTTP_CLIENT_IP and/or HTTP_X_FORWARDED_FOR, so we prioritize those.
# HTTP_X_FORWARDED_FOR may be a comma-delimited list in the case of
# multiple chained proxies. The first address which is in this list
# if it's not a known proxy will be the originating IP.
# Format of HTTP_X_FORWARDED_FOR:
# client_ip, proxy_ip1, proxy_ip2...
# http://en.wikipedia.org/wiki/X-Forwarded-For
def calculate_ip
client_ip = @env['HTTP_CLIENT_IP']
forwarded_ip = ips_from('HTTP_X_FORWARDED_FOR').first
remote_addrs = ips_from('REMOTE_ADDR')
check_ip = client_ip && @middleware.check_ip
if check_ip && forwarded_ip != client_ip
# We don't know which came from the proxy, and which from the user
raise IpSpoofAttackError, "IP spoofing attack?!" \
"HTTP_CLIENT_IP=#{@env['HTTP_CLIENT_IP'].inspect}" \
"HTTP_X_FORWARDED_FOR=#{@env['HTTP_X_FORWARDED_FOR'].inspect}"
end
client_ips = remove_proxies [client_ip, forwarded_ip, remote_addrs].flatten
if client_ips.present?
client_ips.first
else
# If there is no client ip we can return first valid proxy ip from REMOTE_ADDR etc
[remote_addrs, client_ip, forwarded_ip].flatten.find { |ip| valid_ip? ip }
end
end
def to_s
return @ip if @calculated_ip
@calculated_ip = true
@ip = calculate_ip
end
private
def ips_from(header)
@env[header] ? @env[header].strip.split(/[,\s]+/) : []
end
def valid_ip?(ip)
ip =~ VALID_IP
end
def not_a_proxy?(ip)
ip !~ @middleware.proxies
end
def remove_proxies(ips)
ips.select { |ip| valid_ip?(ip) && not_a_proxy?(ip) }
end
end
end

58
lib/score_calculator.rb Normal file
View File

@ -0,0 +1,58 @@
class ScoreCalculator
def self.default_score_weights
{
reply_count: 5,
like_count: 15,
incoming_link_count: 5,
bookmark_count: 2,
avg_time: 0.05,
reads: 0.2
}
end
def initialize(weightings=nil)
@weightings = weightings || ScoreCalculator.default_score_weights
end
# Calculate the score for all posts based on the weightings
def calculate
# First update the scores of the posts
exec_sql(post_score_sql, @weightings)
# Update the best of flag
exec_sql "
UPDATE topics SET has_best_of =
CASE
WHEN like_count >= :likes_required AND
posts_count >= :posts_required AND
EXISTS(SELECT * FROM posts AS p
WHERE p.topic_id = topics.id
AND p.score >= :score_required) THEN true
ELSE false
END",
likes_required: SiteSetting.best_of_likes_required,
posts_required: SiteSetting.best_of_posts_required,
score_required: SiteSetting.best_of_score_threshold
end
private
def exec_sql(sql, params)
ActiveRecord::Base.exec_sql(sql, params)
end
# Generate a SQL statement to update the scores of all posts
def post_score_sql
"UPDATE posts SET score = ".tap do |sql|
components = []
@weightings.keys.each do |k|
components << "COALESCE(#{k.to_s}, 0) * :#{k.to_s}"
end
sql << components.join(" + ")
end
end
end

168
lib/search.rb Normal file
View File

@ -0,0 +1,168 @@
module Search
def self.min_search_term_length
3
end
def self.per_facet
5
end
def self.facets
%w(topic category user)
end
def self.user_query_sql
"SELECT 'user' AS type,
u.username_lower AS id,
'/users/' || u.username_lower AS url,
u.username AS title,
u.email,
NULL AS color
FROM users AS u
JOIN users_search s on s.id = u.id
WHERE s.search_data @@ TO_TSQUERY(:query)
ORDER BY last_posted_at desc
"
end
def self.topic_query_sql
"SELECT 'topic' AS type,
CAST(ft.id AS VARCHAR),
'/t/slug/' || ft.id AS url,
ft.title,
NULL AS email,
NULL AS color
FROM topics AS ft
JOIN posts AS p ON p.topic_id = ft.id AND p.post_number = 1
JOIN posts_search s on s.id = p.id
WHERE s.search_data @@ TO_TSQUERY(:query)
AND ft.deleted_at IS NULL
AND ft.visible
AND ft.archetype <> '#{Archetype.private_message}'
ORDER BY
TS_RANK_CD(TO_TSVECTOR('english', ft.title), TO_TSQUERY(:query)) desc,
TS_RANK_CD(search_data, TO_TSQUERY(:query)) desc,
bumped_at desc"
end
def self.post_query_sql
"SELECT cast('topic' as varchar) AS type,
CAST(ft.id AS VARCHAR),
'/t/slug/' || ft.id || '/' || p.post_number AS url,
ft.title,
NULL AS email,
NULL AS color
FROM topics AS ft
JOIN posts AS p ON p.topic_id = ft.id AND p.post_number <> 1
JOIN posts_search s on s.id = p.id
WHERE s.search_data @@ TO_TSQUERY(:query)
AND ft.deleted_at IS NULL and p.deleted_at IS NULL
AND ft.visible
AND ft.archetype <> '#{Archetype.private_message}'
ORDER BY
TS_RANK_CD(TO_TSVECTOR('english', ft.title), TO_TSQUERY(:query)) desc,
TS_RANK_CD(search_data, TO_TSQUERY(:query)) desc,
bumped_at desc"
end
def self.category_query_sql
"SELECT 'category' AS type,
c.name AS id,
'/category/' || c.slug AS url,
c.name AS title,
NULL AS email,
c.color
FROM categories AS c
JOIN categories_search s on s.id = c.id
WHERE s.search_data @@ TO_TSQUERY(:query)
ORDER BY topics_month desc
"
end
def self.query(term, type_filter=nil)
return nil if term.blank?
sanitized_term = term.gsub(/[^0-9a-zA-Z_ ]/, '')
# really short terms are totally pointless
return nil if sanitized_term.blank? || sanitized_term.length < self.min_search_term_length
terms = sanitized_term.split
terms.map! {|t| "#{t}:*"}
if type_filter.present?
raise Discourse::InvalidAccess.new("invalid type filter") unless Search.facets.include?(type_filter)
sql = Search.send("#{type_filter}_query_sql") << " LIMIT #{Search.per_facet * Search.facets.size}"
db_result = ActiveRecord::Base.exec_sql(sql , query: terms.join(" & "))
else
db_result = []
[user_query_sql, category_query_sql, topic_query_sql].each do |sql|
sql << " limit " << Search.per_facet.to_s
db_result += ActiveRecord::Base.exec_sql(sql , query: terms.join(" & ")).to_a
end
end
db_result = db_result.to_a
expected_topics = 0
expected_topics = Search.facets.size unless type_filter.present?
expected_topics = Search.per_facet * Search.facets.size if type_filter == 'topic'
if expected_topics > 0
db_result.each do |row|
expected_topics -= 1 if row['type'] == 'topic'
end
end
if expected_topics > 0
tmp = ActiveRecord::Base.exec_sql "#{post_query_sql} limit :per_facet",
query: terms.join(" & "), per_facet: expected_topics * 3
topic_ids = Set.new db_result.map{|r| r["id"]}
tmp = tmp.to_a
tmp = tmp.reject{ |i|
if topic_ids.include? i["id"]
true
else
topic_ids << i["id"]
false
end
}
db_result += tmp[0..expected_topics-1]
end
# Group the results by type
grouped = {}
db_result.each do |row|
type = row.delete('type')
# Add the slug for topics
row['url'].gsub!('slug', Slug.for(row['title'])) if type == 'topic'
# Remove attributes when we know they don't matter
row.delete('id')
if type == 'user'
row['avatar_template'] = User.avatar_template(row['email'])
end
row.delete('email')
row.delete('color') unless type == 'category'
grouped[type] ||= []
grouped[type] << row
end
result = grouped.map do |type, results|
{type: type,
name: I18n.t("search.types.#{type}"),
more: type_filter.blank? && (results.size == Search.per_facet),
results: results}
end
result
end
end

Some files were not shown because too many files have changed in this diff Show More