mirror of
https://github.com/discourse/discourse.git
synced 2025-05-22 07:53:49 +08:00
DEV: Apply syntax_tree formatting to script/*
This commit is contained in:
@ -8,7 +8,6 @@ require "htmlentities"
|
||||
# NOTE: this importer expects a MySQL DB to directly connect to
|
||||
|
||||
class BulkImport::Vanilla < BulkImport::Base
|
||||
|
||||
VANILLA_DB = "dbname"
|
||||
TABLE_PREFIX = "GDN_"
|
||||
ATTACHMENTS_BASE_DIR = "/my/absolute/path/to/from_vanilla/uploads"
|
||||
@ -20,13 +19,14 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
def initialize
|
||||
super
|
||||
@htmlentities = HTMLEntities.new
|
||||
@client = Mysql2::Client.new(
|
||||
host: "localhost",
|
||||
username: "root",
|
||||
database: VANILLA_DB,
|
||||
password: "",
|
||||
reconnect: true
|
||||
)
|
||||
@client =
|
||||
Mysql2::Client.new(
|
||||
host: "localhost",
|
||||
username: "root",
|
||||
database: VANILLA_DB,
|
||||
password: "",
|
||||
reconnect: true,
|
||||
)
|
||||
|
||||
@import_tags = false
|
||||
begin
|
||||
@ -88,10 +88,10 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
end
|
||||
|
||||
def import_users
|
||||
puts '', "Importing users..."
|
||||
puts "", "Importing users..."
|
||||
|
||||
username = nil
|
||||
total_count = mysql_query("SELECT count(*) count FROM #{TABLE_PREFIX}User;").first['count']
|
||||
total_count = mysql_query("SELECT count(*) count FROM #{TABLE_PREFIX}User;").first["count"]
|
||||
|
||||
users = mysql_stream <<-SQL
|
||||
SELECT UserID, Name, Title, Location, Email,
|
||||
@ -103,26 +103,32 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
SQL
|
||||
|
||||
create_users(users) do |row|
|
||||
next if row['Email'].blank?
|
||||
next if row['Name'].blank?
|
||||
next if row["Email"].blank?
|
||||
next if row["Name"].blank?
|
||||
|
||||
if ip_address = row['InsertIPAddress']&.split(',').try(:[], 0)
|
||||
ip_address = nil unless (IPAddr.new(ip_address) rescue false)
|
||||
if ip_address = row["InsertIPAddress"]&.split(",").try(:[], 0)
|
||||
ip_address = nil unless (
|
||||
begin
|
||||
IPAddr.new(ip_address)
|
||||
rescue StandardError
|
||||
false
|
||||
end
|
||||
)
|
||||
end
|
||||
|
||||
u = {
|
||||
imported_id: row['UserID'],
|
||||
email: row['Email'],
|
||||
username: row['Name'],
|
||||
name: row['Name'],
|
||||
created_at: row['DateInserted'] == nil ? 0 : Time.zone.at(row['DateInserted']),
|
||||
imported_id: row["UserID"],
|
||||
email: row["Email"],
|
||||
username: row["Name"],
|
||||
name: row["Name"],
|
||||
created_at: row["DateInserted"] == nil ? 0 : Time.zone.at(row["DateInserted"]),
|
||||
registration_ip_address: ip_address,
|
||||
last_seen_at: row['DateLastActive'] == nil ? 0 : Time.zone.at(row['DateLastActive']),
|
||||
location: row['Location'],
|
||||
admin: row['Admin'] > 0
|
||||
last_seen_at: row["DateLastActive"] == nil ? 0 : Time.zone.at(row["DateLastActive"]),
|
||||
location: row["Location"],
|
||||
admin: row["Admin"] > 0,
|
||||
}
|
||||
if row["Banned"] > 0
|
||||
u[:suspended_at] = Time.zone.at(row['DateInserted'])
|
||||
u[:suspended_at] = Time.zone.at(row["DateInserted"])
|
||||
u[:suspended_till] = SUSPENDED_TILL
|
||||
end
|
||||
u
|
||||
@ -130,7 +136,7 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
end
|
||||
|
||||
def import_user_emails
|
||||
puts '', 'Importing user emails...'
|
||||
puts "", "Importing user emails..."
|
||||
|
||||
users = mysql_stream <<-SQL
|
||||
SELECT UserID, Name, Email, DateInserted
|
||||
@ -141,20 +147,20 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
SQL
|
||||
|
||||
create_user_emails(users) do |row|
|
||||
next if row['Email'].blank?
|
||||
next if row['Name'].blank?
|
||||
next if row["Email"].blank?
|
||||
next if row["Name"].blank?
|
||||
|
||||
{
|
||||
imported_id: row["UserID"],
|
||||
imported_user_id: row["UserID"],
|
||||
email: row["Email"],
|
||||
created_at: Time.zone.at(row["DateInserted"])
|
||||
created_at: Time.zone.at(row["DateInserted"]),
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
def import_user_profiles
|
||||
puts '', 'Importing user profiles...'
|
||||
puts "", "Importing user profiles..."
|
||||
|
||||
user_profiles = mysql_stream <<-SQL
|
||||
SELECT UserID, Name, Email, Location, About
|
||||
@ -165,19 +171,19 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
SQL
|
||||
|
||||
create_user_profiles(user_profiles) do |row|
|
||||
next if row['Email'].blank?
|
||||
next if row['Name'].blank?
|
||||
next if row["Email"].blank?
|
||||
next if row["Name"].blank?
|
||||
|
||||
{
|
||||
user_id: user_id_from_imported_id(row["UserID"]),
|
||||
location: row["Location"],
|
||||
bio_raw: row["About"]
|
||||
bio_raw: row["About"],
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
def import_user_stats
|
||||
puts '', "Importing user stats..."
|
||||
puts "", "Importing user stats..."
|
||||
|
||||
users = mysql_stream <<-SQL
|
||||
SELECT UserID, CountDiscussions, CountComments, DateInserted
|
||||
@ -190,14 +196,14 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
now = Time.zone.now
|
||||
|
||||
create_user_stats(users) do |row|
|
||||
next unless @users[row['UserID'].to_i] # shouldn't need this but it can be NULL :<
|
||||
next unless @users[row["UserID"].to_i] # shouldn't need this but it can be NULL :<
|
||||
|
||||
{
|
||||
imported_id: row['UserID'],
|
||||
imported_user_id: row['UserID'],
|
||||
new_since: Time.zone.at(row['DateInserted'] || now),
|
||||
post_count: row['CountComments'] || 0,
|
||||
topic_count: row['CountDiscussions'] || 0
|
||||
imported_id: row["UserID"],
|
||||
imported_user_id: row["UserID"],
|
||||
new_since: Time.zone.at(row["DateInserted"] || now),
|
||||
post_count: row["CountComments"] || 0,
|
||||
topic_count: row["CountDiscussions"] || 0,
|
||||
}
|
||||
end
|
||||
end
|
||||
@ -215,7 +221,10 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
|
||||
next unless u.custom_fields["import_id"]
|
||||
|
||||
r = mysql_query("SELECT photo FROM #{TABLE_PREFIX}User WHERE UserID = #{u.custom_fields['import_id']};").first
|
||||
r =
|
||||
mysql_query(
|
||||
"SELECT photo FROM #{TABLE_PREFIX}User WHERE UserID = #{u.custom_fields["import_id"]};",
|
||||
).first
|
||||
next if r.nil?
|
||||
photo = r["photo"]
|
||||
next unless photo.present?
|
||||
@ -229,9 +238,9 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
photo_real_filename = nil
|
||||
parts = photo.squeeze("/").split("/")
|
||||
if parts[0] =~ /^[a-z0-9]{2}:/
|
||||
photo_path = "#{ATTACHMENTS_BASE_DIR}/#{parts[2..-2].join('/')}".squeeze("/")
|
||||
photo_path = "#{ATTACHMENTS_BASE_DIR}/#{parts[2..-2].join("/")}".squeeze("/")
|
||||
elsif parts[0] == "~cf"
|
||||
photo_path = "#{ATTACHMENTS_BASE_DIR}/#{parts[1..-2].join('/')}".squeeze("/")
|
||||
photo_path = "#{ATTACHMENTS_BASE_DIR}/#{parts[1..-2].join("/")}".squeeze("/")
|
||||
else
|
||||
puts "UNKNOWN FORMAT: #{photo}"
|
||||
next
|
||||
@ -272,75 +281,86 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
count = 0
|
||||
|
||||
# https://us.v-cdn.net/1234567/uploads/editor/xyz/image.jpg
|
||||
cdn_regex = /https:\/\/us.v-cdn.net\/1234567\/uploads\/(\S+\/(\w|-)+.\w+)/i
|
||||
cdn_regex = %r{https://us.v-cdn.net/1234567/uploads/(\S+/(\w|-)+.\w+)}i
|
||||
# [attachment=10109:Screen Shot 2012-04-01 at 3.47.35 AM.png]
|
||||
attachment_regex = /\[attachment=(\d+):(.*?)\]/i
|
||||
|
||||
Post.where("raw LIKE '%/us.v-cdn.net/%' OR raw LIKE '%[attachment%'").find_each do |post|
|
||||
count += 1
|
||||
print "\r%7d - %6d/sec" % [count, count.to_f / (Time.now - start)]
|
||||
new_raw = post.raw.dup
|
||||
Post
|
||||
.where("raw LIKE '%/us.v-cdn.net/%' OR raw LIKE '%[attachment%'")
|
||||
.find_each do |post|
|
||||
count += 1
|
||||
print "\r%7d - %6d/sec" % [count, count.to_f / (Time.now - start)]
|
||||
new_raw = post.raw.dup
|
||||
|
||||
new_raw.gsub!(attachment_regex) do |s|
|
||||
matches = attachment_regex.match(s)
|
||||
attachment_id = matches[1]
|
||||
file_name = matches[2]
|
||||
next unless attachment_id
|
||||
new_raw.gsub!(attachment_regex) do |s|
|
||||
matches = attachment_regex.match(s)
|
||||
attachment_id = matches[1]
|
||||
file_name = matches[2]
|
||||
next unless attachment_id
|
||||
|
||||
r = mysql_query("SELECT Path, Name FROM #{TABLE_PREFIX}Media WHERE MediaID = #{attachment_id};").first
|
||||
next if r.nil?
|
||||
path = r["Path"]
|
||||
name = r["Name"]
|
||||
next unless path.present?
|
||||
r =
|
||||
mysql_query(
|
||||
"SELECT Path, Name FROM #{TABLE_PREFIX}Media WHERE MediaID = #{attachment_id};",
|
||||
).first
|
||||
next if r.nil?
|
||||
path = r["Path"]
|
||||
name = r["Name"]
|
||||
next unless path.present?
|
||||
|
||||
path.gsub!("s3://content/", "")
|
||||
path.gsub!("s3://uploads/", "")
|
||||
file_path = "#{ATTACHMENTS_BASE_DIR}/#{path}"
|
||||
path.gsub!("s3://content/", "")
|
||||
path.gsub!("s3://uploads/", "")
|
||||
file_path = "#{ATTACHMENTS_BASE_DIR}/#{path}"
|
||||
|
||||
if File.exist?(file_path)
|
||||
upload = create_upload(post.user.id, file_path, File.basename(file_path))
|
||||
if upload && upload.errors.empty?
|
||||
# upload.url
|
||||
filename = name || file_name || File.basename(file_path)
|
||||
html_for_upload(upload, normalize_text(filename))
|
||||
if File.exist?(file_path)
|
||||
upload = create_upload(post.user.id, file_path, File.basename(file_path))
|
||||
if upload && upload.errors.empty?
|
||||
# upload.url
|
||||
filename = name || file_name || File.basename(file_path)
|
||||
html_for_upload(upload, normalize_text(filename))
|
||||
else
|
||||
puts "Error: Upload did not persist for #{post.id} #{attachment_id}!"
|
||||
end
|
||||
else
|
||||
puts "Error: Upload did not persist for #{post.id} #{attachment_id}!"
|
||||
puts "Couldn't find file for #{attachment_id}. Skipping."
|
||||
next
|
||||
end
|
||||
else
|
||||
puts "Couldn't find file for #{attachment_id}. Skipping."
|
||||
next
|
||||
end
|
||||
end
|
||||
|
||||
new_raw.gsub!(cdn_regex) do |s|
|
||||
matches = cdn_regex.match(s)
|
||||
attachment_id = matches[1]
|
||||
new_raw.gsub!(cdn_regex) do |s|
|
||||
matches = cdn_regex.match(s)
|
||||
attachment_id = matches[1]
|
||||
|
||||
file_path = "#{ATTACHMENTS_BASE_DIR}/#{attachment_id}"
|
||||
file_path = "#{ATTACHMENTS_BASE_DIR}/#{attachment_id}"
|
||||
|
||||
if File.exist?(file_path)
|
||||
upload = create_upload(post.user.id, file_path, File.basename(file_path))
|
||||
if upload && upload.errors.empty?
|
||||
upload.url
|
||||
if File.exist?(file_path)
|
||||
upload = create_upload(post.user.id, file_path, File.basename(file_path))
|
||||
if upload && upload.errors.empty?
|
||||
upload.url
|
||||
else
|
||||
puts "Error: Upload did not persist for #{post.id} #{attachment_id}!"
|
||||
end
|
||||
else
|
||||
puts "Error: Upload did not persist for #{post.id} #{attachment_id}!"
|
||||
puts "Couldn't find file for #{attachment_id}. Skipping."
|
||||
next
|
||||
end
|
||||
else
|
||||
puts "Couldn't find file for #{attachment_id}. Skipping."
|
||||
next
|
||||
end
|
||||
end
|
||||
|
||||
if new_raw != post.raw
|
||||
begin
|
||||
PostRevisor.new(post).revise!(post.user, { raw: new_raw }, skip_revision: true, skip_validations: true, bypass_bump: true)
|
||||
rescue
|
||||
puts "PostRevisor error for #{post.id}"
|
||||
post.raw = new_raw
|
||||
post.save(validate: false)
|
||||
if new_raw != post.raw
|
||||
begin
|
||||
PostRevisor.new(post).revise!(
|
||||
post.user,
|
||||
{ raw: new_raw },
|
||||
skip_revision: true,
|
||||
skip_validations: true,
|
||||
bypass_bump: true,
|
||||
)
|
||||
rescue StandardError
|
||||
puts "PostRevisor error for #{post.id}"
|
||||
post.raw = new_raw
|
||||
post.save(validate: false)
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
@ -352,7 +372,7 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
|
||||
# Otherwise, the file exists but with a prefix:
|
||||
# The p prefix seems to be the full file, so try to find that one first.
|
||||
['p', 't', 'n'].each do |prefix|
|
||||
%w[p t n].each do |prefix|
|
||||
full_guess = File.join(path, "#{prefix}#{base_guess}")
|
||||
return full_guess if File.exist?(full_guess)
|
||||
end
|
||||
@ -364,26 +384,30 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
def import_categories
|
||||
puts "", "Importing categories..."
|
||||
|
||||
categories = mysql_query("
|
||||
categories =
|
||||
mysql_query(
|
||||
"
|
||||
SELECT CategoryID, ParentCategoryID, Name, Description, Sort
|
||||
FROM #{TABLE_PREFIX}Category
|
||||
WHERE CategoryID > 0
|
||||
ORDER BY Sort, CategoryID
|
||||
").to_a
|
||||
",
|
||||
).to_a
|
||||
|
||||
# Throw the -1 level categories away since they contain no topics.
|
||||
# Use the next level as root categories.
|
||||
|
||||
top_level_categories = categories.select { |c| c["ParentCategoryID"].blank? || c['ParentCategoryID'] == -1 }
|
||||
top_level_categories =
|
||||
categories.select { |c| c["ParentCategoryID"].blank? || c["ParentCategoryID"] == -1 }
|
||||
|
||||
# Depth = 2
|
||||
create_categories(top_level_categories) do |category|
|
||||
next if category_id_from_imported_id(category['CategoryID'])
|
||||
next if category_id_from_imported_id(category["CategoryID"])
|
||||
{
|
||||
imported_id: category['CategoryID'],
|
||||
name: CGI.unescapeHTML(category['Name']),
|
||||
description: category['Description'] ? CGI.unescapeHTML(category['Description']) : nil,
|
||||
position: category['Sort']
|
||||
imported_id: category["CategoryID"],
|
||||
name: CGI.unescapeHTML(category["Name"]),
|
||||
description: category["Description"] ? CGI.unescapeHTML(category["Description"]) : nil,
|
||||
position: category["Sort"],
|
||||
}
|
||||
end
|
||||
|
||||
@ -393,39 +417,39 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
|
||||
# Depth = 3
|
||||
create_categories(subcategories) do |category|
|
||||
next if category_id_from_imported_id(category['CategoryID'])
|
||||
next if category_id_from_imported_id(category["CategoryID"])
|
||||
{
|
||||
imported_id: category['CategoryID'],
|
||||
parent_category_id: category_id_from_imported_id(category['ParentCategoryID']),
|
||||
name: CGI.unescapeHTML(category['Name']),
|
||||
description: category['Description'] ? CGI.unescapeHTML(category['Description']) : nil,
|
||||
position: category['Sort']
|
||||
imported_id: category["CategoryID"],
|
||||
parent_category_id: category_id_from_imported_id(category["ParentCategoryID"]),
|
||||
name: CGI.unescapeHTML(category["Name"]),
|
||||
description: category["Description"] ? CGI.unescapeHTML(category["Description"]) : nil,
|
||||
position: category["Sort"],
|
||||
}
|
||||
end
|
||||
|
||||
subcategory_ids = Set.new(subcategories.map { |c| c['CategoryID'] })
|
||||
subcategory_ids = Set.new(subcategories.map { |c| c["CategoryID"] })
|
||||
|
||||
# Depth 4 and 5 need to be tags
|
||||
|
||||
categories.each do |c|
|
||||
next if c['ParentCategoryID'] == -1
|
||||
next if top_level_category_ids.include?(c['CategoryID'])
|
||||
next if subcategory_ids.include?(c['CategoryID'])
|
||||
next if c["ParentCategoryID"] == -1
|
||||
next if top_level_category_ids.include?(c["CategoryID"])
|
||||
next if subcategory_ids.include?(c["CategoryID"])
|
||||
|
||||
# Find a depth 3 category for topics in this category
|
||||
parent = c
|
||||
while !parent.nil? && !subcategory_ids.include?(parent['CategoryID'])
|
||||
parent = categories.find { |subcat| subcat['CategoryID'] == parent['ParentCategoryID'] }
|
||||
while !parent.nil? && !subcategory_ids.include?(parent["CategoryID"])
|
||||
parent = categories.find { |subcat| subcat["CategoryID"] == parent["ParentCategoryID"] }
|
||||
end
|
||||
|
||||
if parent
|
||||
tag_name = DiscourseTagging.clean_tag(c['Name'])
|
||||
@category_mappings[c['CategoryID']] = {
|
||||
category_id: category_id_from_imported_id(parent['CategoryID']),
|
||||
tag: Tag.find_by_name(tag_name) || Tag.create(name: tag_name)
|
||||
tag_name = DiscourseTagging.clean_tag(c["Name"])
|
||||
@category_mappings[c["CategoryID"]] = {
|
||||
category_id: category_id_from_imported_id(parent["CategoryID"]),
|
||||
tag: Tag.find_by_name(tag_name) || Tag.create(name: tag_name),
|
||||
}
|
||||
else
|
||||
puts '', "Couldn't find a category for #{c['CategoryID']} '#{c['Name']}'!"
|
||||
puts "", "Couldn't find a category for #{c["CategoryID"]} '#{c["Name"]}'!"
|
||||
end
|
||||
end
|
||||
end
|
||||
@ -433,7 +457,8 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
def import_topics
|
||||
puts "", "Importing topics..."
|
||||
|
||||
topics_sql = "SELECT DiscussionID, CategoryID, Name, Body, DateInserted, InsertUserID, Announce, Format
|
||||
topics_sql =
|
||||
"SELECT DiscussionID, CategoryID, Name, Body, DateInserted, InsertUserID, Announce, Format
|
||||
FROM #{TABLE_PREFIX}Discussion
|
||||
WHERE DiscussionID > #{@last_imported_topic_id}
|
||||
ORDER BY DiscussionID ASC"
|
||||
@ -442,11 +467,12 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
data = {
|
||||
imported_id: row["DiscussionID"],
|
||||
title: normalize_text(row["Name"]),
|
||||
category_id: category_id_from_imported_id(row["CategoryID"]) ||
|
||||
@category_mappings[row["CategoryID"]].try(:[], :category_id),
|
||||
category_id:
|
||||
category_id_from_imported_id(row["CategoryID"]) ||
|
||||
@category_mappings[row["CategoryID"]].try(:[], :category_id),
|
||||
user_id: user_id_from_imported_id(row["InsertUserID"]),
|
||||
created_at: Time.zone.at(row['DateInserted']),
|
||||
pinned_at: row['Announce'] == 0 ? nil : Time.zone.at(row['DateInserted'])
|
||||
created_at: Time.zone.at(row["DateInserted"]),
|
||||
pinned_at: row["Announce"] == 0 ? nil : Time.zone.at(row["DateInserted"]),
|
||||
}
|
||||
(data[:user_id].present? && data[:title].present?) ? data : false
|
||||
end
|
||||
@ -455,46 +481,45 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
|
||||
create_posts(mysql_stream(topics_sql)) do |row|
|
||||
data = {
|
||||
imported_id: "d-" + row['DiscussionID'].to_s,
|
||||
topic_id: topic_id_from_imported_id(row['DiscussionID']),
|
||||
imported_id: "d-" + row["DiscussionID"].to_s,
|
||||
topic_id: topic_id_from_imported_id(row["DiscussionID"]),
|
||||
user_id: user_id_from_imported_id(row["InsertUserID"]),
|
||||
created_at: Time.zone.at(row['DateInserted']),
|
||||
raw: clean_up(row['Body'], row['Format'])
|
||||
created_at: Time.zone.at(row["DateInserted"]),
|
||||
raw: clean_up(row["Body"], row["Format"]),
|
||||
}
|
||||
data[:topic_id].present? ? data : false
|
||||
end
|
||||
|
||||
puts '', 'converting deep categories to tags...'
|
||||
puts "", "converting deep categories to tags..."
|
||||
|
||||
create_topic_tags(mysql_stream(topics_sql)) do |row|
|
||||
next unless mapping = @category_mappings[row['CategoryID']]
|
||||
next unless mapping = @category_mappings[row["CategoryID"]]
|
||||
|
||||
{
|
||||
tag_id: mapping[:tag].id,
|
||||
topic_id: topic_id_from_imported_id(row["DiscussionID"])
|
||||
}
|
||||
{ tag_id: mapping[:tag].id, topic_id: topic_id_from_imported_id(row["DiscussionID"]) }
|
||||
end
|
||||
end
|
||||
|
||||
def import_posts
|
||||
puts "", "Importing posts..."
|
||||
|
||||
posts = mysql_stream(
|
||||
"SELECT CommentID, DiscussionID, Body, DateInserted, InsertUserID, Format
|
||||
posts =
|
||||
mysql_stream(
|
||||
"SELECT CommentID, DiscussionID, Body, DateInserted, InsertUserID, Format
|
||||
FROM #{TABLE_PREFIX}Comment
|
||||
WHERE CommentID > #{@last_imported_post_id}
|
||||
ORDER BY CommentID ASC")
|
||||
ORDER BY CommentID ASC",
|
||||
)
|
||||
|
||||
create_posts(posts) do |row|
|
||||
next unless topic_id = topic_id_from_imported_id(row['DiscussionID'])
|
||||
next if row['Body'].blank?
|
||||
next unless topic_id = topic_id_from_imported_id(row["DiscussionID"])
|
||||
next if row["Body"].blank?
|
||||
|
||||
{
|
||||
imported_id: row['CommentID'],
|
||||
imported_id: row["CommentID"],
|
||||
topic_id: topic_id,
|
||||
user_id: user_id_from_imported_id(row['InsertUserID']),
|
||||
created_at: Time.zone.at(row['DateInserted']),
|
||||
raw: clean_up(row['Body'], row['Format'])
|
||||
user_id: user_id_from_imported_id(row["InsertUserID"]),
|
||||
created_at: Time.zone.at(row["DateInserted"]),
|
||||
raw: clean_up(row["Body"], row["Format"]),
|
||||
}
|
||||
end
|
||||
end
|
||||
@ -505,31 +530,31 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
tag_mapping = {}
|
||||
|
||||
mysql_query("SELECT TagID, Name FROM #{TABLE_PREFIX}Tag").each do |row|
|
||||
tag_name = DiscourseTagging.clean_tag(row['Name'])
|
||||
tag_name = DiscourseTagging.clean_tag(row["Name"])
|
||||
tag = Tag.find_by_name(tag_name) || Tag.create(name: tag_name)
|
||||
tag_mapping[row['TagID']] = tag.id
|
||||
tag_mapping[row["TagID"]] = tag.id
|
||||
end
|
||||
|
||||
tags = mysql_query(
|
||||
"SELECT TagID, DiscussionID
|
||||
tags =
|
||||
mysql_query(
|
||||
"SELECT TagID, DiscussionID
|
||||
FROM #{TABLE_PREFIX}TagDiscussion
|
||||
WHERE DiscussionID > #{@last_imported_topic_id}
|
||||
ORDER BY DateInserted")
|
||||
ORDER BY DateInserted",
|
||||
)
|
||||
|
||||
create_topic_tags(tags) do |row|
|
||||
next unless topic_id = topic_id_from_imported_id(row['DiscussionID'])
|
||||
next unless topic_id = topic_id_from_imported_id(row["DiscussionID"])
|
||||
|
||||
{
|
||||
topic_id: topic_id,
|
||||
tag_id: tag_mapping[row['TagID']]
|
||||
}
|
||||
{ topic_id: topic_id, tag_id: tag_mapping[row["TagID"]] }
|
||||
end
|
||||
end
|
||||
|
||||
def import_private_topics
|
||||
puts "", "Importing private topics..."
|
||||
|
||||
topics_sql = "SELECT c.ConversationID, c.Subject, m.MessageID, m.Body, c.DateInserted, c.InsertUserID
|
||||
topics_sql =
|
||||
"SELECT c.ConversationID, c.Subject, m.MessageID, m.Body, c.DateInserted, c.InsertUserID
|
||||
FROM #{TABLE_PREFIX}Conversation c, #{TABLE_PREFIX}ConversationMessage m
|
||||
WHERE c.FirstMessageID = m.MessageID
|
||||
AND c.ConversationID > #{@last_imported_private_topic_id - PRIVATE_OFFSET}
|
||||
@ -539,9 +564,10 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
{
|
||||
archetype: Archetype.private_message,
|
||||
imported_id: row["ConversationID"] + PRIVATE_OFFSET,
|
||||
title: row["Subject"] ? normalize_text(row["Subject"]) : "Conversation #{row["ConversationID"]}",
|
||||
title:
|
||||
row["Subject"] ? normalize_text(row["Subject"]) : "Conversation #{row["ConversationID"]}",
|
||||
user_id: user_id_from_imported_id(row["InsertUserID"]),
|
||||
created_at: Time.zone.at(row['DateInserted'])
|
||||
created_at: Time.zone.at(row["DateInserted"]),
|
||||
}
|
||||
end
|
||||
end
|
||||
@ -549,7 +575,8 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
def import_topic_allowed_users
|
||||
puts "", "importing topic_allowed_users..."
|
||||
|
||||
topic_allowed_users_sql = "
|
||||
topic_allowed_users_sql =
|
||||
"
|
||||
SELECT ConversationID, UserID
|
||||
FROM #{TABLE_PREFIX}UserConversation
|
||||
WHERE Deleted = 0
|
||||
@ -559,45 +586,43 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
added = 0
|
||||
|
||||
create_topic_allowed_users(mysql_stream(topic_allowed_users_sql)) do |row|
|
||||
next unless topic_id = topic_id_from_imported_id(row['ConversationID'] + PRIVATE_OFFSET)
|
||||
next unless topic_id = topic_id_from_imported_id(row["ConversationID"] + PRIVATE_OFFSET)
|
||||
next unless user_id = user_id_from_imported_id(row["UserID"])
|
||||
added += 1
|
||||
{
|
||||
topic_id: topic_id,
|
||||
user_id: user_id,
|
||||
}
|
||||
{ topic_id: topic_id, user_id: user_id }
|
||||
end
|
||||
|
||||
puts '', "Added #{added} topic_allowed_users records."
|
||||
puts "", "Added #{added} topic_allowed_users records."
|
||||
end
|
||||
|
||||
def import_private_posts
|
||||
puts "", "importing private replies..."
|
||||
|
||||
private_posts_sql = "
|
||||
private_posts_sql =
|
||||
"
|
||||
SELECT ConversationID, MessageID, Body, InsertUserID, DateInserted, Format
|
||||
FROM GDN_ConversationMessage
|
||||
WHERE ConversationID > #{@last_imported_private_topic_id - PRIVATE_OFFSET}
|
||||
ORDER BY ConversationID ASC, MessageID ASC"
|
||||
|
||||
create_posts(mysql_stream(private_posts_sql)) do |row|
|
||||
next unless topic_id = topic_id_from_imported_id(row['ConversationID'] + PRIVATE_OFFSET)
|
||||
next unless topic_id = topic_id_from_imported_id(row["ConversationID"] + PRIVATE_OFFSET)
|
||||
|
||||
{
|
||||
imported_id: row['MessageID'] + PRIVATE_OFFSET,
|
||||
imported_id: row["MessageID"] + PRIVATE_OFFSET,
|
||||
topic_id: topic_id,
|
||||
user_id: user_id_from_imported_id(row['InsertUserID']),
|
||||
created_at: Time.zone.at(row['DateInserted']),
|
||||
raw: clean_up(row['Body'], row['Format'])
|
||||
user_id: user_id_from_imported_id(row["InsertUserID"]),
|
||||
created_at: Time.zone.at(row["DateInserted"]),
|
||||
raw: clean_up(row["Body"], row["Format"]),
|
||||
}
|
||||
end
|
||||
end
|
||||
|
||||
# TODO: too slow
|
||||
def create_permalinks
|
||||
puts '', 'Creating permalinks...', ''
|
||||
puts "", "Creating permalinks...", ""
|
||||
|
||||
puts ' User pages...'
|
||||
puts " User pages..."
|
||||
|
||||
start = Time.now
|
||||
count = 0
|
||||
@ -606,21 +631,23 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
sql = "COPY permalinks (url, created_at, updated_at, external_url) FROM STDIN"
|
||||
|
||||
@raw_connection.copy_data(sql, @encoder) do
|
||||
User.includes(:_custom_fields).find_each do |u|
|
||||
count += 1
|
||||
ucf = u.custom_fields
|
||||
if ucf && ucf["import_id"]
|
||||
vanilla_username = ucf["import_username"] || u.username
|
||||
@raw_connection.put_copy_data(
|
||||
["profile/#{vanilla_username}", now, now, "/users/#{u.username}"]
|
||||
)
|
||||
end
|
||||
User
|
||||
.includes(:_custom_fields)
|
||||
.find_each do |u|
|
||||
count += 1
|
||||
ucf = u.custom_fields
|
||||
if ucf && ucf["import_id"]
|
||||
vanilla_username = ucf["import_username"] || u.username
|
||||
@raw_connection.put_copy_data(
|
||||
["profile/#{vanilla_username}", now, now, "/users/#{u.username}"],
|
||||
)
|
||||
end
|
||||
|
||||
print "\r%7d - %6d/sec" % [count, count.to_f / (Time.now - start)] if count % 5000 == 0
|
||||
end
|
||||
print "\r%7d - %6d/sec" % [count, count.to_f / (Time.now - start)] if count % 5000 == 0
|
||||
end
|
||||
end
|
||||
|
||||
puts '', '', ' Topics and posts...'
|
||||
puts "", "", " Topics and posts..."
|
||||
|
||||
start = Time.now
|
||||
count = 0
|
||||
@ -628,38 +655,36 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
sql = "COPY permalinks (url, topic_id, post_id, created_at, updated_at) FROM STDIN"
|
||||
|
||||
@raw_connection.copy_data(sql, @encoder) do
|
||||
Post.includes(:_custom_fields).find_each do |post|
|
||||
count += 1
|
||||
pcf = post.custom_fields
|
||||
if pcf && pcf["import_id"]
|
||||
topic = post.topic
|
||||
if topic.present?
|
||||
id = pcf["import_id"].split('-').last
|
||||
if post.post_number == 1
|
||||
slug = Slug.for(topic.title) # probably matches what vanilla would do...
|
||||
@raw_connection.put_copy_data(
|
||||
["discussion/#{id}/#{slug}", topic.id, nil, now, now]
|
||||
)
|
||||
else
|
||||
@raw_connection.put_copy_data(
|
||||
["discussion/comment/#{id}", nil, post.id, now, now]
|
||||
)
|
||||
Post
|
||||
.includes(:_custom_fields)
|
||||
.find_each do |post|
|
||||
count += 1
|
||||
pcf = post.custom_fields
|
||||
if pcf && pcf["import_id"]
|
||||
topic = post.topic
|
||||
if topic.present?
|
||||
id = pcf["import_id"].split("-").last
|
||||
if post.post_number == 1
|
||||
slug = Slug.for(topic.title) # probably matches what vanilla would do...
|
||||
@raw_connection.put_copy_data(["discussion/#{id}/#{slug}", topic.id, nil, now, now])
|
||||
else
|
||||
@raw_connection.put_copy_data(["discussion/comment/#{id}", nil, post.id, now, now])
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
print "\r%7d - %6d/sec" % [count, count.to_f / (Time.now - start)] if count % 5000 == 0
|
||||
end
|
||||
print "\r%7d - %6d/sec" % [count, count.to_f / (Time.now - start)] if count % 5000 == 0
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
def clean_up(raw, format)
|
||||
raw.encode!("utf-8", "utf-8", invalid: :replace, undef: :replace, replace: "")
|
||||
|
||||
raw.gsub!(/<(.+)> <\/\1>/, "\n\n")
|
||||
raw.gsub!(%r{<(.+)> </\1>}, "\n\n")
|
||||
|
||||
html =
|
||||
if format == 'Html'
|
||||
if format == "Html"
|
||||
raw
|
||||
else
|
||||
markdown = Redcarpet::Markdown.new(Redcarpet::Render::HTML, autolink: true, tables: true)
|
||||
@ -668,29 +693,23 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
|
||||
doc = Nokogiri::HTML5.fragment(html)
|
||||
|
||||
doc.css("blockquote").each do |bq|
|
||||
name = bq["rel"]
|
||||
user = User.find_by(name: name)
|
||||
bq.replace %{<br>[QUOTE="#{user&.username || name}"]\n#{bq.inner_html}\n[/QUOTE]<br>}
|
||||
end
|
||||
doc
|
||||
.css("blockquote")
|
||||
.each do |bq|
|
||||
name = bq["rel"]
|
||||
user = User.find_by(name: name)
|
||||
bq.replace %{<br>[QUOTE="#{user&.username || name}"]\n#{bq.inner_html}\n[/QUOTE]<br>}
|
||||
end
|
||||
|
||||
doc.css("font").reverse.each do |f|
|
||||
f.replace f.inner_html
|
||||
end
|
||||
doc.css("font").reverse.each { |f| f.replace f.inner_html }
|
||||
|
||||
doc.css("span").reverse.each do |f|
|
||||
f.replace f.inner_html
|
||||
end
|
||||
doc.css("span").reverse.each { |f| f.replace f.inner_html }
|
||||
|
||||
doc.css("sub").reverse.each do |f|
|
||||
f.replace f.inner_html
|
||||
end
|
||||
doc.css("sub").reverse.each { |f| f.replace f.inner_html }
|
||||
|
||||
doc.css("u").reverse.each do |f|
|
||||
f.replace f.inner_html
|
||||
end
|
||||
doc.css("u").reverse.each { |f| f.replace f.inner_html }
|
||||
|
||||
markdown = format == 'Html' ? ReverseMarkdown.convert(doc.to_html) : doc.to_html
|
||||
markdown = format == "Html" ? ReverseMarkdown.convert(doc.to_html) : doc.to_html
|
||||
markdown.gsub!(/\[QUOTE="([^;]+);c-(\d+)"\]/i) { "[QUOTE=#{$1};#{$2}]" }
|
||||
|
||||
markdown = process_raw_text(markdown)
|
||||
@ -702,31 +721,31 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
text = raw.dup
|
||||
text = CGI.unescapeHTML(text)
|
||||
|
||||
text.gsub!(/:(?:\w{8})\]/, ']')
|
||||
text.gsub!(/:(?:\w{8})\]/, "]")
|
||||
|
||||
# Some links look like this: <!-- m --><a class="postlink" href="http://www.onegameamonth.com">http://www.onegameamonth.com</a><!-- m -->
|
||||
text.gsub!(/<!-- \w --><a(?:.+)href="(\S+)"(?:.*)>(.+)<\/a><!-- \w -->/i, '[\2](\1)')
|
||||
text.gsub!(%r{<!-- \w --><a(?:.+)href="(\S+)"(?:.*)>(.+)</a><!-- \w -->}i, '[\2](\1)')
|
||||
|
||||
# phpBB shortens link text like this, which breaks our markdown processing:
|
||||
# [http://answers.yahoo.com/question/index ... 223AAkkPli](http://answers.yahoo.com/question/index?qid=20070920134223AAkkPli)
|
||||
#
|
||||
# Work around it for now:
|
||||
text.gsub!(/\[http(s)?:\/\/(www\.)?/i, '[')
|
||||
text.gsub!(%r{\[http(s)?://(www\.)?}i, "[")
|
||||
|
||||
# convert list tags to ul and list=1 tags to ol
|
||||
# list=a is not supported, so handle it like list=1
|
||||
# list=9 and list=x have the same result as list=1 and list=a
|
||||
text.gsub!(/\[list\](.*?)\[\/list:u\]/mi, '[ul]\1[/ul]')
|
||||
text.gsub!(/\[list=.*?\](.*?)\[\/list:o\]/mi, '[ol]\1[/ol]')
|
||||
text.gsub!(%r{\[list\](.*?)\[/list:u\]}mi, '[ul]\1[/ul]')
|
||||
text.gsub!(%r{\[list=.*?\](.*?)\[/list:o\]}mi, '[ol]\1[/ol]')
|
||||
|
||||
# convert *-tags to li-tags so bbcode-to-md can do its magic on phpBB's lists:
|
||||
text.gsub!(/\[\*\](.*?)\[\/\*:m\]/mi, '[li]\1[/li]')
|
||||
text.gsub!(%r{\[\*\](.*?)\[/\*:m\]}mi, '[li]\1[/li]')
|
||||
|
||||
# [QUOTE="<username>"] -- add newline
|
||||
text.gsub!(/(\[quote="[a-zA-Z\d]+"\])/i) { "#{$1}\n" }
|
||||
|
||||
# [/QUOTE] -- add newline
|
||||
text.gsub!(/(\[\/quote\])/i) { "\n#{$1}" }
|
||||
text.gsub!(%r{(\[/quote\])}i) { "\n#{$1}" }
|
||||
|
||||
text
|
||||
end
|
||||
@ -742,7 +761,6 @@ class BulkImport::Vanilla < BulkImport::Base
|
||||
def mysql_query(sql)
|
||||
@client.query(sql)
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
BulkImport::Vanilla.new.start
|
||||
|
Reference in New Issue
Block a user