2019-05-02 18:17:27 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2017-04-24 16:57:30 -04:00
|
|
|
require_relative "base"
|
|
|
|
require "mysql2"
|
|
|
|
require "htmlentities"
|
2022-11-28 14:30:19 -05:00
|
|
|
require "parallel"
|
2017-04-24 16:57:30 -04:00
|
|
|
|
|
|
|
class BulkImport::VBulletin < BulkImport::Base
|
2022-11-28 14:30:19 -05:00
|
|
|
TABLE_PREFIX ||= ENV["TABLE_PREFIX"] || "vb_"
|
2017-04-24 16:57:30 -04:00
|
|
|
SUSPENDED_TILL ||= Date.new(3000, 1, 1)
|
2019-04-11 03:05:19 -04:00
|
|
|
ATTACHMENT_DIR ||= ENV["ATTACHMENT_DIR"] || "/shared/import/data/attachments"
|
|
|
|
AVATAR_DIR ||= ENV["AVATAR_DIR"] || "/shared/import/data/customavatars"
|
2017-04-24 16:57:30 -04:00
|
|
|
|
|
|
|
def initialize
|
|
|
|
super
|
|
|
|
|
2018-12-09 23:40:44 -05:00
|
|
|
host = ENV["DB_HOST"] || "localhost"
|
2017-04-24 16:57:30 -04:00
|
|
|
username = ENV["DB_USERNAME"] || "root"
|
|
|
|
password = ENV["DB_PASSWORD"]
|
|
|
|
database = ENV["DB_NAME"] || "vbulletin"
|
2017-07-24 20:24:19 -04:00
|
|
|
charset = ENV["DB_CHARSET"] || "utf8"
|
2017-04-24 16:57:30 -04:00
|
|
|
|
|
|
|
@html_entities = HTMLEntities.new
|
2017-07-24 20:24:19 -04:00
|
|
|
@encoding = CHARSET_MAP[charset]
|
|
|
|
|
|
|
|
@client =
|
|
|
|
Mysql2::Client.new(
|
|
|
|
host: host,
|
|
|
|
username: username,
|
|
|
|
password: password,
|
|
|
|
database: database,
|
2019-04-11 03:05:19 -04:00
|
|
|
encoding: charset,
|
|
|
|
reconnect: true,
|
2017-07-24 20:24:19 -04:00
|
|
|
)
|
2017-04-24 16:57:30 -04:00
|
|
|
|
|
|
|
@client.query_options.merge!(as: :array, cache_rows: false)
|
2017-07-24 08:49:22 -04:00
|
|
|
|
|
|
|
@has_post_thanks = mysql_query(<<-SQL).to_a.count > 0
|
|
|
|
SELECT `COLUMN_NAME`
|
|
|
|
FROM `INFORMATION_SCHEMA`.`COLUMNS`
|
|
|
|
WHERE `TABLE_SCHEMA`='#{database}'
|
|
|
|
AND `TABLE_NAME`='user'
|
|
|
|
AND `COLUMN_NAME` LIKE 'post_thanks_%'
|
|
|
|
SQL
|
2022-11-28 14:30:19 -05:00
|
|
|
|
|
|
|
@user_ids_by_email = {}
|
2017-04-24 16:57:30 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def execute
|
2019-04-11 03:05:19 -04:00
|
|
|
# enable as per requirement:
|
|
|
|
# SiteSetting.automatic_backups_enabled = false
|
|
|
|
# SiteSetting.disable_emails = "non-staff"
|
|
|
|
# SiteSetting.authorized_extensions = '*'
|
|
|
|
# SiteSetting.max_image_size_kb = 102400
|
|
|
|
# SiteSetting.max_attachment_size_kb = 102400
|
|
|
|
# SiteSetting.clean_up_uploads = false
|
|
|
|
# SiteSetting.clean_orphan_uploads_grace_period_hours = 43200
|
|
|
|
|
2017-04-24 16:57:30 -04:00
|
|
|
import_groups
|
|
|
|
import_users
|
|
|
|
import_group_users
|
|
|
|
|
2017-07-24 08:49:22 -04:00
|
|
|
import_user_emails
|
|
|
|
import_user_stats
|
|
|
|
|
2017-04-24 16:57:30 -04:00
|
|
|
import_user_passwords
|
|
|
|
import_user_salts
|
|
|
|
import_user_profiles
|
|
|
|
|
|
|
|
import_categories
|
|
|
|
import_topics
|
|
|
|
import_posts
|
|
|
|
|
2017-08-01 04:01:45 -04:00
|
|
|
import_likes
|
|
|
|
|
2017-04-24 16:57:30 -04:00
|
|
|
import_private_topics
|
|
|
|
import_topic_allowed_users
|
|
|
|
import_private_posts
|
2019-04-11 03:05:19 -04:00
|
|
|
|
|
|
|
create_permalink_file
|
|
|
|
import_attachments
|
|
|
|
import_avatars
|
|
|
|
import_signatures
|
2017-04-24 16:57:30 -04:00
|
|
|
end
|
|
|
|
|
2022-11-28 14:30:19 -05:00
|
|
|
def execute_after
|
|
|
|
max_age = SiteSetting.delete_user_max_post_age
|
|
|
|
SiteSetting.delete_user_max_post_age = 50 * 365
|
|
|
|
|
|
|
|
merge_duplicated_users
|
|
|
|
|
|
|
|
SiteSetting.delete_user_max_post_age = max_age
|
|
|
|
end
|
|
|
|
|
2017-04-24 16:57:30 -04:00
|
|
|
def import_groups
|
2022-11-28 14:30:19 -05:00
|
|
|
puts "", "Importing groups..."
|
2017-04-24 16:57:30 -04:00
|
|
|
|
|
|
|
groups = mysql_stream <<-SQL
|
|
|
|
SELECT usergroupid, title, description, usertitle
|
2018-12-09 23:40:44 -05:00
|
|
|
FROM #{TABLE_PREFIX}usergroup
|
2017-04-24 16:57:30 -04:00
|
|
|
WHERE usergroupid > #{@last_imported_group_id}
|
|
|
|
ORDER BY usergroupid
|
|
|
|
SQL
|
|
|
|
|
|
|
|
create_groups(groups) do |row|
|
|
|
|
{
|
|
|
|
imported_id: row[0],
|
2017-07-24 20:24:19 -04:00
|
|
|
name: normalize_text(row[1]),
|
|
|
|
bio_raw: normalize_text(row[2]),
|
|
|
|
title: normalize_text(row[3]),
|
2017-04-24 16:57:30 -04:00
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_users
|
2022-11-28 14:30:19 -05:00
|
|
|
puts "", "Importing users..."
|
2017-04-24 16:57:30 -04:00
|
|
|
|
|
|
|
users = mysql_stream <<-SQL
|
2018-12-09 23:40:44 -05:00
|
|
|
SELECT u.userid, username, email, joindate, birthday, ipaddress, u.usergroupid, bandate, liftdate
|
|
|
|
FROM #{TABLE_PREFIX}user u
|
|
|
|
LEFT JOIN #{TABLE_PREFIX}userban ub ON ub.userid = u.userid
|
|
|
|
WHERE u.userid > #{@last_imported_user_id}
|
|
|
|
ORDER BY u.userid
|
2017-04-24 16:57:30 -04:00
|
|
|
SQL
|
|
|
|
|
|
|
|
create_users(users) do |row|
|
|
|
|
u = {
|
|
|
|
imported_id: row[0],
|
2017-07-24 20:24:19 -04:00
|
|
|
username: normalize_text(row[1]),
|
2018-07-31 00:43:16 -04:00
|
|
|
name: normalize_text(row[1]),
|
2021-09-28 18:20:06 -04:00
|
|
|
email: row[2],
|
2017-04-24 16:57:30 -04:00
|
|
|
created_at: Time.zone.at(row[3]),
|
|
|
|
date_of_birth: parse_birthday(row[4]),
|
|
|
|
primary_group_id: group_id_from_imported_id(row[6]),
|
|
|
|
}
|
|
|
|
u[:ip_address] = row[5][/\b(?:\d{1,3}\.){3}\d{1,3}\b/] if row[5].present?
|
|
|
|
if row[7]
|
|
|
|
u[:suspended_at] = Time.zone.at(row[7])
|
|
|
|
u[:suspended_till] = row[8] > 0 ? Time.zone.at(row[8]) : SUSPENDED_TILL
|
|
|
|
end
|
|
|
|
u
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-07-24 08:49:22 -04:00
|
|
|
def import_user_emails
|
2022-11-28 14:30:19 -05:00
|
|
|
puts "", "Importing user emails..."
|
2017-07-24 08:49:22 -04:00
|
|
|
|
|
|
|
users = mysql_stream <<-SQL
|
2018-12-09 23:40:44 -05:00
|
|
|
SELECT u.userid, email, joindate
|
|
|
|
FROM #{TABLE_PREFIX}user u
|
|
|
|
WHERE u.userid > #{@last_imported_user_id}
|
|
|
|
ORDER BY u.userid
|
2017-07-24 08:49:22 -04:00
|
|
|
SQL
|
|
|
|
|
|
|
|
create_user_emails(users) do |row|
|
2022-11-28 14:30:19 -05:00
|
|
|
user_id, email = row[0..1]
|
|
|
|
|
|
|
|
@user_ids_by_email[email.downcase] ||= []
|
|
|
|
user_ids = @user_ids_by_email[email.downcase] << user_id
|
|
|
|
|
|
|
|
if user_ids.count > 1
|
|
|
|
# fudge email to avoid conflicts; accounts from the 2nd and on will later be merged back into the first
|
|
|
|
# NOTE: gsub! is used to avoid creating a new (frozen) string
|
|
|
|
email.gsub!(/^/, SecureRandom.hex)
|
|
|
|
end
|
|
|
|
|
2017-07-24 08:49:22 -04:00
|
|
|
{
|
2022-11-28 14:30:19 -05:00
|
|
|
imported_id: user_id,
|
|
|
|
imported_user_id: user_id,
|
|
|
|
email: email,
|
2017-07-24 08:49:22 -04:00
|
|
|
created_at: Time.zone.at(row[2]),
|
|
|
|
}
|
|
|
|
end
|
2022-11-28 14:30:19 -05:00
|
|
|
|
|
|
|
# for debugging purposes; not used operationally
|
|
|
|
save_duplicated_users
|
2017-07-24 08:49:22 -04:00
|
|
|
end
|
|
|
|
|
|
|
|
def import_user_stats
|
2022-11-28 14:30:19 -05:00
|
|
|
puts "", "Importing user stats..."
|
2017-07-24 08:49:22 -04:00
|
|
|
|
|
|
|
users = mysql_stream <<-SQL
|
2018-12-09 23:40:44 -05:00
|
|
|
SELECT u.userid, joindate, posts, COUNT(t.threadid) AS threads, p.dateline
|
2017-07-24 08:49:22 -04:00
|
|
|
#{", post_thanks_user_amount, post_thanks_thanked_times" if @has_post_thanks}
|
2018-12-09 23:40:44 -05:00
|
|
|
FROM #{TABLE_PREFIX}user u
|
|
|
|
LEFT OUTER JOIN #{TABLE_PREFIX}post p ON p.postid = u.lastpostid
|
|
|
|
LEFT OUTER JOIN #{TABLE_PREFIX}thread t ON u.userid = t.postuserid
|
|
|
|
WHERE u.userid > #{@last_imported_user_id}
|
|
|
|
GROUP BY u.userid
|
|
|
|
ORDER BY u.userid
|
2017-07-24 08:49:22 -04:00
|
|
|
SQL
|
|
|
|
|
|
|
|
create_user_stats(users) do |row|
|
|
|
|
user = {
|
|
|
|
imported_id: row[0],
|
|
|
|
imported_user_id: row[0],
|
|
|
|
new_since: Time.zone.at(row[1]),
|
|
|
|
post_count: row[2],
|
|
|
|
topic_count: row[3],
|
|
|
|
first_post_created_at: row[4] && Time.zone.at(row[4]),
|
|
|
|
}
|
|
|
|
|
|
|
|
if @has_post_thanks
|
|
|
|
user[:likes_given] = row[5]
|
|
|
|
user[:likes_received] = row[6]
|
|
|
|
end
|
|
|
|
|
|
|
|
user
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-04-24 16:57:30 -04:00
|
|
|
def import_group_users
|
2022-11-28 14:30:19 -05:00
|
|
|
puts "", "Importing group users..."
|
2017-04-24 16:57:30 -04:00
|
|
|
|
|
|
|
group_users = mysql_stream <<-SQL
|
|
|
|
SELECT usergroupid, userid
|
2018-12-09 23:40:44 -05:00
|
|
|
FROM #{TABLE_PREFIX}user
|
2017-04-24 16:57:30 -04:00
|
|
|
WHERE userid > #{@last_imported_user_id}
|
|
|
|
SQL
|
|
|
|
|
|
|
|
create_group_users(group_users) do |row|
|
|
|
|
{ group_id: group_id_from_imported_id(row[0]), user_id: user_id_from_imported_id(row[1]) }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_user_passwords
|
2022-11-28 14:30:19 -05:00
|
|
|
puts "", "Importing user passwords..."
|
2017-04-24 16:57:30 -04:00
|
|
|
|
|
|
|
user_passwords = mysql_stream <<-SQL
|
|
|
|
SELECT userid, password
|
2018-12-09 23:40:44 -05:00
|
|
|
FROM #{TABLE_PREFIX}user
|
2017-04-24 16:57:30 -04:00
|
|
|
WHERE userid > #{@last_imported_user_id}
|
|
|
|
ORDER BY userid
|
|
|
|
SQL
|
|
|
|
|
|
|
|
create_custom_fields("user", "password", user_passwords) do |row|
|
|
|
|
{ record_id: user_id_from_imported_id(row[0]), value: row[1] }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_user_salts
|
2022-11-28 14:30:19 -05:00
|
|
|
puts "", "Importing user salts..."
|
2017-04-24 16:57:30 -04:00
|
|
|
|
|
|
|
user_salts = mysql_stream <<-SQL
|
|
|
|
SELECT userid, salt
|
2018-12-09 23:40:44 -05:00
|
|
|
FROM #{TABLE_PREFIX}user
|
2017-04-24 16:57:30 -04:00
|
|
|
WHERE userid > #{@last_imported_user_id}
|
|
|
|
AND LENGTH(COALESCE(salt, '')) > 0
|
|
|
|
ORDER BY userid
|
|
|
|
SQL
|
|
|
|
|
|
|
|
create_custom_fields("user", "salt", user_salts) do |row|
|
|
|
|
{ record_id: user_id_from_imported_id(row[0]), value: row[1] }
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_user_profiles
|
2022-11-28 14:30:19 -05:00
|
|
|
puts "", "Importing user profiles..."
|
2017-04-24 16:57:30 -04:00
|
|
|
|
|
|
|
user_profiles = mysql_stream <<-SQL
|
|
|
|
SELECT userid, homepage, profilevisits
|
2018-12-09 23:40:44 -05:00
|
|
|
FROM #{TABLE_PREFIX}user
|
2017-04-24 16:57:30 -04:00
|
|
|
WHERE userid > #{@last_imported_user_id}
|
|
|
|
ORDER BY userid
|
|
|
|
SQL
|
|
|
|
|
|
|
|
create_user_profiles(user_profiles) do |row|
|
|
|
|
{
|
|
|
|
user_id: user_id_from_imported_id(row[0]),
|
|
|
|
website:
|
|
|
|
(
|
2023-01-07 06:53:14 -05:00
|
|
|
begin
|
2017-04-24 16:57:30 -04:00
|
|
|
URI.parse(row[1]).to_s
|
|
|
|
rescue StandardError
|
|
|
|
nil
|
2023-01-07 06:53:14 -05:00
|
|
|
end
|
2017-04-24 16:57:30 -04:00
|
|
|
),
|
|
|
|
views: row[2],
|
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_categories
|
2022-11-28 14:30:19 -05:00
|
|
|
puts "", "Importing categories..."
|
2017-04-24 16:57:30 -04:00
|
|
|
|
|
|
|
categories = mysql_query(<<-SQL).to_a
|
2022-11-28 14:30:19 -05:00
|
|
|
select
|
|
|
|
forumid,
|
|
|
|
parentid,
|
|
|
|
case
|
|
|
|
when forumid in (
|
|
|
|
select distinct forumid from (
|
|
|
|
select forumid, title, count(title)
|
|
|
|
from forum
|
|
|
|
group by replace(replace(title, ':', ''), '&', '')
|
|
|
|
having count(title) > 1
|
|
|
|
) as duplicated_forum_ids
|
|
|
|
)
|
|
|
|
then
|
|
|
|
-- deduplicate by fudging the title; categories will needed to be manually merged later
|
|
|
|
concat(title, '_DUPLICATE_', forumid)
|
|
|
|
else
|
|
|
|
title
|
|
|
|
end as title,
|
|
|
|
description,
|
|
|
|
displayorder
|
|
|
|
from forum
|
|
|
|
order by forumid
|
|
|
|
SQL
|
2017-04-24 16:57:30 -04:00
|
|
|
|
|
|
|
return if categories.empty?
|
|
|
|
|
|
|
|
parent_categories = categories.select { |c| c[1] == -1 }
|
|
|
|
children_categories = categories.select { |c| c[1] != -1 }
|
|
|
|
|
|
|
|
parent_category_ids = Set.new parent_categories.map { |c| c[0] }
|
|
|
|
|
|
|
|
# cut down the tree to only 2 levels of categories
|
|
|
|
children_categories.each do |cc|
|
|
|
|
cc[1] = categories.find { |c| c[0] == cc[1] }[1] until parent_category_ids.include?(cc[1])
|
|
|
|
end
|
|
|
|
|
2022-11-28 14:30:19 -05:00
|
|
|
puts "", "Importing parent categories..."
|
2017-04-24 16:57:30 -04:00
|
|
|
create_categories(parent_categories) do |row|
|
|
|
|
{
|
|
|
|
imported_id: row[0],
|
2017-07-24 20:24:19 -04:00
|
|
|
name: normalize_text(row[2]),
|
|
|
|
description: normalize_text(row[3]),
|
2017-04-24 16:57:30 -04:00
|
|
|
position: row[4],
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2022-11-28 14:30:19 -05:00
|
|
|
puts "", "Importing children categories..."
|
2017-04-24 16:57:30 -04:00
|
|
|
create_categories(children_categories) do |row|
|
|
|
|
{
|
|
|
|
imported_id: row[0],
|
2017-07-24 20:24:19 -04:00
|
|
|
name: normalize_text(row[2]),
|
|
|
|
description: normalize_text(row[3]),
|
2017-04-24 16:57:30 -04:00
|
|
|
position: row[4],
|
|
|
|
parent_category_id: category_id_from_imported_id(row[1]),
|
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_topics
|
2022-11-28 14:30:19 -05:00
|
|
|
puts "", "Importing topics..."
|
2017-04-24 16:57:30 -04:00
|
|
|
|
|
|
|
topics = mysql_stream <<-SQL
|
|
|
|
SELECT threadid, title, forumid, postuserid, open, dateline, views, visible, sticky
|
2018-12-09 23:40:44 -05:00
|
|
|
FROM #{TABLE_PREFIX}thread t
|
2017-04-24 16:57:30 -04:00
|
|
|
WHERE threadid > #{@last_imported_topic_id}
|
2018-12-09 23:40:44 -05:00
|
|
|
AND EXISTS (SELECT 1 FROM #{TABLE_PREFIX}post p WHERE p.threadid = t.threadid)
|
2017-04-24 16:57:30 -04:00
|
|
|
ORDER BY threadid
|
|
|
|
SQL
|
|
|
|
|
|
|
|
create_topics(topics) do |row|
|
|
|
|
created_at = Time.zone.at(row[5])
|
|
|
|
|
|
|
|
t = {
|
|
|
|
imported_id: row[0],
|
2017-07-24 20:24:19 -04:00
|
|
|
title: normalize_text(row[1]),
|
2017-04-24 16:57:30 -04:00
|
|
|
category_id: category_id_from_imported_id(row[2]),
|
|
|
|
user_id: user_id_from_imported_id(row[3]),
|
|
|
|
closed: row[4] == 0,
|
|
|
|
created_at: created_at,
|
|
|
|
views: row[6],
|
|
|
|
visible: row[7] == 1,
|
|
|
|
}
|
|
|
|
|
|
|
|
t[:pinned_at] = created_at if row[8] == 1
|
|
|
|
|
|
|
|
t
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_posts
|
2022-11-28 14:30:19 -05:00
|
|
|
puts "", "Importing posts..."
|
2017-04-24 16:57:30 -04:00
|
|
|
|
|
|
|
posts = mysql_stream <<-SQL
|
2018-12-09 23:40:44 -05:00
|
|
|
SELECT postid, p.threadid, parentid, userid, p.dateline, p.visible, pagetext
|
2017-07-31 04:56:57 -04:00
|
|
|
#{", post_thanks_amount" if @has_post_thanks}
|
|
|
|
|
2018-12-09 23:40:44 -05:00
|
|
|
FROM #{TABLE_PREFIX}post p
|
|
|
|
JOIN #{TABLE_PREFIX}thread t ON t.threadid = p.threadid
|
2017-04-24 16:57:30 -04:00
|
|
|
WHERE postid > #{@last_imported_post_id}
|
|
|
|
ORDER BY postid
|
|
|
|
SQL
|
|
|
|
|
|
|
|
create_posts(posts) do |row|
|
2017-04-24 17:05:25 -04:00
|
|
|
topic_id = topic_id_from_imported_id(row[1])
|
2017-04-24 16:57:30 -04:00
|
|
|
replied_post_topic_id = topic_id_from_imported_post_id(row[2])
|
|
|
|
reply_to_post_number =
|
|
|
|
topic_id == replied_post_topic_id ? post_number_from_imported_id(row[2]) : nil
|
|
|
|
|
2017-07-31 04:56:57 -04:00
|
|
|
post = {
|
2017-04-24 16:57:30 -04:00
|
|
|
imported_id: row[0],
|
|
|
|
topic_id: topic_id,
|
|
|
|
reply_to_post_number: reply_to_post_number,
|
|
|
|
user_id: user_id_from_imported_id(row[3]),
|
|
|
|
created_at: Time.zone.at(row[4]),
|
2021-02-12 08:29:05 -05:00
|
|
|
hidden: row[5] != 1,
|
2017-07-24 20:24:19 -04:00
|
|
|
raw: normalize_text(row[6]),
|
2017-04-24 16:57:30 -04:00
|
|
|
}
|
2017-07-31 04:56:57 -04:00
|
|
|
|
|
|
|
post[:like_count] = row[7] if @has_post_thanks
|
|
|
|
post
|
2017-04-24 16:57:30 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-08-01 04:01:45 -04:00
|
|
|
def import_likes
|
|
|
|
return unless @has_post_thanks
|
2022-11-28 14:30:19 -05:00
|
|
|
puts "", "Importing likes..."
|
2017-08-01 04:01:45 -04:00
|
|
|
|
|
|
|
@imported_likes = Set.new
|
|
|
|
@last_imported_post_id = 0
|
|
|
|
|
|
|
|
post_thanks = mysql_stream <<-SQL
|
|
|
|
SELECT postid, userid, date
|
2018-12-09 23:40:44 -05:00
|
|
|
FROM #{TABLE_PREFIX}post_thanks
|
2017-08-01 04:01:45 -04:00
|
|
|
WHERE postid > #{@last_imported_post_id}
|
|
|
|
ORDER BY postid
|
|
|
|
SQL
|
|
|
|
|
|
|
|
create_post_actions(post_thanks) do |row|
|
|
|
|
post_id = post_id_from_imported_id(row[0])
|
|
|
|
user_id = user_id_from_imported_id(row[1])
|
|
|
|
|
|
|
|
next if post_id.nil? || user_id.nil?
|
|
|
|
next if @imported_likes.add?([post_id, user_id]).nil?
|
|
|
|
|
|
|
|
{
|
|
|
|
post_id: post_id_from_imported_id(row[0]),
|
|
|
|
user_id: user_id_from_imported_id(row[1]),
|
|
|
|
post_action_type_id: 2,
|
|
|
|
created_at: Time.zone.at(row[2]),
|
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-04-24 16:57:30 -04:00
|
|
|
def import_private_topics
|
2022-11-28 14:30:19 -05:00
|
|
|
puts "", "Importing private topics..."
|
2017-04-24 16:57:30 -04:00
|
|
|
|
|
|
|
@imported_topics = {}
|
|
|
|
|
|
|
|
topics = mysql_stream <<-SQL
|
|
|
|
SELECT pmtextid, title, fromuserid, touserarray, dateline
|
2018-12-09 23:40:44 -05:00
|
|
|
FROM #{TABLE_PREFIX}pmtext
|
2017-04-24 16:57:30 -04:00
|
|
|
WHERE pmtextid > (#{@last_imported_private_topic_id - PRIVATE_OFFSET})
|
|
|
|
ORDER BY pmtextid
|
|
|
|
SQL
|
|
|
|
|
|
|
|
create_topics(topics) do |row|
|
|
|
|
title = extract_pm_title(row[1])
|
|
|
|
user_ids = [row[2], row[3].scan(/i:(\d+)/)].flatten.map(&:to_i).sort
|
|
|
|
key = [title, user_ids]
|
|
|
|
|
|
|
|
next if @imported_topics.has_key?(key)
|
|
|
|
@imported_topics[key] = row[0] + PRIVATE_OFFSET
|
|
|
|
{
|
|
|
|
archetype: Archetype.private_message,
|
|
|
|
imported_id: row[0] + PRIVATE_OFFSET,
|
2017-07-31 04:56:57 -04:00
|
|
|
title: title,
|
2017-04-24 16:57:30 -04:00
|
|
|
user_id: user_id_from_imported_id(row[2]),
|
|
|
|
created_at: Time.zone.at(row[4]),
|
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_topic_allowed_users
|
2022-11-28 14:30:19 -05:00
|
|
|
puts "", "Importing topic allowed users..."
|
2017-04-24 16:57:30 -04:00
|
|
|
|
2017-07-31 04:56:57 -04:00
|
|
|
allowed_users = Set.new
|
2017-04-24 16:57:30 -04:00
|
|
|
|
|
|
|
mysql_stream(<<-SQL).each do |row|
|
|
|
|
SELECT pmtextid, touserarray
|
2018-12-09 23:40:44 -05:00
|
|
|
FROM #{TABLE_PREFIX}pmtext
|
2017-04-24 16:57:30 -04:00
|
|
|
WHERE pmtextid > (#{@last_imported_private_topic_id - PRIVATE_OFFSET})
|
|
|
|
ORDER BY pmtextid
|
|
|
|
SQL
|
|
|
|
next unless topic_id = topic_id_from_imported_id(row[0] + PRIVATE_OFFSET)
|
|
|
|
row[1]
|
|
|
|
.scan(/i:(\d+)/)
|
|
|
|
.flatten
|
|
|
|
.each do |id|
|
|
|
|
next unless user_id = user_id_from_imported_id(id)
|
|
|
|
allowed_users << [topic_id, user_id]
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
create_topic_allowed_users(allowed_users) { |row| { topic_id: row[0], user_id: row[1] } }
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_private_posts
|
2022-11-28 14:30:19 -05:00
|
|
|
puts "", "Importing private posts..."
|
2017-04-24 16:57:30 -04:00
|
|
|
|
|
|
|
posts = mysql_stream <<-SQL
|
|
|
|
SELECT pmtextid, title, fromuserid, touserarray, dateline, message
|
2018-12-09 23:40:44 -05:00
|
|
|
FROM #{TABLE_PREFIX}pmtext
|
2017-04-24 16:57:30 -04:00
|
|
|
WHERE pmtextid > #{@last_imported_private_post_id - PRIVATE_OFFSET}
|
|
|
|
ORDER BY pmtextid
|
|
|
|
SQL
|
|
|
|
|
|
|
|
create_posts(posts) do |row|
|
|
|
|
title = extract_pm_title(row[1])
|
|
|
|
user_ids = [row[2], row[3].scan(/i:(\d+)/)].flatten.map(&:to_i).sort
|
|
|
|
key = [title, user_ids]
|
|
|
|
|
|
|
|
next unless topic_id = topic_id_from_imported_id(@imported_topics[key])
|
|
|
|
|
|
|
|
{
|
|
|
|
imported_id: row[0] + PRIVATE_OFFSET,
|
|
|
|
topic_id: topic_id,
|
|
|
|
user_id: user_id_from_imported_id(row[2]),
|
|
|
|
created_at: Time.zone.at(row[4]),
|
2017-07-24 20:24:19 -04:00
|
|
|
raw: normalize_text(row[5]),
|
2017-04-24 16:57:30 -04:00
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-04-11 03:05:19 -04:00
|
|
|
def create_permalink_file
|
|
|
|
puts "", "Creating Permalink File...", ""
|
|
|
|
|
2022-11-28 14:30:19 -05:00
|
|
|
total = Topic.listable_topics.count
|
|
|
|
start = Time.now
|
2019-04-11 03:05:19 -04:00
|
|
|
|
2022-11-28 14:30:19 -05:00
|
|
|
i = 0
|
|
|
|
File.open(File.expand_path("../vb_map.csv", __FILE__), "w") do |f|
|
|
|
|
Topic.listable_topics.find_each do |topic|
|
|
|
|
i += 1
|
|
|
|
pcf = topic.posts.includes(:_custom_fields).where(post_number: 1).first.custom_fields
|
|
|
|
if pcf && pcf["import_id"]
|
|
|
|
id = pcf["import_id"].split("-").last
|
2019-04-11 03:05:19 -04:00
|
|
|
|
2022-11-28 14:30:19 -05:00
|
|
|
f.print ["XXX#{id} YYY#{topic.id}"].to_csv
|
|
|
|
print "\r%7d/%7d - %6d/sec" % [i, total, i.to_f / (Time.now - start)] if i % 5000 == 0
|
|
|
|
end
|
2019-04-11 03:05:19 -04:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# find the uploaded file information from the db
|
|
|
|
def find_upload(post, attachment_id)
|
2022-11-28 14:30:19 -05:00
|
|
|
sql =
|
|
|
|
"SELECT a.attachmentid attachment_id, a.userid user_id, a.filename filename
|
2019-04-11 03:05:19 -04:00
|
|
|
FROM #{TABLE_PREFIX}attachment a
|
|
|
|
WHERE a.attachmentid = #{attachment_id}"
|
|
|
|
results = mysql_query(sql)
|
|
|
|
|
|
|
|
unless row = results.first
|
|
|
|
puts "Couldn't find attachment record for attachment_id = #{attachment_id} post.id = #{post.id}"
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
|
|
|
attachment_id = row[0]
|
|
|
|
user_id = row[1]
|
|
|
|
db_filename = row[2]
|
|
|
|
|
|
|
|
filename =
|
|
|
|
File.join(ATTACHMENT_DIR, user_id.to_s.split("").join("/"), "#{attachment_id}.attach")
|
|
|
|
real_filename = db_filename
|
|
|
|
real_filename.prepend SecureRandom.hex if real_filename[0] == "."
|
|
|
|
|
2022-01-05 12:45:08 -05:00
|
|
|
unless File.exist?(filename)
|
2019-04-11 03:05:19 -04:00
|
|
|
puts "Attachment file #{row.inspect} doesn't exist"
|
|
|
|
return nil
|
|
|
|
end
|
|
|
|
|
|
|
|
upload = create_upload(post.user.id, filename, real_filename)
|
|
|
|
|
2022-11-28 14:30:19 -05:00
|
|
|
if upload.nil? || upload.errors.any?
|
2019-04-11 03:05:19 -04:00
|
|
|
puts "Upload not valid :("
|
|
|
|
puts upload.errors.inspect if upload
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
|
|
|
[upload, real_filename]
|
|
|
|
rescue Mysql2::Error => e
|
|
|
|
puts "SQL Error"
|
|
|
|
puts e.message
|
|
|
|
puts sql
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_attachments
|
|
|
|
puts "", "importing attachments..."
|
|
|
|
|
|
|
|
RateLimiter.disable
|
|
|
|
current_count = 0
|
2022-11-28 14:30:19 -05:00
|
|
|
total_count = Post.count
|
2019-04-11 03:05:19 -04:00
|
|
|
success_count = 0
|
|
|
|
fail_count = 0
|
|
|
|
|
|
|
|
attachment_regex = %r{\[attach[^\]]*\](\d+)\[/attach\]}i
|
|
|
|
|
|
|
|
Post.find_each do |post|
|
|
|
|
current_count += 1
|
|
|
|
print_status current_count, total_count
|
|
|
|
|
|
|
|
new_raw = post.raw.dup
|
|
|
|
new_raw.gsub!(attachment_regex) do |s|
|
|
|
|
matches = attachment_regex.match(s)
|
|
|
|
attachment_id = matches[1]
|
|
|
|
|
|
|
|
upload, filename = find_upload(post, attachment_id)
|
|
|
|
unless upload
|
|
|
|
fail_count += 1
|
|
|
|
next
|
|
|
|
# should we strip invalid attach tags?
|
|
|
|
end
|
|
|
|
|
|
|
|
html_for_upload(upload, filename)
|
|
|
|
end
|
|
|
|
|
|
|
|
if new_raw != post.raw
|
|
|
|
PostRevisor.new(post).revise!(
|
|
|
|
post.user,
|
|
|
|
{ raw: new_raw },
|
|
|
|
bypass_bump: true,
|
|
|
|
edit_reason: "Import attachments from vBulletin",
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
success_count += 1
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "", "imported #{success_count} attachments... failed: #{fail_count}."
|
|
|
|
RateLimiter.enable
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_avatars
|
2022-01-05 12:45:08 -05:00
|
|
|
if AVATAR_DIR && File.exist?(AVATAR_DIR)
|
2019-04-11 03:05:19 -04:00
|
|
|
puts "", "importing user avatars"
|
|
|
|
|
|
|
|
RateLimiter.disable
|
|
|
|
start = Time.now
|
|
|
|
count = 0
|
|
|
|
|
|
|
|
Dir.foreach(AVATAR_DIR) do |item|
|
2020-04-30 02:48:34 -04:00
|
|
|
print "\r%7d - %6d/sec" % [count, count.to_f / (Time.now - start)]
|
2019-04-11 03:05:19 -04:00
|
|
|
|
|
|
|
next if item == (".") || item == ("..") || item == (".DS_Store")
|
|
|
|
next unless item =~ /avatar(\d+)_(\d).gif/
|
|
|
|
scan = item.scan(/avatar(\d+)_(\d).gif/)
|
2024-05-27 06:27:13 -04:00
|
|
|
next if scan[0][0].blank?
|
2019-04-11 03:05:19 -04:00
|
|
|
u = UserCustomField.find_by(name: "import_id", value: scan[0][0]).try(:user)
|
2024-05-27 06:27:13 -04:00
|
|
|
next if u.blank?
|
2019-04-11 03:05:19 -04:00
|
|
|
# raise "User not found for id #{user_id}" if user.blank?
|
|
|
|
|
|
|
|
photo_real_filename = File.join(AVATAR_DIR, item)
|
2022-01-05 12:45:08 -05:00
|
|
|
puts "#{photo_real_filename} not found" unless File.exist?(photo_real_filename)
|
2019-04-11 03:05:19 -04:00
|
|
|
|
|
|
|
upload = create_upload(u.id, photo_real_filename, File.basename(photo_real_filename))
|
|
|
|
count += 1
|
|
|
|
if upload.persisted?
|
|
|
|
u.import_mode = false
|
|
|
|
u.create_user_avatar
|
|
|
|
u.import_mode = true
|
|
|
|
u.user_avatar.update(custom_upload_id: upload.id)
|
|
|
|
u.update(uploaded_avatar_id: upload.id)
|
|
|
|
else
|
|
|
|
puts "Error: Upload did not persist for #{u.username} #{photo_real_filename}!"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "", "imported #{count} avatars..."
|
|
|
|
RateLimiter.enable
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_signatures
|
|
|
|
puts "Importing user signatures..."
|
|
|
|
|
|
|
|
total_count = mysql_query(<<-SQL).first[0].to_i
|
|
|
|
SELECT COUNT(userid) count
|
|
|
|
FROM #{TABLE_PREFIX}sigparsed
|
|
|
|
SQL
|
|
|
|
current_count = 0
|
|
|
|
|
|
|
|
user_signatures = mysql_stream <<-SQL
|
|
|
|
SELECT userid, signatureparsed
|
|
|
|
FROM #{TABLE_PREFIX}sigparsed
|
|
|
|
ORDER BY userid
|
|
|
|
SQL
|
|
|
|
|
|
|
|
user_signatures.each do |sig|
|
|
|
|
current_count += 1
|
|
|
|
print_status current_count, total_count
|
|
|
|
user_id = sig[0]
|
|
|
|
user_sig = sig[1]
|
2024-05-27 06:27:13 -04:00
|
|
|
next if user_id.blank? || user_sig.blank?
|
2019-04-11 03:05:19 -04:00
|
|
|
|
|
|
|
u = UserCustomField.find_by(name: "import_id", value: user_id).try(:user)
|
2024-05-27 06:27:13 -04:00
|
|
|
next if u.blank?
|
2019-04-11 03:05:19 -04:00
|
|
|
|
|
|
|
# can not hold dupes
|
|
|
|
UserCustomField.where(
|
|
|
|
user_id: u.id,
|
|
|
|
name: %w[see_signatures signature_raw signature_cooked],
|
|
|
|
).destroy_all
|
|
|
|
|
|
|
|
user_sig.gsub!(%r{\[/?sigpic\]}i, "")
|
|
|
|
|
|
|
|
UserCustomField.create!(user_id: u.id, name: "see_signatures", value: true)
|
|
|
|
UserCustomField.create!(user_id: u.id, name: "signature_raw", value: user_sig)
|
|
|
|
UserCustomField.create!(
|
|
|
|
user_id: u.id,
|
|
|
|
name: "signature_cooked",
|
|
|
|
value: PrettyText.cook(user_sig, omit_nofollow: false),
|
|
|
|
)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2022-11-28 14:30:19 -05:00
|
|
|
def merge_duplicated_users
|
|
|
|
count = 0
|
|
|
|
total_count = 0
|
|
|
|
|
|
|
|
duplicated = {}
|
|
|
|
@user_ids_by_email
|
|
|
|
.select { |e, ids| ids.count > 1 }
|
|
|
|
.each_with_index do |(email, ids), i|
|
|
|
|
duplicated[email] = [ids, i]
|
|
|
|
count += 1
|
|
|
|
total_count += ids.count
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "", "Merging #{total_count} duplicated users across #{count} distinct emails..."
|
|
|
|
|
|
|
|
start = Time.now
|
|
|
|
|
|
|
|
Parallel.each duplicated do |email, (user_ids, i)|
|
|
|
|
# nothing to do about these - they will remain a randomized hex string
|
|
|
|
next unless email.presence
|
|
|
|
|
|
|
|
# queried one by one to ensure ordering
|
|
|
|
first, *rest =
|
|
|
|
user_ids.map do |id|
|
|
|
|
UserCustomField.includes(:user).find_by!(name: "import_id", value: id).user
|
|
|
|
end
|
|
|
|
|
|
|
|
rest.each do |dup|
|
|
|
|
UserMerger.new(dup, first).merge!
|
|
|
|
first.reload
|
|
|
|
printf "."
|
|
|
|
end
|
|
|
|
|
|
|
|
print "\n%6d/%6d - %6d/sec" % [i, count, i.to_f / (Time.now - start)] if i % 10 == 0
|
|
|
|
end
|
|
|
|
|
|
|
|
puts
|
|
|
|
end
|
|
|
|
|
|
|
|
def save_duplicated_users
|
|
|
|
File.open("duplicated_users.json", "w+") { |f| f.puts @user_ids_by_email.to_json }
|
|
|
|
end
|
|
|
|
|
|
|
|
def read_duplicated_users
|
|
|
|
@user_ids_by_email = JSON.parse File.read("duplicated_users.json")
|
|
|
|
end
|
|
|
|
|
2017-04-24 16:57:30 -04:00
|
|
|
def extract_pm_title(title)
|
2017-07-24 20:24:19 -04:00
|
|
|
normalize_text(title).scrub.gsub(/^Re\s*:\s*/i, "")
|
|
|
|
end
|
|
|
|
|
2017-04-24 16:57:30 -04:00
|
|
|
def parse_birthday(birthday)
|
|
|
|
return if birthday.blank?
|
2017-07-24 08:22:00 -04:00
|
|
|
date_of_birth =
|
2023-01-07 06:53:14 -05:00
|
|
|
begin
|
2017-07-24 08:22:00 -04:00
|
|
|
Date.strptime(birthday.gsub(/[^\d-]+/, ""), "%m-%d-%Y")
|
|
|
|
rescue StandardError
|
|
|
|
nil
|
2023-01-07 06:53:14 -05:00
|
|
|
end
|
2017-07-24 08:22:00 -04:00
|
|
|
return if date_of_birth.nil?
|
2017-04-24 16:57:30 -04:00
|
|
|
if date_of_birth.year < 1904
|
|
|
|
Date.new(1904, date_of_birth.month, date_of_birth.day)
|
2023-01-07 06:53:14 -05:00
|
|
|
else
|
2017-04-24 16:57:30 -04:00
|
|
|
date_of_birth
|
2023-01-07 06:53:14 -05:00
|
|
|
end
|
2017-04-24 16:57:30 -04:00
|
|
|
end
|
|
|
|
|
2019-04-11 03:05:19 -04:00
|
|
|
def print_status(current, max, start_time = nil)
|
|
|
|
if start_time.present?
|
|
|
|
elapsed_seconds = Time.now - start_time
|
|
|
|
elements_per_minute = "[%.0f items/min] " % [current / elapsed_seconds.to_f * 60]
|
|
|
|
else
|
|
|
|
elements_per_minute = ""
|
|
|
|
end
|
|
|
|
|
|
|
|
print "\r%9d / %d (%5.1f%%) %s" % [current, max, current / max.to_f * 100, elements_per_minute]
|
|
|
|
end
|
|
|
|
|
2017-04-24 16:57:30 -04:00
|
|
|
def mysql_stream(sql)
|
|
|
|
@client.query(sql, stream: true)
|
|
|
|
end
|
|
|
|
|
|
|
|
def mysql_query(sql)
|
|
|
|
@client.query(sql)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
BulkImport::VBulletin.new.run
|