2019-05-02 18:17:27 -04:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2017-01-17 17:08:45 -05:00
|
|
|
require "csv"
|
|
|
|
require File.expand_path(File.dirname(__FILE__) + "/base.rb")
|
|
|
|
|
|
|
|
# Edit the constants and initialize method for your import data.
|
|
|
|
|
|
|
|
class ImportScripts::JsonGeneric < ImportScripts::Base
|
|
|
|
JSON_FILE_PATH = ENV["JSON_FILE"]
|
|
|
|
BATCH_SIZE ||= 1000
|
|
|
|
|
|
|
|
def initialize
|
|
|
|
super
|
|
|
|
|
|
|
|
@imported_json = load_json
|
|
|
|
end
|
|
|
|
|
|
|
|
def execute
|
|
|
|
puts "", "Importing from JSON file..."
|
|
|
|
|
|
|
|
import_users
|
|
|
|
import_discussions
|
|
|
|
|
|
|
|
puts "", "Done"
|
|
|
|
end
|
|
|
|
|
|
|
|
def load_json
|
|
|
|
JSON.parse(File.read(JSON_FILE_PATH))
|
|
|
|
end
|
|
|
|
|
2017-01-18 13:22:32 -05:00
|
|
|
def username_for(name)
|
2017-01-19 14:31:35 -05:00
|
|
|
result = name.downcase.gsub(/[^a-z0-9\-\_]/, "")
|
|
|
|
|
|
|
|
result = Digest::SHA1.hexdigest(name)[0...10] if result.blank?
|
|
|
|
|
|
|
|
result
|
2017-01-18 13:22:32 -05:00
|
|
|
end
|
|
|
|
|
2017-01-17 17:08:45 -05:00
|
|
|
def import_users
|
|
|
|
puts "", "Importing users"
|
|
|
|
|
|
|
|
users = []
|
|
|
|
@imported_json["topics"].each { |t| t["posts"].each { |p| users << p["author"].scrub } }
|
|
|
|
users.uniq!
|
|
|
|
|
|
|
|
create_users(users) do |u|
|
|
|
|
{
|
2017-01-18 13:22:32 -05:00
|
|
|
id: username_for(u),
|
|
|
|
username: username_for(u),
|
|
|
|
name: u,
|
2017-01-18 14:23:23 -05:00
|
|
|
email: "#{username_for(u)}@example.com",
|
2017-01-17 17:08:45 -05:00
|
|
|
created_at: Time.now,
|
|
|
|
}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def import_discussions
|
|
|
|
puts "", "Importing discussions"
|
|
|
|
|
|
|
|
topics = 0
|
|
|
|
posts = 0
|
|
|
|
|
|
|
|
@imported_json["topics"].each do |t|
|
|
|
|
first_post = t["posts"][0]
|
|
|
|
next unless first_post
|
|
|
|
|
|
|
|
topic = {
|
|
|
|
id: t["id"],
|
2017-01-18 13:22:32 -05:00
|
|
|
user_id: user_id_from_imported_user_id(username_for(first_post["author"])) || -1,
|
2017-01-17 17:08:45 -05:00
|
|
|
raw: first_post["body"],
|
|
|
|
created_at: Time.zone.parse(first_post["date"]),
|
|
|
|
cook_method: Post.cook_methods[:raw_html],
|
|
|
|
title: t["title"],
|
|
|
|
category: ENV["CATEGORY_ID"],
|
|
|
|
custom_fields: {
|
|
|
|
import_id: "pid:#{first_post["id"]}",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
topic[:pinned_at] = Time.zone.parse(first_post["date"]) if t["pinned"]
|
|
|
|
topics += 1
|
|
|
|
parent_post = create_post(topic, topic[:id])
|
|
|
|
|
|
|
|
t["posts"][1..-1].each do |p|
|
|
|
|
create_post(
|
|
|
|
{
|
|
|
|
id: p["id"],
|
|
|
|
topic_id: parent_post.topic_id,
|
2017-01-18 13:22:32 -05:00
|
|
|
user_id: user_id_from_imported_user_id(username_for(p["author"])) || -1,
|
2017-01-17 17:08:45 -05:00
|
|
|
raw: p["body"],
|
|
|
|
created_at: Time.zone.parse(p["date"]),
|
|
|
|
cook_method: Post.cook_methods[:raw_html],
|
|
|
|
custom_fields: {
|
|
|
|
import_id: "pid:#{p["id"]}",
|
|
|
|
},
|
|
|
|
},
|
|
|
|
p["id"],
|
|
|
|
)
|
|
|
|
posts += 1
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
puts "", "Imported #{topics} topics with #{topics + posts} posts."
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
ImportScripts::JsonGeneric.new.perform if __FILE__ == $0
|