2016-06-14 11:44:35 -04:00
require 'sqlite3'
2015-07-23 18:37:40 -04:00
require File . expand_path ( File . dirname ( __FILE__ ) + " /base.rb " )
2016-09-21 15:01:57 -04:00
# Paste these lines into your shell before running this:
= begin
export MBOX_SUBDIR = " messages " # subdirectory with mbox files
export LIST_NAME = LIST_NAME
export DEFAULT_TRUST_LEVEL = 1
export DATA_DIR =~ / data /im port
export SPLIT_AT = " ^From " # or "^From (.*)"
= end
2016-10-12 16:29:19 -04:00
# If you change the functionality of this script, please consider updating this HOWTO:
# https://meta.discourse.org/t/howto-import-mbox-mailing-list-files/51233
2015-07-23 18:37:40 -04:00
class ImportScripts :: Mbox < ImportScripts :: Base
2016-09-21 15:01:57 -04:00
include ActiveModel :: Validations
2015-07-23 18:37:40 -04:00
# CHANGE THESE BEFORE RUNNING THE IMPORTER
2016-09-21 15:01:57 -04:00
MBOX_SUBDIR = ENV [ 'MBOX_SUBDIR' ] || " messages " # subdirectory with mbox files
LIST_NAME = ENV [ 'LIST_NAME' ] || " " # Will remove [LIST_NAME] from Subjects
DEFAULT_TRUST_LEVEL = ENV [ 'DEFAULT_TRUST_LEVEL' ] || 1
DATA_DIR = ENV [ 'DATA_DIR' ] || " ~/data/import "
MBOX_DIR = File . expand_path ( DATA_DIR ) # where index.db will be created
2015-07-23 18:37:40 -04:00
BATCH_SIZE = 1000
2016-06-14 11:44:35 -04:00
2016-09-21 15:01:57 -04:00
# Site settings
SiteSetting . disable_emails = true
# Comment out if each file contains a single message
# Use formail to split yourself: http://linuxcommand.org/man_pages/formail1.html
# SPLIT_AT = /^From (.*) at/ # for Google Groups?
SPLIT_AT = / #{ ENV [ 'SPLIT_AT' ] } / || / ^From / # for standard MBOX files
2015-07-23 18:37:40 -04:00
2016-06-28 16:35:19 -04:00
# Will create a category if it doesn't exist
2016-09-21 15:01:57 -04:00
# create subdirectories in MBOX_SUBDIR with categories
2016-06-28 16:35:19 -04:00
CATEGORY_MAPPINGS = {
" default " = > " uncategorized " ,
# ex: "jobs-folder" => "jobs"
}
2016-10-07 12:38:54 -04:00
unless File . directory? ( MBOX_DIR )
puts " Cannot find import directory #{ MBOX_DIR } . Giving up. "
exit
end
validates_format_of :email , :with = > / \ A([^@ \ s]+)@((?:[-a-z0-9]+ \ .)+[a-z]{2,}) \ Z /i , :on = > :create
2015-07-23 18:37:40 -04:00
def execute
2016-06-28 16:35:19 -04:00
import_categories
2016-06-14 11:44:35 -04:00
create_email_indices
create_user_indices
massage_indices
2015-07-23 18:37:40 -04:00
import_users
create_forum_topics
import_replies
2016-10-12 16:18:34 -04:00
# replace_email_addresses # uncomment to replace all email address with @username
2015-07-23 18:37:40 -04:00
end
2016-06-28 16:35:19 -04:00
def import_categories
mappings = CATEGORY_MAPPINGS . values - [ 'uncategorized' ]
create_categories ( mappings ) do | c |
{ id : c , name : c }
end
end
2016-06-14 11:44:35 -04:00
def open_db
SQLite3 :: Database . new ( " #{ MBOX_DIR } /index.db " )
end
2015-07-23 18:37:40 -04:00
2016-06-20 12:41:57 -04:00
def each_line ( f )
infile = File . open ( f , 'r' )
if f . ends_with? ( '.gz' )
gz = Zlib :: GzipReader . new ( infile )
gz . each_line do | line |
yield line
end
else
infile . each_line do | line |
yield line
end
end
ensure
infile . close
end
2016-06-14 11:44:35 -04:00
def all_messages
2016-09-21 15:01:57 -04:00
files = Dir [ " #{ MBOX_DIR } / #{ MBOX_SUBDIR } /* " ]
2015-07-23 18:37:40 -04:00
2016-06-28 16:35:19 -04:00
CATEGORY_MAPPINGS . keys . each do | k |
files << Dir [ " #{ MBOX_DIR } / #{ k } /* " ]
end
files . flatten!
2016-09-26 15:18:11 -04:00
files . sort!
2015-07-23 18:37:40 -04:00
files . each_with_index do | f , idx |
2016-10-07 12:38:54 -04:00
print_warning " \n Processing: #{ f } "
start_time = Time . now
2016-06-14 11:44:35 -04:00
if SPLIT_AT . present?
msg = " "
2016-10-07 12:38:54 -04:00
message_count = 0
2016-06-20 12:41:57 -04:00
each_line ( f ) do | line |
2016-06-14 11:44:35 -04:00
line = line . scrub
if line =~ SPLIT_AT
2016-10-07 12:38:54 -04:00
p message_count += 1
2016-06-14 11:44:35 -04:00
if ! msg . empty?
mail = Mail . read_from_string ( msg )
2016-06-28 16:35:19 -04:00
yield mail , f
2016-10-07 12:38:54 -04:00
print_status ( idx , files . size , start_time )
2016-06-14 11:44:35 -04:00
msg = " "
end
end
msg << line
end
2016-06-20 14:58:53 -04:00
2016-06-14 11:44:35 -04:00
if ! msg . empty?
mail = Mail . read_from_string ( msg )
2016-06-28 16:35:19 -04:00
yield mail , f
2016-10-07 12:38:54 -04:00
print_status ( idx , files . size , start_time )
2016-06-14 11:44:35 -04:00
msg = " "
end
else
raw = File . read ( f )
mail = Mail . read_from_string ( raw )
2016-06-28 16:35:19 -04:00
yield mail , f
2016-10-07 12:38:54 -04:00
print_status ( idx , files . size , start_time )
2016-06-14 11:44:35 -04:00
end
2015-07-23 18:37:40 -04:00
end
end
2016-06-14 11:44:35 -04:00
def massage_indices
db = open_db
db . execute " UPDATE emails SET reply_to = null WHERE reply_to = '' "
2015-07-23 18:37:40 -04:00
2016-08-26 12:47:03 -04:00
rows = db . execute " SELECT msg_id, title, reply_to FROM emails ORDER BY datetime(email_date) ASC "
2015-07-23 18:37:40 -04:00
2016-06-14 11:44:35 -04:00
msg_ids = { }
titles = { }
rows . each do | row |
msg_ids [ row [ 0 ] ] = true
2016-10-07 12:38:54 -04:00
if titles [ row [ 1 ] ] . nil?
titles [ row [ 1 ] ] = row [ 0 ]
end
2016-06-14 11:44:35 -04:00
end
2015-07-23 18:37:40 -04:00
2016-06-14 11:44:35 -04:00
# First, any replies where the parent doesn't exist should have that field cleared
not_found = [ ]
rows . each do | row |
msg_id , _ , reply_to = row
2015-07-23 18:37:40 -04:00
if reply_to . present?
2016-06-14 11:44:35 -04:00
not_found << msg_id if msg_ids [ reply_to ] . blank?
2015-07-23 18:37:40 -04:00
end
end
2016-06-14 11:44:35 -04:00
puts " #{ not_found . size } records couldn't be associated with parents "
if not_found . present?
db . execute " UPDATE emails SET reply_to = NULL WHERE msg_id IN ( #{ not_found . map { | nf | " ' #{ nf } ' " } . join ( ',' ) } ) "
2015-10-22 15:02:53 -04:00
end
2016-06-14 11:44:35 -04:00
dupe_titles = db . execute " SELECT title, COUNT(*) FROM emails GROUP BY title HAVING count(*) > 1 "
puts " #{ dupe_titles . size } replies to wire up "
dupe_titles . each do | t |
title = t [ 0 ]
first = titles [ title ]
db . execute " UPDATE emails SET reply_to = ? WHERE title = ? and msg_id <> ? " , [ first , title , first ]
2015-10-22 15:02:53 -04:00
end
2016-06-14 11:44:35 -04:00
ensure
db . close
end
2015-10-22 15:02:53 -04:00
2016-06-20 14:58:53 -04:00
def extract_name ( mail )
from_name = nil
from = mail [ :from ]
from_email = nil
if mail . from . present?
from_email = mail . from . dup
if from_email . kind_of? ( Array )
2016-10-07 12:38:54 -04:00
if from_email [ 0 ] . nil?
print_warning " Cannot find email address (ignoring)! \n #{ mail } "
else
from_email = from_email . first . dup
from_email . gsub! ( / at / , '@' )
from_email . gsub! ( / [at] / , '@' )
# strip real names in ()s. Todo: read into name
from_email . gsub! ( / \ (.*$ / , '' )
from_email . gsub! ( / / , '' )
end
2016-06-20 14:58:53 -04:00
end
2016-10-07 12:38:54 -04:00
p end
2016-06-20 14:58:53 -04:00
display_names = from . try ( :display_names )
if display_names . present?
from_name = display_names . first
end
if from_name . blank? && from . to_s =~ / \ (([^ \ )]+) \ ) /
from_name = Regexp . last_match [ 1 ]
end
from_name = from . to_s if from_name . blank?
[ from_email , from_name ]
end
2016-10-07 12:38:54 -04:00
def print_warning ( message )
$stderr . puts " #{ message } "
end
2016-06-14 11:44:35 -04:00
def create_email_indices
db = open_db
db . execute " DROP TABLE IF EXISTS emails "
db . execute <<-SQL
CREATE TABLE emails (
msg_id VARCHAR ( 995 ) PRIMARY KEY ,
from_email VARCHAR ( 255 ) NOT NULL ,
from_name VARCHAR ( 255 ) NOT NULL ,
title VARCHAR ( 255 ) NOT NULL ,
reply_to VARCHAR ( 955 ) NULL ,
email_date DATETIME NOT NULL ,
2016-06-28 16:35:19 -04:00
message TEXT NOT NULL ,
category VARCHAR ( 255 ) NOT NULL
2016-06-14 11:44:35 -04:00
) ;
SQL
db . execute " CREATE INDEX by_title ON emails (title) "
db . execute " CREATE INDEX by_email ON emails (from_email) "
puts " " , " creating indices "
2016-06-28 16:35:19 -04:00
all_messages do | mail , filename |
directory = filename . sub ( " #{ MBOX_DIR } / " , '' ) . split ( " / " ) [ 0 ]
category = CATEGORY_MAPPINGS [ directory ] || CATEGORY_MAPPINGS [ 'default' ] || 'uncategorized'
2016-06-14 11:44:35 -04:00
msg_id = mail [ 'Message-ID' ] . to_s
# Many ways to get a name
2016-06-20 14:58:53 -04:00
from_email , from_name = extract_name ( mail )
2015-10-27 14:21:29 -04:00
2016-06-14 11:44:35 -04:00
title = clean_title ( mail [ 'Subject' ] . to_s )
reply_to = mail [ 'In-Reply-To' ] . to_s
email_date = mail [ 'date' ] . to_s
2016-08-26 12:47:03 -04:00
email_date = DateTime . parse ( email_date ) . to_s unless email_date . blank?
2016-06-14 11:44:35 -04:00
2016-10-07 12:38:54 -04:00
if from_email . kind_of? ( String )
unless from_email . match ( / \ A[ \ w+ \ -.]+@[a-z \ d \ -]+( \ .[a-z \ d \ -]+)* \ .[a-z]+ \ z /i )
print_warning " Ignoring bad email address #{ from_email } in #{ msg_id } "
else
db . execute " INSERT OR IGNORE INTO emails (msg_id,
2016-06-28 16:35:19 -04:00
from_email ,
from_name ,
title ,
reply_to ,
email_date ,
message ,
category )
VALUES ( ?, ?, ?, ?, ?, ?, ?, ?) " ,
2016-10-07 12:38:54 -04:00
[ msg_id , from_email , from_name , title , reply_to , email_date , mail . to_s , category ]
end
end
2016-06-14 11:44:35 -04:00
end
ensure
db . close
end
2015-10-27 14:21:29 -04:00
2016-06-14 11:44:35 -04:00
def create_user_indices
db = open_db
db . execute " DROP TABLE IF EXISTS users "
db . execute <<-SQL
CREATE TABLE users (
email VARCHAR ( 995 ) PRIMARY KEY ,
name VARCHAR ( 255 ) NOT NULL
) ;
SQL
db . execute " INSERT OR IGNORE INTO users (email, name) SELECT from_email, from_name FROM emails "
ensure
db . close
2015-07-23 18:37:40 -04:00
end
2015-10-22 15:02:53 -04:00
def clean_title ( title )
2016-06-14 11:44:35 -04:00
title || = " "
2016-03-24 11:05:10 -04:00
#Strip mailing list name from subject
2016-09-28 11:40:31 -04:00
title = title . gsub ( / \ [ #{ Regexp . escape ( LIST_NAME ) } \ ] / , '' ) . strip
2016-03-24 11:05:10 -04:00
original_length = title . length
#Strip Reply prefix from title (Standard and localized)
title = title . gsub ( / ^Re: * /i , '' )
title = title . gsub ( / ^R: * /i , '' ) #Italian
title = title . gsub ( / ^RIF: * /i , '' ) #Italian
#Strip Forward prefix from title (Standard and localized)
title = title . gsub ( / ^Fwd: * /i , '' )
title = title . gsub ( / ^I: * /i , '' ) #Italian
title . strip
#In case of mixed localized prefixes there could be many of them if the mail client didn't strip the localized ones
if original_length > title . length
clean_title ( title )
else
title
end
2015-10-22 15:02:53 -04:00
end
2016-06-20 14:58:53 -04:00
def clean_raw ( input )
raw = input . dup
2016-10-07 12:38:54 -04:00
raw . scrub!
2016-06-20 14:58:53 -04:00
raw . gsub! ( / -- \ nYou received this message because you are subscribed to the Google Groups "[^"]*" group. \ nTo unsubscribe from this group and stop receiving emails from it, send an email to [^+@]+ \ +unsubscribe@googlegroups.com \ . \ nFor more options, visit https: \/ \/ groups \ .google \ .com \/ groups \/ opt_out \ . / , '' )
raw
2015-10-22 15:02:53 -04:00
end
2015-07-23 18:37:40 -04:00
def import_users
puts " " , " importing users "
2016-06-14 11:44:35 -04:00
db = open_db
2015-07-23 18:37:40 -04:00
2016-06-14 11:44:35 -04:00
all_users = db . execute ( " SELECT name, email FROM users " )
total_count = all_users . size
2015-07-23 18:37:40 -04:00
batches ( BATCH_SIZE ) do | offset |
2016-06-14 11:44:35 -04:00
users = all_users [ offset .. offset + BATCH_SIZE - 1 ]
2015-07-23 18:37:40 -04:00
break if users . nil?
2016-06-14 11:44:35 -04:00
next if all_records_exist? :users , users . map { | u | u [ 1 ] }
2015-07-23 18:37:40 -04:00
2016-06-14 11:44:35 -04:00
create_users ( users , total : total_count , offset : offset ) do | u |
2015-07-23 18:37:40 -04:00
{
2016-06-14 11:44:35 -04:00
id : u [ 1 ] ,
email : u [ 1 ] ,
2016-09-21 15:01:57 -04:00
name : u [ 0 ] ,
trust_level : DEFAULT_TRUST_LEVEL ,
2015-07-23 18:37:40 -04:00
}
end
end
2016-06-14 11:44:35 -04:00
ensure
db . close
2015-07-23 18:37:40 -04:00
end
2016-10-12 16:18:34 -04:00
def replace_email_addresses
puts " " , " replacing email addresses with @usernames "
post = Post . new
total_count = User . real . count
progress_count = 0
start_time = Time . now
# from: https://meta.discourse.org/t/replace-a-string-in-all-posts/48729/17
# and https://github.com/discourse/discourse/blob/master/lib/tasks/posts.rake#L114-L136
User . find_each do | u |
i = 0
find = u . email . dup
replace = " @ #{ u . username } "
if ! replace . include? " @ "
puts " Skipping #{ replace } "
end
found = Post . where ( " raw ILIKE ? " , " % #{ find } % " )
next if found . nil?
next if found . count < 1
found . each do | p |
new_raw = p . raw . dup
new_raw = new_raw . gsub! ( / #{ Regexp . escape ( find ) } /i , replace ) || new_raw
if new_raw != p . raw
p . revise ( Discourse . system_user , { raw : new_raw } , { bypass_bump : true } )
print_warning " \n Replaced #{ find } with #{ replace } in topic #{ p . topic_id } "
end
end
progress_count += 1
puts " "
print_status ( progress_count , total_count , start_time )
end
end
2015-07-23 18:37:40 -04:00
def parse_email ( msg )
2016-03-07 17:15:57 -05:00
receiver = Email :: Receiver . new ( msg )
2015-07-23 18:37:40 -04:00
mail = Mail . read_from_string ( msg )
mail . body
2016-03-07 17:15:57 -05:00
selected = receiver . select_body
2015-07-23 18:37:40 -04:00
selected . force_encoding ( selected . encoding ) . encode ( " UTF-8 " )
end
def create_forum_topics
puts " " , " creating forum topics "
2016-06-14 11:44:35 -04:00
db = open_db
all_topics = db . execute ( " SELECT msg_id,
from_email ,
from_name ,
title ,
email_date ,
2016-06-28 16:35:19 -04:00
message ,
category
2016-06-14 11:44:35 -04:00
FROM emails
2016-09-26 15:18:11 -04:00
WHERE reply_to IS NULL
ORDER BY DATE ( email_date ) " )
2016-06-14 11:44:35 -04:00
2015-07-23 18:37:40 -04:00
topic_count = all_topics . size
batches ( BATCH_SIZE ) do | offset |
topics = all_topics [ offset .. offset + BATCH_SIZE - 1 ]
break if topics . nil?
2016-06-14 11:44:35 -04:00
next if all_records_exist? :posts , topics . map { | t | t [ 0 ] }
2015-09-21 19:48:42 -04:00
2015-07-23 18:37:40 -04:00
create_posts ( topics , total : topic_count , offset : offset ) do | t |
2016-06-14 11:44:35 -04:00
raw_email = t [ 5 ]
2016-03-07 17:15:57 -05:00
receiver = Email :: Receiver . new ( raw_email )
2015-07-23 18:37:40 -04:00
mail = Mail . read_from_string ( raw_email )
mail . body
2016-06-20 14:58:53 -04:00
from_email , _ = extract_name ( mail )
2016-03-07 17:15:57 -05:00
selected = receiver . select_body
2015-07-23 18:37:40 -04:00
next unless selected
2016-06-14 11:44:35 -04:00
selected = selected . join ( '' ) if selected . kind_of? ( Array )
2015-07-23 18:37:40 -04:00
2016-03-24 11:05:10 -04:00
title = mail . subject
2015-07-23 18:37:40 -04:00
2016-10-12 16:18:34 -04:00
username = User . find_by_email ( from_email ) . username
2016-03-24 11:05:58 -04:00
# import the attachments
2016-10-12 16:18:34 -04:00
raw = " "
2016-03-24 11:05:58 -04:00
mail . attachments . each do | attachment |
tmp = Tempfile . new ( " discourse-email-attachment " )
begin
# read attachment
File . open ( tmp . path , " w+b " ) { | f | f . write attachment . body . decoded }
# create the upload for the user
2016-06-20 14:58:53 -04:00
upload = Upload . create_for ( user_id_from_imported_user_id ( from_email ) || Discourse :: SYSTEM_USER_ID , tmp , attachment . filename , tmp . size )
2016-03-24 11:05:58 -04:00
if upload && upload . errors . empty?
raw << " \n \n #{ receiver . attachment_markdown ( upload ) } \n \n "
end
ensure
tmp . try ( :close! ) rescue nil
end
end
2016-10-12 16:18:34 -04:00
user_id = user_id_from_imported_user_id ( from_email ) || Discourse :: SYSTEM_USER_ID
raw = selected . force_encoding ( selected . encoding ) . encode ( " UTF-8 " )
raw = clean_raw ( raw )
raw = raw . dup . to_s
raw . gsub! ( / #{ from_email } / , " @ #{ username } " )
cleaned_email = from_email . dup . sub ( / @ / , ' at ' )
raw . gsub! ( / #{ cleaned_email } / , " @ #{ username } " )
2016-06-14 11:44:35 -04:00
{ id : t [ 0 ] ,
2015-10-22 15:02:53 -04:00
title : clean_title ( title ) ,
2016-10-12 16:18:34 -04:00
user_id : user_id ,
2015-07-23 18:37:40 -04:00
created_at : mail . date ,
2016-06-28 16:35:19 -04:00
category : t [ 6 ] ,
2015-10-22 15:02:53 -04:00
raw : clean_raw ( raw ) ,
2015-07-23 18:37:40 -04:00
cook_method : Post . cook_methods [ :email ] }
end
end
2016-06-14 11:44:35 -04:00
ensure
db . close
2015-07-23 18:37:40 -04:00
end
def import_replies
puts " " , " creating topic replies "
2016-06-14 11:44:35 -04:00
db = open_db
replies = db . execute ( " SELECT msg_id,
from_email ,
from_name ,
title ,
email_date ,
message ,
reply_to
FROM emails
2016-09-26 15:18:11 -04:00
WHERE reply_to IS NOT NULL
ORDER BY DATE ( email_date )
" )
2016-06-14 11:44:35 -04:00
2015-07-23 18:37:40 -04:00
post_count = replies . size
2016-10-07 12:38:54 -04:00
puts " Replies: #{ post_count } "
2015-07-23 18:37:40 -04:00
batches ( BATCH_SIZE ) do | offset |
posts = replies [ offset .. offset + BATCH_SIZE - 1 ]
break if posts . nil?
2016-10-07 12:38:54 -04:00
break if posts . count < 1
2015-07-23 18:37:40 -04:00
2016-06-14 11:44:35 -04:00
next if all_records_exist? :posts , posts . map { | p | p [ 0 ] }
2015-09-21 19:48:42 -04:00
2015-07-23 18:37:40 -04:00
create_posts ( posts , total : post_count , offset : offset ) do | p |
2016-06-14 11:44:35 -04:00
parent_id = p [ 6 ]
id = p [ 0 ]
2015-07-23 18:37:40 -04:00
topic = topic_lookup_from_imported_post_id ( parent_id )
topic_id = topic [ :topic_id ] if topic
next unless topic_id
2016-06-14 11:44:35 -04:00
raw_email = p [ 5 ]
2016-03-07 17:15:57 -05:00
receiver = Email :: Receiver . new ( raw_email )
2015-07-23 18:37:40 -04:00
mail = Mail . read_from_string ( raw_email )
mail . body
2016-06-20 14:58:53 -04:00
from_email , _ = extract_name ( mail )
2016-03-07 17:15:57 -05:00
selected = receiver . select_body
2016-06-14 11:44:35 -04:00
selected = selected . join ( '' ) if selected . kind_of? ( Array )
next unless selected
2015-07-23 18:37:40 -04:00
raw = selected . force_encoding ( selected . encoding ) . encode ( " UTF-8 " )
2016-10-12 16:18:34 -04:00
username = User . find_by_email ( from_email ) . username
2015-07-23 18:37:40 -04:00
2016-10-12 16:18:34 -04:00
user_id = user_id_from_imported_user_id ( from_email ) || Discourse :: SYSTEM_USER_ID
raw = clean_raw ( raw ) . to_s
raw . gsub! ( / #{ from_email } / , " @ #{ username } " )
cleaned_email = from_email . dup . sub ( / @ / , ' at ' )
raw . gsub! ( / #{ cleaned_email } / , " @ #{ username } " )
2016-03-24 11:05:58 -04:00
# import the attachments
mail . attachments . each do | attachment |
tmp = Tempfile . new ( " discourse-email-attachment " )
begin
# read attachment
File . open ( tmp . path , " w+b " ) { | f | f . write attachment . body . decoded }
# create the upload for the user
2016-06-20 14:58:53 -04:00
upload = Upload . create_for ( user_id_from_imported_user_id ( from_email ) || Discourse :: SYSTEM_USER_ID , tmp , attachment . filename , tmp . size )
2016-03-24 11:05:58 -04:00
if upload && upload . errors . empty?
raw << " \n \n #{ receiver . attachment_markdown ( upload ) } \n \n "
end
ensure
tmp . try ( :close! ) rescue nil
end
end
2015-07-23 18:37:40 -04:00
{ id : id ,
topic_id : topic_id ,
2016-06-20 14:58:53 -04:00
user_id : user_id_from_imported_user_id ( from_email ) || Discourse :: SYSTEM_USER_ID ,
2015-07-23 18:37:40 -04:00
created_at : mail . date ,
2015-10-22 15:02:53 -04:00
raw : clean_raw ( raw ) ,
2015-07-23 18:37:40 -04:00
cook_method : Post . cook_methods [ :email ] }
end
end
2016-06-14 11:44:35 -04:00
ensure
db . close
2015-07-23 18:37:40 -04:00
end
end
ImportScripts :: Mbox . new . perform