few components with rspec3 syntax
This commit is contained in:
parent
c96220ca76
commit
0fd98b56d8
|
@ -8,11 +8,11 @@ describe Archetype do
|
|||
context 'default archetype' do
|
||||
|
||||
it 'has an Archetype by default' do
|
||||
Archetype.list.should be_present
|
||||
expect(Archetype.list).to be_present
|
||||
end
|
||||
|
||||
it 'has an id of default' do
|
||||
Archetype.list.first.id.should == Archetype.default
|
||||
expect(Archetype.list.first.id).to eq(Archetype.default)
|
||||
end
|
||||
|
||||
context 'duplicate' do
|
||||
|
@ -23,7 +23,7 @@ describe Archetype do
|
|||
end
|
||||
|
||||
it 'does not add the same archetype twice' do
|
||||
Archetype.list.size.should == @old_size
|
||||
expect(Archetype.list.size).to eq(@old_size)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -35,8 +35,8 @@ describe Archetype do
|
|||
it 'has one more element' do
|
||||
@list = Archetype.list.dup
|
||||
Archetype.register('glados')
|
||||
Archetype.list.size.should == @list.size + 1
|
||||
Archetype.list.find {|a| a.id == 'glados'}.should be_present
|
||||
expect(Archetype.list.size).to eq(@list.size + 1)
|
||||
expect(Archetype.list.find {|a| a.id == 'glados'}).to be_present
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -18,7 +18,7 @@ describe Auth::DefaultCurrentUserProvider do
|
|||
it "finds a user for a correct per-user api key" do
|
||||
user = Fabricate(:user)
|
||||
ApiKey.create!(key: "hello", user_id: user.id, created_by_id: -1)
|
||||
provider("/?api_key=hello").current_user.id.should == user.id
|
||||
expect(provider("/?api_key=hello").current_user.id).to eq(user.id)
|
||||
end
|
||||
|
||||
it "raises for a user pretending" do
|
||||
|
@ -48,29 +48,29 @@ describe Auth::DefaultCurrentUserProvider do
|
|||
found_user = provider("/?api_key=hello&api_username=#{user.username.downcase}",
|
||||
"REMOTE_ADDR" => "100.0.0.22").current_user
|
||||
|
||||
found_user.id.should == user.id
|
||||
expect(found_user.id).to eq(user.id)
|
||||
|
||||
|
||||
found_user = provider("/?api_key=hello&api_username=#{user.username.downcase}",
|
||||
"HTTP_X_FORWARDED_FOR" => "10.1.1.1, 100.0.0.22").current_user
|
||||
found_user.id.should == user.id
|
||||
expect(found_user.id).to eq(user.id)
|
||||
|
||||
end
|
||||
|
||||
it "finds a user for a correct system api key" do
|
||||
user = Fabricate(:user)
|
||||
ApiKey.create!(key: "hello", created_by_id: -1)
|
||||
provider("/?api_key=hello&api_username=#{user.username.downcase}").current_user.id.should == user.id
|
||||
expect(provider("/?api_key=hello&api_username=#{user.username.downcase}").current_user.id).to eq(user.id)
|
||||
end
|
||||
|
||||
it "should not update last seen for message bus" do
|
||||
provider("/message-bus/anything/goes", method: "POST").should_update_last_seen?.should == false
|
||||
provider("/message-bus/anything/goes", method: "GET").should_update_last_seen?.should == false
|
||||
expect(provider("/message-bus/anything/goes", method: "POST").should_update_last_seen?).to eq(false)
|
||||
expect(provider("/message-bus/anything/goes", method: "GET").should_update_last_seen?).to eq(false)
|
||||
end
|
||||
|
||||
it "should update last seen for others" do
|
||||
provider("/topic/anything/goes", method: "POST").should_update_last_seen?.should == true
|
||||
provider("/topic/anything/goes", method: "GET").should_update_last_seen?.should == true
|
||||
expect(provider("/topic/anything/goes", method: "POST").should_update_last_seen?).to eq(true)
|
||||
expect(provider("/topic/anything/goes", method: "GET").should_update_last_seen?).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ describe Auth::FacebookAuthenticator do
|
|||
|
||||
result = authenticator.after_authenticate(hash)
|
||||
|
||||
result.user.id.should == user.id
|
||||
expect(result.user.id).to eq(user.id)
|
||||
end
|
||||
|
||||
it 'can create a proper result for non existing users' do
|
||||
|
@ -49,8 +49,8 @@ describe Auth::FacebookAuthenticator do
|
|||
|
||||
result = authenticator.after_authenticate(hash)
|
||||
|
||||
result.user.should == nil
|
||||
result.extra_data[:name].should == "bob bob"
|
||||
expect(result.user).to eq(nil)
|
||||
expect(result.extra_data[:name]).to eq("bob bob")
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ describe Auth::GoogleOAuth2Authenticator do
|
|||
|
||||
result = authenticator.after_authenticate(hash)
|
||||
|
||||
result.user.id.should == user.id
|
||||
expect(result.user.id).to eq(user.id)
|
||||
end
|
||||
|
||||
it 'can create a proper result for non existing users' do
|
||||
|
@ -50,8 +50,8 @@ describe Auth::GoogleOAuth2Authenticator do
|
|||
authenticator = described_class.new
|
||||
result = authenticator.after_authenticate(hash)
|
||||
|
||||
result.user.should == nil
|
||||
result.extra_data[:name].should == "Jane Doe"
|
||||
expect(result.user).to eq(nil)
|
||||
expect(result.extra_data[:name]).to eq("Jane Doe")
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -14,12 +14,12 @@ describe Auth::OpenIdAuthenticator do
|
|||
user = Fabricate(:user)
|
||||
response = OpenStruct.new(identity_url: 'abc')
|
||||
result = auth.after_authenticate(info: {email: user.email}, extra: {response: response})
|
||||
result.user.should == user
|
||||
expect(result.user).to eq(user)
|
||||
end
|
||||
|
||||
it "raises an exception when email is missing" do
|
||||
auth = Auth::OpenIdAuthenticator.new("test", "id", trusted: true)
|
||||
response = OpenStruct.new(identity_url: 'abc')
|
||||
-> { auth.after_authenticate(info: {}, extra: { response: response }) }.should raise_error(Discourse::InvalidParameters)
|
||||
expect { auth.after_authenticate(info: {}, extra: { response: response }) }.to raise_error(Discourse::InvalidParameters)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -12,15 +12,15 @@ describe AvatarLookup do
|
|||
end
|
||||
|
||||
it 'returns nil if user_id does not exists' do
|
||||
@avatar_lookup[0].should == nil
|
||||
expect(@avatar_lookup[0]).to eq(nil)
|
||||
end
|
||||
|
||||
it 'returns nil if user_id is nil' do
|
||||
@avatar_lookup[nil].should == nil
|
||||
expect(@avatar_lookup[nil]).to eq(nil)
|
||||
end
|
||||
|
||||
it 'returns user if user_id exists' do
|
||||
@avatar_lookup[user.id].should eq(user)
|
||||
expect(@avatar_lookup[user.id]).to eq(user)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -16,19 +16,19 @@ describe AvatarUploadService do
|
|||
let(:avatar_file) { AvatarUploadService.new(file, :image) }
|
||||
|
||||
it "should have a filesize" do
|
||||
avatar_file.filesize.should > 0
|
||||
expect(avatar_file.filesize).to be > 0
|
||||
end
|
||||
|
||||
it "should have a filename" do
|
||||
avatar_file.filename.should == "logo.png"
|
||||
expect(avatar_file.filename).to eq("logo.png")
|
||||
end
|
||||
|
||||
it "should have a file" do
|
||||
avatar_file.file.should == file.tempfile
|
||||
expect(avatar_file.file).to eq(file.tempfile)
|
||||
end
|
||||
|
||||
it "should have a source as 'image'" do
|
||||
avatar_file.source.should == :image
|
||||
expect(avatar_file.source).to eq(:image)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -38,19 +38,19 @@ describe AvatarUploadService do
|
|||
before { FileHelper.stubs(:download).returns(logo) }
|
||||
|
||||
it "should have a filesize" do
|
||||
avatar_file.filesize.should > 0
|
||||
expect(avatar_file.filesize).to be > 0
|
||||
end
|
||||
|
||||
it "should have a filename" do
|
||||
avatar_file.filename.should == "logo.png"
|
||||
expect(avatar_file.filename).to eq("logo.png")
|
||||
end
|
||||
|
||||
it "should have a file" do
|
||||
avatar_file.file.should == logo
|
||||
expect(avatar_file.file).to eq(logo)
|
||||
end
|
||||
|
||||
it "should have a source as 'url'" do
|
||||
avatar_file.source.should == :url
|
||||
expect(avatar_file.source).to eq(:url)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -9,13 +9,13 @@ describe Cache do
|
|||
|
||||
it "supports fixnum" do
|
||||
cache.write("num", 1)
|
||||
cache.read("num").should == 1
|
||||
expect(cache.read("num")).to eq(1)
|
||||
end
|
||||
|
||||
it "supports hash" do
|
||||
hash = {a: 1, b: [1,2,3]}
|
||||
cache.write("hash", hash)
|
||||
cache.read("hash").should == hash
|
||||
expect(cache.read("hash")).to eq(hash)
|
||||
end
|
||||
|
||||
it "can be cleared" do
|
||||
|
@ -23,7 +23,7 @@ describe Cache do
|
|||
cache.write("hello1", "world")
|
||||
cache.clear
|
||||
|
||||
cache.read("hello0").should == nil
|
||||
expect(cache.read("hello0")).to eq(nil)
|
||||
end
|
||||
|
||||
it "can delete by family" do
|
||||
|
@ -32,8 +32,8 @@ describe Cache do
|
|||
|
||||
cache.delete_by_family("my_family")
|
||||
|
||||
cache.fetch("key").should == nil
|
||||
cache.fetch("key2").should == nil
|
||||
expect(cache.fetch("key")).to eq(nil)
|
||||
expect(cache.fetch("key2")).to eq(nil)
|
||||
|
||||
end
|
||||
|
||||
|
@ -43,7 +43,7 @@ describe Cache do
|
|||
end
|
||||
|
||||
cache.delete("key")
|
||||
cache.fetch("key").should == nil
|
||||
expect(cache.fetch("key")).to eq(nil)
|
||||
end
|
||||
|
||||
#TODO yuck on this mock
|
||||
|
@ -64,7 +64,7 @@ describe Cache do
|
|||
r = cache.fetch "key" do
|
||||
"bob"
|
||||
end
|
||||
r.should == "bob"
|
||||
expect(r).to eq("bob")
|
||||
end
|
||||
|
||||
it "can fetch existing correctly" do
|
||||
|
@ -73,6 +73,6 @@ describe Cache do
|
|||
r = cache.fetch "key" do
|
||||
"bob"
|
||||
end
|
||||
r.should == "bill"
|
||||
expect(r).to eq("bill")
|
||||
end
|
||||
end
|
||||
|
|
|
@ -15,9 +15,9 @@ describe CategoryList do
|
|||
cat.save
|
||||
|
||||
# uncategorized + this
|
||||
CategoryList.new(Guardian.new admin).categories.count.should == 2
|
||||
CategoryList.new(Guardian.new user).categories.count.should == 0
|
||||
CategoryList.new(Guardian.new nil).categories.count.should == 0
|
||||
expect(CategoryList.new(Guardian.new admin).categories.count).to eq(2)
|
||||
expect(CategoryList.new(Guardian.new user).categories.count).to eq(0)
|
||||
expect(CategoryList.new(Guardian.new nil).categories.count).to eq(0)
|
||||
end
|
||||
|
||||
it "doesn't show topics that you can't view" do
|
||||
|
@ -36,14 +36,14 @@ describe CategoryList do
|
|||
|
||||
CategoryFeaturedTopic.feature_topics
|
||||
|
||||
CategoryList.new(Guardian.new(admin)).categories.find { |x| x.name == public_cat.name }.displayable_topics.count.should == 2
|
||||
CategoryList.new(Guardian.new(admin)).categories.find { |x| x.name == private_cat.name }.displayable_topics.count.should == 1
|
||||
expect(CategoryList.new(Guardian.new(admin)).categories.find { |x| x.name == public_cat.name }.displayable_topics.count).to eq(2)
|
||||
expect(CategoryList.new(Guardian.new(admin)).categories.find { |x| x.name == private_cat.name }.displayable_topics.count).to eq(1)
|
||||
|
||||
CategoryList.new(Guardian.new(user)).categories.find { |x| x.name == public_cat.name }.displayable_topics.count.should == 1
|
||||
CategoryList.new(Guardian.new(user)).categories.find { |x| x.name == private_cat.name }.should == nil
|
||||
expect(CategoryList.new(Guardian.new(user)).categories.find { |x| x.name == public_cat.name }.displayable_topics.count).to eq(1)
|
||||
expect(CategoryList.new(Guardian.new(user)).categories.find { |x| x.name == private_cat.name }).to eq(nil)
|
||||
|
||||
CategoryList.new(Guardian.new(nil)).categories.find { |x| x.name == public_cat.name }.displayable_topics.count.should == 1
|
||||
CategoryList.new(Guardian.new(nil)).categories.find { |x| x.name == private_cat.name }.should == nil
|
||||
expect(CategoryList.new(Guardian.new(nil)).categories.find { |x| x.name == public_cat.name }.displayable_topics.count).to eq(1)
|
||||
expect(CategoryList.new(Guardian.new(nil)).categories.find { |x| x.name == private_cat.name }).to eq(nil)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -54,33 +54,33 @@ describe CategoryList do
|
|||
context "without a featured topic" do
|
||||
|
||||
it "should not return empty categories" do
|
||||
category_list.categories.should be_blank
|
||||
expect(category_list.categories).to be_blank
|
||||
end
|
||||
|
||||
it "returns empty categories for those who can create them" do
|
||||
SiteSetting.stubs(:allow_uncategorized_topics).returns(true)
|
||||
Guardian.any_instance.expects(:can_create?).with(Category).returns(true)
|
||||
category_list.categories.should_not be_blank
|
||||
expect(category_list.categories).not_to be_blank
|
||||
end
|
||||
|
||||
it "returns empty categories with descriptions" do
|
||||
Fabricate(:category, description: 'The category description.')
|
||||
Guardian.any_instance.expects(:can_create?).with(Category).returns(false)
|
||||
category_list.categories.should_not be_blank
|
||||
expect(category_list.categories).not_to be_blank
|
||||
end
|
||||
|
||||
it 'returns the empty category and a non-empty category for those who can create them' do
|
||||
SiteSetting.stubs(:allow_uncategorized_topics).returns(true)
|
||||
Fabricate(:topic, category: Fabricate(:category))
|
||||
Guardian.any_instance.expects(:can_create?).with(Category).returns(true)
|
||||
category_list.categories.size.should == 3
|
||||
category_list.categories.should include(topic_category)
|
||||
expect(category_list.categories.size).to eq(3)
|
||||
expect(category_list.categories).to include(topic_category)
|
||||
end
|
||||
|
||||
it "doesn't return empty uncategorized category to admins if allow_uncategorized_topics is false" do
|
||||
SiteSetting.stubs(:allow_uncategorized_topics).returns(false)
|
||||
CategoryList.new(Guardian.new(user)).categories.should be_empty
|
||||
CategoryList.new(Guardian.new(admin)).categories.map(&:id).should_not include(SiteSetting.uncategorized_category_id)
|
||||
expect(CategoryList.new(Guardian.new(user)).categories).to be_empty
|
||||
expect(CategoryList.new(Guardian.new(admin)).categories.map(&:id)).not_to include(SiteSetting.uncategorized_category_id)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -90,15 +90,15 @@ describe CategoryList do
|
|||
let(:category) { category_list.categories.first }
|
||||
|
||||
it "should return the category" do
|
||||
category.should be_present
|
||||
expect(category).to be_present
|
||||
end
|
||||
|
||||
it "returns the correct category" do
|
||||
category.id.should == topic_category.id
|
||||
expect(category.id).to eq(topic_category.id)
|
||||
end
|
||||
|
||||
it "should contain our topic" do
|
||||
category.featured_topics.include?(topic).should == true
|
||||
expect(category.featured_topics.include?(topic)).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -120,16 +120,16 @@ describe CategoryList do
|
|||
|
||||
it "returns categories in specified order" do
|
||||
cat1, cat2 = Fabricate(:category, position: 1), Fabricate(:category, position: 0)
|
||||
category_ids.should == [cat2.id, cat1.id]
|
||||
expect(category_ids).to eq([cat2.id, cat1.id])
|
||||
end
|
||||
|
||||
it "handles duplicate position values" do
|
||||
cat1, cat2, cat3, cat4 = Fabricate(:category, position: 0), Fabricate(:category, position: 0), Fabricate(:category, position: nil), Fabricate(:category, position: 0)
|
||||
first_three = category_ids[0,3] # The order is not deterministic
|
||||
first_three.should include(cat1.id)
|
||||
first_three.should include(cat2.id)
|
||||
first_three.should include(cat4.id)
|
||||
category_ids[-1].should == cat3.id
|
||||
expect(first_three).to include(cat1.id)
|
||||
expect(first_three).to include(cat2.id)
|
||||
expect(first_three).to include(cat4.id)
|
||||
expect(category_ids[-1]).to eq(cat3.id)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -141,12 +141,12 @@ describe CategoryList do
|
|||
it "returns categories in order of activity" do
|
||||
cat1 = Fabricate(:category, position: 0, posts_week: 1, posts_month: 1, posts_year: 1)
|
||||
cat2 = Fabricate(:category, position: 1, posts_week: 2, posts_month: 1, posts_year: 1)
|
||||
category_ids.should == [cat2.id, cat1.id]
|
||||
expect(category_ids).to eq([cat2.id, cat1.id])
|
||||
end
|
||||
|
||||
it "returns categories in order of id when there's no activity" do
|
||||
cat1, cat2 = Fabricate(:category, position: 1), Fabricate(:category, position: 0)
|
||||
category_ids.should == [cat1.id, cat2.id]
|
||||
expect(category_ids).to eq([cat1.id, cat2.id])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -4,7 +4,7 @@ require_dependency "common_passwords/common_passwords"
|
|||
describe CommonPasswords do
|
||||
|
||||
it "the passwords file should exist" do
|
||||
File.exists?(described_class::PASSWORD_FILE).should eq(true)
|
||||
expect(File.exists?(described_class::PASSWORD_FILE)).to eq(true)
|
||||
end
|
||||
|
||||
describe "#common_password?" do
|
||||
|
@ -15,25 +15,25 @@ describe CommonPasswords do
|
|||
it "returns false if password isn't in the common passwords list" do
|
||||
described_class.stubs(:password_list).returns(stub_everything(:include? => false))
|
||||
@password = 'uncommonPassword'
|
||||
subject.should eq(false)
|
||||
expect(subject).to eq(false)
|
||||
end
|
||||
|
||||
it "returns false if password is nil" do
|
||||
described_class.expects(:password_list).never
|
||||
@password = nil
|
||||
subject.should eq(false)
|
||||
expect(subject).to eq(false)
|
||||
end
|
||||
|
||||
it "returns false if password is blank" do
|
||||
described_class.expects(:password_list).never
|
||||
@password = ""
|
||||
subject.should eq(false)
|
||||
expect(subject).to eq(false)
|
||||
end
|
||||
|
||||
it "returns true if password is in the common passwords list" do
|
||||
described_class.stubs(:password_list).returns(stub_everything(:include? => true))
|
||||
@password = "password"
|
||||
subject.should eq(true)
|
||||
expect(subject).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -45,7 +45,7 @@ describe CommonPasswords do
|
|||
described_class.stubs(:redis).returns(mock_redis)
|
||||
described_class.expects(:load_passwords).returns(['password'])
|
||||
list = described_class.password_list
|
||||
list.should respond_to(:include?)
|
||||
expect(list).to respond_to(:include?)
|
||||
end
|
||||
|
||||
it "doesn't load the passwords file if redis has it" do
|
||||
|
@ -55,7 +55,7 @@ describe CommonPasswords do
|
|||
described_class.stubs(:redis).returns(mock_redis)
|
||||
described_class.expects(:load_passwords).never
|
||||
list = described_class.password_list
|
||||
list.should respond_to(:include?)
|
||||
expect(list).to respond_to(:include?)
|
||||
end
|
||||
|
||||
it "loads the passwords file if redis has an empty list" do
|
||||
|
@ -65,7 +65,7 @@ describe CommonPasswords do
|
|||
described_class.stubs(:redis).returns(mock_redis)
|
||||
described_class.expects(:load_passwords).returns(['password'])
|
||||
list = described_class.password_list
|
||||
list.should respond_to(:include?)
|
||||
expect(list).to respond_to(:include?)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -73,7 +73,7 @@ describe CommonPasswords do
|
|||
it "tolerates it" do
|
||||
described_class.stubs(:redis).returns(stub_everything(sismember: false, exists: false, scard: 0))
|
||||
File.stubs(:readlines).with(described_class::PASSWORD_FILE).raises(Errno::ENOENT)
|
||||
described_class.common_password?("password").should eq(false)
|
||||
expect(described_class.common_password?("password")).to eq(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -32,12 +32,12 @@ describe ComposerMessagesFinder do
|
|||
|
||||
it "returns a message for a user who has not posted any topics" do
|
||||
user.expects(:created_topic_count).returns(9)
|
||||
finder.check_education_message.should be_present
|
||||
expect(finder.check_education_message).to be_present
|
||||
end
|
||||
|
||||
it "returns no message when the user has posted enough topics" do
|
||||
user.expects(:created_topic_count).returns(10)
|
||||
finder.check_education_message.should be_blank
|
||||
expect(finder.check_education_message).to be_blank
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -50,12 +50,12 @@ describe ComposerMessagesFinder do
|
|||
|
||||
it "returns a message for a user who has not posted any topics" do
|
||||
user.expects(:post_count).returns(9)
|
||||
finder.check_education_message.should be_present
|
||||
expect(finder.check_education_message).to be_present
|
||||
end
|
||||
|
||||
it "returns no message when the user has posted enough topics" do
|
||||
user.expects(:post_count).returns(10)
|
||||
finder.check_education_message.should be_blank
|
||||
expect(finder.check_education_message).to be_blank
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -68,12 +68,12 @@ describe ComposerMessagesFinder do
|
|||
|
||||
it "has no message when `posted_too_much_in_topic?` is false" do
|
||||
user.expects(:posted_too_much_in_topic?).returns(false)
|
||||
finder.check_new_user_many_replies.should be_blank
|
||||
expect(finder.check_new_user_many_replies).to be_blank
|
||||
end
|
||||
|
||||
it "has a message when a user has posted too much" do
|
||||
user.expects(:posted_too_much_in_topic?).returns(true)
|
||||
finder.check_new_user_many_replies.should be_present
|
||||
expect(finder.check_new_user_many_replies).to be_present
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -87,27 +87,27 @@ describe ComposerMessagesFinder do
|
|||
let!(:message) { finder.check_avatar_notification }
|
||||
|
||||
it "returns an avatar upgrade message" do
|
||||
message.should be_present
|
||||
expect(message).to be_present
|
||||
end
|
||||
|
||||
it "creates a notified_about_avatar log" do
|
||||
UserHistory.exists_for_user?(user, :notified_about_avatar).should == true
|
||||
expect(UserHistory.exists_for_user?(user, :notified_about_avatar)).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
it "doesn't return notifications for new users" do
|
||||
user.trust_level = TrustLevel[0]
|
||||
finder.check_avatar_notification.should be_blank
|
||||
expect(finder.check_avatar_notification).to be_blank
|
||||
end
|
||||
|
||||
it "doesn't return notifications for users who have custom avatars" do
|
||||
user.uploaded_avatar_id = 1
|
||||
finder.check_avatar_notification.should be_blank
|
||||
expect(finder.check_avatar_notification).to be_blank
|
||||
end
|
||||
|
||||
it "doesn't notify users who have been notified already" do
|
||||
UserHistory.create!(action: UserHistory.actions[:notified_about_avatar], target_user_id: user.id )
|
||||
finder.check_avatar_notification.should be_blank
|
||||
expect(finder.check_avatar_notification).to be_blank
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -127,11 +127,11 @@ describe ComposerMessagesFinder do
|
|||
|
||||
it "does not give a message for new topics" do
|
||||
finder = ComposerMessagesFinder.new(user, composerAction: 'createTopic')
|
||||
finder.check_sequential_replies.should be_blank
|
||||
expect(finder.check_sequential_replies).to be_blank
|
||||
end
|
||||
|
||||
it "does not give a message without a topic id" do
|
||||
ComposerMessagesFinder.new(user, composerAction: 'reply').check_sequential_replies.should be_blank
|
||||
expect(ComposerMessagesFinder.new(user, composerAction: 'reply').check_sequential_replies).to be_blank
|
||||
end
|
||||
|
||||
context "reply" do
|
||||
|
@ -140,39 +140,39 @@ describe ComposerMessagesFinder do
|
|||
|
||||
it "does not give a message to users who are still in the 'education' phase" do
|
||||
user.stubs(:post_count).returns(9)
|
||||
finder.check_sequential_replies.should be_blank
|
||||
expect(finder.check_sequential_replies).to be_blank
|
||||
end
|
||||
|
||||
it "doesn't notify a user it has already notified about sequential replies" do
|
||||
UserHistory.create!(action: UserHistory.actions[:notified_about_sequential_replies], target_user_id: user.id, topic_id: topic.id )
|
||||
finder.check_sequential_replies.should be_blank
|
||||
expect(finder.check_sequential_replies).to be_blank
|
||||
end
|
||||
|
||||
|
||||
it "will notify you if it hasn't in the current topic" do
|
||||
UserHistory.create!(action: UserHistory.actions[:notified_about_sequential_replies], target_user_id: user.id, topic_id: topic.id+1 )
|
||||
finder.check_sequential_replies.should be_present
|
||||
expect(finder.check_sequential_replies).to be_present
|
||||
end
|
||||
|
||||
it "doesn't notify a user who has less than the `sequential_replies_threshold` threshold posts" do
|
||||
SiteSetting.stubs(:sequential_replies_threshold).returns(5)
|
||||
finder.check_sequential_replies.should be_blank
|
||||
expect(finder.check_sequential_replies).to be_blank
|
||||
end
|
||||
|
||||
it "doesn't notify a user if another user posted" do
|
||||
Fabricate(:post, topic: topic, user: Fabricate(:user))
|
||||
finder.check_sequential_replies.should be_blank
|
||||
expect(finder.check_sequential_replies).to be_blank
|
||||
end
|
||||
|
||||
context "success" do
|
||||
let!(:message) { finder.check_sequential_replies }
|
||||
|
||||
it "returns a message" do
|
||||
message.should be_present
|
||||
expect(message).to be_present
|
||||
end
|
||||
|
||||
it "creates a notified_about_sequential_replies log" do
|
||||
UserHistory.exists_for_user?(user, :notified_about_sequential_replies).should == true
|
||||
expect(UserHistory.exists_for_user?(user, :notified_about_sequential_replies)).to eq(true)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -199,11 +199,11 @@ describe ComposerMessagesFinder do
|
|||
|
||||
it "does not give a message for new topics" do
|
||||
finder = ComposerMessagesFinder.new(user, composerAction: 'createTopic')
|
||||
finder.check_dominating_topic.should be_blank
|
||||
expect(finder.check_dominating_topic).to be_blank
|
||||
end
|
||||
|
||||
it "does not give a message without a topic id" do
|
||||
ComposerMessagesFinder.new(user, composerAction: 'reply').check_dominating_topic.should be_blank
|
||||
expect(ComposerMessagesFinder.new(user, composerAction: 'reply').check_dominating_topic).to be_blank
|
||||
end
|
||||
|
||||
context "reply" do
|
||||
|
@ -211,53 +211,53 @@ describe ComposerMessagesFinder do
|
|||
|
||||
it "does not give a message to users who are still in the 'education' phase" do
|
||||
user.stubs(:post_count).returns(9)
|
||||
finder.check_dominating_topic.should be_blank
|
||||
expect(finder.check_dominating_topic).to be_blank
|
||||
end
|
||||
|
||||
it "does not notify if the `summary_posts_required` has not been reached" do
|
||||
SiteSetting.stubs(:summary_posts_required).returns(100)
|
||||
finder.check_dominating_topic.should be_blank
|
||||
expect(finder.check_dominating_topic).to be_blank
|
||||
end
|
||||
|
||||
it "doesn't notify a user it has already notified in this topic" do
|
||||
UserHistory.create!(action: UserHistory.actions[:notified_about_dominating_topic], topic_id: topic.id, target_user_id: user.id )
|
||||
finder.check_dominating_topic.should be_blank
|
||||
expect(finder.check_dominating_topic).to be_blank
|
||||
end
|
||||
|
||||
it "notifies a user if the topic is different" do
|
||||
UserHistory.create!(action: UserHistory.actions[:notified_about_dominating_topic], topic_id: topic.id+1, target_user_id: user.id )
|
||||
finder.check_dominating_topic.should be_present
|
||||
expect(finder.check_dominating_topic).to be_present
|
||||
end
|
||||
|
||||
it "doesn't notify a user if the topic has less than `summary_posts_required` posts" do
|
||||
SiteSetting.stubs(:summary_posts_required).returns(5)
|
||||
finder.check_dominating_topic.should be_blank
|
||||
expect(finder.check_dominating_topic).to be_blank
|
||||
end
|
||||
|
||||
it "doesn't notify a user if they've posted less than the percentage" do
|
||||
SiteSetting.stubs(:dominating_topic_minimum_percent).returns(100)
|
||||
finder.check_dominating_topic.should be_blank
|
||||
expect(finder.check_dominating_topic).to be_blank
|
||||
end
|
||||
|
||||
it "doesn't notify you if it's your own topic" do
|
||||
topic.update_column(:user_id, user.id)
|
||||
finder.check_dominating_topic.should be_blank
|
||||
expect(finder.check_dominating_topic).to be_blank
|
||||
end
|
||||
|
||||
it "doesn't notify you in a private message" do
|
||||
topic.update_columns(category_id: nil, archetype: Archetype.private_message)
|
||||
finder.check_dominating_topic.should be_blank
|
||||
expect(finder.check_dominating_topic).to be_blank
|
||||
end
|
||||
|
||||
context "success" do
|
||||
let!(:message) { finder.check_dominating_topic }
|
||||
|
||||
it "returns a message" do
|
||||
message.should be_present
|
||||
expect(message).to be_present
|
||||
end
|
||||
|
||||
it "creates a notified_about_dominating_topic log" do
|
||||
UserHistory.exists_for_user?(user, :notified_about_dominating_topic).should == true
|
||||
expect(UserHistory.exists_for_user?(user, :notified_about_dominating_topic)).to eq(true)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -270,8 +270,8 @@ describe ComposerMessagesFinder do
|
|||
let(:topic) { Fabricate(:topic) }
|
||||
|
||||
it "does not give a message without a topic id" do
|
||||
described_class.new(user, composerAction: 'createTopic').check_reviving_old_topic.should be_blank
|
||||
described_class.new(user, composerAction: 'reply').check_reviving_old_topic.should be_blank
|
||||
expect(described_class.new(user, composerAction: 'createTopic').check_reviving_old_topic).to be_blank
|
||||
expect(described_class.new(user, composerAction: 'reply').check_reviving_old_topic).to be_blank
|
||||
end
|
||||
|
||||
context "a reply" do
|
||||
|
@ -282,12 +282,12 @@ describe ComposerMessagesFinder do
|
|||
|
||||
it "does not notify if last post is recent" do
|
||||
topic = Fabricate(:topic, last_posted_at: 1.hour.ago)
|
||||
described_class.new(user, composerAction: 'reply', topic_id: topic.id).check_reviving_old_topic.should be_blank
|
||||
expect(described_class.new(user, composerAction: 'reply', topic_id: topic.id).check_reviving_old_topic).to be_blank
|
||||
end
|
||||
|
||||
it "notifies if last post is old" do
|
||||
topic = Fabricate(:topic, last_posted_at: 181.days.ago)
|
||||
described_class.new(user, composerAction: 'reply', topic_id: topic.id).check_reviving_old_topic.should_not be_blank
|
||||
expect(described_class.new(user, composerAction: 'reply', topic_id: topic.id).check_reviving_old_topic).not_to be_blank
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -298,12 +298,12 @@ describe ComposerMessagesFinder do
|
|||
|
||||
it "does not notify if last post is new" do
|
||||
topic = Fabricate(:topic, last_posted_at: 1.hour.ago)
|
||||
described_class.new(user, composerAction: 'reply', topic_id: topic.id).check_reviving_old_topic.should be_blank
|
||||
expect(described_class.new(user, composerAction: 'reply', topic_id: topic.id).check_reviving_old_topic).to be_blank
|
||||
end
|
||||
|
||||
it "does not notify if last post is old" do
|
||||
topic = Fabricate(:topic, last_posted_at: 365.days.ago)
|
||||
described_class.new(user, composerAction: 'reply', topic_id: topic.id).check_reviving_old_topic.should be_blank
|
||||
expect(described_class.new(user, composerAction: 'reply', topic_id: topic.id).check_reviving_old_topic).to be_blank
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -31,7 +31,7 @@ describe HasCustomFields do
|
|||
it "simple modification of custom fields" do
|
||||
test_item = CustomFieldsTestItem.new
|
||||
|
||||
test_item.custom_fields["a"].should == nil
|
||||
expect(test_item.custom_fields["a"]).to eq(nil)
|
||||
|
||||
test_item.custom_fields["bob"] = "marley"
|
||||
test_item.custom_fields["jack"] = "black"
|
||||
|
@ -40,8 +40,8 @@ describe HasCustomFields do
|
|||
|
||||
test_item = CustomFieldsTestItem.find(test_item.id)
|
||||
|
||||
test_item.custom_fields["bob"].should == "marley"
|
||||
test_item.custom_fields["jack"].should == "black"
|
||||
expect(test_item.custom_fields["bob"]).to eq("marley")
|
||||
expect(test_item.custom_fields["jack"]).to eq("black")
|
||||
|
||||
test_item.custom_fields.delete("bob")
|
||||
test_item.custom_fields["jack"] = "jill"
|
||||
|
@ -49,42 +49,42 @@ describe HasCustomFields do
|
|||
test_item.save
|
||||
test_item = CustomFieldsTestItem.find(test_item.id)
|
||||
|
||||
test_item.custom_fields.should == {"jack" => "jill"}
|
||||
expect(test_item.custom_fields).to eq({"jack" => "jill"})
|
||||
end
|
||||
|
||||
it "casts integers to string without error" do
|
||||
test_item = CustomFieldsTestItem.new
|
||||
test_item.custom_fields["a"].should == nil
|
||||
expect(test_item.custom_fields["a"]).to eq(nil)
|
||||
test_item.custom_fields["a"] = 0
|
||||
|
||||
test_item.custom_fields["a"].should == 0
|
||||
expect(test_item.custom_fields["a"]).to eq(0)
|
||||
test_item.save
|
||||
|
||||
# should be casted right after saving
|
||||
test_item.custom_fields["a"].should == "0"
|
||||
expect(test_item.custom_fields["a"]).to eq("0")
|
||||
|
||||
test_item = CustomFieldsTestItem.find(test_item.id)
|
||||
test_item.custom_fields["a"].should == "0"
|
||||
expect(test_item.custom_fields["a"]).to eq("0")
|
||||
end
|
||||
|
||||
it "reload loads from database" do
|
||||
test_item = CustomFieldsTestItem.new
|
||||
test_item.custom_fields["a"] = 0
|
||||
|
||||
test_item.custom_fields["a"].should == 0
|
||||
expect(test_item.custom_fields["a"]).to eq(0)
|
||||
test_item.save
|
||||
|
||||
# should be casted right after saving
|
||||
test_item.custom_fields["a"].should == "0"
|
||||
expect(test_item.custom_fields["a"]).to eq("0")
|
||||
|
||||
CustomFieldsTestItem.exec_sql("UPDATE custom_fields_test_item_custom_fields SET value='1' WHERE custom_fields_test_item_id=? AND name='a'", test_item.id)
|
||||
|
||||
# still the same, did not load
|
||||
test_item.custom_fields["a"].should == "0"
|
||||
expect(test_item.custom_fields["a"]).to eq("0")
|
||||
|
||||
# refresh loads from database
|
||||
test_item.reload.custom_fields["a"].should == "1"
|
||||
test_item.custom_fields["a"].should == "1"
|
||||
expect(test_item.reload.custom_fields["a"]).to eq("1")
|
||||
expect(test_item.custom_fields["a"]).to eq("1")
|
||||
|
||||
end
|
||||
it "double save actually saves" do
|
||||
|
@ -97,7 +97,7 @@ describe HasCustomFields do
|
|||
test_item.save
|
||||
|
||||
db_item = CustomFieldsTestItem.find(test_item.id)
|
||||
db_item.custom_fields.should == {"a" => "b", "c" => "d"}
|
||||
expect(db_item.custom_fields).to eq({"a" => "b", "c" => "d"})
|
||||
|
||||
end
|
||||
|
||||
|
@ -109,14 +109,14 @@ describe HasCustomFields do
|
|||
test_item.save
|
||||
|
||||
db_item = CustomFieldsTestItem.find(test_item.id)
|
||||
db_item.custom_fields.should == {"a" => ["b", "c", "d"]}
|
||||
expect(db_item.custom_fields).to eq({"a" => ["b", "c", "d"]})
|
||||
|
||||
db_item.custom_fields.update('a' => ['c', 'd'])
|
||||
db_item.save
|
||||
db_item.custom_fields.should == {"a" => ["c", "d"]}
|
||||
expect(db_item.custom_fields).to eq({"a" => ["c", "d"]})
|
||||
|
||||
db_item.custom_fields.delete('a')
|
||||
db_item.custom_fields.should == {}
|
||||
expect(db_item.custom_fields).to eq({})
|
||||
|
||||
end
|
||||
|
||||
|
@ -125,10 +125,10 @@ describe HasCustomFields do
|
|||
test_item = CustomFieldsTestItem.new
|
||||
test_item.custom_fields = {"a" => ["b", 10, "d"]}
|
||||
test_item.save
|
||||
test_item.custom_fields.should == {"a" => ["b", "10", "d"]}
|
||||
expect(test_item.custom_fields).to eq({"a" => ["b", "10", "d"]})
|
||||
|
||||
db_item = CustomFieldsTestItem.find(test_item.id)
|
||||
db_item.custom_fields.should == {"a" => ["b", "10", "d"]}
|
||||
expect(db_item.custom_fields).to eq({"a" => ["b", "10", "d"]})
|
||||
|
||||
end
|
||||
|
||||
|
@ -141,13 +141,13 @@ describe HasCustomFields do
|
|||
test_item.save
|
||||
test_item.reload
|
||||
|
||||
test_item.custom_fields.should == {"bool" => true, "int" => 1}
|
||||
expect(test_item.custom_fields).to eq({"bool" => true, "int" => 1})
|
||||
end
|
||||
|
||||
it "simple modifications don't interfere" do
|
||||
test_item = CustomFieldsTestItem.new
|
||||
|
||||
test_item.custom_fields["a"].should == nil
|
||||
expect(test_item.custom_fields["a"]).to eq(nil)
|
||||
|
||||
test_item.custom_fields["bob"] = "marley"
|
||||
test_item.custom_fields["jack"] = "black"
|
||||
|
@ -155,7 +155,7 @@ describe HasCustomFields do
|
|||
|
||||
test_item2 = CustomFieldsTestItem.new
|
||||
|
||||
test_item2.custom_fields["x"].should == nil
|
||||
expect(test_item2.custom_fields["x"]).to eq(nil)
|
||||
|
||||
test_item2.custom_fields["sixto"] = "rodriguez"
|
||||
test_item2.custom_fields["de"] = "la playa"
|
||||
|
@ -164,8 +164,8 @@ describe HasCustomFields do
|
|||
test_item = CustomFieldsTestItem.find(test_item.id)
|
||||
test_item2 = CustomFieldsTestItem.find(test_item2.id)
|
||||
|
||||
test_item.custom_fields.should == {"jack" => "black", "bob" => "marley"}
|
||||
test_item2.custom_fields.should == {"sixto" => "rodriguez", "de" => "la playa"}
|
||||
expect(test_item.custom_fields).to eq({"jack" => "black", "bob" => "marley"})
|
||||
expect(test_item2.custom_fields).to eq({"sixto" => "rodriguez", "de" => "la playa"})
|
||||
end
|
||||
|
||||
it "supports bulk retrieval with a list of ids" do
|
||||
|
@ -178,10 +178,10 @@ describe HasCustomFields do
|
|||
item2.save
|
||||
|
||||
fields = CustomFieldsTestItem.custom_fields_for_ids([item1.id, item2.id], ['a', 'e'])
|
||||
fields.should be_present
|
||||
fields[item1.id]['a'].should =~ ['b', 'c', 'd']
|
||||
fields[item1.id]['not_whitelisted'].should be_blank
|
||||
fields[item2.id]['e'].should == 'hallo'
|
||||
expect(fields).to be_present
|
||||
expect(fields[item1.id]['a']).to match_array(['b', 'c', 'd'])
|
||||
expect(fields[item1.id]['not_whitelisted']).to be_blank
|
||||
expect(fields[item2.id]['e']).to eq('hallo')
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -28,24 +28,24 @@ describe Positionable do
|
|||
Topic.exec_sql("insert into test_items(id,position) values(#{i}, #{i})")
|
||||
end
|
||||
|
||||
positions.should == [0,1,2,3,4]
|
||||
expect(positions).to eq([0,1,2,3,4])
|
||||
TestItem.find(3).move_to(0)
|
||||
positions.should == [3,0,1,2,4]
|
||||
TestItem.pluck(:position).sort.should == [0,1,2,3,4]
|
||||
expect(positions).to eq([3,0,1,2,4])
|
||||
expect(TestItem.pluck(:position).sort).to eq([0,1,2,3,4])
|
||||
|
||||
TestItem.find(3).move_to(1)
|
||||
positions.should == [0,3,1,2,4]
|
||||
expect(positions).to eq([0,3,1,2,4])
|
||||
|
||||
# this is somewhat odd, but when there is no such position, not much we can do
|
||||
TestItem.find(1).move_to(5)
|
||||
positions.should == [0,3,2,4,1]
|
||||
expect(positions).to eq([0,3,2,4,1])
|
||||
|
||||
TestItem.pluck(:position).sort.should == [0,1,2,3,4]
|
||||
expect(TestItem.pluck(:position).sort).to eq([0,1,2,3,4])
|
||||
|
||||
item = TestItem.new
|
||||
item.id = 7
|
||||
item.save
|
||||
item.position.should == 5
|
||||
expect(item.position).to eq(5)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -6,24 +6,24 @@ describe ContentBuffer do
|
|||
it "handles deletion across lines properly" do
|
||||
c = ContentBuffer.new("a\nbc\nc")
|
||||
c.apply_transform!(start: {row: 0, col: 0}, finish: {col: 1, row: 1}, operation: :delete)
|
||||
c.to_s.should == "c\nc"
|
||||
expect(c.to_s).to eq("c\nc")
|
||||
end
|
||||
it "handles deletion inside lines properly" do
|
||||
c = ContentBuffer.new("hello world")
|
||||
c.apply_transform!(start: {row: 0, col: 1}, finish: {col: 4, row: 0}, operation: :delete)
|
||||
c.to_s.should == "ho world"
|
||||
expect(c.to_s).to eq("ho world")
|
||||
end
|
||||
|
||||
it "handles inserts inside lines properly" do
|
||||
c = ContentBuffer.new("hello!")
|
||||
c.apply_transform!(start: {row: 0, col: 5}, operation: :insert, text: " world")
|
||||
c.to_s.should == "hello world!"
|
||||
expect(c.to_s).to eq("hello world!")
|
||||
end
|
||||
|
||||
it "handles multiline inserts" do
|
||||
c = ContentBuffer.new("hello!")
|
||||
c.apply_transform!(start: {row: 0, col: 5}, operation: :insert, text: "\nworld")
|
||||
c.to_s.should == "hello\nworld!"
|
||||
expect(c.to_s).to eq("hello\nworld!")
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -49,10 +49,10 @@ describe CookedPostProcessor do
|
|||
|
||||
it "works" do
|
||||
# adds the width from the image sizes provided when no dimension is provided
|
||||
cpp.html.should =~ /src="http:\/\/foo.bar\/image.png" width="111" height="222"/
|
||||
expect(cpp.html).to match(/src="http:\/\/foo.bar\/image.png" width="111" height="222"/)
|
||||
# adds the width from the image sizes provided
|
||||
cpp.html.should =~ /src="http:\/\/domain.com\/picture.jpg" width="50" height="42"/
|
||||
cpp.should be_dirty
|
||||
expect(cpp.html).to match(/src="http:\/\/domain.com\/picture.jpg" width="50" height="42"/)
|
||||
expect(cpp).to be_dirty
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -65,8 +65,8 @@ describe CookedPostProcessor do
|
|||
it "adds the width and height to images that don't have them" do
|
||||
FastImage.expects(:size).returns([123, 456])
|
||||
cpp.post_process_images
|
||||
cpp.html.should =~ /width="123" height="456"/
|
||||
cpp.should be_dirty
|
||||
expect(cpp.html).to match(/width="123" height="456"/)
|
||||
expect(cpp).to be_dirty
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -90,10 +90,10 @@ describe CookedPostProcessor do
|
|||
|
||||
it "generates overlay information" do
|
||||
cpp.post_process_images
|
||||
cpp.html.should match_html '<div class="lightbox-wrapper"><a data-download-href="/uploads/default/e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98" href="/uploads/default/1/1234567890123456.jpg" class="lightbox" title="logo.png"><img src="/uploads/default/_optimized/da3/9a3/ee5e6b4b0d_690x1380.png" width="690" height="1380"><div class="meta">
|
||||
expect(cpp.html).to match_html '<div class="lightbox-wrapper"><a data-download-href="/uploads/default/e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98" href="/uploads/default/1/1234567890123456.jpg" class="lightbox" title="logo.png"><img src="/uploads/default/_optimized/da3/9a3/ee5e6b4b0d_690x1380.png" width="690" height="1380"><div class="meta">
|
||||
<span class="filename">logo.png</span><span class="informations">1000x2000 1.21 KB</span><span class="expand"></span>
|
||||
</div></a></div>'
|
||||
cpp.should be_dirty
|
||||
expect(cpp).to be_dirty
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -117,10 +117,10 @@ describe CookedPostProcessor do
|
|||
|
||||
it "generates overlay information" do
|
||||
cpp.post_process_images
|
||||
cpp.html.should match_html '<div class="lightbox-wrapper"><a data-download-href="/uploads/default/e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98" href="/uploads/default/1/1234567890123456.jpg" class="lightbox" title="WAT"><img src="/uploads/default/_optimized/da3/9a3/ee5e6b4b0d_690x1380.png" title="WAT" width="690" height="1380"><div class="meta">
|
||||
expect(cpp.html).to match_html '<div class="lightbox-wrapper"><a data-download-href="/uploads/default/e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98" href="/uploads/default/1/1234567890123456.jpg" class="lightbox" title="WAT"><img src="/uploads/default/_optimized/da3/9a3/ee5e6b4b0d_690x1380.png" title="WAT" width="690" height="1380"><div class="meta">
|
||||
<span class="filename">WAT</span><span class="informations">1000x2000 1.21 KB</span><span class="expand"></span>
|
||||
</div></a></div>'
|
||||
cpp.should be_dirty
|
||||
expect(cpp).to be_dirty
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -133,10 +133,10 @@ describe CookedPostProcessor do
|
|||
|
||||
it "adds a topic image if there's one in the post" do
|
||||
FastImage.stubs(:size)
|
||||
post.topic.image_url.should == nil
|
||||
expect(post.topic.image_url).to eq(nil)
|
||||
cpp.post_process_images
|
||||
post.topic.reload
|
||||
post.topic.image_url.should be_present
|
||||
expect(post.topic.image_url).to be_present
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -149,7 +149,7 @@ describe CookedPostProcessor do
|
|||
let(:cpp) { CookedPostProcessor.new(post) }
|
||||
|
||||
it "does not extract images inside oneboxes or quotes" do
|
||||
cpp.extract_images.length.should == 0
|
||||
expect(cpp.extract_images.length).to eq(0)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -161,7 +161,7 @@ describe CookedPostProcessor do
|
|||
|
||||
it "returns the size" do
|
||||
image_sizes = { "http://my.discourse.org/image.png" => { "width" => 111, "height" => 222 } }
|
||||
cpp.get_size_from_image_sizes("/image.png", image_sizes).should == [111, 222]
|
||||
expect(cpp.get_size_from_image_sizes("/image.png", image_sizes)).to eq([111, 222])
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -185,7 +185,7 @@ describe CookedPostProcessor do
|
|||
SiteSetting.stubs(:crawl_images?).returns(true)
|
||||
FastImage.expects(:size).returns([200, 400])
|
||||
cpp.get_size("http://foo.bar/image3.png")
|
||||
cpp.get_size("http://foo.bar/image3.png").should == [200, 400]
|
||||
expect(cpp.get_size("http://foo.bar/image3.png")).to eq([200, 400])
|
||||
end
|
||||
|
||||
context "when crawl_images is disabled" do
|
||||
|
@ -194,7 +194,7 @@ describe CookedPostProcessor do
|
|||
|
||||
it "doesn't call FastImage" do
|
||||
FastImage.expects(:size).never
|
||||
cpp.get_size("http://foo.bar/image1.png").should == nil
|
||||
expect(cpp.get_size("http://foo.bar/image1.png")).to eq(nil)
|
||||
end
|
||||
|
||||
it "is always allowed to crawl our own images" do
|
||||
|
@ -202,7 +202,7 @@ describe CookedPostProcessor do
|
|||
store.expects(:has_been_uploaded?).returns(true)
|
||||
Discourse.expects(:store).returns(store)
|
||||
FastImage.expects(:size).returns([100, 200])
|
||||
cpp.get_size("http://foo.bar/image2.png").should == [100, 200]
|
||||
expect(cpp.get_size("http://foo.bar/image2.png")).to eq([100, 200])
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -215,19 +215,19 @@ describe CookedPostProcessor do
|
|||
let(:cpp) { CookedPostProcessor.new(post) }
|
||||
|
||||
it "validates HTTP(s) urls" do
|
||||
cpp.is_valid_image_url?("http://domain.com").should == true
|
||||
cpp.is_valid_image_url?("https://domain.com").should == true
|
||||
expect(cpp.is_valid_image_url?("http://domain.com")).to eq(true)
|
||||
expect(cpp.is_valid_image_url?("https://domain.com")).to eq(true)
|
||||
end
|
||||
|
||||
it "doesn't validate other urls" do
|
||||
cpp.is_valid_image_url?("ftp://domain.com").should == false
|
||||
cpp.is_valid_image_url?("ftps://domain.com").should == false
|
||||
cpp.is_valid_image_url?("/tmp/image.png").should == false
|
||||
cpp.is_valid_image_url?("//domain.com").should == false
|
||||
expect(cpp.is_valid_image_url?("ftp://domain.com")).to eq(false)
|
||||
expect(cpp.is_valid_image_url?("ftps://domain.com")).to eq(false)
|
||||
expect(cpp.is_valid_image_url?("/tmp/image.png")).to eq(false)
|
||||
expect(cpp.is_valid_image_url?("//domain.com")).to eq(false)
|
||||
end
|
||||
|
||||
it "doesn't throw an exception with a bad URI" do
|
||||
cpp.is_valid_image_url?("http://do<main.com").should == nil
|
||||
expect(cpp.is_valid_image_url?("http://do<main.com")).to eq(nil)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -238,17 +238,17 @@ describe CookedPostProcessor do
|
|||
let(:cpp) { CookedPostProcessor.new(post) }
|
||||
|
||||
it "returns the filename of the src when there is no upload" do
|
||||
cpp.get_filename(nil, "http://domain.com/image.png").should == "image.png"
|
||||
expect(cpp.get_filename(nil, "http://domain.com/image.png")).to eq("image.png")
|
||||
end
|
||||
|
||||
it "returns the original filename of the upload when there is an upload" do
|
||||
upload = build(:upload, { original_filename: "upload.jpg" })
|
||||
cpp.get_filename(upload, "http://domain.com/image.png").should == "upload.jpg"
|
||||
expect(cpp.get_filename(upload, "http://domain.com/image.png")).to eq("upload.jpg")
|
||||
end
|
||||
|
||||
it "returns a generic name for pasted images" do
|
||||
upload = build(:upload, { original_filename: "blob.png" })
|
||||
cpp.get_filename(upload, "http://domain.com/image.png").should == I18n.t('upload.pasted_image_filename')
|
||||
expect(cpp.get_filename(upload, "http://domain.com/image.png")).to eq(I18n.t('upload.pasted_image_filename'))
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -266,11 +266,11 @@ describe CookedPostProcessor do
|
|||
end
|
||||
|
||||
it "is dirty" do
|
||||
cpp.should be_dirty
|
||||
expect(cpp).to be_dirty
|
||||
end
|
||||
|
||||
it "inserts the onebox without wrapping p" do
|
||||
cpp.html.should match_html "<div>GANGNAM STYLE</div>"
|
||||
expect(cpp.html).to match_html "<div>GANGNAM STYLE</div>"
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -282,7 +282,7 @@ describe CookedPostProcessor do
|
|||
|
||||
it "uses schemaless url for uploads" do
|
||||
cpp.optimize_urls
|
||||
cpp.html.should match_html '<a href="//test.localhost/uploads/default/2/2345678901234567.jpg">Link</a>
|
||||
expect(cpp.html).to match_html '<a href="//test.localhost/uploads/default/2/2345678901234567.jpg">Link</a>
|
||||
<img src="//test.localhost/uploads/default/1/1234567890123456.jpg"><a href="http://www.google.com">Google</a>
|
||||
<img src="http://foo.bar/image.png">'
|
||||
end
|
||||
|
@ -292,7 +292,7 @@ describe CookedPostProcessor do
|
|||
it "uses schemaless CDN url for uploads" do
|
||||
Rails.configuration.action_controller.stubs(:asset_host).returns("http://my.cdn.com")
|
||||
cpp.optimize_urls
|
||||
cpp.html.should match_html '<a href="//my.cdn.com/uploads/default/2/2345678901234567.jpg">Link</a>
|
||||
expect(cpp.html).to match_html '<a href="//my.cdn.com/uploads/default/2/2345678901234567.jpg">Link</a>
|
||||
<img src="//my.cdn.com/uploads/default/1/1234567890123456.jpg"><a href="http://www.google.com">Google</a>
|
||||
<img src="http://foo.bar/image.png">'
|
||||
end
|
||||
|
@ -365,7 +365,7 @@ describe CookedPostProcessor do
|
|||
it "does nothing when there's enough disk space" do
|
||||
SiteSetting.expects(:download_remote_images_threshold).returns(20)
|
||||
SiteSetting.expects(:download_remote_images_to_local).never
|
||||
cpp.disable_if_low_on_disk_space.should == false
|
||||
expect(cpp.disable_if_low_on_disk_space).to eq(false)
|
||||
end
|
||||
|
||||
context "when there's not enough disk space" do
|
||||
|
@ -375,8 +375,8 @@ describe CookedPostProcessor do
|
|||
it "disables download_remote_images_threshold and send a notification to the admin" do
|
||||
StaffActionLogger.any_instance.expects(:log_site_setting_change).once
|
||||
SystemMessage.expects(:create_from_system_user).with(Discourse.site_contact_user, :download_remote_images_disabled).once
|
||||
cpp.disable_if_low_on_disk_space.should == true
|
||||
SiteSetting.download_remote_images_to_local.should == false
|
||||
expect(cpp.disable_if_low_on_disk_space).to eq(true)
|
||||
expect(SiteSetting.download_remote_images_to_local).to eq(false)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -391,12 +391,12 @@ describe CookedPostProcessor do
|
|||
|
||||
it "is true when the image is inside a link" do
|
||||
img = doc.css("img#linked_image").first
|
||||
cpp.is_a_hyperlink?(img).should == true
|
||||
expect(cpp.is_a_hyperlink?(img)).to eq(true)
|
||||
end
|
||||
|
||||
it "is false when the image is not inside a link" do
|
||||
img = doc.css("img#standard_image").first
|
||||
cpp.is_a_hyperlink?(img).should == false
|
||||
expect(cpp.is_a_hyperlink?(img)).to eq(false)
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -5,29 +5,29 @@ describe CrawlerDetection do
|
|||
describe "crawler?" do
|
||||
it "returns true for crawler user agents" do
|
||||
# https://support.google.com/webmasters/answer/1061943?hl=en
|
||||
described_class.crawler?("Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)").should == true
|
||||
described_class.crawler?("Googlebot/2.1 (+http://www.google.com/bot.html)").should == true
|
||||
described_class.crawler?("Googlebot-News").should == true
|
||||
described_class.crawler?("Googlebot-Image/1.0").should == true
|
||||
described_class.crawler?("Googlebot-Video/1.0").should == true
|
||||
described_class.crawler?("(compatible; Googlebot-Mobile/2.1; +http://www.google.com/bot.html)").should == true
|
||||
described_class.crawler?("Mozilla/5.0 (iPhone; CPU iPhone OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5376e Safari/8536.25 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)").should == true
|
||||
described_class.crawler?("(compatible; Mediapartners-Google/2.1; +http://www.google.com/bot.html)").should == true
|
||||
described_class.crawler?("Mediapartners-Google").should == true
|
||||
described_class.crawler?("AdsBot-Google (+http://www.google.com/adsbot.html)").should == true
|
||||
described_class.crawler?("Twitterbot").should == true
|
||||
described_class.crawler?("facebookexternalhit/1.1 (+http(s)://www.facebook.com/externalhit_uatext.php)").should == true
|
||||
described_class.crawler?("Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)").should == true
|
||||
described_class.crawler?("Baiduspider+(+http://www.baidu.com/search/spider.htm)").should == true
|
||||
expect(described_class.crawler?("Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)")).to eq(true)
|
||||
expect(described_class.crawler?("Googlebot/2.1 (+http://www.google.com/bot.html)")).to eq(true)
|
||||
expect(described_class.crawler?("Googlebot-News")).to eq(true)
|
||||
expect(described_class.crawler?("Googlebot-Image/1.0")).to eq(true)
|
||||
expect(described_class.crawler?("Googlebot-Video/1.0")).to eq(true)
|
||||
expect(described_class.crawler?("(compatible; Googlebot-Mobile/2.1; +http://www.google.com/bot.html)")).to eq(true)
|
||||
expect(described_class.crawler?("Mozilla/5.0 (iPhone; CPU iPhone OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5376e Safari/8536.25 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)")).to eq(true)
|
||||
expect(described_class.crawler?("(compatible; Mediapartners-Google/2.1; +http://www.google.com/bot.html)")).to eq(true)
|
||||
expect(described_class.crawler?("Mediapartners-Google")).to eq(true)
|
||||
expect(described_class.crawler?("AdsBot-Google (+http://www.google.com/adsbot.html)")).to eq(true)
|
||||
expect(described_class.crawler?("Twitterbot")).to eq(true)
|
||||
expect(described_class.crawler?("facebookexternalhit/1.1 (+http(s)://www.facebook.com/externalhit_uatext.php)")).to eq(true)
|
||||
expect(described_class.crawler?("Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)")).to eq(true)
|
||||
expect(described_class.crawler?("Baiduspider+(+http://www.baidu.com/search/spider.htm)")).to eq(true)
|
||||
end
|
||||
|
||||
it "returns false for non-crawler user agents" do
|
||||
described_class.crawler?("Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36").should == false
|
||||
described_class.crawler?("Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko").should == false
|
||||
described_class.crawler?("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; Trident/6.0)").should == false
|
||||
described_class.crawler?("Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5355d Safari/8536.25").should == false
|
||||
described_class.crawler?("Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0").should == false
|
||||
described_class.crawler?("Mozilla/5.0 (Linux; U; Android 4.0.3; ko-kr; LG-L160L Build/IML74K) AppleWebkit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30").should == false
|
||||
expect(described_class.crawler?("Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/32.0.1667.0 Safari/537.36")).to eq(false)
|
||||
expect(described_class.crawler?("Mozilla/5.0 (Windows NT 6.3; Trident/7.0; rv:11.0) like Gecko")).to eq(false)
|
||||
expect(described_class.crawler?("Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.2; Trident/6.0)")).to eq(false)
|
||||
expect(described_class.crawler?("Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5355d Safari/8536.25")).to eq(false)
|
||||
expect(described_class.crawler?("Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0")).to eq(false)
|
||||
expect(described_class.crawler?("Mozilla/5.0 (Linux; U; Android 4.0.3; ko-kr; LG-L160L Build/IML74K) AppleWebkit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30")).to eq(false)
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -7,7 +7,7 @@ describe CurrentUser do
|
|||
EmailToken.confirm(user.auth_token)
|
||||
|
||||
env = Rack::MockRequest.env_for("/test", "HTTP_COOKIE" => "_t=#{user.auth_token};")
|
||||
CurrentUser.lookup_from_env(env).should == user
|
||||
expect(CurrentUser.lookup_from_env(env)).to eq(user)
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -19,8 +19,8 @@ describe DirectoryHelper do
|
|||
|
||||
describe '#tmp_directory' do
|
||||
it 'is memoized by prefix' do
|
||||
helper.tmp_directory('prefix').should eq(helper.tmp_directory('prefix'))
|
||||
helper.tmp_directory('prefix').should_not eq(helper.tmp_directory('other_prefix'))
|
||||
expect(helper.tmp_directory('prefix')).to eq(helper.tmp_directory('prefix'))
|
||||
expect(helper.tmp_directory('prefix')).not_to eq(helper.tmp_directory('other_prefix'))
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -29,7 +29,7 @@ describe DirectoryHelper do
|
|||
tmp_directory = helper.tmp_directory('prefix')
|
||||
helper.remove_tmp_directory('prefix')
|
||||
|
||||
Dir[tmp_directory].should_not be_present
|
||||
expect(Dir[tmp_directory]).not_to be_present
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -12,41 +12,41 @@ describe DiscourseDiff do
|
|||
cur = "<div>#{CGI::escapeHTML(b)}</div>"
|
||||
|
||||
diff = DiscourseDiff.new(prev,cur)
|
||||
diff.inline_html.should_not =~ /<\/?test>/
|
||||
diff.side_by_side_html.should_not =~ /<\/?test>/
|
||||
expect(diff.inline_html).not_to match(/<\/?test>/)
|
||||
expect(diff.side_by_side_html).not_to match(/<\/?test>/)
|
||||
end
|
||||
|
||||
it "returns an empty div when no content is diffed" do
|
||||
DiscourseDiff.new("", "").inline_html.should == "<div class=\"inline-diff\"></div>"
|
||||
expect(DiscourseDiff.new("", "").inline_html).to eq("<div class=\"inline-diff\"></div>")
|
||||
end
|
||||
|
||||
it "returns the diffed content when there is no difference" do
|
||||
before = after = "<p>this is a paragraph</p>"
|
||||
DiscourseDiff.new(before, after).inline_html.should == "<div class=\"inline-diff\"><p>this is a paragraph</p></div>"
|
||||
expect(DiscourseDiff.new(before, after).inline_html).to eq("<div class=\"inline-diff\"><p>this is a paragraph</p></div>")
|
||||
end
|
||||
|
||||
it "adds <ins> tags around added text" do
|
||||
before = "<p>this is a paragraph</p>"
|
||||
after = "<p>this is a great paragraph</p>"
|
||||
DiscourseDiff.new(before, after).inline_html.should == "<div class=\"inline-diff\"><p>this is a <ins>great </ins>paragraph</p></div>"
|
||||
expect(DiscourseDiff.new(before, after).inline_html).to eq("<div class=\"inline-diff\"><p>this is a <ins>great </ins>paragraph</p></div>")
|
||||
end
|
||||
|
||||
it "adds <del> tags around removed text" do
|
||||
before = "<p>this is a great paragraph</p>"
|
||||
after = "<p>this is a paragraph</p>"
|
||||
DiscourseDiff.new(before, after).inline_html.should == "<div class=\"inline-diff\"><p>this is a <del>great </del>paragraph</p></div>"
|
||||
expect(DiscourseDiff.new(before, after).inline_html).to eq("<div class=\"inline-diff\"><p>this is a <del>great </del>paragraph</p></div>")
|
||||
end
|
||||
|
||||
it "adds .diff-ins class when a paragraph is added" do
|
||||
before = "<p>this is the first paragraph</p>"
|
||||
after = "<p>this is the first paragraph</p><p>this is the second paragraph</p>"
|
||||
DiscourseDiff.new(before, after).inline_html.should == "<div class=\"inline-diff\"><p>this is the first paragraph</p><p class=\"diff-ins\">this is the second paragraph</p></div>"
|
||||
expect(DiscourseDiff.new(before, after).inline_html).to eq("<div class=\"inline-diff\"><p>this is the first paragraph</p><p class=\"diff-ins\">this is the second paragraph</p></div>")
|
||||
end
|
||||
|
||||
it "adds .diff-del class when a paragraph is removed" do
|
||||
before = "<p>this is the first paragraph</p><p>this is the second paragraph</p>"
|
||||
after = "<p>this is the second paragraph</p>"
|
||||
DiscourseDiff.new(before, after).inline_html.should == "<div class=\"inline-diff\"><p class=\"diff-del\">this is the first paragraph</p><p>this is the second paragraph</p></div>"
|
||||
expect(DiscourseDiff.new(before, after).inline_html).to eq("<div class=\"inline-diff\"><p class=\"diff-del\">this is the first paragraph</p><p>this is the second paragraph</p></div>")
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -54,36 +54,36 @@ describe DiscourseDiff do
|
|||
describe "side_by_side_html" do
|
||||
|
||||
it "returns two empty divs when no content is diffed" do
|
||||
DiscourseDiff.new("", "").side_by_side_html.should == "<div class=\"span8\"></div><div class=\"span8 offset1\"></div>"
|
||||
expect(DiscourseDiff.new("", "").side_by_side_html).to eq("<div class=\"span8\"></div><div class=\"span8 offset1\"></div>")
|
||||
end
|
||||
|
||||
it "returns the diffed content on both sides when there is no difference" do
|
||||
before = after = "<p>this is a paragraph</p>"
|
||||
DiscourseDiff.new(before, after).side_by_side_html.should == "<div class=\"span8\"><p>this is a paragraph</p></div><div class=\"span8 offset1\"><p>this is a paragraph</p></div>"
|
||||
expect(DiscourseDiff.new(before, after).side_by_side_html).to eq("<div class=\"span8\"><p>this is a paragraph</p></div><div class=\"span8 offset1\"><p>this is a paragraph</p></div>")
|
||||
end
|
||||
|
||||
it "adds <ins> tags around added text on the right div" do
|
||||
before = "<p>this is a paragraph</p>"
|
||||
after = "<p>this is a great paragraph</p>"
|
||||
DiscourseDiff.new(before, after).side_by_side_html.should == "<div class=\"span8\"><p>this is a paragraph</p></div><div class=\"span8 offset1\"><p>this is a <ins>great </ins>paragraph</p></div>"
|
||||
expect(DiscourseDiff.new(before, after).side_by_side_html).to eq("<div class=\"span8\"><p>this is a paragraph</p></div><div class=\"span8 offset1\"><p>this is a <ins>great </ins>paragraph</p></div>")
|
||||
end
|
||||
|
||||
it "adds <del> tags around removed text on the left div" do
|
||||
before = "<p>this is a great paragraph</p>"
|
||||
after = "<p>this is a paragraph</p>"
|
||||
DiscourseDiff.new(before, after).side_by_side_html.should == "<div class=\"span8\"><p>this is a <del>great </del>paragraph</p></div><div class=\"span8 offset1\"><p>this is a paragraph</p></div>"
|
||||
expect(DiscourseDiff.new(before, after).side_by_side_html).to eq("<div class=\"span8\"><p>this is a <del>great </del>paragraph</p></div><div class=\"span8 offset1\"><p>this is a paragraph</p></div>")
|
||||
end
|
||||
|
||||
it "adds .diff-ins class when a paragraph is added" do
|
||||
before = "<p>this is the first paragraph</p>"
|
||||
after = "<p>this is the first paragraph</p><p>this is the second paragraph</p>"
|
||||
DiscourseDiff.new(before, after).side_by_side_html.should == "<div class=\"span8\"><p>this is the first paragraph</p></div><div class=\"span8 offset1\"><p>this is the first paragraph</p><p class=\"diff-ins\">this is the second paragraph</p></div>"
|
||||
expect(DiscourseDiff.new(before, after).side_by_side_html).to eq("<div class=\"span8\"><p>this is the first paragraph</p></div><div class=\"span8 offset1\"><p>this is the first paragraph</p><p class=\"diff-ins\">this is the second paragraph</p></div>")
|
||||
end
|
||||
|
||||
it "adds .diff-del class when a paragraph is removed" do
|
||||
before = "<p>this is the first paragraph</p><p>this is the second paragraph</p>"
|
||||
after = "<p>this is the second paragraph</p>"
|
||||
DiscourseDiff.new(before, after).side_by_side_html.should == "<div class=\"span8\"><p class=\"diff-del\">this is the first paragraph</p><p>this is the second paragraph</p></div><div class=\"span8 offset1\"><p>this is the second paragraph</p></div>"
|
||||
expect(DiscourseDiff.new(before, after).side_by_side_html).to eq("<div class=\"span8\"><p class=\"diff-del\">this is the first paragraph</p><p>this is the second paragraph</p></div><div class=\"span8 offset1\"><p>this is the second paragraph</p></div>")
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -91,42 +91,42 @@ describe DiscourseDiff do
|
|||
describe "side_by_side_markdown" do
|
||||
|
||||
it "returns an empty table when no content is diffed" do
|
||||
DiscourseDiff.new("", "").side_by_side_markdown.should == "<table class=\"markdown\"></table>"
|
||||
expect(DiscourseDiff.new("", "").side_by_side_markdown).to eq("<table class=\"markdown\"></table>")
|
||||
end
|
||||
|
||||
it "properly escape html tags" do
|
||||
before = ""
|
||||
after = "<img src=\"//domain.com/image.png>\""
|
||||
DiscourseDiff.new(before, after).side_by_side_markdown.should == "<table class=\"markdown\"><tr><td></td><td class=\"diff-ins\"><img src="//domain.com/image.png>"</td></tr></table>"
|
||||
expect(DiscourseDiff.new(before, after).side_by_side_markdown).to eq("<table class=\"markdown\"><tr><td></td><td class=\"diff-ins\"><img src="//domain.com/image.png>"</td></tr></table>")
|
||||
end
|
||||
|
||||
it "returns the diffed content on both columns when there is no difference" do
|
||||
before = after = "this is a paragraph"
|
||||
DiscourseDiff.new(before, after).side_by_side_markdown.should == "<table class=\"markdown\"><tr><td>this is a paragraph</td><td>this is a paragraph</td></tr></table>"
|
||||
expect(DiscourseDiff.new(before, after).side_by_side_markdown).to eq("<table class=\"markdown\"><tr><td>this is a paragraph</td><td>this is a paragraph</td></tr></table>")
|
||||
end
|
||||
|
||||
it "adds <ins> tags around added text on the second column" do
|
||||
before = "this is a paragraph"
|
||||
after = "this is a great paragraph"
|
||||
DiscourseDiff.new(before, after).side_by_side_markdown.should == "<table class=\"markdown\"><tr><td class=\"diff-del\">this is a paragraph</td><td class=\"diff-ins\">this is a <ins>great </ins>paragraph</td></tr></table>"
|
||||
expect(DiscourseDiff.new(before, after).side_by_side_markdown).to eq("<table class=\"markdown\"><tr><td class=\"diff-del\">this is a paragraph</td><td class=\"diff-ins\">this is a <ins>great </ins>paragraph</td></tr></table>")
|
||||
end
|
||||
|
||||
it "adds <del> tags around removed text on the first column" do
|
||||
before = "this is a great paragraph"
|
||||
after = "this is a paragraph"
|
||||
DiscourseDiff.new(before, after).side_by_side_markdown.should == "<table class=\"markdown\"><tr><td class=\"diff-del\">this is a <del>great </del>paragraph</td><td class=\"diff-ins\">this is a paragraph</td></tr></table>"
|
||||
expect(DiscourseDiff.new(before, after).side_by_side_markdown).to eq("<table class=\"markdown\"><tr><td class=\"diff-del\">this is a <del>great </del>paragraph</td><td class=\"diff-ins\">this is a paragraph</td></tr></table>")
|
||||
end
|
||||
|
||||
it "adds .diff-ins class when a paragraph is added" do
|
||||
before = "this is the first paragraph"
|
||||
after = "this is the first paragraph\nthis is the second paragraph"
|
||||
DiscourseDiff.new(before, after).side_by_side_markdown.should == "<table class=\"markdown\"><tr><td class=\"diff-del\">this is the first paragraph</td><td class=\"diff-ins\">this is the first paragraph<ins>\nthis is the second paragraph</ins></td></tr></table>"
|
||||
expect(DiscourseDiff.new(before, after).side_by_side_markdown).to eq("<table class=\"markdown\"><tr><td class=\"diff-del\">this is the first paragraph</td><td class=\"diff-ins\">this is the first paragraph<ins>\nthis is the second paragraph</ins></td></tr></table>")
|
||||
end
|
||||
|
||||
it "adds .diff-del class when a paragraph is removed" do
|
||||
before = "this is the first paragraph\nthis is the second paragraph"
|
||||
after = "this is the second paragraph"
|
||||
DiscourseDiff.new(before, after).side_by_side_markdown.should == "<table class=\"markdown\"><tr><td class=\"diff-del\">this is the first paragraph\n</td><td></td></tr><tr><td>this is the second paragraph</td><td>this is the second paragraph</td></tr></table>"
|
||||
expect(DiscourseDiff.new(before, after).side_by_side_markdown).to eq("<table class=\"markdown\"><tr><td class=\"diff-del\">this is the first paragraph\n</td><td></td></tr><tr><td>this is the second paragraph</td><td>this is the second paragraph</td></tr></table>")
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -6,12 +6,12 @@ describe DiscourseEvent do
|
|||
describe "#events" do
|
||||
it "defaults to {}" do
|
||||
DiscourseEvent.instance_variable_set(:@events, nil)
|
||||
DiscourseEvent.events.should == {}
|
||||
expect(DiscourseEvent.events).to eq({})
|
||||
end
|
||||
|
||||
describe "key value" do
|
||||
it "defaults to an empty set" do
|
||||
DiscourseEvent.events["event42"].should == Set.new
|
||||
expect(DiscourseEvent.events["event42"]).to eq(Set.new)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -20,7 +20,7 @@ describe DiscourseEvent do
|
|||
it "clears out events" do
|
||||
DiscourseEvent.events["event42"] << "test event"
|
||||
DiscourseEvent.clear
|
||||
DiscourseEvent.events.should be_empty
|
||||
expect(DiscourseEvent.events).to be_empty
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -55,7 +55,7 @@ describe DiscourseEvent do
|
|||
|
||||
it "changes the name" do
|
||||
DiscourseEvent.trigger(:acid_face, harvey)
|
||||
harvey.name.should == 'Two Face'
|
||||
expect(harvey.name).to eq('Two Face')
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -71,8 +71,8 @@ describe DiscourseEvent do
|
|||
end
|
||||
|
||||
it 'triggers both events' do
|
||||
harvey.job.should == 'Supervillian'
|
||||
harvey.name.should == 'Two Face'
|
||||
expect(harvey.job).to eq('Supervillian')
|
||||
expect(harvey.name).to eq('Two Face')
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -6,7 +6,7 @@ describe DiscourseHub do
|
|||
it 'should return just return the json that the hub returns' do
|
||||
hub_response = {'success' => 'OK', 'latest_version' => '0.8.1', 'critical_updates' => false}
|
||||
RestClient.stubs(:get).returns( hub_response.to_json )
|
||||
DiscourseHub.discourse_version_check.should == hub_response
|
||||
expect(DiscourseHub.discourse_version_check).to eq(hub_response)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -11,35 +11,35 @@ describe DiscoursePluginRegistry do
|
|||
context '#stylesheets' do
|
||||
it 'defaults to an empty Set' do
|
||||
registry.stylesheets = nil
|
||||
registry.stylesheets.should == Set.new
|
||||
expect(registry.stylesheets).to eq(Set.new)
|
||||
end
|
||||
end
|
||||
|
||||
context '#mobile_stylesheets' do
|
||||
it 'defaults to an empty Set' do
|
||||
registry.mobile_stylesheets = nil
|
||||
registry.mobile_stylesheets.should == Set.new
|
||||
expect(registry.mobile_stylesheets).to eq(Set.new)
|
||||
end
|
||||
end
|
||||
|
||||
context '#javascripts' do
|
||||
it 'defaults to an empty Set' do
|
||||
registry.javascripts = nil
|
||||
registry.javascripts.should == Set.new
|
||||
expect(registry.javascripts).to eq(Set.new)
|
||||
end
|
||||
end
|
||||
|
||||
context '#server_side_javascripts' do
|
||||
it 'defaults to an empty Set' do
|
||||
registry.server_side_javascripts = nil
|
||||
registry.server_side_javascripts.should == Set.new
|
||||
expect(registry.server_side_javascripts).to eq(Set.new)
|
||||
end
|
||||
end
|
||||
|
||||
context '#admin_javascripts' do
|
||||
it 'defaults to an empty Set' do
|
||||
registry.admin_javascripts = nil
|
||||
registry.admin_javascripts.should == Set.new
|
||||
expect(registry.admin_javascripts).to eq(Set.new)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -49,15 +49,15 @@ describe DiscoursePluginRegistry do
|
|||
end
|
||||
|
||||
it 'is not leaking' do
|
||||
DiscoursePluginRegistry.new.stylesheets.should be_blank
|
||||
expect(DiscoursePluginRegistry.new.stylesheets).to be_blank
|
||||
end
|
||||
|
||||
it 'is returned by DiscoursePluginRegistry.stylesheets' do
|
||||
registry_instance.stylesheets.include?('hello.css').should == true
|
||||
expect(registry_instance.stylesheets.include?('hello.css')).to eq(true)
|
||||
end
|
||||
|
||||
it "won't add the same file twice" do
|
||||
lambda { registry_instance.register_css('hello.css') }.should_not change(registry.stylesheets, :size)
|
||||
expect { registry_instance.register_css('hello.css') }.not_to change(registry.stylesheets, :size)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -67,11 +67,11 @@ describe DiscoursePluginRegistry do
|
|||
end
|
||||
|
||||
it 'is returned by DiscoursePluginRegistry.javascripts' do
|
||||
registry_instance.javascripts.include?('hello.js').should == true
|
||||
expect(registry_instance.javascripts.include?('hello.js')).to eq(true)
|
||||
end
|
||||
|
||||
it "won't add the same file twice" do
|
||||
lambda { registry_instance.register_js('hello.js') }.should_not change(registry.javascripts, :size)
|
||||
expect { registry_instance.register_js('hello.js') }.not_to change(registry.javascripts, :size)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -93,53 +93,53 @@ describe DiscoursePluginRegistry do
|
|||
registry.register_asset("test.css")
|
||||
registry.register_asset("test2.css")
|
||||
|
||||
registry.mobile_stylesheets.count.should == 0
|
||||
registry.stylesheets.count.should == 2
|
||||
expect(registry.mobile_stylesheets.count).to eq(0)
|
||||
expect(registry.stylesheets.count).to eq(2)
|
||||
end
|
||||
|
||||
it "registers desktop css properly" do
|
||||
registry.register_asset("test.css", :desktop)
|
||||
|
||||
registry.mobile_stylesheets.count.should == 0
|
||||
registry.desktop_stylesheets.count.should == 1
|
||||
registry.stylesheets.count.should == 0
|
||||
expect(registry.mobile_stylesheets.count).to eq(0)
|
||||
expect(registry.desktop_stylesheets.count).to eq(1)
|
||||
expect(registry.stylesheets.count).to eq(0)
|
||||
end
|
||||
|
||||
it "registers mobile css properly" do
|
||||
registry.register_asset("test.css", :mobile)
|
||||
|
||||
registry.mobile_stylesheets.count.should == 1
|
||||
registry.stylesheets.count.should == 0
|
||||
expect(registry.mobile_stylesheets.count).to eq(1)
|
||||
expect(registry.stylesheets.count).to eq(0)
|
||||
end
|
||||
|
||||
it "registers desktop css properly" do
|
||||
registry.register_asset("test.css", :desktop)
|
||||
|
||||
registry.desktop_stylesheets.count.should == 1
|
||||
registry.stylesheets.count.should == 0
|
||||
expect(registry.desktop_stylesheets.count).to eq(1)
|
||||
expect(registry.stylesheets.count).to eq(0)
|
||||
end
|
||||
|
||||
it "registers sass variable properly" do
|
||||
registry.register_asset("test.css", :variables)
|
||||
|
||||
registry.sass_variables.count.should == 1
|
||||
registry.stylesheets.count.should == 0
|
||||
expect(registry.sass_variables.count).to eq(1)
|
||||
expect(registry.stylesheets.count).to eq(0)
|
||||
end
|
||||
|
||||
it "registers admin javascript properly" do
|
||||
registry.register_asset("my_admin.js", :admin)
|
||||
|
||||
registry.admin_javascripts.count.should == 1
|
||||
registry.javascripts.count.should == 0
|
||||
registry.server_side_javascripts.count.should == 0
|
||||
expect(registry.admin_javascripts.count).to eq(1)
|
||||
expect(registry.javascripts.count).to eq(0)
|
||||
expect(registry.server_side_javascripts.count).to eq(0)
|
||||
end
|
||||
|
||||
it "registers server side javascript properly" do
|
||||
registry.register_asset("my_admin.js", :server_side)
|
||||
|
||||
registry.server_side_javascripts.count.should == 1
|
||||
registry.javascripts.count.should == 1
|
||||
registry.admin_javascripts.count.should == 0
|
||||
expect(registry.server_side_javascripts.count).to eq(1)
|
||||
expect(registry.javascripts.count).to eq(1)
|
||||
expect(registry.admin_javascripts.count).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ describe DiscoursePlugin do
|
|||
|
||||
describe ".mixins" do
|
||||
it "finds its mixins" do
|
||||
TestPlugin.mixins.should == [TestPlugin::TestMixin]
|
||||
expect(TestPlugin.mixins).to eq([TestPlugin::TestMixin])
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -9,8 +9,8 @@ describe DiscourseSassCompiler do
|
|||
it "compiles scss" do
|
||||
DiscoursePluginRegistry.stubs(:stylesheets).returns(["#{Rails.root}/spec/fixtures/scss/my_plugin.scss"])
|
||||
css = described_class.compile(test_scss, "test")
|
||||
css.should include("color")
|
||||
css.should include('my-plugin-thing')
|
||||
expect(css).to include("color")
|
||||
expect(css).to include('my-plugin-thing')
|
||||
end
|
||||
|
||||
it "raises error for invalid scss" do
|
||||
|
@ -23,7 +23,7 @@ describe DiscourseSassCompiler do
|
|||
ColorScheme.expects(:enabled).never
|
||||
DiscoursePluginRegistry.stubs(:stylesheets).returns(["#{Rails.root}/spec/fixtures/scss/my_plugin.scss"])
|
||||
css = described_class.compile(test_scss, "test", safe: true)
|
||||
css.should_not include('my-plugin-thing')
|
||||
expect(css).not_to include('my-plugin-thing')
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ describe Discourse do
|
|||
context 'current_hostname' do
|
||||
|
||||
it 'returns the hostname from the current db connection' do
|
||||
Discourse.current_hostname.should == 'foo.com'
|
||||
expect(Discourse.current_hostname).to eq('foo.com')
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -22,7 +22,7 @@ describe Discourse do
|
|||
end
|
||||
|
||||
it 'has a non https base url' do
|
||||
Discourse.base_url.should == "http://foo.com"
|
||||
expect(Discourse.base_url).to eq("http://foo.com")
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -32,7 +32,7 @@ describe Discourse do
|
|||
end
|
||||
|
||||
it 'has a non-ssl base url' do
|
||||
Discourse.base_url.should == "https://foo.com"
|
||||
expect(Discourse.base_url).to eq("https://foo.com")
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -42,7 +42,7 @@ describe Discourse do
|
|||
end
|
||||
|
||||
it "returns the non standart port in the base url" do
|
||||
Discourse.base_url.should == "http://foo.com:3000"
|
||||
expect(Discourse.base_url).to eq("http://foo.com:3000")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -54,17 +54,17 @@ describe Discourse do
|
|||
|
||||
it 'returns the user specified by the site setting site_contact_username' do
|
||||
SiteSetting.stubs(:site_contact_username).returns(another_admin.username)
|
||||
Discourse.site_contact_user.should == another_admin
|
||||
expect(Discourse.site_contact_user).to eq(another_admin)
|
||||
end
|
||||
|
||||
it 'returns the user specified by the site setting site_contact_username regardless of its case' do
|
||||
SiteSetting.stubs(:site_contact_username).returns(another_admin.username.upcase)
|
||||
Discourse.site_contact_user.should == another_admin
|
||||
expect(Discourse.site_contact_user).to eq(another_admin)
|
||||
end
|
||||
|
||||
it 'returns the first admin user otherwise' do
|
||||
SiteSetting.stubs(:site_contact_username).returns(nil)
|
||||
Discourse.site_contact_user.should == admin
|
||||
expect(Discourse.site_contact_user).to eq(admin)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -72,7 +72,7 @@ describe Discourse do
|
|||
context "#store" do
|
||||
|
||||
it "returns LocalStore by default" do
|
||||
Discourse.store.should be_a(FileStore::LocalStore)
|
||||
expect(Discourse.store).to be_a(FileStore::LocalStore)
|
||||
end
|
||||
|
||||
it "returns S3Store when S3 is enabled" do
|
||||
|
@ -80,7 +80,7 @@ describe Discourse do
|
|||
SiteSetting.stubs(:s3_upload_bucket).returns("s3_bucket")
|
||||
SiteSetting.stubs(:s3_access_key_id).returns("s3_access_key_id")
|
||||
SiteSetting.stubs(:s3_secret_access_key).returns("s3_secret_access_key")
|
||||
Discourse.store.should be_a(FileStore::S3Store)
|
||||
expect(Discourse.store).to be_a(FileStore::S3Store)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -109,12 +109,12 @@ describe Discourse do
|
|||
|
||||
it "returns true when the key is present in redis" do
|
||||
$redis.expects(:get).with(Discourse.readonly_mode_key).returns("1")
|
||||
Discourse.readonly_mode?.should == true
|
||||
expect(Discourse.readonly_mode?).to eq(true)
|
||||
end
|
||||
|
||||
it "returns false when the key is not present in redis" do
|
||||
$redis.expects(:get).with(Discourse.readonly_mode_key).returns(nil)
|
||||
Discourse.readonly_mode?.should == false
|
||||
expect(Discourse.readonly_mode?).to eq(false)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -140,16 +140,16 @@ describe Discourse do
|
|||
exception = StandardError.new
|
||||
|
||||
Discourse.handle_exception(exception, nil, nil)
|
||||
logger.exception.should == exception
|
||||
logger.context.keys.should == [:current_db, :current_hostname]
|
||||
expect(logger.exception).to eq(exception)
|
||||
expect(logger.context.keys).to eq([:current_db, :current_hostname])
|
||||
end
|
||||
|
||||
it "correctly passes extra context" do
|
||||
exception = StandardError.new
|
||||
|
||||
Discourse.handle_exception(exception, {message: "Doing a test", post_id: 31}, nil)
|
||||
logger.exception.should == exception
|
||||
logger.context.keys.sort.should == [:current_db, :current_hostname, :message, :post_id].sort
|
||||
expect(logger.exception).to eq(exception)
|
||||
expect(logger.context.keys.sort).to eq([:current_db, :current_hostname, :message, :post_id].sort)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -7,14 +7,14 @@ describe DiscourseStylesheets do
|
|||
it "can compile desktop bundle" do
|
||||
DiscoursePluginRegistry.stubs(:stylesheets).returns(["#{Rails.root}/spec/fixtures/scss/my_plugin.scss"])
|
||||
builder = described_class.new(:desktop)
|
||||
builder.compile.should include('my-plugin-thing')
|
||||
expect(builder.compile).to include('my-plugin-thing')
|
||||
FileUtils.rm builder.stylesheet_fullpath
|
||||
end
|
||||
|
||||
it "can compile mobile bundle" do
|
||||
DiscoursePluginRegistry.stubs(:mobile_stylesheets).returns(["#{Rails.root}/spec/fixtures/scss/my_plugin.scss"])
|
||||
builder = described_class.new(:mobile)
|
||||
builder.compile.should include('my-plugin-thing')
|
||||
expect(builder.compile).to include('my-plugin-thing')
|
||||
FileUtils.rm builder.stylesheet_fullpath
|
||||
end
|
||||
|
||||
|
@ -24,7 +24,7 @@ describe DiscourseStylesheets do
|
|||
"#{Rails.root}/spec/fixtures/scss/broken.scss"
|
||||
])
|
||||
builder = described_class.new(:desktop)
|
||||
builder.compile.should_not include('my-plugin-thing')
|
||||
expect(builder.compile).not_to include('my-plugin-thing')
|
||||
FileUtils.rm builder.stylesheet_fullpath
|
||||
end
|
||||
end
|
||||
|
|
|
@ -26,14 +26,14 @@ describe DiscourseUpdates do
|
|||
before { stub_data(Discourse::VERSION::STRING, 0, false, 12.hours.ago) }
|
||||
|
||||
it 'returns all the version fields' do
|
||||
subject['latest_version'].should == Discourse::VERSION::STRING
|
||||
subject['missing_versions_count'].should == 0
|
||||
subject['critical_updates'].should == false
|
||||
subject['installed_version'].should == Discourse::VERSION::STRING
|
||||
expect(subject['latest_version']).to eq(Discourse::VERSION::STRING)
|
||||
expect(subject['missing_versions_count']).to eq(0)
|
||||
expect(subject['critical_updates']).to eq(false)
|
||||
expect(subject['installed_version']).to eq(Discourse::VERSION::STRING)
|
||||
end
|
||||
|
||||
it 'returns the timestamp of the last version check' do
|
||||
subject['updated_at'].should be_within_one_second_of(12.hours.ago)
|
||||
expect(subject['updated_at']).to be_within_one_second_of(12.hours.ago)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -41,14 +41,14 @@ describe DiscourseUpdates do
|
|||
before { stub_data('0.9.0', 2, false, 12.hours.ago) }
|
||||
|
||||
it 'returns all the version fields' do
|
||||
subject['latest_version'].should == '0.9.0'
|
||||
subject['missing_versions_count'].should == 2
|
||||
subject['critical_updates'].should == false
|
||||
subject['installed_version'].should == Discourse::VERSION::STRING
|
||||
expect(subject['latest_version']).to eq('0.9.0')
|
||||
expect(subject['missing_versions_count']).to eq(2)
|
||||
expect(subject['critical_updates']).to eq(false)
|
||||
expect(subject['installed_version']).to eq(Discourse::VERSION::STRING)
|
||||
end
|
||||
|
||||
it 'returns the timestamp of the last version check' do
|
||||
subject['updated_at'].should be_within_one_second_of(12.hours.ago)
|
||||
expect(subject['updated_at']).to be_within_one_second_of(12.hours.ago)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -57,18 +57,18 @@ describe DiscourseUpdates do
|
|||
before { stub_data(nil, nil, false, nil) }
|
||||
|
||||
it 'returns the installed version' do
|
||||
subject['installed_version'].should == Discourse::VERSION::STRING
|
||||
expect(subject['installed_version']).to eq(Discourse::VERSION::STRING)
|
||||
end
|
||||
|
||||
it 'indicates that version check has not been performed' do
|
||||
subject.should have_key('updated_at')
|
||||
subject['updated_at'].should == nil
|
||||
expect(subject).to have_key('updated_at')
|
||||
expect(subject['updated_at']).to eq(nil)
|
||||
end
|
||||
|
||||
it 'does not return latest version info' do
|
||||
subject.should_not have_key('latest_version')
|
||||
subject.should_not have_key('missing_versions_count')
|
||||
subject.should_not have_key('critical_updates')
|
||||
expect(subject).not_to have_key('latest_version')
|
||||
expect(subject).not_to have_key('missing_versions_count')
|
||||
expect(subject).not_to have_key('critical_updates')
|
||||
end
|
||||
|
||||
it 'queues a version check' do
|
||||
|
@ -87,11 +87,11 @@ describe DiscourseUpdates do
|
|||
end
|
||||
|
||||
it 'reports 0 missing versions' do
|
||||
subject['missing_versions_count'].should == 0
|
||||
expect(subject['missing_versions_count']).to eq(0)
|
||||
end
|
||||
|
||||
it 'reports that a version check will be run soon' do
|
||||
subject['version_check_pending'].should == true
|
||||
expect(subject['version_check_pending']).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -119,11 +119,11 @@ describe DiscourseUpdates do
|
|||
end
|
||||
|
||||
it 'reports 0 missing versions' do
|
||||
subject['missing_versions_count'].should == 0
|
||||
expect(subject['missing_versions_count']).to eq(0)
|
||||
end
|
||||
|
||||
it 'reports that a version check will be run soon' do
|
||||
subject['version_check_pending'].should == true
|
||||
expect(subject['version_check_pending']).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -20,18 +20,18 @@ describe DistributedCache do
|
|||
end
|
||||
|
||||
Thread.pass
|
||||
cache1["hi"].should == nil
|
||||
expect(cache1["hi"]).to eq(nil)
|
||||
|
||||
end
|
||||
|
||||
it 'allows coerces symbol keys to strings' do
|
||||
cache1[:key] = "test"
|
||||
cache1["key"].should == "test"
|
||||
expect(cache1["key"]).to eq("test")
|
||||
|
||||
wait_for do
|
||||
cache2[:key] == "test"
|
||||
end
|
||||
cache2["key"].should == "test"
|
||||
expect(cache2["key"]).to eq("test")
|
||||
end
|
||||
|
||||
it 'sets other caches' do
|
||||
|
@ -49,7 +49,7 @@ describe DistributedCache do
|
|||
end
|
||||
|
||||
cache1.delete("foo")
|
||||
cache1["foo"].should == nil
|
||||
expect(cache1["foo"]).to eq(nil)
|
||||
|
||||
wait_for do
|
||||
cache2["foo"] == nil
|
||||
|
@ -64,7 +64,7 @@ describe DistributedCache do
|
|||
end
|
||||
|
||||
cache1.clear
|
||||
cache1["foo"].should == nil
|
||||
expect(cache1["foo"]).to eq(nil)
|
||||
wait_for do
|
||||
cache2["boom"] == nil
|
||||
end
|
||||
|
|
|
@ -17,9 +17,9 @@ describe DistributedMemoizer do
|
|||
end
|
||||
|
||||
it "returns the value of a block" do
|
||||
memoize do
|
||||
expect(memoize do
|
||||
"abc"
|
||||
end.should == "abc"
|
||||
end).to eq("abc")
|
||||
end
|
||||
|
||||
it "return the old value once memoized" do
|
||||
|
@ -28,9 +28,9 @@ describe DistributedMemoizer do
|
|||
"abc"
|
||||
end
|
||||
|
||||
memoize do
|
||||
expect(memoize do
|
||||
"world"
|
||||
end.should == "abc"
|
||||
end).to eq("abc")
|
||||
end
|
||||
|
||||
it "memoizes correctly when used concurrently" do
|
||||
|
@ -47,8 +47,8 @@ describe DistributedMemoizer do
|
|||
end
|
||||
|
||||
threads.each(&:join)
|
||||
results.uniq.length.should == 1
|
||||
results.count.should == 5
|
||||
expect(results.uniq.length).to eq(1)
|
||||
expect(results.count).to eq(5)
|
||||
|
||||
end
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ describe DistributedMutex do
|
|||
end
|
||||
end.map(&:join)
|
||||
|
||||
x.should == 10
|
||||
expect(x).to eq(10)
|
||||
end
|
||||
|
||||
it "handles auto cleanup correctly" do
|
||||
|
@ -33,17 +33,17 @@ describe DistributedMutex do
|
|||
end
|
||||
|
||||
# no longer than a second
|
||||
Time.now.to_i.should <= start + 1
|
||||
expect(Time.now.to_i).to be <= start + 1
|
||||
end
|
||||
|
||||
it "maintains mutex semantics" do
|
||||
m = DistributedMutex.new("test_mutex_key")
|
||||
|
||||
lambda {
|
||||
expect {
|
||||
m.synchronize do
|
||||
m.synchronize{}
|
||||
end
|
||||
}.should raise_error(ThreadError)
|
||||
}.to raise_error(ThreadError)
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -6,19 +6,19 @@ describe Email do
|
|||
describe "is_valid?" do
|
||||
|
||||
it 'treats a good email as valid' do
|
||||
Email.is_valid?('sam@sam.com').should == true
|
||||
expect(Email.is_valid?('sam@sam.com')).to eq(true)
|
||||
end
|
||||
|
||||
it 'treats a bad email as invalid' do
|
||||
Email.is_valid?('sam@sam').should == false
|
||||
expect(Email.is_valid?('sam@sam')).to eq(false)
|
||||
end
|
||||
|
||||
it 'allows museum tld' do
|
||||
Email.is_valid?('sam@nic.museum').should == true
|
||||
expect(Email.is_valid?('sam@nic.museum')).to eq(true)
|
||||
end
|
||||
|
||||
it 'does not think a word is an email' do
|
||||
Email.is_valid?('sam').should == false
|
||||
expect(Email.is_valid?('sam')).to eq(false)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -26,14 +26,14 @@ describe Email do
|
|||
describe "downcase" do
|
||||
|
||||
it 'downcases local and host part' do
|
||||
Email.downcase('SAM@GMAIL.COM').should == 'sam@gmail.com'
|
||||
Email.downcase('sam@GMAIL.COM').should == 'sam@gmail.com'
|
||||
expect(Email.downcase('SAM@GMAIL.COM')).to eq('sam@gmail.com')
|
||||
expect(Email.downcase('sam@GMAIL.COM')).to eq('sam@gmail.com')
|
||||
end
|
||||
|
||||
it 'leaves invalid emails untouched' do
|
||||
Email.downcase('SAM@GMAILCOM').should == 'SAM@GMAILCOM'
|
||||
Email.downcase('samGMAIL.COM').should == 'samGMAIL.COM'
|
||||
Email.downcase('sam@GM@AIL.COM').should == 'sam@GM@AIL.COM'
|
||||
expect(Email.downcase('SAM@GMAILCOM')).to eq('SAM@GMAILCOM')
|
||||
expect(Email.downcase('samGMAIL.COM')).to eq('samGMAIL.COM')
|
||||
expect(Email.downcase('sam@GM@AIL.COM')).to eq('sam@GM@AIL.COM')
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -254,7 +254,7 @@ describe Email::MessageBuilder do
|
|||
|
||||
it "cleans up aliases in the from_alias arg" do
|
||||
builder = Email::MessageBuilder.new(to_address, from_alias: "Finn: the Dog, <3", from: finn_email)
|
||||
builder.build_args[:from].should == "Finn the Dog 3 <#{finn_email}>"
|
||||
expect(builder.build_args[:from]).to eq("Finn the Dog 3 <#{finn_email}>")
|
||||
end
|
||||
|
||||
it "cleans up the email_site_title" do
|
||||
|
|
|
@ -33,45 +33,45 @@ describe Email::Receiver do
|
|||
end
|
||||
|
||||
it "can parse the html section" do
|
||||
test_parse_body(fixture_file("emails/html_only.eml")).should == "The EC2 instance - I've seen that there tends to be odd and " +
|
||||
"unrecommended settings on the Bitnami installs that I've checked out."
|
||||
expect(test_parse_body(fixture_file("emails/html_only.eml"))).to eq("The EC2 instance - I've seen that there tends to be odd and " +
|
||||
"unrecommended settings on the Bitnami installs that I've checked out.")
|
||||
end
|
||||
|
||||
it "supports a Dutch reply" do
|
||||
test_parse_body(fixture_file("emails/dutch.eml")).should == "Dit is een antwoord in het Nederlands."
|
||||
expect(test_parse_body(fixture_file("emails/dutch.eml"))).to eq("Dit is een antwoord in het Nederlands.")
|
||||
end
|
||||
|
||||
it "supports a Hebrew reply" do
|
||||
I18n.expects(:t).with('user_notifications.previous_discussion').returns('כלטוב')
|
||||
|
||||
# The force_encoding call is only needed for the test - it is passed on fine to the cooked post
|
||||
test_parse_body(fixture_file("emails/hebrew.eml")).should == "שלום"
|
||||
expect(test_parse_body(fixture_file("emails/hebrew.eml"))).to eq("שלום")
|
||||
end
|
||||
|
||||
it "supports a BIG5-encoded reply" do
|
||||
I18n.expects(:t).with('user_notifications.previous_discussion').returns('媽!我上電視了!')
|
||||
|
||||
# The force_encoding call is only needed for the test - it is passed on fine to the cooked post
|
||||
test_parse_body(fixture_file("emails/big5.eml")).should == "媽!我上電視了!"
|
||||
expect(test_parse_body(fixture_file("emails/big5.eml"))).to eq("媽!我上電視了!")
|
||||
end
|
||||
|
||||
it "removes 'via' lines if they match the site title" do
|
||||
SiteSetting.title = "Discourse"
|
||||
|
||||
test_parse_body(fixture_file("emails/via_line.eml")).should == "Hello this email has content!"
|
||||
expect(test_parse_body(fixture_file("emails/via_line.eml"))).to eq("Hello this email has content!")
|
||||
end
|
||||
|
||||
it "removes an 'on date wrote' quoting line" do
|
||||
test_parse_body(fixture_file("emails/on_wrote.eml")).should == "Sure, all you need to do is frobnicate the foobar and you'll be all set!"
|
||||
expect(test_parse_body(fixture_file("emails/on_wrote.eml"))).to eq("Sure, all you need to do is frobnicate the foobar and you'll be all set!")
|
||||
end
|
||||
|
||||
it "removes the 'Previous Discussion' marker" do
|
||||
test_parse_body(fixture_file("emails/previous.eml")).should == "This will not include the previous discussion that is present in this email."
|
||||
expect(test_parse_body(fixture_file("emails/previous.eml"))).to eq("This will not include the previous discussion that is present in this email.")
|
||||
end
|
||||
|
||||
it "handles multiple paragraphs" do
|
||||
test_parse_body(fixture_file("emails/paragraphs.eml")).
|
||||
should == (
|
||||
expect(test_parse_body(fixture_file("emails/paragraphs.eml"))).
|
||||
to eq(
|
||||
"Is there any reason the *old* candy can't be be kept in silos while the new candy
|
||||
is imported into *new* silos?
|
||||
|
||||
|
@ -83,8 +83,8 @@ Thanks for listening."
|
|||
end
|
||||
|
||||
it "handles multiple paragraphs when parsing html" do
|
||||
test_parse_body(fixture_file("emails/html_paragraphs.eml")).
|
||||
should == (
|
||||
expect(test_parse_body(fixture_file("emails/html_paragraphs.eml"))).
|
||||
to eq(
|
||||
"Awesome!
|
||||
|
||||
Pleasure to have you here!
|
||||
|
@ -94,8 +94,8 @@ Pleasure to have you here!
|
|||
end
|
||||
|
||||
it "handles newlines" do
|
||||
test_parse_body(fixture_file("emails/newlines.eml")).
|
||||
should == (
|
||||
expect(test_parse_body(fixture_file("emails/newlines.eml"))).
|
||||
to eq(
|
||||
"This is my reply.
|
||||
It is my best reply.
|
||||
It will also be my *only* reply."
|
||||
|
@ -103,8 +103,8 @@ It will also be my *only* reply."
|
|||
end
|
||||
|
||||
it "handles inline reply" do
|
||||
test_parse_body(fixture_file("emails/inline_reply.eml")).
|
||||
should == (
|
||||
expect(test_parse_body(fixture_file("emails/inline_reply.eml"))).
|
||||
to eq(
|
||||
"On Wed, Oct 8, 2014 at 11:12 AM, techAPJ <info@unconfigured.discourse.org> wrote:
|
||||
|
||||
> techAPJ <https://meta.discourse.org/users/techapj>
|
||||
|
@ -150,16 +150,16 @@ the lazy dog. The quick brown fox jumps over the lazy dog."
|
|||
end
|
||||
|
||||
it "should not include previous replies" do
|
||||
test_parse_body(fixture_file("emails/previous_replies.eml")).should_not match /Previous Replies/
|
||||
expect(test_parse_body(fixture_file("emails/previous_replies.eml"))).not_to match /Previous Replies/
|
||||
end
|
||||
|
||||
it "strips iPhone signature" do
|
||||
test_parse_body(fixture_file("emails/iphone_signature.eml")).should_not match /Sent from my iPhone/
|
||||
expect(test_parse_body(fixture_file("emails/iphone_signature.eml"))).not_to match /Sent from my iPhone/
|
||||
end
|
||||
|
||||
it "properly renders email reply from gmail web client" do
|
||||
test_parse_body(fixture_file("emails/gmail_web.eml")).
|
||||
should == (
|
||||
expect(test_parse_body(fixture_file("emails/gmail_web.eml"))).
|
||||
to eq(
|
||||
"### This is a reply from standard GMail in Google Chrome.
|
||||
|
||||
The quick brown fox jumps over the lazy dog. The quick brown fox jumps over
|
||||
|
@ -175,8 +175,8 @@ Here's a link http://example.com"
|
|||
end
|
||||
|
||||
it "properly renders email reply from iOS default mail client" do
|
||||
test_parse_body(fixture_file("emails/ios_default.eml")).
|
||||
should == (
|
||||
expect(test_parse_body(fixture_file("emails/ios_default.eml"))).
|
||||
to eq(
|
||||
"### this is a reply from iOS default mail
|
||||
|
||||
The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog.
|
||||
|
@ -188,8 +188,8 @@ Here's a link http://example.com"
|
|||
end
|
||||
|
||||
it "properly renders email reply from Android 5 gmail client" do
|
||||
test_parse_body(fixture_file("emails/android_gmail.eml")).
|
||||
should == (
|
||||
expect(test_parse_body(fixture_file("emails/android_gmail.eml"))).
|
||||
to eq(
|
||||
"### this is a reply from Android 5 gmail
|
||||
|
||||
The quick brown fox jumps over the lazy dog. The quick brown fox jumps over
|
||||
|
@ -204,8 +204,8 @@ This is a link to http://example.com"
|
|||
end
|
||||
|
||||
it "properly renders email reply from Windows 8.1 Metro default mail client" do
|
||||
test_parse_body(fixture_file("emails/windows_8_metro.eml")).
|
||||
should == (
|
||||
expect(test_parse_body(fixture_file("emails/windows_8_metro.eml"))).
|
||||
to eq(
|
||||
"### reply from default mail client in Windows 8.1 Metro
|
||||
|
||||
|
||||
|
@ -220,12 +220,12 @@ This is a link http://example.com"
|
|||
end
|
||||
|
||||
it "properly renders email reply from MS Outlook client" do
|
||||
test_parse_body(fixture_file("emails/outlook.eml")).should == "Microsoft Outlook 2010"
|
||||
expect(test_parse_body(fixture_file("emails/outlook.eml"))).to eq("Microsoft Outlook 2010")
|
||||
end
|
||||
|
||||
it "converts back to UTF-8 at the end" do
|
||||
result = test_parse_body(fixture_file("emails/big5.eml"))
|
||||
result.encoding.should == Encoding::UTF_8
|
||||
expect(result.encoding).to eq(Encoding::UTF_8)
|
||||
|
||||
# should not throw
|
||||
TextCleaner.normalize_whitespaces(
|
||||
|
@ -269,11 +269,11 @@ This is a link http://example.com"
|
|||
|
||||
receiver.process
|
||||
|
||||
topic.posts.count.should == (start_count + 1)
|
||||
expect(topic.posts.count).to eq(start_count + 1)
|
||||
created_post = topic.posts.last
|
||||
created_post.via_email.should == true
|
||||
created_post.raw_email.should == fixture_file("emails/valid_reply.eml")
|
||||
created_post.cooked.strip.should == fixture_file("emails/valid_reply.cooked").strip
|
||||
expect(created_post.via_email).to eq(true)
|
||||
expect(created_post.raw_email).to eq(fixture_file("emails/valid_reply.eml"))
|
||||
expect(created_post.cooked.strip).to eq(fixture_file("emails/valid_reply.cooked").strip)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -286,9 +286,9 @@ This is a link http://example.com"
|
|||
|
||||
receiver.process
|
||||
|
||||
topic.posts.count.should == (start_count + 1)
|
||||
topic.posts.last.cooked.strip.should == fixture_file("emails/paragraphs.cooked").strip
|
||||
topic.posts.last.cooked.should_not match /<br/
|
||||
expect(topic.posts.count).to eq(start_count + 1)
|
||||
expect(topic.posts.last.cooked.strip).to eq(fixture_file("emails/paragraphs.cooked").strip)
|
||||
expect(topic.posts.last.cooked).not_to match /<br/
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -308,9 +308,9 @@ This is a link http://example.com"
|
|||
|
||||
receiver.process
|
||||
|
||||
topic.posts.count.should == (start_count + 1)
|
||||
topic.posts.last.cooked.should match /<img src=['"](\/uploads\/default\/\d+\/\w{16}\.png)['"] width=['"]289['"] height=['"]126['"]>/
|
||||
Upload.find_by(sha1: upload_sha).should_not == nil
|
||||
expect(topic.posts.count).to eq(start_count + 1)
|
||||
expect(topic.posts.last.cooked).to match /<img src=['"](\/uploads\/default\/\d+\/\w{16}\.png)['"] width=['"]289['"] height=['"]126['"]>/
|
||||
expect(Upload.find_by(sha1: upload_sha)).not_to eq(nil)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -448,7 +448,7 @@ This is a link http://example.com"
|
|||
|
||||
expect { receiver.process }.to raise_error(Email::Receiver::InvalidPost)
|
||||
|
||||
Topic.count.should == before_topic_count
|
||||
expect(Topic.count).to eq(before_topic_count)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -549,7 +549,7 @@ greatest show ever created. Everyone should watch it.
|
|||
user.save
|
||||
|
||||
process_email(from: user.email, to: to)
|
||||
user.posts.count.should == 1
|
||||
expect(user.posts.count).to eq(1)
|
||||
|
||||
# email too short
|
||||
message = nil
|
||||
|
@ -559,7 +559,7 @@ greatest show ever created. Everyone should watch it.
|
|||
message = e.message
|
||||
end
|
||||
|
||||
e.message.should include("too short")
|
||||
expect(e.message).to include("too short")
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -581,7 +581,7 @@ greatest show ever created. Everyone should watch it.
|
|||
process_email(from: "test@test.com", to: "bob@bob.com")
|
||||
|
||||
# This is the current implementation but it is wrong, it should register an account
|
||||
Discourse.system_user.posts.order("id desc").limit(1).pluck(:raw).first.should include("Hey folks")
|
||||
expect(Discourse.system_user.posts.order("id desc").limit(1).pluck(:raw).first).to include("Hey folks")
|
||||
|
||||
end
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ describe Email::Renderer do
|
|||
|
||||
it "escapes HTML entities from text" do
|
||||
renderer = Email::Renderer.new(message)
|
||||
renderer.text.should == "Key & Peele"
|
||||
expect(renderer.text).to eq("Key & Peele")
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -29,19 +29,19 @@ describe Email::Sender do
|
|||
|
||||
context "host_for" do
|
||||
it "defaults to localhost" do
|
||||
Email::Sender.host_for(nil).should == "localhost"
|
||||
expect(Email::Sender.host_for(nil)).to eq("localhost")
|
||||
end
|
||||
|
||||
it "returns localhost for a weird host" do
|
||||
Email::Sender.host_for("this is not a real host").should == "localhost"
|
||||
expect(Email::Sender.host_for("this is not a real host")).to eq("localhost")
|
||||
end
|
||||
|
||||
it "parses hosts from urls" do
|
||||
Email::Sender.host_for("http://meta.discourse.org").should == "meta.discourse.org"
|
||||
expect(Email::Sender.host_for("http://meta.discourse.org")).to eq("meta.discourse.org")
|
||||
end
|
||||
|
||||
it "downcases hosts" do
|
||||
Email::Sender.host_for("http://ForumSite.com").should == "forumsite.com"
|
||||
expect(Email::Sender.host_for("http://ForumSite.com")).to eq("forumsite.com")
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -144,7 +144,7 @@ describe Email::Sender do
|
|||
end
|
||||
|
||||
it 'should have the current user_id' do
|
||||
@email_log.user_id.should == user.id
|
||||
expect(@email_log.user_id).to eq(user.id)
|
||||
end
|
||||
|
||||
|
||||
|
|
|
@ -100,7 +100,7 @@ describe Email::Styles do
|
|||
context "rewriting protocol relative URLs to the forum" do
|
||||
it "doesn't rewrite a url to another site" do
|
||||
frag = html_fragment('<a href="//youtube.com/discourse">hello</a>')
|
||||
frag.at('a')['href'].should == "//youtube.com/discourse"
|
||||
expect(frag.at('a')['href']).to eq("//youtube.com/discourse")
|
||||
end
|
||||
|
||||
context "without https" do
|
||||
|
@ -110,17 +110,17 @@ describe Email::Styles do
|
|||
|
||||
it "rewrites the href to have http" do
|
||||
frag = html_fragment('<a href="//test.localhost/discourse">hello</a>')
|
||||
frag.at('a')['href'].should == "http://test.localhost/discourse"
|
||||
expect(frag.at('a')['href']).to eq("http://test.localhost/discourse")
|
||||
end
|
||||
|
||||
it "rewrites the href for attachment files to have http" do
|
||||
frag = html_fragment('<a class="attachment" href="//try-discourse.global.ssl.fastly.net/uploads/default/368/40b610b0aa90cfcf.txt">attachment_file.txt</a>')
|
||||
frag.at('a')['href'].should == "http://try-discourse.global.ssl.fastly.net/uploads/default/368/40b610b0aa90cfcf.txt"
|
||||
expect(frag.at('a')['href']).to eq("http://try-discourse.global.ssl.fastly.net/uploads/default/368/40b610b0aa90cfcf.txt")
|
||||
end
|
||||
|
||||
it "rewrites the src to have http" do
|
||||
frag = html_fragment('<img src="//test.localhost/blah.jpg">')
|
||||
frag.at('img')['src'].should == "http://test.localhost/blah.jpg"
|
||||
expect(frag.at('img')['src']).to eq("http://test.localhost/blah.jpg")
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -131,17 +131,17 @@ describe Email::Styles do
|
|||
|
||||
it "rewrites the forum URL to have https" do
|
||||
frag = html_fragment('<a href="//test.localhost/discourse">hello</a>')
|
||||
frag.at('a')['href'].should == "https://test.localhost/discourse"
|
||||
expect(frag.at('a')['href']).to eq("https://test.localhost/discourse")
|
||||
end
|
||||
|
||||
it "rewrites the href for attachment files to have https" do
|
||||
frag = html_fragment('<a class="attachment" href="//try-discourse.global.ssl.fastly.net/uploads/default/368/40b610b0aa90cfcf.txt">attachment_file.txt</a>')
|
||||
frag.at('a')['href'].should == "https://try-discourse.global.ssl.fastly.net/uploads/default/368/40b610b0aa90cfcf.txt"
|
||||
expect(frag.at('a')['href']).to eq("https://try-discourse.global.ssl.fastly.net/uploads/default/368/40b610b0aa90cfcf.txt")
|
||||
end
|
||||
|
||||
it "rewrites the src to have https" do
|
||||
frag = html_fragment('<img src="//test.localhost/blah.jpg">')
|
||||
frag.at('img')['src'].should == "https://test.localhost/blah.jpg"
|
||||
expect(frag.at('img')['src']).to eq("https://test.localhost/blah.jpg")
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -6,33 +6,33 @@ describe Enum do
|
|||
|
||||
describe ".[]" do
|
||||
it "looks up a number by symbol" do
|
||||
enum[:princess_bubblegum].should == 3
|
||||
expect(enum[:princess_bubblegum]).to eq(3)
|
||||
end
|
||||
|
||||
it "looks up a symbol by number" do
|
||||
enum[2].should == :finn
|
||||
expect(enum[2]).to eq(:finn)
|
||||
end
|
||||
end
|
||||
|
||||
describe ".valid?" do
|
||||
it "returns true if a key exists" do
|
||||
enum.valid?(:finn).should == true
|
||||
expect(enum.valid?(:finn)).to eq(true)
|
||||
end
|
||||
|
||||
it "returns false if a key does not exist" do
|
||||
enum.valid?(:obama).should == false
|
||||
expect(enum.valid?(:obama)).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
describe ".only" do
|
||||
it "returns only the values we ask for" do
|
||||
enum.only(:jake, :princess_bubblegum).should == { jake: 1, princess_bubblegum: 3 }
|
||||
expect(enum.only(:jake, :princess_bubblegum)).to eq({ jake: 1, princess_bubblegum: 3 })
|
||||
end
|
||||
end
|
||||
|
||||
describe ".except" do
|
||||
it "returns everything but the values we ask to delete" do
|
||||
enum.except(:jake, :princess_bubblegum).should == { finn: 2, peppermint_butler: 4 }
|
||||
expect(enum.except(:jake, :princess_bubblegum)).to eq({ finn: 2, peppermint_butler: 4 })
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -18,7 +18,7 @@ describe FileStore::LocalStore do
|
|||
Time.stubs(:now).returns(Time.utc(2013, 2, 17, 12, 0, 0, 0))
|
||||
upload.stubs(:id).returns(42)
|
||||
store.expects(:copy_file)
|
||||
store.store_upload(uploaded_file, upload).should == "/uploads/default/42/253dc8edf9d4ada1.png"
|
||||
expect(store.store_upload(uploaded_file, upload)).to eq("/uploads/default/42/253dc8edf9d4ada1.png")
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -27,7 +27,7 @@ describe FileStore::LocalStore do
|
|||
|
||||
it "returns a relative url" do
|
||||
store.expects(:copy_file)
|
||||
store.store_optimized_image({}, optimized_image).should == "/uploads/default/_optimized/86f/7e4/37faa5a7fc_100x200.png"
|
||||
expect(store.store_optimized_image({}, optimized_image)).to eq("/uploads/default/_optimized/86f/7e4/37faa5a7fc_100x200.png")
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -66,24 +66,24 @@ describe FileStore::LocalStore do
|
|||
describe ".has_been_uploaded?" do
|
||||
|
||||
it "identifies relatives urls" do
|
||||
store.has_been_uploaded?("/uploads/default/42/0123456789ABCDEF.jpg").should == true
|
||||
expect(store.has_been_uploaded?("/uploads/default/42/0123456789ABCDEF.jpg")).to eq(true)
|
||||
end
|
||||
|
||||
it "identifies local urls" do
|
||||
Discourse.stubs(:base_url_no_prefix).returns("http://discuss.site.com")
|
||||
store.has_been_uploaded?("http://discuss.site.com/uploads/default/42/0123456789ABCDEF.jpg").should == true
|
||||
store.has_been_uploaded?("//discuss.site.com/uploads/default/42/0123456789ABCDEF.jpg").should == true
|
||||
expect(store.has_been_uploaded?("http://discuss.site.com/uploads/default/42/0123456789ABCDEF.jpg")).to eq(true)
|
||||
expect(store.has_been_uploaded?("//discuss.site.com/uploads/default/42/0123456789ABCDEF.jpg")).to eq(true)
|
||||
end
|
||||
|
||||
it "identifies local urls when using a CDN" do
|
||||
Rails.configuration.action_controller.stubs(:asset_host).returns("http://my.cdn.com")
|
||||
store.has_been_uploaded?("http://my.cdn.com/uploads/default/42/0123456789ABCDEF.jpg").should == true
|
||||
store.has_been_uploaded?("//my.cdn.com/uploads/default/42/0123456789ABCDEF.jpg").should == true
|
||||
expect(store.has_been_uploaded?("http://my.cdn.com/uploads/default/42/0123456789ABCDEF.jpg")).to eq(true)
|
||||
expect(store.has_been_uploaded?("//my.cdn.com/uploads/default/42/0123456789ABCDEF.jpg")).to eq(true)
|
||||
end
|
||||
|
||||
it "does not match dummy urls" do
|
||||
store.has_been_uploaded?("http://domain.com/uploads/default/42/0123456789ABCDEF.jpg").should == false
|
||||
store.has_been_uploaded?("//domain.com/uploads/default/42/0123456789ABCDEF.jpg").should == false
|
||||
expect(store.has_been_uploaded?("http://domain.com/uploads/default/42/0123456789ABCDEF.jpg")).to eq(false)
|
||||
expect(store.has_been_uploaded?("//domain.com/uploads/default/42/0123456789ABCDEF.jpg")).to eq(false)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -91,7 +91,7 @@ describe FileStore::LocalStore do
|
|||
describe ".absolute_base_url" do
|
||||
|
||||
it "is present" do
|
||||
store.absolute_base_url.should == "http://test.localhost/uploads/default"
|
||||
expect(store.absolute_base_url).to eq("http://test.localhost/uploads/default")
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -99,20 +99,20 @@ describe FileStore::LocalStore do
|
|||
describe ".relative_base_url" do
|
||||
|
||||
it "is present" do
|
||||
store.relative_base_url.should == "/uploads/default"
|
||||
expect(store.relative_base_url).to eq("/uploads/default")
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
it "is internal" do
|
||||
store.internal?.should == true
|
||||
store.external?.should == false
|
||||
expect(store.internal?).to eq(true)
|
||||
expect(store.external?).to eq(false)
|
||||
end
|
||||
|
||||
describe ".avatar_template" do
|
||||
|
||||
it "is present" do
|
||||
store.avatar_template(avatar).should == "/uploads/default/avatars/e9d/71f/5ee7c92d6d/{size}.png"
|
||||
expect(store.avatar_template(avatar)).to eq("/uploads/default/avatars/e9d/71f/5ee7c92d6d/{size}.png")
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -26,7 +26,7 @@ describe FileStore::S3Store do
|
|||
upload.stubs(:id).returns(42)
|
||||
upload.stubs(:extension).returns(".png")
|
||||
s3_helper.expects(:upload)
|
||||
store.store_upload(uploaded_file, upload).should == "//s3_upload_bucket.s3.amazonaws.com/42e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98.png"
|
||||
expect(store.store_upload(uploaded_file, upload)).to eq("//s3_upload_bucket.s3.amazonaws.com/42e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98.png")
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -36,7 +36,7 @@ describe FileStore::S3Store do
|
|||
it "returns an absolute schemaless url" do
|
||||
optimized_image.stubs(:id).returns(42)
|
||||
s3_helper.expects(:upload)
|
||||
store.store_optimized_image(optimized_image_file, optimized_image).should == "//s3_upload_bucket.s3.amazonaws.com/4286f7e437faa5a7fce15d1ddcb9eaeaea377667b8_100x200.png"
|
||||
expect(store.store_optimized_image(optimized_image_file, optimized_image)).to eq("//s3_upload_bucket.s3.amazonaws.com/4286f7e437faa5a7fce15d1ddcb9eaeaea377667b8_100x200.png")
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -62,12 +62,12 @@ describe FileStore::S3Store do
|
|||
describe ".has_been_uploaded?" do
|
||||
|
||||
it "identifies S3 uploads" do
|
||||
store.has_been_uploaded?("//s3_upload_bucket.s3.amazonaws.com/1337.png").should == true
|
||||
expect(store.has_been_uploaded?("//s3_upload_bucket.s3.amazonaws.com/1337.png")).to eq(true)
|
||||
end
|
||||
|
||||
it "does not match other s3 urls" do
|
||||
store.has_been_uploaded?("//s3.amazonaws.com/s3_upload_bucket/1337.png").should == false
|
||||
store.has_been_uploaded?("//s4_upload_bucket.s3.amazonaws.com/1337.png").should == false
|
||||
expect(store.has_been_uploaded?("//s3.amazonaws.com/s3_upload_bucket/1337.png")).to eq(false)
|
||||
expect(store.has_been_uploaded?("//s4_upload_bucket.s3.amazonaws.com/1337.png")).to eq(false)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -75,14 +75,14 @@ describe FileStore::S3Store do
|
|||
describe ".absolute_base_url" do
|
||||
|
||||
it "returns a lowercase schemaless absolute url" do
|
||||
store.absolute_base_url.should == "//s3_upload_bucket.s3.amazonaws.com"
|
||||
expect(store.absolute_base_url).to eq("//s3_upload_bucket.s3.amazonaws.com")
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
it "is external" do
|
||||
store.external?.should == true
|
||||
store.internal?.should == false
|
||||
expect(store.external?).to eq(true)
|
||||
expect(store.internal?).to eq(false)
|
||||
end
|
||||
|
||||
describe ".download" do
|
||||
|
@ -105,7 +105,7 @@ describe FileStore::S3Store do
|
|||
describe ".avatar_template" do
|
||||
|
||||
it "is present" do
|
||||
store.avatar_template(avatar).should == "//s3_upload_bucket.s3.amazonaws.com/avatars/e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98/{size}.png"
|
||||
expect(store.avatar_template(avatar)).to eq("//s3_upload_bucket.s3.amazonaws.com/avatars/e9d71f5ee7c92d6dc9e92ffdad17b8bd49418f98/{size}.png")
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -21,12 +21,12 @@ describe FilterBestPosts do
|
|||
|
||||
filtered_posts = TopicView.new(topic.id, coding_horror, best: 2).filtered_posts
|
||||
best2 = FilterBestPosts.new(topic, filtered_posts, 2)
|
||||
best2.posts.count.should == 2
|
||||
best2.posts[0].id.should == p2.id
|
||||
best2.posts[1].id.should == p3.id
|
||||
expect(best2.posts.count).to eq(2)
|
||||
expect(best2.posts[0].id).to eq(p2.id)
|
||||
expect(best2.posts[1].id).to eq(p3.id)
|
||||
|
||||
topic.update_status('closed', true, Fabricate(:admin))
|
||||
topic.posts.count.should == 4
|
||||
expect(topic.posts.count).to eq(4)
|
||||
end
|
||||
|
||||
describe "processing options" do
|
||||
|
@ -35,57 +35,57 @@ describe FilterBestPosts do
|
|||
it "should not get the status post" do
|
||||
|
||||
best = FilterBestPosts.new(topic, @filtered_posts, 99)
|
||||
best.filtered_posts.size.should == 3
|
||||
best.posts.map(&:id).should =~ [p2.id, p3.id]
|
||||
expect(best.filtered_posts.size).to eq(3)
|
||||
expect(best.posts.map(&:id)).to match_array([p2.id, p3.id])
|
||||
|
||||
end
|
||||
|
||||
it "should get no results for trust level too low" do
|
||||
|
||||
best = FilterBestPosts.new(topic, @filtered_posts, 99, min_trust_level: coding_horror.trust_level + 1)
|
||||
best.posts.count.should == 0
|
||||
expect(best.posts.count).to eq(0)
|
||||
end
|
||||
|
||||
|
||||
it "should filter out the posts with a score that is too low" do
|
||||
|
||||
best = FilterBestPosts.new(topic, @filtered_posts, 99, min_score: 99)
|
||||
best.posts.count.should == 0
|
||||
expect(best.posts.count).to eq(0)
|
||||
end
|
||||
|
||||
|
||||
it "should filter out everything if min replies not met" do
|
||||
best = FilterBestPosts.new(topic, @filtered_posts, 99, min_replies: 99)
|
||||
best.posts.count.should == 0
|
||||
expect(best.posts.count).to eq(0)
|
||||
end
|
||||
|
||||
it "should punch through posts if the score is high enough" do
|
||||
p2.update_column(:score, 100)
|
||||
|
||||
best = FilterBestPosts.new(topic, @filtered_posts, 99, bypass_trust_level_score: 100, min_trust_level: coding_horror.trust_level + 1)
|
||||
best.posts.count.should == 1
|
||||
expect(best.posts.count).to eq(1)
|
||||
end
|
||||
|
||||
it "should bypass trust level score" do
|
||||
best = FilterBestPosts.new(topic, @filtered_posts, 99, bypass_trust_level_score: 0, min_trust_level: coding_horror.trust_level + 1)
|
||||
best.posts.count.should == 0
|
||||
expect(best.posts.count).to eq(0)
|
||||
end
|
||||
|
||||
it "should return none if restricted to posts a moderator liked" do
|
||||
best = FilterBestPosts.new(topic, @filtered_posts, 99, only_moderator_liked: true)
|
||||
best.posts.count.should == 0
|
||||
expect(best.posts.count).to eq(0)
|
||||
end
|
||||
|
||||
it "doesn't count likes from admins" do
|
||||
PostAction.act(admin, p3, PostActionType.types[:like])
|
||||
best = FilterBestPosts.new(topic, @filtered_posts, 99, only_moderator_liked: true)
|
||||
best.posts.count.should == 0
|
||||
expect(best.posts.count).to eq(0)
|
||||
end
|
||||
|
||||
it "should find the post liked by the moderator" do
|
||||
PostAction.act(moderator, p2, PostActionType.types[:like])
|
||||
best = FilterBestPosts.new(topic, @filtered_posts, 99, only_moderator_liked: true)
|
||||
best.posts.count.should == 1
|
||||
expect(best.posts.count).to eq(1)
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -24,21 +24,21 @@ describe FlagQuery do
|
|||
PostAction.act(user2, post2, PostActionType.types[:spam])
|
||||
|
||||
posts, topics, users = FlagQuery.flagged_posts_report(admin, "")
|
||||
posts.count.should == 2
|
||||
expect(posts.count).to eq(2)
|
||||
first = posts.first
|
||||
|
||||
users.count.should == 5
|
||||
first[:post_actions].count.should == 2
|
||||
expect(users.count).to eq(5)
|
||||
expect(first[:post_actions].count).to eq(2)
|
||||
|
||||
topics.count.should == 2
|
||||
expect(topics.count).to eq(2)
|
||||
|
||||
second = posts[1]
|
||||
|
||||
second[:post_actions].count.should == 3
|
||||
second[:post_actions].first[:permalink].should == mod_message.related_post.topic.relative_url
|
||||
expect(second[:post_actions].count).to eq(3)
|
||||
expect(second[:post_actions].first[:permalink]).to eq(mod_message.related_post.topic.relative_url)
|
||||
|
||||
posts, users = FlagQuery.flagged_posts_report(admin, "", 1)
|
||||
posts.count.should == 1
|
||||
expect(posts.count).to eq(1)
|
||||
|
||||
# chuck post in category a mod can not see and make sure its missing
|
||||
category = Fabricate(:category)
|
||||
|
@ -49,7 +49,7 @@ describe FlagQuery do
|
|||
|
||||
posts, users = FlagQuery.flagged_posts_report(moderator, "")
|
||||
|
||||
posts.count.should == 1
|
||||
expect(posts.count).to eq(1)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -8,22 +8,22 @@ describe 'pool drainer' do
|
|||
it 'can correctly drain the connection pool' do
|
||||
pool.drain
|
||||
old = pool.connections.length
|
||||
old.should == 1
|
||||
expect(old).to eq(1)
|
||||
|
||||
Thread.new do
|
||||
conn = pool.checkout
|
||||
pool.checkin conn
|
||||
end.join
|
||||
|
||||
pool.connections.length.should == (old+1)
|
||||
expect(pool.connections.length).to eq(old+1)
|
||||
pool.drain
|
||||
pool.connections.length.should == old
|
||||
expect(pool.connections.length).to eq(old)
|
||||
end
|
||||
|
||||
it 'can drain with idle time setting' do
|
||||
pool.drain
|
||||
old = pool.connections.length
|
||||
old.should == 1
|
||||
expect(old).to eq(1)
|
||||
|
||||
|
||||
Thread.new do
|
||||
|
@ -31,9 +31,9 @@ describe 'pool drainer' do
|
|||
pool.checkin conn
|
||||
end.join
|
||||
|
||||
pool.connections.length.should == (old+1)
|
||||
expect(pool.connections.length).to eq(old+1)
|
||||
pool.drain(1.minute)
|
||||
pool.connections.length.should == (old+1)
|
||||
expect(pool.connections.length).to eq(old+1)
|
||||
|
||||
# make sure we don't corrupt internal state
|
||||
20.times do
|
||||
|
|
|
@ -10,6 +10,6 @@ describe ActiveSupport::SafeBuffer do
|
|||
buffer << "hello#{254.chr}".force_encoding("ASCII-8BIT").freeze
|
||||
|
||||
# we pay a cost for force encoding, the h gets dropped
|
||||
buffer.should =~ /ello.*hello/
|
||||
expect(buffer).to match(/ello.*hello/)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -5,24 +5,24 @@ describe Gaps do
|
|||
|
||||
|
||||
it 'returns no gaps for empty data' do
|
||||
Gaps.new(nil, nil).should be_blank
|
||||
expect(Gaps.new(nil, nil)).to be_blank
|
||||
end
|
||||
|
||||
it 'returns no gaps with one element' do
|
||||
Gaps.new([1], [1]).should be_blank
|
||||
expect(Gaps.new([1], [1])).to be_blank
|
||||
end
|
||||
|
||||
it 'returns no gaps when all elements are present' do
|
||||
Gaps.new([1,2,3], [1,2,3]).should be_blank
|
||||
expect(Gaps.new([1,2,3], [1,2,3])).to be_blank
|
||||
end
|
||||
|
||||
context "single element gap" do
|
||||
let(:gap) { Gaps.new([1,3], [1,2,3]) }
|
||||
|
||||
it 'has a gap for post 3' do
|
||||
gap.should_not be_blank
|
||||
gap.before[3].should == [2]
|
||||
gap.after.should be_blank
|
||||
expect(gap).not_to be_blank
|
||||
expect(gap.before[3]).to eq([2])
|
||||
expect(gap.after).to be_blank
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -30,9 +30,9 @@ describe Gaps do
|
|||
let(:gap) { Gaps.new([1,2,3,6,7], [1,2,3,4,5,6,7]) }
|
||||
|
||||
it 'has a gap for post 6' do
|
||||
gap.should_not be_blank
|
||||
gap.before[6].should == [4,5]
|
||||
gap.after.should be_blank
|
||||
expect(gap).not_to be_blank
|
||||
expect(gap.before[6]).to eq([4,5])
|
||||
expect(gap.after).to be_blank
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -40,10 +40,10 @@ describe Gaps do
|
|||
let(:gap) { Gaps.new([1,5,6,7,10], [1,2,3,4,5,6,7,8,9,10]) }
|
||||
|
||||
it 'has both gaps' do
|
||||
gap.should_not be_blank
|
||||
gap.before[5].should == [2,3,4]
|
||||
gap.before[10].should == [8,9]
|
||||
gap.after.should be_blank
|
||||
expect(gap).not_to be_blank
|
||||
expect(gap.before[5]).to eq([2,3,4])
|
||||
expect(gap.before[10]).to eq([8,9])
|
||||
expect(gap.after).to be_blank
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -51,9 +51,9 @@ describe Gaps do
|
|||
let(:gap) { Gaps.new([2,3,4], [1,2,3,4]) }
|
||||
|
||||
it 'has the gap' do
|
||||
gap.should_not be_blank
|
||||
gap.before[2].should == [1]
|
||||
gap.after.should be_blank
|
||||
expect(gap).not_to be_blank
|
||||
expect(gap.before[2]).to eq([1])
|
||||
expect(gap.after).to be_blank
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -61,9 +61,9 @@ describe Gaps do
|
|||
let(:gap) { Gaps.new([1,2,3], [1,2,3,4]) }
|
||||
|
||||
it 'has the gap' do
|
||||
gap.should_not be_blank
|
||||
gap.before.should be_blank
|
||||
gap.after[3].should == [4]
|
||||
expect(gap).not_to be_blank
|
||||
expect(gap.before).to be_blank
|
||||
expect(gap.after[3]).to eq([4])
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -71,9 +71,9 @@ describe Gaps do
|
|||
let(:gap) { Gaps.new([1,2,3], [1,2,3,4,5,6]) }
|
||||
|
||||
it 'has the gap' do
|
||||
gap.should_not be_blank
|
||||
gap.before.should be_blank
|
||||
gap.after[3].should == [4,5,6]
|
||||
expect(gap).not_to be_blank
|
||||
expect(gap.before).to be_blank
|
||||
expect(gap.after[3]).to eq([4,5,6])
|
||||
end
|
||||
end
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -9,19 +9,19 @@ describe ImageSizer do
|
|||
end
|
||||
|
||||
it 'returns the same dimensions when smaller than the maximums' do
|
||||
ImageSizer.resize(400, 200).should == [400, 200]
|
||||
expect(ImageSizer.resize(400, 200)).to eq([400, 200])
|
||||
end
|
||||
|
||||
it 'returns nil if the width is nil' do
|
||||
ImageSizer.resize(nil, 100).should == nil
|
||||
expect(ImageSizer.resize(nil, 100)).to eq(nil)
|
||||
end
|
||||
|
||||
it 'returns nil if the height is nil' do
|
||||
ImageSizer.resize(100, nil).should == nil
|
||||
expect(ImageSizer.resize(100, nil)).to eq(nil)
|
||||
end
|
||||
|
||||
it 'works with string parameters' do
|
||||
ImageSizer.resize('100', '101').should == [100, 101]
|
||||
expect(ImageSizer.resize('100', '101')).to eq([100, 101])
|
||||
end
|
||||
|
||||
describe 'when larger than the maximum width' do
|
||||
|
@ -31,11 +31,11 @@ describe ImageSizer do
|
|||
end
|
||||
|
||||
it 'returns the maxmimum width if larger than the maximum' do
|
||||
@w.should == 500
|
||||
expect(@w).to eq(500)
|
||||
end
|
||||
|
||||
it 'resizes the height retaining the aspect ratio' do
|
||||
@h.should == 102
|
||||
expect(@h).to eq(102)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -47,11 +47,11 @@ describe ImageSizer do
|
|||
end
|
||||
|
||||
it 'returns the maxmimum height if larger than the maximum' do
|
||||
@h.should == 500
|
||||
expect(@h).to eq(500)
|
||||
end
|
||||
|
||||
it 'resizes the width retaining the aspect ratio' do
|
||||
@w.should == 102
|
||||
expect(@w).to eq(102)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -63,8 +63,8 @@ describe ImageSizer do
|
|||
end
|
||||
|
||||
it 'resizes both dimensions retaining the aspect ratio' do
|
||||
@h.should == 500
|
||||
@w.should == 333
|
||||
expect(@h).to eq(500)
|
||||
expect(@w).to eq(333)
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -14,7 +14,7 @@ describe Import::Normalize do
|
|||
<pre><code>this is a ""</code></pre>
|
||||
MD
|
||||
expected = " \n \n```\n I am a te \"\n \n```\n\n test \n \n```\nthis is a \"\"\n```\n\n"
|
||||
Import::Normalize.normalize_code_blocks(markdown).should == expected
|
||||
expect(Import::Normalize.normalize_code_blocks(markdown)).to eq(expected)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -3,7 +3,7 @@ require_dependency 'js_locale_helper'
|
|||
|
||||
describe JsLocaleHelper do
|
||||
it 'should be able to generate translations' do
|
||||
JsLocaleHelper.output_locale('en').length.should > 0
|
||||
expect(JsLocaleHelper.output_locale('en').length).to be > 0
|
||||
end
|
||||
|
||||
def setup_message_format(format)
|
||||
|
@ -23,8 +23,8 @@ describe JsLocaleHelper do
|
|||
one {1 result}
|
||||
other {# results}
|
||||
}')
|
||||
localize(NUM_RESULTS: 1).should == '1 result'
|
||||
localize(NUM_RESULTS: 2).should == '2 results'
|
||||
expect(localize(NUM_RESULTS: 1)).to eq('1 result')
|
||||
expect(localize(NUM_RESULTS: 2)).to eq('2 results')
|
||||
end
|
||||
|
||||
it 'handles double plurals' do
|
||||
|
@ -37,21 +37,21 @@ describe JsLocaleHelper do
|
|||
}')
|
||||
|
||||
|
||||
localize(NUM_RESULTS: 1, NUM_APPLES: 2).should == '1 result and 2 apples'
|
||||
localize(NUM_RESULTS: 2, NUM_APPLES: 1).should == '2 results and 1 apple'
|
||||
expect(localize(NUM_RESULTS: 1, NUM_APPLES: 2)).to eq('1 result and 2 apples')
|
||||
expect(localize(NUM_RESULTS: 2, NUM_APPLES: 1)).to eq('2 results and 1 apple')
|
||||
end
|
||||
|
||||
it 'handles select' do
|
||||
setup_message_format('{GENDER, select, male {He} female {She} other {They}} read a book')
|
||||
localize(GENDER: 'male').should == 'He read a book'
|
||||
localize(GENDER: 'female').should == 'She read a book'
|
||||
localize(GENDER: 'none').should == 'They read a book'
|
||||
expect(localize(GENDER: 'male')).to eq('He read a book')
|
||||
expect(localize(GENDER: 'female')).to eq('She read a book')
|
||||
expect(localize(GENDER: 'none')).to eq('They read a book')
|
||||
end
|
||||
|
||||
it 'can strip out message formats' do
|
||||
hash = {"a" => "b", "c" => { "d" => {"f_MF" => "bob"} }}
|
||||
JsLocaleHelper.strip_out_message_formats!(hash).should == {"c.d.f_MF" => "bob"}
|
||||
hash["c"]["d"].should == {}
|
||||
expect(JsLocaleHelper.strip_out_message_formats!(hash)).to eq({"c.d.f_MF" => "bob"})
|
||||
expect(hash["c"]["d"]).to eq({})
|
||||
end
|
||||
|
||||
it 'handles message format special keys' do
|
||||
|
@ -70,18 +70,18 @@ describe JsLocaleHelper do
|
|||
}
|
||||
}))
|
||||
|
||||
ctx.eval('I18n.translations')["en"]["js"]["hello"].should == "world"
|
||||
ctx.eval('I18n.translations')["en"]["js"]["test_MF"].should == nil
|
||||
expect(ctx.eval('I18n.translations')["en"]["js"]["hello"]).to eq("world")
|
||||
expect(ctx.eval('I18n.translations')["en"]["js"]["test_MF"]).to eq(nil)
|
||||
|
||||
ctx.eval('I18n.messageFormat("test_MF", { HELLO: "hi", COUNT: 3 })').should == "hi 3 ducks"
|
||||
ctx.eval('I18n.messageFormat("error_MF", { HELLO: "hi", COUNT: 3 })').should =~ /Invalid Format/
|
||||
ctx.eval('I18n.messageFormat("missing", {})').should =~ /missing/
|
||||
ctx.eval('I18n.messageFormat("simple_MF", {})').should =~ /COUNT/ # error
|
||||
expect(ctx.eval('I18n.messageFormat("test_MF", { HELLO: "hi", COUNT: 3 })')).to eq("hi 3 ducks")
|
||||
expect(ctx.eval('I18n.messageFormat("error_MF", { HELLO: "hi", COUNT: 3 })')).to match(/Invalid Format/)
|
||||
expect(ctx.eval('I18n.messageFormat("missing", {})')).to match(/missing/)
|
||||
expect(ctx.eval('I18n.messageFormat("simple_MF", {})')).to match(/COUNT/) # error
|
||||
end
|
||||
|
||||
it 'load pluralizations rules before precompile' do
|
||||
message = JsLocaleHelper.compile_message_format('ru', 'format')
|
||||
message.should_not match 'Plural Function not found'
|
||||
expect(message).not_to match 'Plural Function not found'
|
||||
end
|
||||
|
||||
LocaleSiteSetting.values.each do |locale|
|
||||
|
|
|
@ -5,11 +5,11 @@ shared_examples "a generic error" do
|
|||
let(:result) { creator.create_errors_json(obj) }
|
||||
|
||||
it "should have a result object" do
|
||||
result.should be_present
|
||||
expect(result).to be_present
|
||||
end
|
||||
|
||||
it "has a generic error message" do
|
||||
result[:errors].should == [I18n.t('js.generic_error')]
|
||||
expect(result[:errors]).to eq([I18n.t('js.generic_error')])
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -37,17 +37,17 @@ describe JsonError do
|
|||
|
||||
describe "with a string" do
|
||||
it "returns the string in the error format" do
|
||||
creator.create_errors_json("test error").should == {errors: ["test error"]}
|
||||
expect(creator.create_errors_json("test error")).to eq({errors: ["test error"]})
|
||||
end
|
||||
end
|
||||
|
||||
describe "an activerecord objec with errors" do
|
||||
let(:invalid_user) { User.new }
|
||||
it "returns the errors correctly" do
|
||||
invalid_user.should_not be_valid
|
||||
expect(invalid_user).not_to be_valid
|
||||
result = creator.create_errors_json(invalid_user)
|
||||
result.should be_present
|
||||
result[:errors].should_not be_blank
|
||||
expect(result).to be_present
|
||||
expect(result[:errors]).not_to be_blank
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -13,15 +13,15 @@ describe Middleware::AnonymousCache::Helper do
|
|||
|
||||
context "cachable?" do
|
||||
it "true by default" do
|
||||
new_helper.cacheable?.should == true
|
||||
expect(new_helper.cacheable?).to eq(true)
|
||||
end
|
||||
|
||||
it "is false for non GET" do
|
||||
new_helper("ANON_CACHE_DURATION" => 10, "REQUEST_METHOD" => "POST").cacheable?.should == false
|
||||
expect(new_helper("ANON_CACHE_DURATION" => 10, "REQUEST_METHOD" => "POST").cacheable?).to eq(false)
|
||||
end
|
||||
|
||||
it "is false if it has an auth cookie" do
|
||||
new_helper("HTTP_COOKIE" => "jack=1; _t=#{"1"*32}; jill=2").cacheable?.should == false
|
||||
expect(new_helper("HTTP_COOKIE" => "jack=1; _t=#{"1"*32}; jill=2").cacheable?).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -41,16 +41,16 @@ describe Middleware::AnonymousCache::Helper do
|
|||
|
||||
it "returns cached data for cached requests" do
|
||||
helper.is_mobile = true
|
||||
helper.cached.should == nil
|
||||
expect(helper.cached).to eq(nil)
|
||||
helper.cache([200, {"HELLO" => "WORLD"}, ["hello ", "my world"]])
|
||||
|
||||
helper = new_helper("ANON_CACHE_DURATION" => 10)
|
||||
helper.is_mobile = true
|
||||
helper.cached.should == [200, {"HELLO" => "WORLD"}, ["hello my world"]]
|
||||
expect(helper.cached).to eq([200, {"HELLO" => "WORLD"}, ["hello my world"]])
|
||||
|
||||
crawler.cached.should == nil
|
||||
expect(crawler.cached).to eq(nil)
|
||||
crawler.cache([200, {"HELLO" => "WORLD"}, ["hello ", "world"]])
|
||||
crawler.cached.should == [200, {"HELLO" => "WORLD"}, ["hello world"]]
|
||||
expect(crawler.cached).to eq([200, {"HELLO" => "WORLD"}, ["hello world"]])
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -3,14 +3,14 @@ require 'spec_helper'
|
|||
describe Onebox::Engine::DiscourseLocalOnebox do
|
||||
it "matches for a topic url" do
|
||||
url = "#{Discourse.base_url}/t/hot-topic"
|
||||
Onebox.has_matcher?(url).should == true
|
||||
Onebox::Matcher.new(url).oneboxed.should == described_class
|
||||
expect(Onebox.has_matcher?(url)).to eq(true)
|
||||
expect(Onebox::Matcher.new(url).oneboxed).to eq(described_class)
|
||||
end
|
||||
|
||||
it "matches for a post url" do
|
||||
url = "#{Discourse.base_url}/t/hot-topic/23/2"
|
||||
Onebox.has_matcher?(url).should == true
|
||||
Onebox::Matcher.new(url).oneboxed.should == described_class
|
||||
expect(Onebox.has_matcher?(url)).to eq(true)
|
||||
expect(Onebox::Matcher.new(url).oneboxed).to eq(described_class)
|
||||
end
|
||||
|
||||
context "for a link to a post" do
|
||||
|
@ -19,27 +19,27 @@ describe Onebox::Engine::DiscourseLocalOnebox do
|
|||
|
||||
it "returns a link if post isn't found" do
|
||||
url = "#{Discourse.base_url}/t/not-exist/3/2"
|
||||
Onebox.preview(url).to_s.should == "<a href='#{url}'>#{url}</a>"
|
||||
expect(Onebox.preview(url).to_s).to eq("<a href='#{url}'>#{url}</a>")
|
||||
end
|
||||
|
||||
it "returns a link if not allowed to see the post" do
|
||||
url = "#{Discourse.base_url}#{post2.url}"
|
||||
Guardian.any_instance.stubs(:can_see?).returns(false)
|
||||
Onebox.preview(url).to_s.should == "<a href='#{url}'>#{url}</a>"
|
||||
expect(Onebox.preview(url).to_s).to eq("<a href='#{url}'>#{url}</a>")
|
||||
end
|
||||
|
||||
it "returns a link if post is hidden" do
|
||||
hidden_post = Fabricate(:post, topic: post.topic, post_number: 2, hidden: true, hidden_reason_id: Post.hidden_reasons[:flag_threshold_reached])
|
||||
url = "#{Discourse.base_url}#{hidden_post.url}"
|
||||
Onebox.preview(url).to_s.should == "<a href='#{url}'>#{url}</a>"
|
||||
expect(Onebox.preview(url).to_s).to eq("<a href='#{url}'>#{url}</a>")
|
||||
end
|
||||
|
||||
it "returns some onebox goodness if post exists and can be seen" do
|
||||
url = "#{Discourse.base_url}#{post2.url}"
|
||||
Guardian.any_instance.stubs(:can_see?).returns(true)
|
||||
html = Onebox.preview(url).to_s
|
||||
html.should include(post2.user.username)
|
||||
html.should include(post2.excerpt)
|
||||
expect(html).to include(post2.user.username)
|
||||
expect(html).to include(post2.excerpt)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -51,21 +51,21 @@ describe Onebox::Engine::DiscourseLocalOnebox do
|
|||
|
||||
it "returns a link if topic isn't found" do
|
||||
url = "#{Discourse.base_url}/t/not-found/123"
|
||||
Onebox.preview(url).to_s.should == "<a href='#{url}'>#{url}</a>"
|
||||
expect(Onebox.preview(url).to_s).to eq("<a href='#{url}'>#{url}</a>")
|
||||
end
|
||||
|
||||
it "returns a link if not allowed to see the post" do
|
||||
url = "#{topic.url}"
|
||||
Guardian.any_instance.stubs(:can_see?).returns(false)
|
||||
Onebox.preview(url).to_s.should == "<a href='#{url}'>#{url}</a>"
|
||||
expect(Onebox.preview(url).to_s).to eq("<a href='#{url}'>#{url}</a>")
|
||||
end
|
||||
|
||||
it "returns some onebox goodness if post exists and can be seen" do
|
||||
url = "#{topic.url}"
|
||||
Guardian.any_instance.stubs(:can_see?).returns(true)
|
||||
html = Onebox.preview(url).to_s
|
||||
html.should include(topic.posts.first.user.username)
|
||||
html.should include("topic-info")
|
||||
expect(html).to include(topic.posts.first.user.username)
|
||||
expect(html).to include("topic-info")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -3,8 +3,8 @@ require_dependency 'oneboxer'
|
|||
|
||||
describe Oneboxer do
|
||||
it "returns blank string for an invalid onebox" do
|
||||
Oneboxer.preview("http://boom.com").should == ""
|
||||
Oneboxer.onebox("http://boom.com").should == ""
|
||||
expect(Oneboxer.preview("http://boom.com")).to eq("")
|
||||
expect(Oneboxer.onebox("http://boom.com")).to eq("")
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -6,11 +6,11 @@ describe ONPDiff do
|
|||
describe "diff" do
|
||||
|
||||
it "returns an empty array when there is no content to diff" do
|
||||
ONPDiff.new("", "").diff.should == []
|
||||
expect(ONPDiff.new("", "").diff).to eq([])
|
||||
end
|
||||
|
||||
it "returns an array with the operation code for each element" do
|
||||
ONPDiff.new("abcd", "abef").diff.should == [["a", :common], ["b", :common], ["e", :add], ["f", :add], ["c", :delete], ["d", :delete]]
|
||||
expect(ONPDiff.new("abcd", "abef").diff).to eq([["a", :common], ["b", :common], ["e", :add], ["f", :add], ["c", :delete], ["d", :delete]])
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -18,15 +18,15 @@ describe ONPDiff do
|
|||
describe "short_diff" do
|
||||
|
||||
it "returns an empty array when there is no content to diff" do
|
||||
ONPDiff.new("", "").short_diff.should == []
|
||||
expect(ONPDiff.new("", "").short_diff).to eq([])
|
||||
end
|
||||
|
||||
it "returns an array with the operation code for each element" do
|
||||
ONPDiff.new("abc", "acd").short_diff.should == [["a", :common], ["b", :delete], ["c", :common], ["d", :add]]
|
||||
expect(ONPDiff.new("abc", "acd").short_diff).to eq([["a", :common], ["b", :delete], ["c", :common], ["d", :add]])
|
||||
end
|
||||
|
||||
it "returns an array with sequencially similar operations merged" do
|
||||
ONPDiff.new("abcd", "abef").short_diff.should == [["ab", :common], ["ef", :add], ["cd", :delete]]
|
||||
expect(ONPDiff.new("abcd", "abef").short_diff).to eq([["ab", :common], ["ef", :add], ["cd", :delete]])
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -3,7 +3,7 @@ require 'pbkdf2'
|
|||
describe Pbkdf2 do
|
||||
# trivial test to ensure this does not regress during extraction
|
||||
it "hashes stuff correctly" do
|
||||
Pbkdf2.hash_password('test', 'abcd', 100).should == "0313a6aca54dd4c5d82a699a8a0f0ffb0191b4ef62414b8d9dbc11c0c5ac04da"
|
||||
Pbkdf2.hash_password('test', 'abcd', 101).should == "c7a7b2891bf8e6f82d08cf8d83824edcf6c7c6bacb6a741f38e21fc7977bd20f"
|
||||
expect(Pbkdf2.hash_password('test', 'abcd', 100)).to eq("0313a6aca54dd4c5d82a699a8a0f0ffb0191b4ef62414b8d9dbc11c0c5ac04da")
|
||||
expect(Pbkdf2.hash_password('test', 'abcd', 101)).to eq("c7a7b2891bf8e6f82d08cf8d83824edcf6c7c6bacb6a741f38e21fc7977bd20f")
|
||||
end
|
||||
end
|
||||
|
|
|
@ -10,11 +10,11 @@ describe PinnedCheck do
|
|||
context "without a topic_user record (either anonymous or never been in the topic)" do
|
||||
|
||||
it "returns false if the topic is not pinned" do
|
||||
PinnedCheck.pinned?(unpinned_topic).should == false
|
||||
expect(PinnedCheck.pinned?(unpinned_topic)).to eq(false)
|
||||
end
|
||||
|
||||
it "returns true if the topic is pinned" do
|
||||
PinnedCheck.pinned?(unpinned_topic).should == false
|
||||
expect(PinnedCheck.pinned?(unpinned_topic)).to eq(false)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -27,7 +27,7 @@ describe PinnedCheck do
|
|||
let(:topic_user) { TopicUser.new(topic: unpinned_topic, user: user) }
|
||||
|
||||
it "returns false" do
|
||||
PinnedCheck.pinned?(unpinned_topic, topic_user).should == false
|
||||
expect(PinnedCheck.pinned?(unpinned_topic, topic_user)).to eq(false)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -36,17 +36,17 @@ describe PinnedCheck do
|
|||
let(:topic_user) { TopicUser.new(topic: pinned_topic, user: user) }
|
||||
|
||||
it "is pinned if the topic_user's cleared_pinned_at is blank" do
|
||||
PinnedCheck.pinned?(pinned_topic, topic_user).should == true
|
||||
expect(PinnedCheck.pinned?(pinned_topic, topic_user)).to eq(true)
|
||||
end
|
||||
|
||||
it "is not pinned if the topic_user's cleared_pinned_at is later than when it was pinned_at" do
|
||||
topic_user.cleared_pinned_at = (pinned_at + 1.hour)
|
||||
PinnedCheck.pinned?(pinned_topic, topic_user).should == false
|
||||
expect(PinnedCheck.pinned?(pinned_topic, topic_user)).to eq(false)
|
||||
end
|
||||
|
||||
it "is pinned if the topic_user's cleared_pinned_at is earlier than when it was pinned_at" do
|
||||
topic_user.cleared_pinned_at = (pinned_at - 3.hours)
|
||||
PinnedCheck.pinned?(pinned_topic, topic_user).should == true
|
||||
expect(PinnedCheck.pinned?(pinned_topic, topic_user)).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -13,23 +13,23 @@ describe Plugin::FilterManager do
|
|||
context + result + 2
|
||||
end
|
||||
|
||||
instance.apply(:added_numbers, 1, 0).should == 5
|
||||
expect(instance.apply(:added_numbers, 1, 0)).to eq(5)
|
||||
end
|
||||
|
||||
it "should raise an exception if wrong arity is passed in" do
|
||||
lambda do
|
||||
expect do
|
||||
instance.register(:test) do
|
||||
end
|
||||
end.should raise_exception
|
||||
end.to raise_exception
|
||||
end
|
||||
|
||||
it "should return the original if no filters exist" do
|
||||
instance.apply(:foo, nil, 42).should == 42
|
||||
expect(instance.apply(:foo, nil, 42)).to eq(42)
|
||||
end
|
||||
|
||||
it "should raise an exception if no block is passed in" do
|
||||
lambda do
|
||||
expect do
|
||||
instance.register(:test)
|
||||
end.should raise_exception
|
||||
end.to raise_exception
|
||||
end
|
||||
end
|
||||
|
|
|
@ -10,16 +10,16 @@ describe Plugin::Instance do
|
|||
context "find_all" do
|
||||
it "can find plugins correctly" do
|
||||
plugins = Plugin::Instance.find_all("#{Rails.root}/spec/fixtures/plugins")
|
||||
plugins.count.should == 1
|
||||
expect(plugins.count).to eq(1)
|
||||
plugin = plugins[0]
|
||||
|
||||
plugin.name.should == "plugin-name"
|
||||
plugin.path.should == "#{Rails.root}/spec/fixtures/plugins/my_plugin/plugin.rb"
|
||||
expect(plugin.name).to eq("plugin-name")
|
||||
expect(plugin.path).to eq("#{Rails.root}/spec/fixtures/plugins/my_plugin/plugin.rb")
|
||||
end
|
||||
|
||||
it "does not blow up on missing directory" do
|
||||
plugins = Plugin::Instance.find_all("#{Rails.root}/frank_zappa")
|
||||
plugins.count.should == 0
|
||||
expect(plugins.count).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -31,8 +31,8 @@ describe Plugin::Instance do
|
|||
|
||||
plugin.send :register_assets!
|
||||
|
||||
DiscoursePluginRegistry.mobile_stylesheets.count.should == 0
|
||||
DiscoursePluginRegistry.stylesheets.count.should == 2
|
||||
expect(DiscoursePluginRegistry.mobile_stylesheets.count).to eq(0)
|
||||
expect(DiscoursePluginRegistry.stylesheets.count).to eq(2)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -46,18 +46,18 @@ describe Plugin::Instance do
|
|||
File.open("#{plugin.auto_generated_path}/junk", "w") {|f| f.write("junk")}
|
||||
plugin.activate!
|
||||
|
||||
plugin.auth_providers.count.should == 1
|
||||
expect(plugin.auth_providers.count).to eq(1)
|
||||
auth_provider = plugin.auth_providers[0]
|
||||
auth_provider.authenticator.name.should == 'ubuntu'
|
||||
expect(auth_provider.authenticator.name).to eq('ubuntu')
|
||||
|
||||
# calls ensure_assets! make sure they are there
|
||||
plugin.assets.count.should == 1
|
||||
expect(plugin.assets.count).to eq(1)
|
||||
plugin.assets.each do |a, opts|
|
||||
File.exists?(a).should == true
|
||||
expect(File.exists?(a)).to eq(true)
|
||||
end
|
||||
|
||||
# ensure it cleans up all crap in autogenerated directory
|
||||
File.exists?(junk_file).should == false
|
||||
expect(File.exists?(junk_file)).to eq(false)
|
||||
end
|
||||
|
||||
it "finds all the custom assets" do
|
||||
|
@ -82,13 +82,13 @@ describe Plugin::Instance do
|
|||
|
||||
plugin.activate!
|
||||
|
||||
DiscoursePluginRegistry.javascripts.count.should == 3
|
||||
DiscoursePluginRegistry.admin_javascripts.count.should == 2
|
||||
DiscoursePluginRegistry.server_side_javascripts.count.should == 1
|
||||
DiscoursePluginRegistry.desktop_stylesheets.count.should == 2
|
||||
DiscoursePluginRegistry.sass_variables.count.should == 2
|
||||
DiscoursePluginRegistry.stylesheets.count.should == 2
|
||||
DiscoursePluginRegistry.mobile_stylesheets.count.should == 1
|
||||
expect(DiscoursePluginRegistry.javascripts.count).to eq(3)
|
||||
expect(DiscoursePluginRegistry.admin_javascripts.count).to eq(2)
|
||||
expect(DiscoursePluginRegistry.server_side_javascripts.count).to eq(1)
|
||||
expect(DiscoursePluginRegistry.desktop_stylesheets.count).to eq(2)
|
||||
expect(DiscoursePluginRegistry.sass_variables.count).to eq(2)
|
||||
expect(DiscoursePluginRegistry.stylesheets.count).to eq(2)
|
||||
expect(DiscoursePluginRegistry.mobile_stylesheets.count).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -100,7 +100,7 @@ describe Plugin::Instance do
|
|||
user.save!
|
||||
|
||||
payload = JSON.parse(CurrentUserSerializer.new(user, scope: Guardian.new(user)).to_json)
|
||||
payload["current_user"]["custom_fields"]["has_car"].should == "true"
|
||||
expect(payload["current_user"]["custom_fields"]["has_car"]).to eq("true")
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -111,7 +111,7 @@ describe Plugin::Instance do
|
|||
plugin.register_color_scheme("Purple", {primary: 'EEE0E5'})
|
||||
plugin.notify_after_initialize
|
||||
}.to change { ColorScheme.count }.by(1)
|
||||
ColorScheme.where(name: "Purple").should be_present
|
||||
expect(ColorScheme.where(name: "Purple")).to be_present
|
||||
end
|
||||
|
||||
it "doesn't add the same color scheme twice" do
|
||||
|
|
|
@ -13,10 +13,10 @@ describe Plugin::Metadata do
|
|||
some_ruby
|
||||
TEXT
|
||||
|
||||
metadata.name.should == "plugin-name"
|
||||
metadata.about.should == "about: my plugin"
|
||||
metadata.version.should == "0.1"
|
||||
metadata.authors.should == "Frank Zappa"
|
||||
expect(metadata.name).to eq("plugin-name")
|
||||
expect(metadata.about).to eq("about: my plugin")
|
||||
expect(metadata.version).to eq("0.1")
|
||||
expect(metadata.authors).to eq("Frank Zappa")
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -24,12 +24,12 @@ describe PostCreator do
|
|||
it "can be created with auto tracking disabled" do
|
||||
p = PostCreator.create(user, basic_topic_params.merge(auto_track: false))
|
||||
# must be 0 otherwise it will think we read the topic which is clearly untrue
|
||||
TopicUser.where(user_id: p.user_id, topic_id: p.topic_id).count.should == 0
|
||||
expect(TopicUser.where(user_id: p.user_id, topic_id: p.topic_id).count).to eq(0)
|
||||
end
|
||||
|
||||
it "ensures the user can create the topic" do
|
||||
Guardian.any_instance.expects(:can_create?).with(Topic,nil).returns(false)
|
||||
lambda { creator.create }.should raise_error(Discourse::InvalidAccess)
|
||||
expect { creator.create }.to raise_error(Discourse::InvalidAccess)
|
||||
end
|
||||
|
||||
|
||||
|
@ -59,7 +59,7 @@ describe PostCreator do
|
|||
|
||||
it "doesn't return true for spam" do
|
||||
creator.create
|
||||
creator.spam?.should == false
|
||||
expect(creator.spam?).to eq(false)
|
||||
end
|
||||
|
||||
it "does not notify on system messages" do
|
||||
|
@ -70,8 +70,8 @@ describe PostCreator do
|
|||
end
|
||||
# don't notify on system messages they introduce too much noise
|
||||
channels = messages.map(&:channel)
|
||||
channels.find{|s| s =~ /unread/}.should == nil
|
||||
channels.find{|s| s =~ /new/}.should == nil
|
||||
expect(channels.find{|s| s =~ /unread/}).to eq(nil)
|
||||
expect(channels.find{|s| s =~ /new/}).to eq(nil)
|
||||
end
|
||||
|
||||
it "generates the correct messages for a secure topic" do
|
||||
|
@ -92,7 +92,7 @@ describe PostCreator do
|
|||
|
||||
|
||||
# 2 for topic, one to notify of new topic another for tracking state
|
||||
messages.map{|m| m.channel}.sort.should == [ "/new",
|
||||
expect(messages.map{|m| m.channel}.sort).to eq([ "/new",
|
||||
"/users/#{admin.username}",
|
||||
"/users/#{admin.username}",
|
||||
"/unread/#{admin.id}",
|
||||
|
@ -101,10 +101,10 @@ describe PostCreator do
|
|||
"/latest",
|
||||
"/topic/#{created_post.topic_id}",
|
||||
"/topic/#{created_post.topic_id}"
|
||||
].sort
|
||||
].sort)
|
||||
admin_ids = [Group[:admins].id]
|
||||
|
||||
messages.any?{|m| m.group_ids != admin_ids && m.user_ids != [admin.id]}.should == false
|
||||
expect(messages.any?{|m| m.group_ids != admin_ids && m.user_ids != [admin.id]}).to eq(false)
|
||||
end
|
||||
|
||||
it 'generates the correct messages for a normal topic' do
|
||||
|
@ -115,18 +115,18 @@ describe PostCreator do
|
|||
end
|
||||
|
||||
latest = messages.find{|m| m.channel == "/latest"}
|
||||
latest.should_not == nil
|
||||
expect(latest).not_to eq(nil)
|
||||
|
||||
latest = messages.find{|m| m.channel == "/new"}
|
||||
latest.should_not == nil
|
||||
expect(latest).not_to eq(nil)
|
||||
|
||||
read = messages.find{|m| m.channel == "/unread/#{p.user_id}"}
|
||||
read.should_not == nil
|
||||
expect(read).not_to eq(nil)
|
||||
|
||||
user_action = messages.find{|m| m.channel == "/users/#{p.user.username}"}
|
||||
user_action.should_not == nil
|
||||
expect(user_action).not_to eq(nil)
|
||||
|
||||
messages.length.should == 5
|
||||
expect(messages.length).to eq(5)
|
||||
end
|
||||
|
||||
it 'extracts links from the post' do
|
||||
|
@ -158,11 +158,11 @@ describe PostCreator do
|
|||
end
|
||||
|
||||
it 'assigns a category when supplied' do
|
||||
creator_with_category.create.topic.category.should == category
|
||||
expect(creator_with_category.create.topic.category).to eq(category)
|
||||
end
|
||||
|
||||
it 'adds meta data from the post' do
|
||||
creator_with_meta_data.create.topic.meta_data['hello'].should == 'world'
|
||||
expect(creator_with_meta_data.create.topic.meta_data['hello']).to eq('world')
|
||||
end
|
||||
|
||||
it 'passes the image sizes through' do
|
||||
|
@ -175,27 +175,27 @@ describe PostCreator do
|
|||
|
||||
# ensure topic user is correct
|
||||
topic_user = first_post.user.topic_users.find_by(topic_id: first_post.topic_id)
|
||||
topic_user.should be_present
|
||||
topic_user.should be_posted
|
||||
topic_user.last_read_post_number.should == first_post.post_number
|
||||
topic_user.highest_seen_post_number.should == first_post.post_number
|
||||
expect(topic_user).to be_present
|
||||
expect(topic_user).to be_posted
|
||||
expect(topic_user.last_read_post_number).to eq(first_post.post_number)
|
||||
expect(topic_user.highest_seen_post_number).to eq(first_post.post_number)
|
||||
|
||||
user2 = Fabricate(:coding_horror)
|
||||
user2.user_stat.topic_reply_count.should == 0
|
||||
expect(user2.user_stat.topic_reply_count).to eq(0)
|
||||
|
||||
first_post.user.user_stat.reload.topic_reply_count.should == 0
|
||||
expect(first_post.user.user_stat.reload.topic_reply_count).to eq(0)
|
||||
|
||||
PostCreator.new(user2, topic_id: first_post.topic_id, raw: "this is my test post 123").create
|
||||
|
||||
first_post.user.user_stat.reload.topic_reply_count.should == 0
|
||||
expect(first_post.user.user_stat.reload.topic_reply_count).to eq(0)
|
||||
|
||||
user2.user_stat.reload.topic_reply_count.should == 1
|
||||
expect(user2.user_stat.reload.topic_reply_count).to eq(1)
|
||||
end
|
||||
|
||||
it 'sets topic excerpt if first post, but not second post' do
|
||||
first_post = creator.create
|
||||
topic = first_post.topic.reload
|
||||
topic.excerpt.should be_present
|
||||
expect(topic.excerpt).to be_present
|
||||
expect {
|
||||
PostCreator.new(first_post.user, topic_id: first_post.topic_id, raw: "this is the second post").create
|
||||
topic.reload
|
||||
|
@ -211,7 +211,7 @@ describe PostCreator do
|
|||
PostCreator.new(topic.user, topic_id: topic.id, raw: "this is a second post").create
|
||||
topic.reload
|
||||
|
||||
topic.auto_close_at.should be_within(1.second).of(auto_close_time)
|
||||
expect(topic.auto_close_at).to be_within(1.second).of(auto_close_time)
|
||||
end
|
||||
|
||||
it "updates topic's auto close date when it's based on last post" do
|
||||
|
@ -221,7 +221,7 @@ describe PostCreator do
|
|||
PostCreator.new(topic.user, topic_id: topic.id, raw: "this is a second post").create
|
||||
topic.reload
|
||||
|
||||
topic.auto_close_at.should_not be_within(1.second).of(auto_close_time)
|
||||
expect(topic.auto_close_at).not_to be_within(1.second).of(auto_close_time)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -232,7 +232,7 @@ describe PostCreator do
|
|||
it 'ensures the user can auto-close the topic, but ignores auto-close param silently' do
|
||||
Guardian.any_instance.stubs(:can_moderate?).returns(false)
|
||||
post = PostCreator.new(user, basic_topic_params.merge(auto_close_time: 2)).create
|
||||
post.topic.auto_close_at.should == nil
|
||||
expect(post.topic.auto_close_at).to eq(nil)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -251,7 +251,7 @@ describe PostCreator do
|
|||
|
||||
it "returns true for another post with the same content" do
|
||||
new_creator = PostCreator.new(user, basic_topic_params)
|
||||
new_creator.create.should be_present
|
||||
expect(new_creator.create).to be_present
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -271,28 +271,28 @@ describe PostCreator do
|
|||
response_1 = create_post(raw: dupe, user: first.user, topic_id: first.topic_id)
|
||||
response_2 = create_post(raw: dupe, user: first.user, topic_id: second.topic_id)
|
||||
|
||||
response_1.errors.count.should == 0
|
||||
response_2.errors.count.should == 1
|
||||
expect(response_1.errors.count).to eq(0)
|
||||
expect(response_2.errors.count).to eq(1)
|
||||
end
|
||||
|
||||
it "returns blank for another post with the same content" do
|
||||
creator.create
|
||||
new_post_creator.create
|
||||
new_post_creator.errors.should be_present
|
||||
expect(new_post_creator.errors).to be_present
|
||||
end
|
||||
|
||||
it "returns a post for admins" do
|
||||
creator.create
|
||||
user.admin = true
|
||||
new_post_creator.create
|
||||
new_post_creator.errors.should be_blank
|
||||
expect(new_post_creator.errors).to be_blank
|
||||
end
|
||||
|
||||
it "returns a post for moderators" do
|
||||
creator.create
|
||||
user.moderator = true
|
||||
new_post_creator.create
|
||||
new_post_creator.errors.should be_blank
|
||||
expect(new_post_creator.errors).to be_blank
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -312,8 +312,8 @@ describe PostCreator do
|
|||
it "does not create the post" do
|
||||
GroupMessage.stubs(:create)
|
||||
creator.create
|
||||
creator.errors.should be_present
|
||||
creator.spam?.should == true
|
||||
expect(creator.errors).to be_present
|
||||
expect(creator.spam?).to eq(true)
|
||||
end
|
||||
|
||||
it "sends a message to moderators" do
|
||||
|
@ -332,15 +332,15 @@ describe PostCreator do
|
|||
|
||||
it 'ensures the user can create the post' do
|
||||
Guardian.any_instance.expects(:can_create?).with(Post, topic).returns(false)
|
||||
lambda { creator.create }.should raise_error(Discourse::InvalidAccess)
|
||||
expect { creator.create }.to raise_error(Discourse::InvalidAccess)
|
||||
end
|
||||
|
||||
context 'success' do
|
||||
it 'create correctly' do
|
||||
post = creator.create
|
||||
Post.count.should == 1
|
||||
Topic.count.should == 1
|
||||
post.reply_to_post_number.should == 4
|
||||
expect(Post.count).to eq(1)
|
||||
expect(Topic.count).to eq(1)
|
||||
expect(post.reply_to_post_number).to eq(4)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -376,18 +376,18 @@ describe PostCreator do
|
|||
|
||||
it 'acts correctly' do
|
||||
# It's not a warning
|
||||
post.topic.warning.should be_blank
|
||||
expect(post.topic.warning).to be_blank
|
||||
|
||||
post.topic.archetype.should == Archetype.private_message
|
||||
post.topic.subtype.should == TopicSubtype.user_to_user
|
||||
post.topic.topic_allowed_users.count.should == 3
|
||||
expect(post.topic.archetype).to eq(Archetype.private_message)
|
||||
expect(post.topic.subtype).to eq(TopicSubtype.user_to_user)
|
||||
expect(post.topic.topic_allowed_users.count).to eq(3)
|
||||
|
||||
# PMs can't have a category
|
||||
post.topic.category.should == nil
|
||||
expect(post.topic.category).to eq(nil)
|
||||
|
||||
# does not notify an unrelated user
|
||||
unrelated.notifications.count.should == 0
|
||||
post.topic.subtype.should == TopicSubtype.user_to_user
|
||||
expect(unrelated.notifications.count).to eq(0)
|
||||
expect(post.topic.subtype).to eq(TopicSubtype.user_to_user)
|
||||
|
||||
# if an admin replies they should be added to the allowed user list
|
||||
admin = Fabricate(:admin)
|
||||
|
@ -395,7 +395,7 @@ describe PostCreator do
|
|||
topic_id: post.topic_id)
|
||||
|
||||
post.topic.reload
|
||||
post.topic.topic_allowed_users.where(user_id: admin.id).count.should == 1
|
||||
expect(post.topic.topic_allowed_users.where(user_id: admin.id).count).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -414,26 +414,26 @@ describe PostCreator do
|
|||
# Invalid archetype
|
||||
creator = PostCreator.new(user, base_args)
|
||||
creator.create
|
||||
creator.errors.should be_present
|
||||
expect(creator.errors).to be_present
|
||||
|
||||
# Too many users
|
||||
creator = PostCreator.new(user, base_args.merge(archetype: Archetype.private_message,
|
||||
target_usernames: [target_user1.username, target_user2.username].join(',')))
|
||||
creator.create
|
||||
creator.errors.should be_present
|
||||
expect(creator.errors).to be_present
|
||||
|
||||
# Success
|
||||
creator = PostCreator.new(user, base_args.merge(archetype: Archetype.private_message))
|
||||
post = creator.create
|
||||
creator.errors.should be_blank
|
||||
expect(creator.errors).to be_blank
|
||||
|
||||
topic = post.topic
|
||||
topic.should be_present
|
||||
topic.warning.should be_present
|
||||
topic.subtype.should == TopicSubtype.moderator_warning
|
||||
topic.warning.user.should == target_user1
|
||||
topic.warning.created_by.should == user
|
||||
target_user1.warnings.count.should == 1
|
||||
expect(topic).to be_present
|
||||
expect(topic.warning).to be_present
|
||||
expect(topic.subtype).to eq(TopicSubtype.moderator_warning)
|
||||
expect(topic.warning.user).to eq(target_user1)
|
||||
expect(topic.warning.created_by).to eq(user)
|
||||
expect(target_user1.warnings.count).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -456,15 +456,15 @@ describe PostCreator do
|
|||
end
|
||||
|
||||
it 'acts correctly' do
|
||||
post.topic.archetype.should == Archetype.private_message
|
||||
post.topic.topic_allowed_users.count.should == 1
|
||||
post.topic.topic_allowed_groups.count.should == 1
|
||||
expect(post.topic.archetype).to eq(Archetype.private_message)
|
||||
expect(post.topic.topic_allowed_users.count).to eq(1)
|
||||
expect(post.topic.topic_allowed_groups.count).to eq(1)
|
||||
|
||||
# does not notify an unrelated user
|
||||
unrelated.notifications.count.should == 0
|
||||
post.topic.subtype.should == TopicSubtype.user_to_user
|
||||
target_user1.notifications.count.should == 1
|
||||
target_user2.notifications.count.should == 1
|
||||
expect(unrelated.notifications.count).to eq(0)
|
||||
expect(post.topic.subtype).to eq(TopicSubtype.user_to_user)
|
||||
expect(target_user1.notifications.count).to eq(1)
|
||||
expect(target_user2.notifications.count).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -485,8 +485,8 @@ describe PostCreator do
|
|||
end
|
||||
|
||||
it 'acts correctly' do
|
||||
topic.created_at.should be_within(10.seconds).of(created_at)
|
||||
post.created_at.should be_within(10.seconds).of(created_at)
|
||||
expect(topic.created_at).to be_within(10.seconds).of(created_at)
|
||||
expect(post.created_at).to be_within(10.seconds).of(created_at)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -494,7 +494,7 @@ describe PostCreator do
|
|||
it 'can save a post' do
|
||||
creator = PostCreator.new(user, raw: 'q', title: 'q', skip_validations: true)
|
||||
creator.create
|
||||
creator.errors.should == nil
|
||||
expect(creator.errors).to eq(nil)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -502,10 +502,10 @@ describe PostCreator do
|
|||
it "has a word count" do
|
||||
creator = PostCreator.new(user, title: 'some inspired poetry for a rainy day', raw: 'mary had a little lamb, little lamb, little lamb. mary had a little lamb')
|
||||
post = creator.create
|
||||
post.word_count.should == 14
|
||||
expect(post.word_count).to eq(14)
|
||||
|
||||
post.topic.reload
|
||||
post.topic.word_count.should == 14
|
||||
expect(post.topic.word_count).to eq(14)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -519,19 +519,19 @@ describe PostCreator do
|
|||
title: 'Reviews of Science Ovens',
|
||||
raw: 'Did you know that you can use microwaves to cook your dinner? Science!')
|
||||
creator.create
|
||||
TopicEmbed.where(embed_url: embed_url).exists?.should == true
|
||||
expect(TopicEmbed.where(embed_url: embed_url).exists?).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
describe "read credit for creator" do
|
||||
it "should give credit to creator" do
|
||||
post = create_post
|
||||
PostTiming.find_by(topic_id: post.topic_id,
|
||||
expect(PostTiming.find_by(topic_id: post.topic_id,
|
||||
post_number: post.post_number,
|
||||
user_id: post.user_id).msecs.should be > 0
|
||||
user_id: post.user_id).msecs).to be > 0
|
||||
|
||||
TopicUser.find_by(topic_id: post.topic_id,
|
||||
user_id: post.user_id).last_read_post_number.should == 1
|
||||
expect(TopicUser.find_by(topic_id: post.topic_id,
|
||||
user_id: post.user_id).last_read_post_number).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -541,13 +541,13 @@ describe PostCreator do
|
|||
|
||||
creator = PostCreator.new(user, {title: "my test title 123", raw: "I should not be allowed to post"} )
|
||||
creator.create
|
||||
creator.errors.count.should be > 0
|
||||
expect(creator.errors.count).to be > 0
|
||||
end
|
||||
end
|
||||
|
||||
it "doesn't strip starting whitespaces" do
|
||||
post = PostCreator.new(user, { title: "testing whitespace stripping", raw: " <-- whitespaces --> " }).create
|
||||
post.raw.should == " <-- whitespaces -->"
|
||||
expect(post.raw).to eq(" <-- whitespaces -->")
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -41,10 +41,10 @@ describe PostDestroyer do
|
|||
reply3.reload
|
||||
reply4.reload
|
||||
|
||||
reply1.deleted_at.should == nil
|
||||
reply2.deleted_at.should_not == nil
|
||||
reply3.deleted_at.should == nil
|
||||
reply4.deleted_at.should == nil
|
||||
expect(reply1.deleted_at).to eq(nil)
|
||||
expect(reply2.deleted_at).not_to eq(nil)
|
||||
expect(reply3.deleted_at).to eq(nil)
|
||||
expect(reply4.deleted_at).to eq(nil)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -69,9 +69,9 @@ describe PostDestroyer do
|
|||
reply2.reload
|
||||
reply3.reload
|
||||
|
||||
reply1.deleted_at.should == nil
|
||||
reply2.deleted_at.should_not == nil
|
||||
reply3.deleted_at.should == nil
|
||||
expect(reply1.deleted_at).to eq(nil)
|
||||
expect(reply2.deleted_at).not_to eq(nil)
|
||||
expect(reply3.deleted_at).to eq(nil)
|
||||
|
||||
# if topic is deleted we should still be able to destroy stubs
|
||||
|
||||
|
@ -80,7 +80,7 @@ describe PostDestroyer do
|
|||
PostDestroyer.destroy_stubs
|
||||
|
||||
reply1.reload
|
||||
reply1.deleted_at.should == nil
|
||||
expect(reply1.deleted_at).to eq(nil)
|
||||
|
||||
# flag the post, it should not nuke the stub anymore
|
||||
topic.recover!
|
||||
|
@ -89,7 +89,7 @@ describe PostDestroyer do
|
|||
PostDestroyer.destroy_stubs
|
||||
|
||||
reply1.reload
|
||||
reply1.deleted_at.should == nil
|
||||
expect(reply1.deleted_at).to eq(nil)
|
||||
|
||||
end
|
||||
|
||||
|
@ -111,8 +111,8 @@ describe PostDestroyer do
|
|||
reply1.reload
|
||||
reply2.reload
|
||||
|
||||
reply1.deleted_at.should == nil
|
||||
reply2.deleted_at.should_not == nil
|
||||
expect(reply1.deleted_at).to eq(nil)
|
||||
expect(reply2.deleted_at).not_to eq(nil)
|
||||
|
||||
SiteSetting.stubs(:delete_removed_posts_after).returns(72)
|
||||
|
||||
|
@ -120,13 +120,13 @@ describe PostDestroyer do
|
|||
|
||||
PostDestroyer.destroy_stubs
|
||||
|
||||
reply1.reload.deleted_at.should == nil
|
||||
expect(reply1.reload.deleted_at).to eq(nil)
|
||||
|
||||
SiteSetting.stubs(:delete_removed_posts_after).returns(47)
|
||||
|
||||
PostDestroyer.destroy_stubs
|
||||
|
||||
reply1.reload.deleted_at.should_not == nil
|
||||
expect(reply1.reload.deleted_at).not_to eq(nil)
|
||||
end
|
||||
|
||||
it "deletes posts immediately if delete_removed_posts_after is 0" do
|
||||
|
@ -138,7 +138,7 @@ describe PostDestroyer do
|
|||
|
||||
PostDestroyer.new(reply1.user, reply1).destroy
|
||||
|
||||
reply1.reload.deleted_at.should_not == nil
|
||||
expect(reply1.reload.deleted_at).not_to eq(nil)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -154,25 +154,25 @@ describe PostDestroyer do
|
|||
PostDestroyer.new(post2.user, post2).destroy
|
||||
post2.reload
|
||||
|
||||
post2.deleted_at.should be_blank
|
||||
post2.deleted_by.should be_blank
|
||||
post2.user_deleted.should == true
|
||||
post2.raw.should == I18n.t('js.post.deleted_by_author', {count: 24})
|
||||
post2.version.should == 2
|
||||
expect(post2.deleted_at).to be_blank
|
||||
expect(post2.deleted_by).to be_blank
|
||||
expect(post2.user_deleted).to eq(true)
|
||||
expect(post2.raw).to eq(I18n.t('js.post.deleted_by_author', {count: 24}))
|
||||
expect(post2.version).to eq(2)
|
||||
|
||||
# lets try to recover
|
||||
PostDestroyer.new(post2.user, post2).recover
|
||||
post2.reload
|
||||
post2.version.should == 3
|
||||
post2.user_deleted.should == false
|
||||
post2.cooked.should == @orig
|
||||
expect(post2.version).to eq(3)
|
||||
expect(post2.user_deleted).to eq(false)
|
||||
expect(post2.cooked).to eq(@orig)
|
||||
end
|
||||
|
||||
context "as a moderator" do
|
||||
it "deletes the post" do
|
||||
PostDestroyer.new(moderator, post).destroy
|
||||
post.deleted_at.should be_present
|
||||
post.deleted_by.should == moderator
|
||||
expect(post.deleted_at).to be_present
|
||||
expect(post.deleted_by).to eq(moderator)
|
||||
end
|
||||
|
||||
it "updates the user's post_count" do
|
||||
|
@ -193,8 +193,8 @@ describe PostDestroyer do
|
|||
context "as an admin" do
|
||||
it "deletes the post" do
|
||||
PostDestroyer.new(admin, post).destroy
|
||||
post.deleted_at.should be_present
|
||||
post.deleted_by.should == admin
|
||||
expect(post.deleted_at).to be_present
|
||||
expect(post.deleted_by).to eq(admin)
|
||||
end
|
||||
|
||||
it "updates the user's post_count" do
|
||||
|
@ -221,11 +221,11 @@ describe PostDestroyer do
|
|||
end
|
||||
|
||||
it 'resets the last_poster_id back to the OP' do
|
||||
topic.last_post_user_id.should == user.id
|
||||
expect(topic.last_post_user_id).to eq(user.id)
|
||||
end
|
||||
|
||||
it 'resets the last_posted_at back to the OP' do
|
||||
topic.last_posted_at.to_i.should == post.created_at.to_i
|
||||
expect(topic.last_posted_at.to_i).to eq(post.created_at.to_i)
|
||||
end
|
||||
|
||||
context 'topic_user' do
|
||||
|
@ -233,15 +233,15 @@ describe PostDestroyer do
|
|||
let(:topic_user) { second_user.topic_users.find_by(topic_id: topic.id) }
|
||||
|
||||
it 'clears the posted flag for the second user' do
|
||||
topic_user.posted?.should == false
|
||||
expect(topic_user.posted?).to eq(false)
|
||||
end
|
||||
|
||||
it "sets the second user's last_read_post_number back to 1" do
|
||||
topic_user.last_read_post_number.should == 1
|
||||
expect(topic_user.last_read_post_number).to eq(1)
|
||||
end
|
||||
|
||||
it "sets the second user's last_read_post_number back to 1" do
|
||||
topic_user.highest_seen_post_number.should == 1
|
||||
expect(topic_user.highest_seen_post_number).to eq(1)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -262,8 +262,8 @@ describe PostDestroyer do
|
|||
end
|
||||
|
||||
it "deletes the post" do
|
||||
post.deleted_at.should be_present
|
||||
post.deleted_by.should == moderator
|
||||
expect(post.deleted_at).to be_present
|
||||
expect(post.deleted_by).to eq(moderator)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -273,8 +273,8 @@ describe PostDestroyer do
|
|||
end
|
||||
|
||||
it "deletes the post" do
|
||||
post.deleted_at.should be_present
|
||||
post.deleted_by.should == admin
|
||||
expect(post.deleted_at).to be_present
|
||||
expect(post.deleted_by).to eq(admin)
|
||||
end
|
||||
|
||||
it "creates a new user history entry" do
|
||||
|
@ -302,22 +302,22 @@ describe PostDestroyer do
|
|||
|
||||
it 'changes the post count of the topic' do
|
||||
post.reload
|
||||
lambda {
|
||||
expect {
|
||||
PostDestroyer.new(moderator, reply).destroy
|
||||
post.topic.reload
|
||||
}.should change(post.topic, :posts_count).by(-1)
|
||||
}.to change(post.topic, :posts_count).by(-1)
|
||||
end
|
||||
|
||||
it 'lowers the reply_count when the reply is deleted' do
|
||||
lambda {
|
||||
expect {
|
||||
PostDestroyer.new(moderator, reply).destroy
|
||||
}.should change(post.post_replies, :count).by(-1)
|
||||
}.to change(post.post_replies, :count).by(-1)
|
||||
end
|
||||
|
||||
it 'should increase the post_number when there are deletion gaps' do
|
||||
PostDestroyer.new(moderator, reply).destroy
|
||||
p = Fabricate(:post, user: post.user, topic: post.topic)
|
||||
p.post_number.should == 3
|
||||
expect(p.post_number).to eq(3)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -328,9 +328,9 @@ describe PostDestroyer do
|
|||
it 'removes notifications when deleted' do
|
||||
user = Fabricate(:evil_trout)
|
||||
post = create_post(raw: 'Hello @eviltrout')
|
||||
lambda {
|
||||
expect {
|
||||
PostDestroyer.new(Fabricate(:moderator), post).destroy
|
||||
}.should change(user.notifications, :count).by(-1)
|
||||
}.to change(user.notifications, :count).by(-1)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -344,15 +344,15 @@ describe PostDestroyer do
|
|||
|
||||
PostDestroyer.new(moderator, second_post).destroy
|
||||
|
||||
PostAction.find_by(id: bookmark.id).should == nil
|
||||
expect(PostAction.find_by(id: bookmark.id)).to eq(nil)
|
||||
|
||||
off_topic = PostAction.find_by(id: flag.id)
|
||||
off_topic.should_not == nil
|
||||
off_topic.agreed_at.should_not == nil
|
||||
expect(off_topic).not_to eq(nil)
|
||||
expect(off_topic.agreed_at).not_to eq(nil)
|
||||
|
||||
second_post.reload
|
||||
second_post.bookmark_count.should == 0
|
||||
second_post.off_topic_count.should == 1
|
||||
expect(second_post.bookmark_count).to eq(0)
|
||||
expect(second_post.off_topic_count).to eq(1)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -15,10 +15,10 @@ describe PostRevisor do
|
|||
|
||||
describe 'with the same body' do
|
||||
it "doesn't change version" do
|
||||
lambda {
|
||||
subject.revise!(post.user, { raw: post.raw }).should == false
|
||||
expect {
|
||||
expect(subject.revise!(post.user, { raw: post.raw })).to eq(false)
|
||||
post.reload
|
||||
}.should_not change(post, :version)
|
||||
}.not_to change(post, :version)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -28,11 +28,11 @@ describe PostRevisor do
|
|||
subject.revise!(post.user, { raw: 'updated body' }, revised_at: post.updated_at + 10.seconds)
|
||||
post.reload
|
||||
|
||||
post.version.should == 1
|
||||
post.public_version.should == 1
|
||||
post.revisions.size.should == 0
|
||||
post.last_version_at.should == first_version_at
|
||||
subject.category_changed.should be_blank
|
||||
expect(post.version).to eq(1)
|
||||
expect(post.public_version).to eq(1)
|
||||
expect(post.revisions.size).to eq(0)
|
||||
expect(post.last_version_at).to eq(first_version_at)
|
||||
expect(subject.category_changed).to be_blank
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -47,20 +47,20 @@ describe PostRevisor do
|
|||
end
|
||||
|
||||
it "doesn't update a category" do
|
||||
subject.category_changed.should be_blank
|
||||
expect(subject.category_changed).to be_blank
|
||||
end
|
||||
|
||||
it 'updates the versions' do
|
||||
post.version.should == 2
|
||||
post.public_version.should == 2
|
||||
expect(post.version).to eq(2)
|
||||
expect(post.public_version).to eq(2)
|
||||
end
|
||||
|
||||
it 'creates a new revision' do
|
||||
post.revisions.size.should == 1
|
||||
expect(post.revisions.size).to eq(1)
|
||||
end
|
||||
|
||||
it "updates the last_version_at" do
|
||||
post.last_version_at.to_i.should == revised_at.to_i
|
||||
expect(post.last_version_at.to_i).to eq(revised_at.to_i)
|
||||
end
|
||||
|
||||
describe "new edit window" do
|
||||
|
@ -71,16 +71,16 @@ describe PostRevisor do
|
|||
end
|
||||
|
||||
it "doesn't create a new version if you do another" do
|
||||
post.version.should == 2
|
||||
post.public_version.should == 2
|
||||
expect(post.version).to eq(2)
|
||||
expect(post.public_version).to eq(2)
|
||||
end
|
||||
|
||||
it "doesn't change last_version_at" do
|
||||
post.last_version_at.to_i.should == revised_at.to_i
|
||||
expect(post.last_version_at.to_i).to eq(revised_at.to_i)
|
||||
end
|
||||
|
||||
it "doesn't update a category" do
|
||||
subject.category_changed.should be_blank
|
||||
expect(subject.category_changed).to be_blank
|
||||
end
|
||||
|
||||
context "after second window" do
|
||||
|
@ -93,12 +93,12 @@ describe PostRevisor do
|
|||
end
|
||||
|
||||
it "does create a new version after the edit window" do
|
||||
post.version.should == 3
|
||||
post.public_version.should == 3
|
||||
expect(post.version).to eq(3)
|
||||
expect(post.public_version).to eq(3)
|
||||
end
|
||||
|
||||
it "does create a new version after the edit window" do
|
||||
post.last_version_at.to_i.should == new_revised_at.to_i
|
||||
expect(post.last_version_at.to_i).to eq(new_revised_at.to_i)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -115,7 +115,7 @@ describe PostRevisor do
|
|||
let(:new_description) { "this is my new description." }
|
||||
|
||||
it "should have no description by default" do
|
||||
category.description.should be_blank
|
||||
expect(category.description).to be_blank
|
||||
end
|
||||
|
||||
context "one paragraph description" do
|
||||
|
@ -125,11 +125,11 @@ describe PostRevisor do
|
|||
end
|
||||
|
||||
it "returns the changed category info" do
|
||||
subject.category_changed.should == category
|
||||
expect(subject.category_changed).to eq(category)
|
||||
end
|
||||
|
||||
it "updates the description of the category" do
|
||||
category.description.should == new_description
|
||||
expect(category.description).to eq(new_description)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -140,11 +140,11 @@ describe PostRevisor do
|
|||
end
|
||||
|
||||
it "returns the changed category info" do
|
||||
subject.category_changed.should == category
|
||||
expect(subject.category_changed).to eq(category)
|
||||
end
|
||||
|
||||
it "updates the description of the category" do
|
||||
category.description.should == new_description
|
||||
expect(category.description).to eq(new_description)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -156,11 +156,11 @@ describe PostRevisor do
|
|||
end
|
||||
|
||||
it "puts the description back to nothing" do
|
||||
category.description.should be_blank
|
||||
expect(category.description).to be_blank
|
||||
end
|
||||
|
||||
it "returns the changed category info" do
|
||||
subject.category_changed.should == category
|
||||
expect(subject.category_changed).to eq(category)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -186,11 +186,11 @@ describe PostRevisor do
|
|||
end
|
||||
|
||||
it "allows an admin to insert images into a new user's post" do
|
||||
post.errors.should be_blank
|
||||
expect(post.errors).to be_blank
|
||||
end
|
||||
|
||||
it "marks the admin as the last updater" do
|
||||
post.last_editor_id.should == changed_by.id
|
||||
expect(post.last_editor_id).to eq(changed_by.id)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -204,7 +204,7 @@ describe PostRevisor do
|
|||
end
|
||||
|
||||
it "doesn't allow images to be inserted" do
|
||||
post.errors.should be_present
|
||||
expect(post.errors).to be_present
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -215,34 +215,34 @@ describe PostRevisor do
|
|||
let!(:result) { subject.revise!(changed_by, { raw: "lets update the body" }) }
|
||||
|
||||
it 'returns true' do
|
||||
result.should == true
|
||||
expect(result).to eq(true)
|
||||
end
|
||||
|
||||
it 'updates the body' do
|
||||
post.raw.should == "lets update the body"
|
||||
expect(post.raw).to eq("lets update the body")
|
||||
end
|
||||
|
||||
it 'sets the invalidate oneboxes attribute' do
|
||||
post.invalidate_oneboxes.should == true
|
||||
expect(post.invalidate_oneboxes).to eq(true)
|
||||
end
|
||||
|
||||
it 'increased the versions' do
|
||||
post.version.should == 2
|
||||
post.public_version.should == 2
|
||||
expect(post.version).to eq(2)
|
||||
expect(post.public_version).to eq(2)
|
||||
end
|
||||
|
||||
it 'has the new revision' do
|
||||
post.revisions.size.should == 1
|
||||
expect(post.revisions.size).to eq(1)
|
||||
end
|
||||
|
||||
it "saved the user who made the change in the revisions" do
|
||||
post.revisions.first.user_id.should == changed_by.id
|
||||
expect(post.revisions.first.user_id).to eq(changed_by.id)
|
||||
end
|
||||
|
||||
it "updates the word count" do
|
||||
post.word_count.should == 4
|
||||
expect(post.word_count).to eq(4)
|
||||
post.topic.reload
|
||||
post.topic.word_count.should == 4
|
||||
expect(post.topic.word_count).to eq(4)
|
||||
end
|
||||
|
||||
context 'second poster posts again quickly' do
|
||||
|
@ -253,9 +253,9 @@ describe PostRevisor do
|
|||
end
|
||||
|
||||
it 'is a ninja edit, because the second poster posted again quickly' do
|
||||
post.version.should == 2
|
||||
post.public_version.should == 2
|
||||
post.revisions.size.should == 1
|
||||
expect(post.version).to eq(2)
|
||||
expect(post.public_version).to eq(2)
|
||||
expect(post.revisions.size).to eq(1)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -279,7 +279,7 @@ describe PostRevisor do
|
|||
it "doesn't strip starting whitespaces" do
|
||||
subject.revise!(post.user, { raw: " <-- whitespaces --> " })
|
||||
post.reload
|
||||
post.raw.should == " <-- whitespaces -->"
|
||||
expect(post.raw).to eq(" <-- whitespaces -->")
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -17,35 +17,36 @@ describe PrettyText do
|
|||
end
|
||||
|
||||
it "produces a quote even with new lines in it" do
|
||||
PrettyText.cook("[quote=\"EvilTrout, post:123, topic:456, full:true\"]ddd\n[/quote]").should match_html "<aside class=\"quote\" data-post=\"123\" data-topic=\"456\" data-full=\"true\"><div class=\"title\">\n<div class=\"quote-controls\"></div>\n<img width=\"20\" height=\"20\" src=\"http://test.localhost/uploads/default/avatars/42d/57c/46ce7ee487/40.png\" class=\"avatar\">EvilTrout:</div>\n<blockquote><p>ddd</p></blockquote></aside>"
|
||||
expect(PrettyText.cook("[quote=\"EvilTrout, post:123, topic:456, full:true\"]ddd\n[/quote]")).to match_html "<aside class=\"quote\" data-post=\"123\" data-topic=\"456\" data-full=\"true\"><div class=\"title\">\n<div class=\"quote-controls\"></div>\n<img width=\"20\" height=\"20\" src=\"http://test.localhost/uploads/default/avatars/42d/57c/46ce7ee487/40.png\" class=\"avatar\">EvilTrout:</div>\n<blockquote><p>ddd</p></blockquote></aside>"
|
||||
end
|
||||
|
||||
it "should produce a quote" do
|
||||
PrettyText.cook("[quote=\"EvilTrout, post:123, topic:456, full:true\"]ddd[/quote]").should match_html "<aside class=\"quote\" data-post=\"123\" data-topic=\"456\" data-full=\"true\"><div class=\"title\">\n<div class=\"quote-controls\"></div>\n<img width=\"20\" height=\"20\" src=\"http://test.localhost/uploads/default/avatars/42d/57c/46ce7ee487/40.png\" class=\"avatar\">EvilTrout:</div>\n<blockquote><p>ddd</p></blockquote></aside>"
|
||||
expect(PrettyText.cook("[quote=\"EvilTrout, post:123, topic:456, full:true\"]ddd[/quote]")).to match_html "<aside class=\"quote\" data-post=\"123\" data-topic=\"456\" data-full=\"true\"><div class=\"title\">\n<div class=\"quote-controls\"></div>\n<img width=\"20\" height=\"20\" src=\"http://test.localhost/uploads/default/avatars/42d/57c/46ce7ee487/40.png\" class=\"avatar\">EvilTrout:</div>\n<blockquote><p>ddd</p></blockquote></aside>"
|
||||
end
|
||||
|
||||
it "trims spaces on quote params" do
|
||||
PrettyText.cook("[quote=\"EvilTrout, post:555, topic: 666\"]ddd[/quote]").should match_html "<aside class=\"quote\" data-post=\"555\" data-topic=\"666\"><div class=\"title\">\n<div class=\"quote-controls\"></div>\n<img width=\"20\" height=\"20\" src=\"http://test.localhost/uploads/default/avatars/42d/57c/46ce7ee487/40.png\" class=\"avatar\">EvilTrout:</div>\n<blockquote><p>ddd</p></blockquote></aside>"
|
||||
expect(PrettyText.cook("[quote=\"EvilTrout, post:555, topic: 666\"]ddd[/quote]")).to match_html "<aside class=\"quote\" data-post=\"555\" data-topic=\"666\"><div class=\"title\">\n<div class=\"quote-controls\"></div>\n<img width=\"20\" height=\"20\" src=\"http://test.localhost/uploads/default/avatars/42d/57c/46ce7ee487/40.png\" class=\"avatar\">EvilTrout:</div>\n<blockquote><p>ddd</p></blockquote></aside>"
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
it "should handle 3 mentions in a row" do
|
||||
PrettyText.cook('@hello @hello @hello').should match_html "<p><span class=\"mention\">@hello</span> <span class=\"mention\">@hello</span> <span class=\"mention\">@hello</span></p>"
|
||||
expect(PrettyText.cook('@hello @hello @hello')).to match_html "<p><span class=\"mention\">@hello</span> <span class=\"mention\">@hello</span> <span class=\"mention\">@hello</span></p>"
|
||||
end
|
||||
|
||||
it "should sanitize the html" do
|
||||
PrettyText.cook("<script>alert(42)</script>").should match_html "<p></p>"
|
||||
expect(PrettyText.cook("<script>alert(42)</script>")).to match_html "<p></p>"
|
||||
end
|
||||
|
||||
it 'should allow for @mentions to have punctuation' do
|
||||
PrettyText.cook("hello @bob's @bob,@bob; @bob\"").should
|
||||
match_html "<p>hello <span class=\"mention\">@bob</span>'s <span class=\"mention\">@bob</span>,<span class=\"mention\">@bob</span>; <span class=\"mention\">@bob</span>\"</p>"
|
||||
expect(PrettyText.cook("hello @bob's @bob,@bob; @bob\"")).to match_html(
|
||||
"<p>hello <span class=\"mention\">@bob</span>'s <span class=\"mention\">@bob</span>,<span class=\"mention\">@bob</span>; <span class=\"mention\">@bob</span>\"</p>"
|
||||
)
|
||||
end
|
||||
|
||||
# see: https://github.com/sparklemotion/nokogiri/issues/1173
|
||||
skip 'allows html entities correctly' do
|
||||
PrettyText.cook("ℵ£¢").should == "<p>ℵ£¢</p>"
|
||||
expect(PrettyText.cook("ℵ£¢")).to eq("<p>ℵ£¢</p>")
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -57,27 +58,27 @@ describe PrettyText do
|
|||
end
|
||||
|
||||
it "should inject nofollow in all user provided links" do
|
||||
PrettyText.cook('<a href="http://cnn.com">cnn</a>').should =~ /nofollow/
|
||||
expect(PrettyText.cook('<a href="http://cnn.com">cnn</a>')).to match(/nofollow/)
|
||||
end
|
||||
|
||||
it "should not inject nofollow in all local links" do
|
||||
(PrettyText.cook("<a href='#{Discourse.base_url}/test.html'>cnn</a>") !~ /nofollow/).should == true
|
||||
expect(PrettyText.cook("<a href='#{Discourse.base_url}/test.html'>cnn</a>") !~ /nofollow/).to eq(true)
|
||||
end
|
||||
|
||||
it "should not inject nofollow in all subdomain links" do
|
||||
(PrettyText.cook("<a href='#{Discourse.base_url.sub('http://', 'http://bla.')}/test.html'>cnn</a>") !~ /nofollow/).should == true
|
||||
expect(PrettyText.cook("<a href='#{Discourse.base_url.sub('http://', 'http://bla.')}/test.html'>cnn</a>") !~ /nofollow/).to eq(true)
|
||||
end
|
||||
|
||||
it "should not inject nofollow for foo.com" do
|
||||
(PrettyText.cook("<a href='http://foo.com/test.html'>cnn</a>") !~ /nofollow/).should == true
|
||||
expect(PrettyText.cook("<a href='http://foo.com/test.html'>cnn</a>") !~ /nofollow/).to eq(true)
|
||||
end
|
||||
|
||||
it "should not inject nofollow for bar.foo.com" do
|
||||
(PrettyText.cook("<a href='http://bar.foo.com/test.html'>cnn</a>") !~ /nofollow/).should == true
|
||||
expect(PrettyText.cook("<a href='http://bar.foo.com/test.html'>cnn</a>") !~ /nofollow/).to eq(true)
|
||||
end
|
||||
|
||||
it "should not inject nofollow if omit_nofollow option is given" do
|
||||
(PrettyText.cook('<a href="http://cnn.com">cnn</a>', omit_nofollow: true) !~ /nofollow/).should == true
|
||||
expect(PrettyText.cook('<a href="http://cnn.com">cnn</a>', omit_nofollow: true) !~ /nofollow/).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -85,79 +86,79 @@ describe PrettyText do
|
|||
|
||||
it "sanitizes attempts to inject invalid attributes" do
|
||||
spinner = "<a href=\"http://thedailywtf.com/\" data-bbcode=\"' class='fa fa-spin\">WTF</a>"
|
||||
PrettyText.excerpt(spinner, 20).should match_html spinner
|
||||
expect(PrettyText.excerpt(spinner, 20)).to match_html spinner
|
||||
|
||||
spinner = %q{<a href="http://thedailywtf.com/" title="' class="fa fa-spin"><img src='http://thedailywtf.com/Resources/Images/Primary/logo.gif"></a>}
|
||||
PrettyText.excerpt(spinner, 20).should match_html spinner
|
||||
expect(PrettyText.excerpt(spinner, 20)).to match_html spinner
|
||||
end
|
||||
|
||||
context "images" do
|
||||
|
||||
it "should dump images" do
|
||||
PrettyText.excerpt("<img src='http://cnn.com/a.gif'>",100).should == "[image]"
|
||||
expect(PrettyText.excerpt("<img src='http://cnn.com/a.gif'>",100)).to eq("[image]")
|
||||
end
|
||||
|
||||
it "should keep alt tags" do
|
||||
PrettyText.excerpt("<img src='http://cnn.com/a.gif' alt='car' title='my big car'>",100).should == "[car]"
|
||||
expect(PrettyText.excerpt("<img src='http://cnn.com/a.gif' alt='car' title='my big car'>",100)).to eq("[car]")
|
||||
end
|
||||
|
||||
it "should keep title tags" do
|
||||
PrettyText.excerpt("<img src='http://cnn.com/a.gif' title='car'>",100).should == "[car]"
|
||||
expect(PrettyText.excerpt("<img src='http://cnn.com/a.gif' title='car'>",100)).to eq("[car]")
|
||||
end
|
||||
|
||||
it "should convert images to markdown if the option is set" do
|
||||
PrettyText.excerpt("<img src='http://cnn.com/a.gif' title='car'>", 100, markdown_images: true).should == "![car](http://cnn.com/a.gif)"
|
||||
expect(PrettyText.excerpt("<img src='http://cnn.com/a.gif' title='car'>", 100, markdown_images: true)).to eq("![car](http://cnn.com/a.gif)")
|
||||
end
|
||||
|
||||
it "should keep spoilers" do
|
||||
PrettyText.excerpt("<div class='spoiler'><img src='http://cnn.com/a.gif'></div>", 100).should match_html "<span class='spoiler'>[image]</span>"
|
||||
PrettyText.excerpt("<span class='spoiler'>spoiler</div>", 100).should match_html "<span class='spoiler'>spoiler</span>"
|
||||
expect(PrettyText.excerpt("<div class='spoiler'><img src='http://cnn.com/a.gif'></div>", 100)).to match_html "<span class='spoiler'>[image]</span>"
|
||||
expect(PrettyText.excerpt("<span class='spoiler'>spoiler</div>", 100)).to match_html "<span class='spoiler'>spoiler</span>"
|
||||
end
|
||||
|
||||
it "should remove meta informations" do
|
||||
PrettyText.excerpt(wrapped_image, 100).should match_html "<a href='//localhost:3000/uploads/default/4399/33691397e78b4d75.png' class='lightbox' title='Screen Shot 2014-04-14 at 9.47.10 PM.png'>[image]</a>"
|
||||
expect(PrettyText.excerpt(wrapped_image, 100)).to match_html "<a href='//localhost:3000/uploads/default/4399/33691397e78b4d75.png' class='lightbox' title='Screen Shot 2014-04-14 at 9.47.10 PM.png'>[image]</a>"
|
||||
end
|
||||
end
|
||||
|
||||
it "should have an option to strip links" do
|
||||
PrettyText.excerpt("<a href='http://cnn.com'>cnn</a>",100, strip_links: true).should == "cnn"
|
||||
expect(PrettyText.excerpt("<a href='http://cnn.com'>cnn</a>",100, strip_links: true)).to eq("cnn")
|
||||
end
|
||||
|
||||
it "should preserve links" do
|
||||
PrettyText.excerpt("<a href='http://cnn.com'>cnn</a>",100).should match_html "<a href='http://cnn.com'>cnn</a>"
|
||||
expect(PrettyText.excerpt("<a href='http://cnn.com'>cnn</a>",100)).to match_html "<a href='http://cnn.com'>cnn</a>"
|
||||
end
|
||||
|
||||
it "should deal with special keys properly" do
|
||||
PrettyText.excerpt("<pre><b></pre>",100).should == ""
|
||||
expect(PrettyText.excerpt("<pre><b></pre>",100)).to eq("")
|
||||
end
|
||||
|
||||
it "should truncate stuff properly" do
|
||||
PrettyText.excerpt("hello world",5).should == "hello…"
|
||||
PrettyText.excerpt("<p>hello</p><p>world</p>",6).should == "hello w…"
|
||||
expect(PrettyText.excerpt("hello world",5)).to eq("hello…")
|
||||
expect(PrettyText.excerpt("<p>hello</p><p>world</p>",6)).to eq("hello w…")
|
||||
end
|
||||
|
||||
it "should insert a space between to Ps" do
|
||||
PrettyText.excerpt("<p>a</p><p>b</p>",5).should == "a b"
|
||||
expect(PrettyText.excerpt("<p>a</p><p>b</p>",5)).to eq("a b")
|
||||
end
|
||||
|
||||
it "should strip quotes" do
|
||||
PrettyText.excerpt("<aside class='quote'><p>a</p><p>b</p></aside>boom",5).should == "boom"
|
||||
expect(PrettyText.excerpt("<aside class='quote'><p>a</p><p>b</p></aside>boom",5)).to eq("boom")
|
||||
end
|
||||
|
||||
it "should not count the surrounds of a link" do
|
||||
PrettyText.excerpt("<a href='http://cnn.com'>cnn</a>",3).should match_html "<a href='http://cnn.com'>cnn</a>"
|
||||
expect(PrettyText.excerpt("<a href='http://cnn.com'>cnn</a>",3)).to match_html "<a href='http://cnn.com'>cnn</a>"
|
||||
end
|
||||
|
||||
it "uses an ellipsis instead of html entities if provided with the option" do
|
||||
PrettyText.excerpt("<a href='http://cnn.com'>cnn</a>", 2, text_entities: true).should match_html "<a href='http://cnn.com'>cn...</a>"
|
||||
expect(PrettyText.excerpt("<a href='http://cnn.com'>cnn</a>", 2, text_entities: true)).to match_html "<a href='http://cnn.com'>cn...</a>"
|
||||
end
|
||||
|
||||
it "should truncate links" do
|
||||
PrettyText.excerpt("<a href='http://cnn.com'>cnn</a>",2).should match_html "<a href='http://cnn.com'>cn…</a>"
|
||||
expect(PrettyText.excerpt("<a href='http://cnn.com'>cnn</a>",2)).to match_html "<a href='http://cnn.com'>cn…</a>"
|
||||
end
|
||||
|
||||
it "doesn't extract empty quotes as links" do
|
||||
PrettyText.extract_links("<aside class='quote'>not a linked quote</aside>\n").to_a.should be_empty
|
||||
expect(PrettyText.extract_links("<aside class='quote'>not a linked quote</aside>\n").to_a).to be_empty
|
||||
end
|
||||
|
||||
def extract_urls(text)
|
||||
|
@ -165,15 +166,15 @@ describe PrettyText do
|
|||
end
|
||||
|
||||
it "should be able to extract links" do
|
||||
extract_urls("<a href='http://cnn.com'>http://bla.com</a>").should == ["http://cnn.com"]
|
||||
expect(extract_urls("<a href='http://cnn.com'>http://bla.com</a>")).to eq(["http://cnn.com"])
|
||||
end
|
||||
|
||||
it "should extract links to topics" do
|
||||
extract_urls("<aside class=\"quote\" data-topic=\"321\">aside</aside>").should == ["/t/topic/321"]
|
||||
expect(extract_urls("<aside class=\"quote\" data-topic=\"321\">aside</aside>")).to eq(["/t/topic/321"])
|
||||
end
|
||||
|
||||
it "should extract links to posts" do
|
||||
extract_urls("<aside class=\"quote\" data-topic=\"1234\" data-post=\"4567\">aside</aside>").should == ["/t/topic/1234/4567"]
|
||||
expect(extract_urls("<aside class=\"quote\" data-topic=\"1234\" data-post=\"4567\">aside</aside>")).to eq(["/t/topic/1234/4567"])
|
||||
end
|
||||
|
||||
it "should not extract links inside quotes" do
|
||||
|
@ -186,37 +187,38 @@ describe PrettyText do
|
|||
<a href='http://body_and_quote.com'>http://useless2.com</a>
|
||||
")
|
||||
|
||||
links.map{|l| [l.url, l.is_quote]}.to_a.sort.should ==
|
||||
expect(links.map{|l| [l.url, l.is_quote]}.to_a.sort).to eq(
|
||||
[["http://body_only.com",false],
|
||||
["http://body_and_quote.com", false],
|
||||
["/t/topic/1234",true]
|
||||
].sort
|
||||
)
|
||||
end
|
||||
|
||||
it "should not preserve tags in code blocks" do
|
||||
PrettyText.excerpt("<pre><code class='handlebars'><h3>Hours</h3></code></pre>",100).should == "<h3>Hours</h3>"
|
||||
expect(PrettyText.excerpt("<pre><code class='handlebars'><h3>Hours</h3></code></pre>",100)).to eq("<h3>Hours</h3>")
|
||||
end
|
||||
|
||||
it "should handle nil" do
|
||||
PrettyText.excerpt(nil,100).should == ''
|
||||
expect(PrettyText.excerpt(nil,100)).to eq('')
|
||||
end
|
||||
|
||||
it "handles span excerpt at the beginning of a post" do
|
||||
PrettyText.excerpt("<span class='excerpt'>hi</span> test",100).should == 'hi'
|
||||
expect(PrettyText.excerpt("<span class='excerpt'>hi</span> test",100)).to eq('hi')
|
||||
post = Fabricate(:post, raw: "<span class='excerpt'>hi</span> test")
|
||||
post.excerpt.should == "hi"
|
||||
expect(post.excerpt).to eq("hi")
|
||||
end
|
||||
|
||||
it "ignores max excerpt length if a span excerpt is specified" do
|
||||
two_hundred = "123456789 " * 20 + "."
|
||||
text = two_hundred + "<span class='excerpt'>#{two_hundred}</span>" + two_hundred
|
||||
PrettyText.excerpt(text, 100).should == two_hundred
|
||||
expect(PrettyText.excerpt(text, 100)).to eq(two_hundred)
|
||||
post = Fabricate(:post, raw: text)
|
||||
post.excerpt.should == two_hundred
|
||||
expect(post.excerpt).to eq(two_hundred)
|
||||
end
|
||||
|
||||
it "unescapes html entities when we want text entities" do
|
||||
PrettyText.excerpt("'", 500, text_entities: true).should == "'"
|
||||
expect(PrettyText.excerpt("'", 500, text_entities: true)).to eq("'")
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -255,17 +257,17 @@ describe PrettyText do
|
|||
it "adds base url to relative links" do
|
||||
html = "<p><a class=\"mention\" href=\"/users/wiseguy\">@wiseguy</a>, <a class=\"mention\" href=\"/users/trollol\">@trollol</a> what do you guys think? </p>"
|
||||
output = make_abs_string(html)
|
||||
output.should == "<p><a class=\"mention\" href=\"#{base_url}/users/wiseguy\">@wiseguy</a>, <a class=\"mention\" href=\"#{base_url}/users/trollol\">@trollol</a> what do you guys think? </p>"
|
||||
expect(output).to eq("<p><a class=\"mention\" href=\"#{base_url}/users/wiseguy\">@wiseguy</a>, <a class=\"mention\" href=\"#{base_url}/users/trollol\">@trollol</a> what do you guys think? </p>")
|
||||
end
|
||||
|
||||
it "doesn't change external absolute links" do
|
||||
html = "<p>Check out <a href=\"http://mywebsite.com/users/boss\">this guy</a>.</p>"
|
||||
make_abs_string(html).should == html
|
||||
expect(make_abs_string(html)).to eq(html)
|
||||
end
|
||||
|
||||
it "doesn't change internal absolute links" do
|
||||
html = "<p>Check out <a href=\"#{base_url}/users/boss\">this guy</a>.</p>"
|
||||
make_abs_string(html).should == html
|
||||
expect(make_abs_string(html)).to eq(html)
|
||||
end
|
||||
|
||||
it "can tolerate invalid URLs" do
|
||||
|
@ -283,11 +285,11 @@ describe PrettyText do
|
|||
|
||||
it "doesn't change HTML when there's no wrapped image" do
|
||||
html = "<img src=\"wat.png\">"
|
||||
strip_image_wrapping(html).should == html
|
||||
expect(strip_image_wrapping(html)).to eq(html)
|
||||
end
|
||||
|
||||
it "strips the metadata" do
|
||||
strip_image_wrapping(wrapped_image).should match_html "<div class=\"lightbox-wrapper\"><a href=\"//localhost:3000/uploads/default/4399/33691397e78b4d75.png\" class=\"lightbox\" title=\"Screen Shot 2014-04-14 at 9.47.10 PM.png\"><img src=\"//localhost:3000/uploads/default/_optimized/bd9/b20/bbbcd6a0c0_655x500.png\" width=\"655\" height=\"500\"></a></div>"
|
||||
expect(strip_image_wrapping(wrapped_image)).to match_html "<div class=\"lightbox-wrapper\"><a href=\"//localhost:3000/uploads/default/4399/33691397e78b4d75.png\" class=\"lightbox\" title=\"Screen Shot 2014-04-14 at 9.47.10 PM.png\"><img src=\"//localhost:3000/uploads/default/_optimized/bd9/b20/bbbcd6a0c0_655x500.png\" width=\"655\" height=\"500\"></a></div>"
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -299,8 +301,8 @@ describe PrettyText do
|
|||
|
||||
|
||||
it 'can escape *' do
|
||||
PrettyText.cook("***a***a").should match_html("<p><strong><em>a</em></strong>a</p>")
|
||||
PrettyText.cook("***\\****a").should match_html("<p><strong><em>*</em></strong>a</p>")
|
||||
expect(PrettyText.cook("***a***a")).to match_html("<p><strong><em>a</em></strong>a</p>")
|
||||
expect(PrettyText.cook("***\\****a")).to match_html("<p><strong><em>*</em></strong>a</p>")
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -19,18 +19,18 @@ describe Promotion do
|
|||
let(:promotion) { Promotion.new(user) }
|
||||
|
||||
it "doesn't raise an error with a nil user" do
|
||||
-> { Promotion.new(nil).review }.should_not raise_error
|
||||
expect { Promotion.new(nil).review }.not_to raise_error
|
||||
end
|
||||
|
||||
context 'that has done nothing' do
|
||||
let!(:result) { promotion.review }
|
||||
|
||||
it "returns false" do
|
||||
result.should == false
|
||||
expect(result).to eq(false)
|
||||
end
|
||||
|
||||
it "has not changed the user's trust level" do
|
||||
user.trust_level.should == TrustLevel[0]
|
||||
expect(user.trust_level).to eq(TrustLevel[0])
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -45,11 +45,11 @@ describe Promotion do
|
|||
end
|
||||
|
||||
it "returns true" do
|
||||
@result.should == true
|
||||
expect(@result).to eq(true)
|
||||
end
|
||||
|
||||
it "has upgraded the user to basic" do
|
||||
user.trust_level.should == TrustLevel[1]
|
||||
expect(user.trust_level).to eq(TrustLevel[1])
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -64,11 +64,11 @@ describe Promotion do
|
|||
let!(:result) { promotion.review }
|
||||
|
||||
it "returns false" do
|
||||
result.should == false
|
||||
expect(result).to eq(false)
|
||||
end
|
||||
|
||||
it "has not changed the user's trust level" do
|
||||
user.trust_level.should == TrustLevel[1]
|
||||
expect(user.trust_level).to eq(TrustLevel[1])
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -88,11 +88,11 @@ describe Promotion do
|
|||
end
|
||||
|
||||
it "returns true" do
|
||||
@result.should == true
|
||||
expect(@result).to eq(true)
|
||||
end
|
||||
|
||||
it "has upgraded the user to regular" do
|
||||
user.trust_level.should == TrustLevel[2]
|
||||
expect(user.trust_level).to eq(TrustLevel[2])
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -109,7 +109,7 @@ describe Promotion do
|
|||
|
||||
it "review_tl2 returns false" do
|
||||
expect {
|
||||
promotion.review_tl2.should == false
|
||||
expect(promotion.review_tl2).to eq(false)
|
||||
}.to_not change { user.reload.trust_level }
|
||||
end
|
||||
|
||||
|
@ -132,12 +132,12 @@ describe Promotion do
|
|||
end
|
||||
|
||||
it "review_tl2 returns true" do
|
||||
promotion.review_tl2.should == true
|
||||
expect(promotion.review_tl2).to eq(true)
|
||||
end
|
||||
|
||||
it "promotes to tl3" do
|
||||
promotion.review_tl2.should == true
|
||||
user.reload.trust_level.should == TrustLevel[3]
|
||||
expect(promotion.review_tl2).to eq(true)
|
||||
expect(user.reload.trust_level).to eq(TrustLevel[3])
|
||||
end
|
||||
|
||||
it "logs a trust level change" do
|
||||
|
|
|
@ -14,11 +14,11 @@ describe RateLimiter do
|
|||
end
|
||||
|
||||
it "returns true for can_perform?" do
|
||||
rate_limiter.can_perform?.should == true
|
||||
expect(rate_limiter.can_perform?).to eq(true)
|
||||
end
|
||||
|
||||
it "doesn't raise an error on performed!" do
|
||||
lambda { rate_limiter.performed! }.should_not raise_error
|
||||
expect { rate_limiter.performed! }.not_to raise_error
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -31,11 +31,11 @@ describe RateLimiter do
|
|||
|
||||
context 'never done' do
|
||||
it "should perform right away" do
|
||||
rate_limiter.can_perform?.should == true
|
||||
expect(rate_limiter.can_perform?).to eq(true)
|
||||
end
|
||||
|
||||
it "performs without an error" do
|
||||
lambda { rate_limiter.performed! }.should_not raise_error
|
||||
expect { rate_limiter.performed! }.not_to raise_error
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -46,33 +46,33 @@ describe RateLimiter do
|
|||
end
|
||||
|
||||
it "returns false for can_perform when the limit has been hit" do
|
||||
rate_limiter.can_perform?.should == false
|
||||
expect(rate_limiter.can_perform?).to eq(false)
|
||||
end
|
||||
|
||||
it "raises an error the third time called" do
|
||||
lambda { rate_limiter.performed! }.should raise_error(RateLimiter::LimitExceeded)
|
||||
expect { rate_limiter.performed! }.to raise_error(RateLimiter::LimitExceeded)
|
||||
end
|
||||
|
||||
context "as an admin/moderator" do
|
||||
|
||||
it "returns true for can_perform if the user is an admin" do
|
||||
user.admin = true
|
||||
rate_limiter.can_perform?.should == true
|
||||
expect(rate_limiter.can_perform?).to eq(true)
|
||||
end
|
||||
|
||||
it "doesn't raise an error when an admin performs the task" do
|
||||
user.admin = true
|
||||
lambda { rate_limiter.performed! }.should_not raise_error
|
||||
expect { rate_limiter.performed! }.not_to raise_error
|
||||
end
|
||||
|
||||
it "returns true for can_perform if the user is a mod" do
|
||||
user.moderator = true
|
||||
rate_limiter.can_perform?.should == true
|
||||
expect(rate_limiter.can_perform?).to eq(true)
|
||||
end
|
||||
|
||||
it "doesn't raise an error when a moderator performs the task" do
|
||||
user.moderator = true
|
||||
lambda { rate_limiter.performed! }.should_not raise_error
|
||||
expect { rate_limiter.performed! }.not_to raise_error
|
||||
end
|
||||
|
||||
|
||||
|
@ -84,11 +84,11 @@ describe RateLimiter do
|
|||
end
|
||||
|
||||
it "returns true for can_perform since there is now room" do
|
||||
rate_limiter.can_perform?.should == true
|
||||
expect(rate_limiter.can_perform?).to eq(true)
|
||||
end
|
||||
|
||||
it "raises no error now that there is room" do
|
||||
lambda { rate_limiter.performed! }.should_not raise_error
|
||||
expect { rate_limiter.performed! }.not_to raise_error
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -23,7 +23,7 @@ describe "Redis Store" do
|
|||
|
||||
r = store.read "key"
|
||||
|
||||
r.should == "key in store"
|
||||
expect(r).to eq("key in store")
|
||||
end
|
||||
|
||||
it "doesn't collide with our Cache" do
|
||||
|
@ -38,7 +38,7 @@ describe "Redis Store" do
|
|||
|
||||
r = store.read "key"
|
||||
|
||||
r.should == "key in store"
|
||||
expect(r).to eq("key in store")
|
||||
end
|
||||
|
||||
it "can be cleared without clearing our cache" do
|
||||
|
@ -51,8 +51,8 @@ describe "Redis Store" do
|
|||
end
|
||||
|
||||
store.clear
|
||||
store.read("key").should == nil
|
||||
cache.fetch("key").should == "key in cache"
|
||||
expect(store.read("key")).to eq(nil)
|
||||
expect(cache.fetch("key")).to eq("key in cache")
|
||||
|
||||
end
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ describe "S3Helper" do
|
|||
let(:file) { file_from_fixtures(filename) }
|
||||
|
||||
it "ensures the bucket name isn't blank" do
|
||||
-> { S3Helper.new("") }.should raise_error(Discourse::InvalidParameters)
|
||||
expect { S3Helper.new("") }.to raise_error(Discourse::InvalidParameters)
|
||||
end
|
||||
|
||||
describe ".upload" do
|
||||
|
|
|
@ -40,7 +40,7 @@ describe Scheduler::Defer do
|
|||
s == "good"
|
||||
end
|
||||
|
||||
s.should == "good"
|
||||
expect(s).to eq("good")
|
||||
end
|
||||
|
||||
it "can queue jobs properly" do
|
||||
|
@ -54,7 +54,7 @@ describe Scheduler::Defer do
|
|||
s == "good"
|
||||
end
|
||||
|
||||
s.should == "good"
|
||||
expect(s).to eq("good")
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -53,7 +53,7 @@ describe Scheduler::Manager do
|
|||
describe '#sync' do
|
||||
|
||||
it 'increases' do
|
||||
Scheduler::Manager.seq.should == Scheduler::Manager.seq - 1
|
||||
expect(Scheduler::Manager.seq).to eq(Scheduler::Manager.seq - 1)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -62,7 +62,7 @@ describe Scheduler::Manager do
|
|||
it 'should nuke missing jobs' do
|
||||
$redis.zadd Scheduler::Manager.queue_key, Time.now.to_i - 1000, "BLABLA"
|
||||
manager.tick
|
||||
$redis.zcard(Scheduler::Manager.queue_key).should == 0
|
||||
expect($redis.zcard(Scheduler::Manager.queue_key)).to eq(0)
|
||||
|
||||
end
|
||||
|
||||
|
@ -81,7 +81,7 @@ describe Scheduler::Manager do
|
|||
manager.reschedule_orphans!
|
||||
|
||||
info = manager.schedule_info(Testing::SuperLongJob)
|
||||
info.next_run.should <= Time.now.to_i
|
||||
expect(info.next_run).to be <= Time.now.to_i
|
||||
end
|
||||
|
||||
it 'should only run pending job once' do
|
||||
|
@ -100,19 +100,19 @@ describe Scheduler::Manager do
|
|||
end
|
||||
end.map(&:join)
|
||||
|
||||
Testing::RandomJob.runs.should == 1
|
||||
expect(Testing::RandomJob.runs).to eq(1)
|
||||
|
||||
info = manager.schedule_info(Testing::RandomJob)
|
||||
info.prev_run.should be <= Time.now.to_i
|
||||
info.prev_duration.should be > 0
|
||||
info.prev_result.should == "OK"
|
||||
expect(info.prev_run).to be <= Time.now.to_i
|
||||
expect(info.prev_duration).to be > 0
|
||||
expect(info.prev_result).to eq("OK")
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
describe '#discover_schedules' do
|
||||
it 'Discovers Testing::RandomJob' do
|
||||
Scheduler::Manager.discover_schedules.should include(Testing::RandomJob)
|
||||
expect(Scheduler::Manager.discover_schedules).to include(Testing::RandomJob)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -122,8 +122,8 @@ describe Scheduler::Manager do
|
|||
manager.remove(Testing::RandomJob)
|
||||
manager.ensure_schedule!(Testing::RandomJob)
|
||||
|
||||
manager.next_run(Testing::RandomJob)
|
||||
.should be_within(5.minutes.to_i).of(Time.now.to_i + 5.minutes)
|
||||
expect(manager.next_run(Testing::RandomJob))
|
||||
.to be_within(5.minutes.to_i).of(Time.now.to_i + 5.minutes)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -28,29 +28,29 @@ describe Scheduler::ScheduleInfo do
|
|||
end
|
||||
|
||||
it "is a scheduled job" do
|
||||
RandomJob.should be_scheduled
|
||||
expect(RandomJob).to be_scheduled
|
||||
end
|
||||
|
||||
it 'starts off invalid' do
|
||||
@info.valid?.should == false
|
||||
expect(@info.valid?).to eq(false)
|
||||
end
|
||||
|
||||
it 'will have a due date in the next 5 minutes if it was blank' do
|
||||
@info.schedule!
|
||||
@info.valid?.should == true
|
||||
@info.next_run.should be_within(5.minutes).of(Time.now.to_i)
|
||||
expect(@info.valid?).to eq(true)
|
||||
expect(@info.next_run).to be_within(5.minutes).of(Time.now.to_i)
|
||||
end
|
||||
|
||||
it 'will have a due date within the next hour if it just ran' do
|
||||
@info.prev_run = Time.now.to_i
|
||||
@info.schedule!
|
||||
@info.valid?.should == true
|
||||
@info.next_run.should be_within(1.hour * manager.random_ratio).of(Time.now.to_i + 1.hour)
|
||||
expect(@info.valid?).to eq(true)
|
||||
expect(@info.next_run).to be_within(1.hour * manager.random_ratio).of(Time.now.to_i + 1.hour)
|
||||
end
|
||||
|
||||
it 'is invalid if way in the future' do
|
||||
@info.next_run = Time.now.to_i + 1.year
|
||||
@info.valid?.should == false
|
||||
expect(@info.valid?).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -75,22 +75,22 @@ describe Scheduler::ScheduleInfo do
|
|||
end
|
||||
|
||||
it "is a scheduled job" do
|
||||
DailyJob.should be_scheduled
|
||||
expect(DailyJob).to be_scheduled
|
||||
end
|
||||
|
||||
it "starts off invalid" do
|
||||
@info.valid?.should == false
|
||||
expect(@info.valid?).to eq(false)
|
||||
end
|
||||
|
||||
skip "will have a due date at the appropriate time if blank" do
|
||||
@info.next_run.should == nil
|
||||
expect(@info.next_run).to eq(nil)
|
||||
@info.schedule!
|
||||
@info.valid?.should == true
|
||||
expect(@info.valid?).to eq(true)
|
||||
end
|
||||
|
||||
it 'is invalid if way in the future' do
|
||||
@info.next_run = Time.now.to_i + 1.year
|
||||
@info.valid?.should == false
|
||||
expect(@info.valid?).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -15,21 +15,21 @@ describe ScoreCalculator do
|
|||
end
|
||||
|
||||
it 'takes the supplied weightings into effect' do
|
||||
post.score.should == 333
|
||||
another_post.score.should == 666
|
||||
expect(post.score).to eq(333)
|
||||
expect(another_post.score).to eq(666)
|
||||
end
|
||||
|
||||
it "creates the percent_ranks" do
|
||||
another_post.percent_rank.should == 0.0
|
||||
post.percent_rank.should == 1.0
|
||||
expect(another_post.percent_rank).to eq(0.0)
|
||||
expect(post.percent_rank).to eq(1.0)
|
||||
end
|
||||
|
||||
it "gives the topic a score" do
|
||||
topic.score.should be_present
|
||||
expect(topic.score).to be_present
|
||||
end
|
||||
|
||||
it "gives the topic a percent_rank" do
|
||||
topic.percent_rank.should_not == 1.0
|
||||
expect(topic.percent_rank).not_to eq(1.0)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -39,14 +39,14 @@ describe ScoreCalculator do
|
|||
it "won't update the site settings when the site settings don't match" do
|
||||
ScoreCalculator.new(reads: 3).calculate
|
||||
topic.reload
|
||||
topic.has_summary.should == false
|
||||
expect(topic.has_summary).to eq(false)
|
||||
end
|
||||
|
||||
it "removes the summary flag if the topic no longer qualifies" do
|
||||
topic.update_column(:has_summary, true)
|
||||
ScoreCalculator.new(reads: 3).calculate
|
||||
topic.reload
|
||||
topic.has_summary.should == false
|
||||
expect(topic.has_summary).to eq(false)
|
||||
end
|
||||
|
||||
it "won't update the site settings when the site settings don't match" do
|
||||
|
@ -56,7 +56,7 @@ describe ScoreCalculator do
|
|||
|
||||
ScoreCalculator.new(reads: 3).calculate
|
||||
topic.reload
|
||||
topic.has_summary.should == true
|
||||
expect(topic.has_summary).to eq(true)
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -4,8 +4,8 @@ require_dependency 'sidekiq/pausable'
|
|||
describe Sidekiq do
|
||||
it "can pause and unpause" do
|
||||
Sidekiq.pause!
|
||||
Sidekiq.paused?.should == true
|
||||
expect(Sidekiq.paused?).to eq(true)
|
||||
Sidekiq.unpause!
|
||||
Sidekiq.paused?.should == false
|
||||
expect(Sidekiq.paused?).to eq(false)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -53,7 +53,7 @@ describe SiteSettings::YamlLoader do
|
|||
|
||||
it "maintains order of categories" do
|
||||
receiver.load_yaml(simple)
|
||||
receiver.categories.should == ['category1', 'category2', 'category3']
|
||||
expect(receiver.categories).to eq(['category1', 'category2', 'category3'])
|
||||
end
|
||||
|
||||
it "can load client settings" do
|
||||
|
|
|
@ -6,58 +6,58 @@ require 'slug'
|
|||
describe Slug do
|
||||
|
||||
it 'replaces spaces with hyphens' do
|
||||
Slug.for("hello world").should == 'hello-world'
|
||||
expect(Slug.for("hello world")).to eq('hello-world')
|
||||
end
|
||||
|
||||
it 'changes accented characters' do
|
||||
Slug.for('àllo').should == 'allo'
|
||||
expect(Slug.for('àllo')).to eq('allo')
|
||||
end
|
||||
|
||||
it 'replaces symbols' do
|
||||
Slug.for('evil#trout').should == 'evil-trout'
|
||||
expect(Slug.for('evil#trout')).to eq('evil-trout')
|
||||
end
|
||||
|
||||
it 'handles a.b.c properly' do
|
||||
Slug.for("a.b.c").should == "a-b-c"
|
||||
expect(Slug.for("a.b.c")).to eq("a-b-c")
|
||||
end
|
||||
|
||||
it 'handles double dots right' do
|
||||
Slug.for("a....b.....c").should == "a-b-c"
|
||||
expect(Slug.for("a....b.....c")).to eq("a-b-c")
|
||||
end
|
||||
|
||||
it 'strips trailing punctuation' do
|
||||
Slug.for("hello...").should == "hello"
|
||||
expect(Slug.for("hello...")).to eq("hello")
|
||||
end
|
||||
|
||||
it 'strips leading punctuation' do
|
||||
Slug.for("...hello").should == "hello"
|
||||
expect(Slug.for("...hello")).to eq("hello")
|
||||
end
|
||||
|
||||
it 'handles our initial transliteration' do
|
||||
from = "àáäâčďèéëěêìíïîľĺňòóöôŕřšťůùúüûýžñç"
|
||||
to = "aaaacdeeeeeiiiillnoooorrstuuuuuyznc"
|
||||
Slug.for(from).should == to
|
||||
expect(Slug.for(from)).to eq(to)
|
||||
end
|
||||
|
||||
it 'replaces underscores' do
|
||||
Slug.for("o_o_o").should == "o-o-o"
|
||||
expect(Slug.for("o_o_o")).to eq("o-o-o")
|
||||
end
|
||||
|
||||
it "doesn't generate slugs that are just numbers" do
|
||||
Slug.for('123').should be_blank
|
||||
expect(Slug.for('123')).to be_blank
|
||||
end
|
||||
|
||||
it "doesn't generate slugs that are just numbers" do
|
||||
Slug.for('2').should be_blank
|
||||
expect(Slug.for('2')).to be_blank
|
||||
end
|
||||
|
||||
it "doesn't keep single quotes within word" do
|
||||
Slug.for("Jeff hate's this").should == "jeff-hates-this"
|
||||
expect(Slug.for("Jeff hate's this")).to eq("jeff-hates-this")
|
||||
end
|
||||
|
||||
it "translate the chineses" do
|
||||
SiteSetting.default_locale = 'zh_CN'
|
||||
Slug.for("习近平:中企承建港口电站等助斯里兰卡发展").should == "xi-jin-ping-zhong-qi-cheng-jian-gang-kou-dian-zhan-deng-zhu-si-li-lan-qia-fa-zhan"
|
||||
expect(Slug.for("习近平:中企承建港口电站等助斯里兰卡发展")).to eq("xi-jin-ping-zhong-qi-cheng-jian-gang-kou-dian-zhan-deng-zhu-si-li-lan-qia-fa-zhan")
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -19,7 +19,7 @@ describe SpamHandler do
|
|||
Fabricate(:user, ip_address: "42.42.42.42", trust_level: TrustLevel[0])
|
||||
|
||||
Fabricate(:user, ip_address: "42.42.42.42", trust_level: TrustLevel[1])
|
||||
-> { Fabricate(:user, ip_address: "42.42.42.42", trust_level: TrustLevel[0]) }.should raise_error(ActiveRecord::RecordInvalid)
|
||||
expect { Fabricate(:user, ip_address: "42.42.42.42", trust_level: TrustLevel[0]) }.to raise_error(ActiveRecord::RecordInvalid)
|
||||
end
|
||||
|
||||
it "doesn't limit registrations since there is a TL2+ user with that IP" do
|
||||
|
|
|
@ -13,8 +13,8 @@ describe SqlBuilder do
|
|||
p = Fabricate(:post)
|
||||
@builder.where('id = :id and topic_id = :topic_id', id: p.id, topic_id: p.topic_id)
|
||||
p2 = @builder.exec.first
|
||||
p2.id.should == p.id
|
||||
p2.should == p
|
||||
expect(p2.id).to eq(p.id)
|
||||
expect(p2).to eq(p)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -32,13 +32,13 @@ describe SqlBuilder do
|
|||
true AS bool")
|
||||
.map_exec(SqlBuilder::TestClass)
|
||||
|
||||
rows.count.should == 1
|
||||
expect(rows.count).to eq(1)
|
||||
row = rows[0]
|
||||
row.int.should == 1
|
||||
row.string.should == "string"
|
||||
row.text.should == "text"
|
||||
row.bool.should == true
|
||||
row.date.should be_within(10.seconds).of(DateTime.now)
|
||||
expect(row.int).to eq(1)
|
||||
expect(row.string).to eq("string")
|
||||
expect(row.text).to eq("text")
|
||||
expect(row.bool).to eq(true)
|
||||
expect(row.date).to be_within(10.seconds).of(DateTime.now)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -48,27 +48,27 @@ describe SqlBuilder do
|
|||
end
|
||||
|
||||
it "should allow for 1 param exec" do
|
||||
@builder.exec(a: 1, b: 2).values[0][0].should == '1'
|
||||
expect(@builder.exec(a: 1, b: 2).values[0][0]).to eq('1')
|
||||
end
|
||||
|
||||
it "should allow for a single where" do
|
||||
@builder.where(":a = 1")
|
||||
@builder.exec(a: 1, b: 2).values[0][0].should == '1'
|
||||
expect(@builder.exec(a: 1, b: 2).values[0][0]).to eq('1')
|
||||
end
|
||||
|
||||
it "should allow where chaining" do
|
||||
@builder.where(":a = 1")
|
||||
@builder.where("2 = 1")
|
||||
@builder.exec(a: 1, b: 2).to_a.length.should == 0
|
||||
expect(@builder.exec(a: 1, b: 2).to_a.length).to eq(0)
|
||||
end
|
||||
|
||||
it "should allow order by" do
|
||||
@builder.order_by("A desc").limit(1)
|
||||
.exec(a:1, b:2).values[0][0].should == "2"
|
||||
expect(@builder.order_by("A desc").limit(1)
|
||||
.exec(a:1, b:2).values[0][0]).to eq("2")
|
||||
end
|
||||
it "should allow offset" do
|
||||
@builder.order_by("A desc").offset(1)
|
||||
.exec(a:1, b:2).values[0][0].should == "1"
|
||||
expect(@builder.order_by("A desc").offset(1)
|
||||
.exec(a:1, b:2).values[0][0]).to eq("1")
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -25,44 +25,44 @@ describe SuggestedTopicsBuilder do
|
|||
builder.splice_results([fake_topic(3,1)], :high)
|
||||
builder.splice_results([fake_topic(4,1)], :high)
|
||||
|
||||
builder.results.map(&:id).should == [3,4,2]
|
||||
expect(builder.results.map(&:id)).to eq([3,4,2])
|
||||
|
||||
# we have 2 items in category 1
|
||||
builder.category_results_left.should == 3
|
||||
expect(builder.category_results_left).to eq(3)
|
||||
end
|
||||
|
||||
it "inserts using default approach for non high priority" do
|
||||
builder.splice_results([fake_topic(2,2)], :high)
|
||||
builder.splice_results([fake_topic(3,1)], :low)
|
||||
|
||||
builder.results.map(&:id).should == [2,3]
|
||||
expect(builder.results.map(&:id)).to eq([2,3])
|
||||
end
|
||||
|
||||
it "inserts multiple results and puts topics in the correct order" do
|
||||
builder.splice_results([fake_topic(2,1), fake_topic(3,2), fake_topic(4,1)], :high)
|
||||
builder.results.map(&:id).should == [2,4,3]
|
||||
expect(builder.results.map(&:id)).to eq([2,4,3])
|
||||
end
|
||||
end
|
||||
|
||||
it "has the correct defaults" do
|
||||
builder.excluded_topic_ids.include?(topic.id).should == true
|
||||
builder.results_left.should == 5
|
||||
builder.size.should == 0
|
||||
builder.should_not be_full
|
||||
expect(builder.excluded_topic_ids.include?(topic.id)).to eq(true)
|
||||
expect(builder.results_left).to eq(5)
|
||||
expect(builder.size).to eq(0)
|
||||
expect(builder).not_to be_full
|
||||
end
|
||||
|
||||
it "returns full correctly" do
|
||||
builder.stubs(:results_left).returns(0)
|
||||
builder.should be_full
|
||||
expect(builder).to be_full
|
||||
end
|
||||
|
||||
context "adding results" do
|
||||
|
||||
it "adds nothing with nil results" do
|
||||
builder.add_results(nil)
|
||||
builder.results_left.should == 5
|
||||
builder.size.should == 0
|
||||
builder.should_not be_full
|
||||
expect(builder.results_left).to eq(5)
|
||||
expect(builder.size).to eq(0)
|
||||
expect(builder).not_to be_full
|
||||
end
|
||||
|
||||
context "adding topics" do
|
||||
|
@ -74,11 +74,11 @@ describe SuggestedTopicsBuilder do
|
|||
end
|
||||
|
||||
it "added the result correctly" do
|
||||
builder.size.should == 1
|
||||
builder.results_left.should == 4
|
||||
builder.should_not be_full
|
||||
builder.excluded_topic_ids.include?(topic.id).should == true
|
||||
builder.excluded_topic_ids.include?(other_topic.id).should == true
|
||||
expect(builder.size).to eq(1)
|
||||
expect(builder.results_left).to eq(4)
|
||||
expect(builder).not_to be_full
|
||||
expect(builder.excluded_topic_ids.include?(topic.id)).to eq(true)
|
||||
expect(builder.excluded_topic_ids.include?(other_topic.id)).to eq(true)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -90,8 +90,8 @@ describe SuggestedTopicsBuilder do
|
|||
|
||||
it "adds archived and closed, but not invisible topics" do
|
||||
builder.add_results(Topic)
|
||||
builder.size.should == 2
|
||||
builder.should_not be_full
|
||||
expect(builder.size).to eq(2)
|
||||
expect(builder).not_to be_full
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -99,10 +99,10 @@ describe SuggestedTopicsBuilder do
|
|||
let!(:category) { Fabricate(:category) }
|
||||
|
||||
it "doesn't add a category definition topic" do
|
||||
category.topic_id.should be_present
|
||||
expect(category.topic_id).to be_present
|
||||
builder.add_results(Topic)
|
||||
builder.size.should == 0
|
||||
builder.should_not be_full
|
||||
expect(builder.size).to eq(0)
|
||||
expect(builder).not_to be_full
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -14,12 +14,12 @@ describe SystemMessage do
|
|||
let(:topic) { post.topic }
|
||||
|
||||
it 'should create a post correctly' do
|
||||
post.should be_present
|
||||
post.should be_valid
|
||||
topic.should be_private_message
|
||||
topic.should be_valid
|
||||
topic.subtype.should == TopicSubtype.system_message
|
||||
topic.allowed_users.include?(user).should == true
|
||||
expect(post).to be_present
|
||||
expect(post).to be_valid
|
||||
expect(topic).to be_private_message
|
||||
expect(topic).to be_valid
|
||||
expect(topic.subtype).to eq(TopicSubtype.system_message)
|
||||
expect(topic.allowed_users.include?(user)).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -9,11 +9,11 @@ describe TextCleaner do
|
|||
let(:deduplicated_string) { "my precious!" }
|
||||
|
||||
it "ignores multiple ! by default" do
|
||||
TextCleaner.clean(duplicated_string).should == duplicated_string
|
||||
expect(TextCleaner.clean(duplicated_string)).to eq(duplicated_string)
|
||||
end
|
||||
|
||||
it "deduplicates ! when enabled" do
|
||||
TextCleaner.clean(duplicated_string, deduplicate_exclamation_marks: true).should == deduplicated_string
|
||||
expect(TextCleaner.clean(duplicated_string, deduplicate_exclamation_marks: true)).to eq(deduplicated_string)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -24,11 +24,11 @@ describe TextCleaner do
|
|||
let(:deduplicated_string) { "please help me?" }
|
||||
|
||||
it "ignores multiple ? by default" do
|
||||
TextCleaner.clean(duplicated_string).should == duplicated_string
|
||||
expect(TextCleaner.clean(duplicated_string)).to eq(duplicated_string)
|
||||
end
|
||||
|
||||
it "deduplicates ? when enabled" do
|
||||
TextCleaner.clean(duplicated_string, deduplicate_question_marks: true).should == deduplicated_string
|
||||
expect(TextCleaner.clean(duplicated_string, deduplicate_question_marks: true)).to eq(deduplicated_string)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -40,15 +40,15 @@ describe TextCleaner do
|
|||
let(:regular_case) { "entire text is all caps" }
|
||||
|
||||
it "ignores all upper case text by default" do
|
||||
TextCleaner.clean(all_caps).should == all_caps
|
||||
expect(TextCleaner.clean(all_caps)).to eq(all_caps)
|
||||
end
|
||||
|
||||
it "replaces all upper case text with regular case letters when enabled" do
|
||||
TextCleaner.clean(all_caps, replace_all_upper_case: true).should == regular_case
|
||||
expect(TextCleaner.clean(all_caps, replace_all_upper_case: true)).to eq(regular_case)
|
||||
end
|
||||
|
||||
it "ignores almost all upper case text when enabled" do
|
||||
TextCleaner.clean(almost_all_caps, replace_all_upper_case: true).should == almost_all_caps
|
||||
expect(TextCleaner.clean(almost_all_caps, replace_all_upper_case: true)).to eq(almost_all_caps)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -60,15 +60,15 @@ describe TextCleaner do
|
|||
let(:iletter) { "iLetter" }
|
||||
|
||||
it "ignores first letter case by default" do
|
||||
TextCleaner.clean(lowercased).should == lowercased
|
||||
TextCleaner.clean(capitalized).should == capitalized
|
||||
TextCleaner.clean(iletter).should == iletter
|
||||
expect(TextCleaner.clean(lowercased)).to eq(lowercased)
|
||||
expect(TextCleaner.clean(capitalized)).to eq(capitalized)
|
||||
expect(TextCleaner.clean(iletter)).to eq(iletter)
|
||||
end
|
||||
|
||||
it "capitalizes first letter when enabled" do
|
||||
TextCleaner.clean(lowercased, capitalize_first_letter: true).should == capitalized
|
||||
TextCleaner.clean(capitalized, capitalize_first_letter: true).should == capitalized
|
||||
TextCleaner.clean(iletter, capitalize_first_letter: true).should == iletter
|
||||
expect(TextCleaner.clean(lowercased, capitalize_first_letter: true)).to eq(capitalized)
|
||||
expect(TextCleaner.clean(capitalized, capitalize_first_letter: true)).to eq(capitalized)
|
||||
expect(TextCleaner.clean(iletter, capitalize_first_letter: true)).to eq(iletter)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -80,17 +80,17 @@ describe TextCleaner do
|
|||
let(:without_period) { "oops" }
|
||||
|
||||
it "ignores unnecessary periods at the end by default" do
|
||||
TextCleaner.clean(with_one_period).should == with_one_period
|
||||
TextCleaner.clean(with_several_periods).should == with_several_periods
|
||||
expect(TextCleaner.clean(with_one_period)).to eq(with_one_period)
|
||||
expect(TextCleaner.clean(with_several_periods)).to eq(with_several_periods)
|
||||
end
|
||||
|
||||
it "removes unnecessary periods at the end when enabled" do
|
||||
TextCleaner.clean(with_one_period, remove_all_periods_from_the_end: true).should == without_period
|
||||
TextCleaner.clean(with_several_periods, remove_all_periods_from_the_end: true).should == without_period
|
||||
expect(TextCleaner.clean(with_one_period, remove_all_periods_from_the_end: true)).to eq(without_period)
|
||||
expect(TextCleaner.clean(with_several_periods, remove_all_periods_from_the_end: true)).to eq(without_period)
|
||||
end
|
||||
|
||||
it "keeps trailing whitespaces when enabled" do
|
||||
TextCleaner.clean(with_several_periods + " ", remove_all_periods_from_the_end: true).should == without_period + " "
|
||||
expect(TextCleaner.clean(with_several_periods + " ", remove_all_periods_from_the_end: true)).to eq(without_period + " ")
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -103,18 +103,18 @@ describe TextCleaner do
|
|||
let(:without_space_question) { "oops?" }
|
||||
|
||||
it "ignores extraneous space before the end punctuation by default" do
|
||||
TextCleaner.clean(with_space_exclamation).should == with_space_exclamation
|
||||
TextCleaner.clean(with_space_question).should == with_space_question
|
||||
expect(TextCleaner.clean(with_space_exclamation)).to eq(with_space_exclamation)
|
||||
expect(TextCleaner.clean(with_space_question)).to eq(with_space_question)
|
||||
end
|
||||
|
||||
it "removes extraneous space before the end punctuation when enabled" do
|
||||
TextCleaner.clean(with_space_exclamation, remove_extraneous_space: true).should == without_space_exclamation
|
||||
TextCleaner.clean(with_space_question, remove_extraneous_space: true).should == without_space_question
|
||||
expect(TextCleaner.clean(with_space_exclamation, remove_extraneous_space: true)).to eq(without_space_exclamation)
|
||||
expect(TextCleaner.clean(with_space_question, remove_extraneous_space: true)).to eq(without_space_question)
|
||||
end
|
||||
|
||||
it "keep trailing whitespaces when enabled" do
|
||||
TextCleaner.clean(with_space_exclamation + " ", remove_extraneous_space: true).should == without_space_exclamation + " "
|
||||
TextCleaner.clean(with_space_question + " ", remove_extraneous_space: true).should == without_space_question + " "
|
||||
expect(TextCleaner.clean(with_space_exclamation + " ", remove_extraneous_space: true)).to eq(without_space_exclamation + " ")
|
||||
expect(TextCleaner.clean(with_space_question + " ", remove_extraneous_space: true)).to eq(without_space_question + " ")
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -125,11 +125,11 @@ describe TextCleaner do
|
|||
let(:unspacey_string) { "hello there's weird spaces here." }
|
||||
|
||||
it "ignores interior spaces by default" do
|
||||
TextCleaner.clean(spacey_string).should == spacey_string
|
||||
expect(TextCleaner.clean(spacey_string)).to eq(spacey_string)
|
||||
end
|
||||
|
||||
it "fixes interior spaces when enabled" do
|
||||
TextCleaner.clean(spacey_string, fixes_interior_spaces: true).should == unspacey_string
|
||||
expect(TextCleaner.clean(spacey_string, fixes_interior_spaces: true)).to eq(unspacey_string)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -140,11 +140,11 @@ describe TextCleaner do
|
|||
let(:unspacey_string) { "test" }
|
||||
|
||||
it "ignores leading and trailing whitespaces by default" do
|
||||
TextCleaner.clean(spacey_string).should == spacey_string
|
||||
expect(TextCleaner.clean(spacey_string)).to eq(spacey_string)
|
||||
end
|
||||
|
||||
it "strips leading and trailing whitespaces when enabled" do
|
||||
TextCleaner.clean(spacey_string, strip_whitespaces: true).should == unspacey_string
|
||||
expect(TextCleaner.clean(spacey_string, strip_whitespaces: true)).to eq(unspacey_string)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -152,11 +152,11 @@ describe TextCleaner do
|
|||
context "title" do
|
||||
|
||||
it "fixes interior spaces" do
|
||||
TextCleaner.clean_title("Hello there").should == "Hello there"
|
||||
expect(TextCleaner.clean_title("Hello there")).to eq("Hello there")
|
||||
end
|
||||
|
||||
it "strips leading and trailing whitespaces" do
|
||||
TextCleaner.clean_title(" \t Hello there \n ").should == "Hello there"
|
||||
expect(TextCleaner.clean_title(" \t Hello there \n ")).to eq("Hello there")
|
||||
end
|
||||
|
||||
context "title_prettify site setting is enabled" do
|
||||
|
@ -164,27 +164,27 @@ describe TextCleaner do
|
|||
before { SiteSetting.title_prettify = true }
|
||||
|
||||
it "deduplicates !" do
|
||||
TextCleaner.clean_title("Hello there!!!!").should == "Hello there!"
|
||||
expect(TextCleaner.clean_title("Hello there!!!!")).to eq("Hello there!")
|
||||
end
|
||||
|
||||
it "deduplicates ?" do
|
||||
TextCleaner.clean_title("Hello there????").should == "Hello there?"
|
||||
expect(TextCleaner.clean_title("Hello there????")).to eq("Hello there?")
|
||||
end
|
||||
|
||||
it "replaces all upper case text with regular case letters" do
|
||||
TextCleaner.clean_title("HELLO THERE").should == "Hello there"
|
||||
expect(TextCleaner.clean_title("HELLO THERE")).to eq("Hello there")
|
||||
end
|
||||
|
||||
it "capitalizes first letter" do
|
||||
TextCleaner.clean_title("hello there").should == "Hello there"
|
||||
expect(TextCleaner.clean_title("hello there")).to eq("Hello there")
|
||||
end
|
||||
|
||||
it "removes unnecessary period at the end" do
|
||||
TextCleaner.clean_title("Hello there.").should == "Hello there"
|
||||
expect(TextCleaner.clean_title("Hello there.")).to eq("Hello there")
|
||||
end
|
||||
|
||||
it "removes extraneous space before the end punctuation" do
|
||||
TextCleaner.clean_title("Hello there ?").should == "Hello there?"
|
||||
expect(TextCleaner.clean_title("Hello there ?")).to eq("Hello there?")
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -194,8 +194,8 @@ describe TextCleaner do
|
|||
describe "#normalize_whitespaces" do
|
||||
it "normalize whitespaces" do
|
||||
whitespaces = "\u0020\u00A0\u1680\u180E\u2000\u2001\u2002\u2003\u2004\u2005\u2006\u2007\u2008\u2009\u200A\u200B\u2028\u2029\u202F\u205F\u3000\uFEFF"
|
||||
whitespaces.strip.should_not == ""
|
||||
TextCleaner.normalize_whitespaces(whitespaces).strip.should == ""
|
||||
expect(whitespaces.strip).not_to eq("")
|
||||
expect(TextCleaner.normalize_whitespaces(whitespaces).strip).to eq("")
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -6,41 +6,41 @@ require 'text_sentinel'
|
|||
describe TextSentinel do
|
||||
|
||||
it "allows utf-8 chars" do
|
||||
TextSentinel.new("йȝîûηыეமிᚉ⠛").text.should == "йȝîûηыეமிᚉ⠛"
|
||||
expect(TextSentinel.new("йȝîûηыეமிᚉ⠛").text).to eq("йȝîûηыეமிᚉ⠛")
|
||||
end
|
||||
|
||||
context "entropy" do
|
||||
|
||||
it "returns 0 for an empty string" do
|
||||
TextSentinel.new("").entropy.should == 0
|
||||
expect(TextSentinel.new("").entropy).to eq(0)
|
||||
end
|
||||
|
||||
it "returns 0 for a nil string" do
|
||||
TextSentinel.new(nil).entropy.should == 0
|
||||
expect(TextSentinel.new(nil).entropy).to eq(0)
|
||||
end
|
||||
|
||||
it "returns 1 for a string with many leading spaces" do
|
||||
TextSentinel.new((" " * 10) + "x").entropy.should == 1
|
||||
expect(TextSentinel.new((" " * 10) + "x").entropy).to eq(1)
|
||||
end
|
||||
|
||||
it "returns 1 for one char, even repeated" do
|
||||
TextSentinel.new("a" * 10).entropy.should == 1
|
||||
expect(TextSentinel.new("a" * 10).entropy).to eq(1)
|
||||
end
|
||||
|
||||
it "returns an accurate count of many chars" do
|
||||
TextSentinel.new("evil trout is evil").entropy.should == 10
|
||||
expect(TextSentinel.new("evil trout is evil").entropy).to eq(10)
|
||||
end
|
||||
|
||||
it "Works on foreign characters" do
|
||||
TextSentinel.new("去年十社會警告").entropy.should == 19
|
||||
expect(TextSentinel.new("去年十社會警告").entropy).to eq(19)
|
||||
end
|
||||
|
||||
it "generates enough entropy for short foreign strings" do
|
||||
TextSentinel.new("又一个测").entropy.should == 11
|
||||
expect(TextSentinel.new("又一个测").entropy).to eq(11)
|
||||
end
|
||||
|
||||
it "handles repeated foreign characters" do
|
||||
TextSentinel.new("又一个测试话题" * 3).entropy.should == 18
|
||||
expect(TextSentinel.new("又一个测试话题" * 3).entropy).to eq(18)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -67,41 +67,41 @@ describe TextSentinel do
|
|||
let(:valid_string) { "This is a cool topic about Discourse" }
|
||||
|
||||
it "allows a valid string" do
|
||||
TextSentinel.new(valid_string).should be_valid
|
||||
expect(TextSentinel.new(valid_string)).to be_valid
|
||||
end
|
||||
|
||||
it "doesn't allow all caps topics" do
|
||||
TextSentinel.new(valid_string.upcase).should_not be_valid
|
||||
expect(TextSentinel.new(valid_string.upcase)).not_to be_valid
|
||||
end
|
||||
|
||||
it "enforces the minimum entropy" do
|
||||
TextSentinel.new(valid_string, min_entropy: 16).should be_valid
|
||||
expect(TextSentinel.new(valid_string, min_entropy: 16)).to be_valid
|
||||
end
|
||||
|
||||
it "enforces the minimum entropy" do
|
||||
TextSentinel.new(valid_string, min_entropy: 17).should_not be_valid
|
||||
expect(TextSentinel.new(valid_string, min_entropy: 17)).not_to be_valid
|
||||
end
|
||||
|
||||
it "allows all foreign characters" do
|
||||
TextSentinel.new("去年十二月,北韓不顧國際社會警告").should be_valid
|
||||
expect(TextSentinel.new("去年十二月,北韓不顧國際社會警告")).to be_valid
|
||||
end
|
||||
|
||||
it "doesn't allow a long alphanumeric string with no spaces" do
|
||||
TextSentinel.new("jfewjfoejwfojeojfoejofjeo3" * 5, max_word_length: 30).should_not be_valid
|
||||
expect(TextSentinel.new("jfewjfoejwfojeojfoejofjeo3" * 5, max_word_length: 30)).not_to be_valid
|
||||
end
|
||||
|
||||
it "doesn't accept junk symbols as a string" do
|
||||
TextSentinel.new("[[[").should_not be_valid
|
||||
TextSentinel.new("<<<").should_not be_valid
|
||||
TextSentinel.new("{{$!").should_not be_valid
|
||||
expect(TextSentinel.new("[[[")).not_to be_valid
|
||||
expect(TextSentinel.new("<<<")).not_to be_valid
|
||||
expect(TextSentinel.new("{{$!")).not_to be_valid
|
||||
end
|
||||
|
||||
it "does allow a long alphanumeric string joined with slashes" do
|
||||
TextSentinel.new("gdfgdfgdfg/fgdfgdfgdg/dfgdfgdfgd/dfgdfgdfgf", max_word_length: 30).should be_valid
|
||||
expect(TextSentinel.new("gdfgdfgdfg/fgdfgdfgdg/dfgdfgdfgd/dfgdfgdfgf", max_word_length: 30)).to be_valid
|
||||
end
|
||||
|
||||
it "does allow a long alphanumeric string joined with dashes" do
|
||||
TextSentinel.new("gdfgdfgdfg-fgdfgdfgdg-dfgdfgdfgd-dfgdfgdfgf", max_word_length: 30).should be_valid
|
||||
expect(TextSentinel.new("gdfgdfgdfg-fgdfgdfgdg-dfgdfgdfgd-dfgdfgdfgf", max_word_length: 30)).to be_valid
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -111,7 +111,7 @@ describe TextSentinel do
|
|||
it "uses a sensible min entropy value when min title length is less than title_min_entropy" do
|
||||
SiteSetting.stubs(:min_topic_title_length).returns(3)
|
||||
SiteSetting.stubs(:title_min_entropy).returns(10)
|
||||
TextSentinel.title_sentinel('Hey').should be_valid
|
||||
expect(TextSentinel.title_sentinel('Hey')).to be_valid
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -121,14 +121,14 @@ describe TextSentinel do
|
|||
it "uses a sensible min entropy value when min body length is less than min entropy" do
|
||||
SiteSetting.stubs(:min_post_length).returns(3)
|
||||
SiteSetting.stubs(:body_min_entropy).returns(7)
|
||||
TextSentinel.body_sentinel('Yup').should be_valid
|
||||
expect(TextSentinel.body_sentinel('Yup')).to be_valid
|
||||
end
|
||||
|
||||
it "uses a sensible min entropy value when min pm body length is less than min entropy" do
|
||||
SiteSetting.stubs(:min_post_length).returns(5)
|
||||
SiteSetting.stubs(:min_private_message_post_length).returns(3)
|
||||
SiteSetting.stubs(:body_min_entropy).returns(7)
|
||||
TextSentinel.body_sentinel('Lol', private_message: true).should be_valid
|
||||
expect(TextSentinel.body_sentinel('Lol', private_message: true)).to be_valid
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -17,35 +17,35 @@ describe TopicCreator do
|
|||
end
|
||||
|
||||
it "should be possible for an admin to create a topic" do
|
||||
TopicCreator.create(admin, Guardian.new(admin), valid_attrs).should be_valid
|
||||
expect(TopicCreator.create(admin, Guardian.new(admin), valid_attrs)).to be_valid
|
||||
end
|
||||
|
||||
it "should be possible for a moderator to create a topic" do
|
||||
TopicCreator.create(moderator, Guardian.new(moderator), valid_attrs).should be_valid
|
||||
expect(TopicCreator.create(moderator, Guardian.new(moderator), valid_attrs)).to be_valid
|
||||
end
|
||||
|
||||
context 'regular user' do
|
||||
before { SiteSetting.stubs(:min_trust_to_create_topic).returns(TrustLevel[0]) }
|
||||
|
||||
it "should be possible for a regular user to create a topic" do
|
||||
TopicCreator.create(user, Guardian.new(user), valid_attrs).should be_valid
|
||||
expect(TopicCreator.create(user, Guardian.new(user), valid_attrs)).to be_valid
|
||||
end
|
||||
|
||||
it "should be possible for a regular user to create a topic with blank auto_close_time" do
|
||||
TopicCreator.create(user, Guardian.new(user), valid_attrs.merge(auto_close_time: '')).should be_valid
|
||||
expect(TopicCreator.create(user, Guardian.new(user), valid_attrs.merge(auto_close_time: ''))).to be_valid
|
||||
end
|
||||
|
||||
it "ignores auto_close_time without raising an error" do
|
||||
topic = TopicCreator.create(user, Guardian.new(user), valid_attrs.merge(auto_close_time: '24'))
|
||||
topic.should be_valid
|
||||
topic.auto_close_at.should == nil
|
||||
expect(topic).to be_valid
|
||||
expect(topic.auto_close_at).to eq(nil)
|
||||
end
|
||||
|
||||
it "category name is case insensitive" do
|
||||
category = Fabricate(:category, name: "Neil's Blog")
|
||||
topic = TopicCreator.create(user, Guardian.new(user), valid_attrs.merge(category: "neil's blog"))
|
||||
topic.should be_valid
|
||||
topic.category.should == category
|
||||
expect(topic).to be_valid
|
||||
expect(topic.category).to eq(category)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -20,21 +20,21 @@ describe TopicQuery do
|
|||
topic = Fabricate(:topic, category: category)
|
||||
topic = Fabricate(:topic, visible: false)
|
||||
|
||||
TopicQuery.new(nil).list_latest.topics.count.should == 0
|
||||
TopicQuery.new(user).list_latest.topics.count.should == 0
|
||||
expect(TopicQuery.new(nil).list_latest.topics.count).to eq(0)
|
||||
expect(TopicQuery.new(user).list_latest.topics.count).to eq(0)
|
||||
|
||||
Topic.top_viewed(10).count.should == 0
|
||||
Topic.recent(10).count.should == 0
|
||||
expect(Topic.top_viewed(10).count).to eq(0)
|
||||
expect(Topic.recent(10).count).to eq(0)
|
||||
|
||||
# mods can see hidden topics
|
||||
TopicQuery.new(moderator).list_latest.topics.count.should == 1
|
||||
expect(TopicQuery.new(moderator).list_latest.topics.count).to eq(1)
|
||||
# admins can see all the topics
|
||||
TopicQuery.new(admin).list_latest.topics.count.should == 3
|
||||
expect(TopicQuery.new(admin).list_latest.topics.count).to eq(3)
|
||||
|
||||
group.add(user)
|
||||
group.save
|
||||
|
||||
TopicQuery.new(user).list_latest.topics.count.should == 2
|
||||
expect(TopicQuery.new(user).list_latest.topics.count).to eq(2)
|
||||
|
||||
end
|
||||
|
||||
|
@ -54,8 +54,8 @@ describe TopicQuery do
|
|||
|
||||
query = TopicQuery.new(user, filter: 'bookmarked').list_latest
|
||||
|
||||
query.topics.length.should == 1
|
||||
query.topics.first.user_data.post_action_data.should == {PostActionType.types[:bookmark] => [1,2]}
|
||||
expect(query.topics.length).to eq(1)
|
||||
expect(query.topics.first.user_data.post_action_data).to eq({PostActionType.types[:bookmark] => [1,2]})
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -63,10 +63,10 @@ describe TopicQuery do
|
|||
it "filters deleted topics correctly" do
|
||||
_topic = Fabricate(:topic, deleted_at: 1.year.ago)
|
||||
|
||||
TopicQuery.new(admin, status: 'deleted').list_latest.topics.size.should == 1
|
||||
TopicQuery.new(moderator, status: 'deleted').list_latest.topics.size.should == 1
|
||||
TopicQuery.new(user, status: 'deleted').list_latest.topics.size.should == 0
|
||||
TopicQuery.new(nil, status: 'deleted').list_latest.topics.size.should == 0
|
||||
expect(TopicQuery.new(admin, status: 'deleted').list_latest.topics.size).to eq(1)
|
||||
expect(TopicQuery.new(moderator, status: 'deleted').list_latest.topics.size).to eq(1)
|
||||
expect(TopicQuery.new(user, status: 'deleted').list_latest.topics.size).to eq(0)
|
||||
expect(TopicQuery.new(nil, status: 'deleted').list_latest.topics.size).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -76,27 +76,27 @@ describe TopicQuery do
|
|||
let(:diff_category) { Fabricate(:diff_category) }
|
||||
|
||||
it "returns topics in the category when we filter to it" do
|
||||
TopicQuery.new(moderator).list_latest.topics.size.should == 0
|
||||
expect(TopicQuery.new(moderator).list_latest.topics.size).to eq(0)
|
||||
|
||||
# Filter by slug
|
||||
TopicQuery.new(moderator, category: category.slug).list_latest.topics.size.should == 1
|
||||
TopicQuery.new(moderator, category: "#{category.id}-category").list_latest.topics.size.should == 1
|
||||
expect(TopicQuery.new(moderator, category: category.slug).list_latest.topics.size).to eq(1)
|
||||
expect(TopicQuery.new(moderator, category: "#{category.id}-category").list_latest.topics.size).to eq(1)
|
||||
|
||||
list = TopicQuery.new(moderator, category: diff_category.slug).list_latest
|
||||
list.topics.size.should == 1
|
||||
list.preload_key.should == "topic_list_c/different-category/l/latest"
|
||||
expect(list.topics.size).to eq(1)
|
||||
expect(list.preload_key).to eq("topic_list_c/different-category/l/latest")
|
||||
|
||||
# Defaults to no category filter when slug does not exist
|
||||
TopicQuery.new(moderator, category: 'made up slug').list_latest.topics.size.should == 2
|
||||
expect(TopicQuery.new(moderator, category: 'made up slug').list_latest.topics.size).to eq(2)
|
||||
end
|
||||
|
||||
context 'subcategories' do
|
||||
let!(:subcategory) { Fabricate(:category, parent_category_id: category.id)}
|
||||
|
||||
it "works with subcategories" do
|
||||
TopicQuery.new(moderator, category: category.id).list_latest.topics.size.should == 1
|
||||
TopicQuery.new(moderator, category: subcategory.id).list_latest.topics.size.should == 1
|
||||
TopicQuery.new(moderator, category: category.id, no_subcategories: true).list_latest.topics.size.should == 1
|
||||
expect(TopicQuery.new(moderator, category: category.id).list_latest.topics.size).to eq(1)
|
||||
expect(TopicQuery.new(moderator, category: subcategory.id).list_latest.topics.size).to eq(1)
|
||||
expect(TopicQuery.new(moderator, category: category.id, no_subcategories: true).list_latest.topics.size).to eq(1)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -111,8 +111,8 @@ describe TopicQuery do
|
|||
CategoryUser.create!(user_id: user.id,
|
||||
category_id: category.id,
|
||||
notification_level: CategoryUser.notification_levels[:muted])
|
||||
topic_query.list_new.topics.map(&:id).should_not include(topic.id)
|
||||
topic_query.list_latest.topics.map(&:id).should_not include(topic.id)
|
||||
expect(topic_query.list_new.topics.map(&:id)).not_to include(topic.id)
|
||||
expect(topic_query.list_latest.topics.map(&:id)).not_to include(topic.id)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -181,13 +181,13 @@ describe TopicQuery do
|
|||
|
||||
context 'list_latest' do
|
||||
it "returns the topics in the correct order" do
|
||||
topics.map(&:id).should == [pinned_topic, future_topic, closed_topic, archived_topic, regular_topic].map(&:id)
|
||||
expect(topics.map(&:id)).to eq([pinned_topic, future_topic, closed_topic, archived_topic, regular_topic].map(&:id))
|
||||
|
||||
# includes the invisible topic if you're a moderator
|
||||
TopicQuery.new(moderator).list_latest.topics.include?(invisible_topic).should == true
|
||||
expect(TopicQuery.new(moderator).list_latest.topics.include?(invisible_topic)).to eq(true)
|
||||
|
||||
# includes the invisible topic if you're an admin" do
|
||||
TopicQuery.new(admin).list_latest.topics.include?(invisible_topic).should == true
|
||||
expect(TopicQuery.new(admin).list_latest.topics.include?(invisible_topic)).to eq(true)
|
||||
end
|
||||
|
||||
context 'sort_order' do
|
||||
|
@ -198,28 +198,28 @@ describe TopicQuery do
|
|||
|
||||
it "returns the topics in correct order" do
|
||||
# returns the topics in likes order if requested
|
||||
ids_in_order('posts').should == [future_topic, pinned_topic, archived_topic, regular_topic, invisible_topic, closed_topic].map(&:id)
|
||||
expect(ids_in_order('posts')).to eq([future_topic, pinned_topic, archived_topic, regular_topic, invisible_topic, closed_topic].map(&:id))
|
||||
|
||||
# returns the topics in reverse likes order if requested
|
||||
ids_in_order('posts', false).should == [closed_topic, invisible_topic, regular_topic, archived_topic, pinned_topic, future_topic].map(&:id)
|
||||
expect(ids_in_order('posts', false)).to eq([closed_topic, invisible_topic, regular_topic, archived_topic, pinned_topic, future_topic].map(&:id))
|
||||
|
||||
# returns the topics in likes order if requested
|
||||
ids_in_order('likes').should == [pinned_topic, regular_topic, archived_topic, future_topic, invisible_topic, closed_topic].map(&:id)
|
||||
expect(ids_in_order('likes')).to eq([pinned_topic, regular_topic, archived_topic, future_topic, invisible_topic, closed_topic].map(&:id))
|
||||
|
||||
# returns the topics in reverse likes order if requested
|
||||
ids_in_order('likes', false).should == [closed_topic, invisible_topic, future_topic, archived_topic, regular_topic, pinned_topic].map(&:id)
|
||||
expect(ids_in_order('likes', false)).to eq([closed_topic, invisible_topic, future_topic, archived_topic, regular_topic, pinned_topic].map(&:id))
|
||||
|
||||
# returns the topics in views order if requested
|
||||
ids_in_order('views').should == [regular_topic, archived_topic, future_topic, pinned_topic, closed_topic, invisible_topic].map(&:id)
|
||||
expect(ids_in_order('views')).to eq([regular_topic, archived_topic, future_topic, pinned_topic, closed_topic, invisible_topic].map(&:id))
|
||||
|
||||
# returns the topics in reverse views order if requested" do
|
||||
ids_in_order('views', false).should == [invisible_topic, closed_topic, pinned_topic, future_topic, archived_topic, regular_topic].map(&:id)
|
||||
expect(ids_in_order('views', false)).to eq([invisible_topic, closed_topic, pinned_topic, future_topic, archived_topic, regular_topic].map(&:id))
|
||||
|
||||
# returns the topics in posters order if requested" do
|
||||
ids_in_order('posters').should == [pinned_topic, regular_topic, future_topic, invisible_topic, closed_topic, archived_topic].map(&:id)
|
||||
expect(ids_in_order('posters')).to eq([pinned_topic, regular_topic, future_topic, invisible_topic, closed_topic, archived_topic].map(&:id))
|
||||
|
||||
# returns the topics in reverse posters order if requested" do
|
||||
ids_in_order('posters', false).should == [archived_topic, closed_topic, invisible_topic, future_topic, regular_topic, pinned_topic].map(&:id)
|
||||
expect(ids_in_order('posters', false)).to eq([archived_topic, closed_topic, invisible_topic, future_topic, regular_topic, pinned_topic].map(&:id))
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -233,7 +233,7 @@ describe TopicQuery do
|
|||
end
|
||||
|
||||
it "no longer shows the pinned topic at the top" do
|
||||
topics.should == [future_topic, closed_topic, archived_topic, pinned_topic, regular_topic]
|
||||
expect(topics).to eq([future_topic, closed_topic, archived_topic, pinned_topic, regular_topic])
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -246,17 +246,17 @@ describe TopicQuery do
|
|||
let!(:topic_in_cat) { Fabricate(:topic, category: category) }
|
||||
|
||||
it "returns the topic with a category when filtering by category" do
|
||||
topic_query.list_category(category).topics.should == [topic_category, topic_in_cat]
|
||||
expect(topic_query.list_category(category).topics).to eq([topic_category, topic_in_cat])
|
||||
end
|
||||
|
||||
it "returns only the topic category when filtering by another category" do
|
||||
another_category = Fabricate(:category, name: 'new cat')
|
||||
topic_query.list_category(another_category).topics.should == [another_category.topic]
|
||||
expect(topic_query.list_category(another_category).topics).to eq([another_category.topic])
|
||||
end
|
||||
|
||||
describe '#list_new_in_category' do
|
||||
it 'returns the topic category and the categorized topic' do
|
||||
topic_query.list_new_in_category(category).topics.should == [topic_in_cat, topic_category]
|
||||
expect(topic_query.list_new_in_category(category).topics).to eq([topic_in_cat, topic_category])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -265,7 +265,7 @@ describe TopicQuery do
|
|||
|
||||
context 'with no data' do
|
||||
it "has no unread topics" do
|
||||
topic_query.list_unread.topics.should be_blank
|
||||
expect(topic_query.list_unread.topics).to be_blank
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -280,7 +280,7 @@ describe TopicQuery do
|
|||
|
||||
context 'list_unread' do
|
||||
it 'contains no topics' do
|
||||
topic_query.list_unread.topics.should == []
|
||||
expect(topic_query.list_unread.topics).to eq([])
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -291,13 +291,13 @@ describe TopicQuery do
|
|||
end
|
||||
|
||||
it 'only contains the partially read topic' do
|
||||
topic_query.list_unread.topics.should == [partially_read]
|
||||
expect(topic_query.list_unread.topics).to eq([partially_read])
|
||||
end
|
||||
end
|
||||
|
||||
context 'list_read' do
|
||||
it 'contain both topics ' do
|
||||
topic_query.list_read.topics.should =~ [fully_read, partially_read]
|
||||
expect(topic_query.list_read.topics).to match_array([fully_read, partially_read])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -308,7 +308,7 @@ describe TopicQuery do
|
|||
|
||||
context 'without a new topic' do
|
||||
it "has no new topics" do
|
||||
topic_query.list_new.topics.should be_blank
|
||||
expect(topic_query.list_new.topics).to be_blank
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -318,7 +318,7 @@ describe TopicQuery do
|
|||
|
||||
|
||||
it "contains the new topic" do
|
||||
topics.should == [new_topic]
|
||||
expect(topics).to eq([new_topic])
|
||||
end
|
||||
|
||||
it "contains no new topics for a user that has missed the window" do
|
||||
|
@ -326,7 +326,7 @@ describe TopicQuery do
|
|||
user.save
|
||||
new_topic.created_at = 10.minutes.ago
|
||||
new_topic.save
|
||||
topics.should == []
|
||||
expect(topics).to eq([])
|
||||
end
|
||||
|
||||
context "muted topics" do
|
||||
|
@ -335,7 +335,7 @@ describe TopicQuery do
|
|||
end
|
||||
|
||||
it "returns an empty set" do
|
||||
topics.should be_blank
|
||||
expect(topics).to be_blank
|
||||
end
|
||||
|
||||
context 'un-muted' do
|
||||
|
@ -344,7 +344,7 @@ describe TopicQuery do
|
|||
end
|
||||
|
||||
it "returns the topic again" do
|
||||
topics.should == [new_topic]
|
||||
expect(topics).to eq([new_topic])
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -356,14 +356,14 @@ describe TopicQuery do
|
|||
let(:topics) { topic_query.list_posted.topics }
|
||||
|
||||
it "returns blank when there are no posted topics" do
|
||||
topics.should be_blank
|
||||
expect(topics).to be_blank
|
||||
end
|
||||
|
||||
context 'created topics' do
|
||||
let!(:created_topic) { create_post(user: user).topic }
|
||||
|
||||
it "includes the created topic" do
|
||||
topics.include?(created_topic).should == true
|
||||
expect(topics.include?(created_topic)).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -372,7 +372,7 @@ describe TopicQuery do
|
|||
let!(:your_post) { create_post(user: user, topic: other_users_topic )}
|
||||
|
||||
it "includes the posted topic" do
|
||||
topics.include?(other_users_topic).should == true
|
||||
expect(topics.include?(other_users_topic)).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -380,7 +380,7 @@ describe TopicQuery do
|
|||
let(:other_users_topic) { create_post(user: creator).topic }
|
||||
|
||||
it "does not include the topic" do
|
||||
topics.should be_blank
|
||||
expect(topics).to be_blank
|
||||
end
|
||||
|
||||
context "but interacted with" do
|
||||
|
@ -388,19 +388,19 @@ describe TopicQuery do
|
|||
it "is not included if read" do
|
||||
TopicUser.update_last_read(user, other_users_topic.id, 0, 0)
|
||||
|
||||
topics.should be_blank
|
||||
expect(topics).to be_blank
|
||||
end
|
||||
|
||||
it "is not included if muted" do
|
||||
other_users_topic.notify_muted!(user)
|
||||
|
||||
topics.should be_blank
|
||||
expect(topics).to be_blank
|
||||
end
|
||||
|
||||
it "is not included if tracking" do
|
||||
other_users_topic.notify_tracking!(user)
|
||||
|
||||
topics.should be_blank
|
||||
expect(topics).to be_blank
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -413,7 +413,7 @@ describe TopicQuery do
|
|||
let!(:new_topic) { Fabricate(:post, user: creator).topic }
|
||||
|
||||
it "should return the new topic" do
|
||||
TopicQuery.new.list_suggested_for(topic).topics.should == [new_topic]
|
||||
expect(TopicQuery.new.list_suggested_for(topic).topics).to eq([new_topic])
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -425,7 +425,7 @@ describe TopicQuery do
|
|||
let!(:invisible_topic) { Fabricate(:topic, user: creator, visible: false) }
|
||||
|
||||
it "should omit the closed/archived/invisbiel topics from suggested" do
|
||||
TopicQuery.new.list_suggested_for(topic).topics.should == [regular_topic]
|
||||
expect(TopicQuery.new.list_suggested_for(topic).topics).to eq([regular_topic])
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -435,7 +435,7 @@ describe TopicQuery do
|
|||
let(:suggested_topics) { topic_query.list_suggested_for(topic).topics.map{|t| t.id} }
|
||||
|
||||
it "should return empty results when there is nothing to find" do
|
||||
suggested_topics.should be_blank
|
||||
expect(suggested_topics).to be_blank
|
||||
end
|
||||
|
||||
context 'with some existing topics' do
|
||||
|
@ -463,24 +463,24 @@ describe TopicQuery do
|
|||
|
||||
it "won't return new or fully read if there are enough partially read topics" do
|
||||
SiteSetting.stubs(:suggested_topics).returns(1)
|
||||
suggested_topics.should == [partially_read.id]
|
||||
expect(suggested_topics).to eq([partially_read.id])
|
||||
end
|
||||
|
||||
it "won't return fully read if there are enough partially read topics and new topics" do
|
||||
SiteSetting.stubs(:suggested_topics).returns(4)
|
||||
suggested_topics[0].should == partially_read.id
|
||||
suggested_topics[1,3].should include(new_topic.id)
|
||||
suggested_topics[1,3].should include(closed_topic.id)
|
||||
suggested_topics[1,3].should include(archived_topic.id)
|
||||
expect(suggested_topics[0]).to eq(partially_read.id)
|
||||
expect(suggested_topics[1,3]).to include(new_topic.id)
|
||||
expect(suggested_topics[1,3]).to include(closed_topic.id)
|
||||
expect(suggested_topics[1,3]).to include(archived_topic.id)
|
||||
end
|
||||
|
||||
it "returns unread, then new, then random" do
|
||||
SiteSetting.stubs(:suggested_topics).returns(7)
|
||||
suggested_topics[0].should == partially_read.id
|
||||
suggested_topics[1,3].should include(new_topic.id)
|
||||
suggested_topics[1,3].should include(closed_topic.id)
|
||||
suggested_topics[1,3].should include(archived_topic.id)
|
||||
suggested_topics[4].should == fully_read.id
|
||||
expect(suggested_topics[0]).to eq(partially_read.id)
|
||||
expect(suggested_topics[1,3]).to include(new_topic.id)
|
||||
expect(suggested_topics[1,3]).to include(closed_topic.id)
|
||||
expect(suggested_topics[1,3]).to include(archived_topic.id)
|
||||
expect(suggested_topics[4]).to eq(fully_read.id)
|
||||
# random doesn't include closed and archived
|
||||
end
|
||||
|
||||
|
|
|
@ -10,29 +10,29 @@ describe TopicView do
|
|||
let(:topic_view) { TopicView.new(topic.id, coding_horror) }
|
||||
|
||||
it "raises a not found error if the topic doesn't exist" do
|
||||
lambda { TopicView.new(1231232, coding_horror) }.should raise_error(Discourse::NotFound)
|
||||
expect { TopicView.new(1231232, coding_horror) }.to raise_error(Discourse::NotFound)
|
||||
end
|
||||
|
||||
it "raises an error if the user can't see the topic" do
|
||||
Guardian.any_instance.expects(:can_see?).with(topic).returns(false)
|
||||
lambda { topic_view }.should raise_error(Discourse::InvalidAccess)
|
||||
expect { topic_view }.to raise_error(Discourse::InvalidAccess)
|
||||
end
|
||||
|
||||
it "handles deleted topics" do
|
||||
admin = Fabricate(:admin)
|
||||
topic.trash!(admin)
|
||||
lambda { TopicView.new(topic.id, Fabricate(:user)) }.should raise_error(Discourse::NotFound)
|
||||
lambda { TopicView.new(topic.id, admin) }.should_not raise_error
|
||||
expect { TopicView.new(topic.id, Fabricate(:user)) }.to raise_error(Discourse::NotFound)
|
||||
expect { TopicView.new(topic.id, admin) }.not_to raise_error
|
||||
end
|
||||
|
||||
context "chunk_size" do
|
||||
it "returns `chunk_size` by default" do
|
||||
TopicView.new(topic.id, coding_horror).chunk_size.should == TopicView.chunk_size
|
||||
expect(TopicView.new(topic.id, coding_horror).chunk_size).to eq(TopicView.chunk_size)
|
||||
end
|
||||
|
||||
it "returns `slow_chunk_size` when slow_platform is true" do
|
||||
tv = TopicView.new(topic.id, coding_horror, slow_platform: true)
|
||||
tv.chunk_size.should == TopicView.slow_chunk_size
|
||||
expect(tv.chunk_size).to eq(TopicView.slow_chunk_size)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -47,69 +47,69 @@ describe TopicView do
|
|||
it "it can find the best responses" do
|
||||
|
||||
best2 = TopicView.new(topic.id, coding_horror, best: 2)
|
||||
best2.posts.count.should == 2
|
||||
best2.posts[0].id.should == p2.id
|
||||
best2.posts[1].id.should == p3.id
|
||||
expect(best2.posts.count).to eq(2)
|
||||
expect(best2.posts[0].id).to eq(p2.id)
|
||||
expect(best2.posts[1].id).to eq(p3.id)
|
||||
|
||||
topic.update_status('closed', true, Fabricate(:admin))
|
||||
topic.posts.count.should == 4
|
||||
expect(topic.posts.count).to eq(4)
|
||||
|
||||
# should not get the status post
|
||||
best = TopicView.new(topic.id, nil, best: 99)
|
||||
best.posts.count.should == 2
|
||||
best.filtered_post_ids.size.should == 3
|
||||
best.current_post_ids.should =~ [p2.id, p3.id]
|
||||
expect(best.posts.count).to eq(2)
|
||||
expect(best.filtered_post_ids.size).to eq(3)
|
||||
expect(best.current_post_ids).to match_array([p2.id, p3.id])
|
||||
|
||||
# should get no results for trust level too low
|
||||
best = TopicView.new(topic.id, nil, best: 99, min_trust_level: coding_horror.trust_level + 1)
|
||||
best.posts.count.should == 0
|
||||
expect(best.posts.count).to eq(0)
|
||||
|
||||
|
||||
# should filter out the posts with a score that is too low
|
||||
best = TopicView.new(topic.id, nil, best: 99, min_score: 99)
|
||||
best.posts.count.should == 0
|
||||
expect(best.posts.count).to eq(0)
|
||||
|
||||
# should filter out everything if min replies not met
|
||||
best = TopicView.new(topic.id, nil, best: 99, min_replies: 99)
|
||||
best.posts.count.should == 0
|
||||
expect(best.posts.count).to eq(0)
|
||||
|
||||
# should punch through posts if the score is high enough
|
||||
p2.update_column(:score, 100)
|
||||
|
||||
best = TopicView.new(topic.id, nil, best: 99, bypass_trust_level_score: 100, min_trust_level: coding_horror.trust_level + 1)
|
||||
best.posts.count.should == 1
|
||||
expect(best.posts.count).to eq(1)
|
||||
|
||||
# 0 means ignore
|
||||
best = TopicView.new(topic.id, nil, best: 99, bypass_trust_level_score: 0, min_trust_level: coding_horror.trust_level + 1)
|
||||
best.posts.count.should == 0
|
||||
expect(best.posts.count).to eq(0)
|
||||
|
||||
# If we restrict to posts a moderator liked, return none
|
||||
best = TopicView.new(topic.id, nil, best: 99, only_moderator_liked: true)
|
||||
best.posts.count.should == 0
|
||||
expect(best.posts.count).to eq(0)
|
||||
|
||||
# It doesn't count likes from admins
|
||||
PostAction.act(admin, p3, PostActionType.types[:like])
|
||||
best = TopicView.new(topic.id, nil, best: 99, only_moderator_liked: true)
|
||||
best.posts.count.should == 0
|
||||
expect(best.posts.count).to eq(0)
|
||||
|
||||
# It should find the post liked by the moderator
|
||||
PostAction.act(moderator, p2, PostActionType.types[:like])
|
||||
best = TopicView.new(topic.id, nil, best: 99, only_moderator_liked: true)
|
||||
best.posts.count.should == 1
|
||||
expect(best.posts.count).to eq(1)
|
||||
|
||||
end
|
||||
|
||||
it "raises NotLoggedIn if the user isn't logged in and is trying to view a private message" do
|
||||
Topic.any_instance.expects(:private_message?).returns(true)
|
||||
lambda { TopicView.new(topic.id, nil) }.should raise_error(Discourse::NotLoggedIn)
|
||||
expect { TopicView.new(topic.id, nil) }.to raise_error(Discourse::NotLoggedIn)
|
||||
end
|
||||
|
||||
it "provides an absolute url" do
|
||||
topic_view.absolute_url.should be_present
|
||||
expect(topic_view.absolute_url).to be_present
|
||||
end
|
||||
|
||||
it "provides a summary of the first post" do
|
||||
topic_view.summary.should be_present
|
||||
expect(topic_view.summary).to be_present
|
||||
end
|
||||
|
||||
describe "#get_canonical_path" do
|
||||
|
@ -123,13 +123,13 @@ describe TopicView do
|
|||
end
|
||||
|
||||
it "generates canonical path correctly" do
|
||||
TopicView.new(1234, user).canonical_path.should eql(path)
|
||||
TopicView.new(1234, user, page: 5).canonical_path.should eql("/1234?page=5")
|
||||
expect(TopicView.new(1234, user).canonical_path).to eql(path)
|
||||
expect(TopicView.new(1234, user, page: 5).canonical_path).to eql("/1234?page=5")
|
||||
end
|
||||
|
||||
it "generates a canonical correctly for paged results" do
|
||||
TopicView.new(1234, user, post_number: 10 * TopicView.chunk_size )
|
||||
.canonical_path.should eql("/1234?page=10")
|
||||
expect(TopicView.new(1234, user, post_number: 10 * TopicView.chunk_size )
|
||||
.canonical_path).to eql("/1234?page=10")
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -150,43 +150,43 @@ describe TopicView do
|
|||
end
|
||||
|
||||
it "should return the next page" do
|
||||
TopicView.new(1234, user).next_page.should eql(2)
|
||||
expect(TopicView.new(1234, user).next_page).to eql(2)
|
||||
end
|
||||
end
|
||||
|
||||
context '.post_counts_by_user' do
|
||||
it 'returns the two posters with their counts' do
|
||||
topic_view.post_counts_by_user.to_a.should =~ [[first_poster.id, 2], [coding_horror.id, 1]]
|
||||
expect(topic_view.post_counts_by_user.to_a).to match_array([[first_poster.id, 2], [coding_horror.id, 1]])
|
||||
end
|
||||
end
|
||||
|
||||
context '.participants' do
|
||||
it 'returns the two participants hashed by id' do
|
||||
topic_view.participants.to_a.should =~ [[first_poster.id, first_poster], [coding_horror.id, coding_horror]]
|
||||
expect(topic_view.participants.to_a).to match_array([[first_poster.id, first_poster], [coding_horror.id, coding_horror]])
|
||||
end
|
||||
end
|
||||
|
||||
context '.all_post_actions' do
|
||||
it 'is blank at first' do
|
||||
topic_view.all_post_actions.should be_blank
|
||||
expect(topic_view.all_post_actions).to be_blank
|
||||
end
|
||||
|
||||
it 'returns the like' do
|
||||
PostAction.act(coding_horror, p1, PostActionType.types[:like])
|
||||
topic_view.all_post_actions[p1.id][PostActionType.types[:like]].should be_present
|
||||
expect(topic_view.all_post_actions[p1.id][PostActionType.types[:like]]).to be_present
|
||||
end
|
||||
end
|
||||
|
||||
context '.all_active_flags' do
|
||||
it 'is blank at first' do
|
||||
topic_view.all_active_flags.should be_blank
|
||||
expect(topic_view.all_active_flags).to be_blank
|
||||
end
|
||||
|
||||
it 'returns the active flags' do
|
||||
PostAction.act(moderator, p1, PostActionType.types[:off_topic])
|
||||
PostAction.act(coding_horror, p1, PostActionType.types[:off_topic])
|
||||
|
||||
topic_view.all_active_flags[p1.id][PostActionType.types[:off_topic]].count.should == 2
|
||||
expect(topic_view.all_active_flags[p1.id][PostActionType.types[:off_topic]].count).to eq(2)
|
||||
end
|
||||
|
||||
it 'returns only the active flags' do
|
||||
|
@ -195,7 +195,7 @@ describe TopicView do
|
|||
|
||||
PostAction.defer_flags!(p1, moderator)
|
||||
|
||||
topic_view.all_active_flags[p1.id].should == nil
|
||||
expect(topic_view.all_active_flags[p1.id]).to eq(nil)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -203,21 +203,21 @@ describe TopicView do
|
|||
context '.read?' do
|
||||
it 'tracks correctly' do
|
||||
# anon is assumed to have read everything
|
||||
TopicView.new(topic.id).read?(1).should == true
|
||||
expect(TopicView.new(topic.id).read?(1)).to eq(true)
|
||||
|
||||
# random user has nothing
|
||||
topic_view.read?(1).should == false
|
||||
expect(topic_view.read?(1)).to eq(false)
|
||||
|
||||
# a real user that just read it should have it marked
|
||||
PostTiming.process_timings(coding_horror, topic.id, 1, [[1,1000]])
|
||||
TopicView.new(topic.id, coding_horror).read?(1).should == true
|
||||
TopicView.new(topic.id, coding_horror).topic_user.should be_present
|
||||
expect(TopicView.new(topic.id, coding_horror).read?(1)).to eq(true)
|
||||
expect(TopicView.new(topic.id, coding_horror).topic_user).to be_present
|
||||
end
|
||||
end
|
||||
|
||||
context '.topic_user' do
|
||||
it 'returns nil when there is no user' do
|
||||
TopicView.new(topic.id, nil).topic_user.should be_blank
|
||||
expect(TopicView.new(topic.id, nil).topic_user).to be_blank
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -232,12 +232,12 @@ describe TopicView do
|
|||
recent_posts = topic_view.recent_posts
|
||||
|
||||
# count
|
||||
recent_posts.count.should == 25
|
||||
expect(recent_posts.count).to eq(25)
|
||||
|
||||
# ordering
|
||||
recent_posts.include?(p1).should == false
|
||||
recent_posts.include?(p3).should == true
|
||||
recent_posts.first.created_at.should > recent_posts.last.created_at
|
||||
expect(recent_posts.include?(p1)).to eq(false)
|
||||
expect(recent_posts.include?(p3)).to eq(true)
|
||||
expect(recent_posts.first.created_at).to be > recent_posts.last.created_at
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -269,13 +269,13 @@ describe TopicView do
|
|||
describe "contains_gaps?" do
|
||||
it "works" do
|
||||
# does not contain contains_gaps with default filtering
|
||||
topic_view.contains_gaps?.should == false
|
||||
expect(topic_view.contains_gaps?).to eq(false)
|
||||
# contains contains_gaps when filtered by username" do
|
||||
TopicView.new(topic.id, coding_horror, username_filters: ['eviltrout']).contains_gaps?.should == true
|
||||
expect(TopicView.new(topic.id, coding_horror, username_filters: ['eviltrout']).contains_gaps?).to eq(true)
|
||||
# contains contains_gaps when filtered by summary
|
||||
TopicView.new(topic.id, coding_horror, filter: 'summary').contains_gaps?.should == true
|
||||
expect(TopicView.new(topic.id, coding_horror, filter: 'summary').contains_gaps?).to eq(true)
|
||||
# contains contains_gaps when filtered by best
|
||||
TopicView.new(topic.id, coding_horror, best: 5).contains_gaps?.should == true
|
||||
expect(TopicView.new(topic.id, coding_horror, best: 5).contains_gaps?).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -293,7 +293,7 @@ describe TopicView do
|
|||
TopicView.new(topic.id, coding_horror).posts.count
|
||||
}.to raise_error(Discourse::InvalidAccess)
|
||||
|
||||
TopicView.new(t2.id, coding_horror, post_ids: [p1.id,p2.id]).posts.count.should == 0
|
||||
expect(TopicView.new(t2.id, coding_horror, post_ids: [p1.id,p2.id]).posts.count).to eq(0)
|
||||
|
||||
end
|
||||
|
||||
|
@ -302,10 +302,10 @@ describe TopicView do
|
|||
before { TopicView.stubs(:chunk_size).returns(2) }
|
||||
|
||||
it 'returns correct posts for all pages' do
|
||||
topic_view.filter_posts_paged(1).should == [p1, p2]
|
||||
topic_view.filter_posts_paged(2).should == [p3, p5]
|
||||
topic_view.filter_posts_paged(3).should == []
|
||||
topic_view.filter_posts_paged(100).should == []
|
||||
expect(topic_view.filter_posts_paged(1)).to eq([p1, p2])
|
||||
expect(topic_view.filter_posts_paged(2)).to eq([p3, p5])
|
||||
expect(topic_view.filter_posts_paged(3)).to eq([])
|
||||
expect(topic_view.filter_posts_paged(100)).to eq([])
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -317,58 +317,58 @@ describe TopicView do
|
|||
|
||||
it "snaps to the lower boundary" do
|
||||
near_view = topic_view_near(p1)
|
||||
near_view.desired_post.should == p1
|
||||
near_view.posts.should == [p1, p2, p3]
|
||||
near_view.contains_gaps?.should == false
|
||||
expect(near_view.desired_post).to eq(p1)
|
||||
expect(near_view.posts).to eq([p1, p2, p3])
|
||||
expect(near_view.contains_gaps?).to eq(false)
|
||||
end
|
||||
|
||||
it "snaps to the upper boundary" do
|
||||
near_view = topic_view_near(p5)
|
||||
near_view.desired_post.should == p5
|
||||
near_view.posts.should == [p2, p3, p5]
|
||||
near_view.contains_gaps?.should == false
|
||||
expect(near_view.desired_post).to eq(p5)
|
||||
expect(near_view.posts).to eq([p2, p3, p5])
|
||||
expect(near_view.contains_gaps?).to eq(false)
|
||||
end
|
||||
|
||||
it "returns the posts in the middle" do
|
||||
near_view = topic_view_near(p2)
|
||||
near_view.desired_post.should == p2
|
||||
near_view.posts.should == [p1, p2, p3]
|
||||
near_view.contains_gaps?.should == false
|
||||
expect(near_view.desired_post).to eq(p2)
|
||||
expect(near_view.posts).to eq([p1, p2, p3])
|
||||
expect(near_view.contains_gaps?).to eq(false)
|
||||
end
|
||||
|
||||
it "gaps deleted posts to an admin" do
|
||||
coding_horror.admin = true
|
||||
near_view = topic_view_near(p3)
|
||||
near_view.desired_post.should == p3
|
||||
near_view.posts.should == [p2, p3, p5]
|
||||
near_view.gaps.before.should == {p5.id => [p4.id]}
|
||||
near_view.gaps.after.should == {p5.id => [p6.id, p7.id]}
|
||||
expect(near_view.desired_post).to eq(p3)
|
||||
expect(near_view.posts).to eq([p2, p3, p5])
|
||||
expect(near_view.gaps.before).to eq({p5.id => [p4.id]})
|
||||
expect(near_view.gaps.after).to eq({p5.id => [p6.id, p7.id]})
|
||||
end
|
||||
|
||||
it "returns deleted posts to an admin with show_deleted" do
|
||||
coding_horror.admin = true
|
||||
near_view = topic_view_near(p3, true)
|
||||
near_view.desired_post.should == p3
|
||||
near_view.posts.should == [p2, p3, p4]
|
||||
near_view.contains_gaps?.should == false
|
||||
expect(near_view.desired_post).to eq(p3)
|
||||
expect(near_view.posts).to eq([p2, p3, p4])
|
||||
expect(near_view.contains_gaps?).to eq(false)
|
||||
end
|
||||
|
||||
it "gaps deleted posts by nuked users to an admin" do
|
||||
coding_horror.admin = true
|
||||
near_view = topic_view_near(p5)
|
||||
near_view.desired_post.should == p5
|
||||
expect(near_view.desired_post).to eq(p5)
|
||||
# note: both p4 and p6 get skipped
|
||||
near_view.posts.should == [p2, p3, p5]
|
||||
near_view.gaps.before.should == {p5.id => [p4.id]}
|
||||
near_view.gaps.after.should == {p5.id => [p6.id, p7.id]}
|
||||
expect(near_view.posts).to eq([p2, p3, p5])
|
||||
expect(near_view.gaps.before).to eq({p5.id => [p4.id]})
|
||||
expect(near_view.gaps.after).to eq({p5.id => [p6.id, p7.id]})
|
||||
end
|
||||
|
||||
it "returns deleted posts by nuked users to an admin with show_deleted" do
|
||||
coding_horror.admin = true
|
||||
near_view = topic_view_near(p5, true)
|
||||
near_view.desired_post.should == p5
|
||||
near_view.posts.should == [p4, p5, p6]
|
||||
near_view.contains_gaps?.should == false
|
||||
expect(near_view.desired_post).to eq(p5)
|
||||
expect(near_view.posts).to eq([p4, p5, p6])
|
||||
expect(near_view.contains_gaps?).to eq(false)
|
||||
end
|
||||
|
||||
context "when 'posts per page' exceeds the number of posts" do
|
||||
|
@ -376,23 +376,23 @@ describe TopicView do
|
|||
|
||||
it 'returns all the posts' do
|
||||
near_view = topic_view_near(p5)
|
||||
near_view.posts.should == [p1, p2, p3, p5]
|
||||
near_view.contains_gaps?.should == false
|
||||
expect(near_view.posts).to eq([p1, p2, p3, p5])
|
||||
expect(near_view.contains_gaps?).to eq(false)
|
||||
end
|
||||
|
||||
it 'gaps deleted posts to admins' do
|
||||
coding_horror.admin = true
|
||||
near_view = topic_view_near(p5)
|
||||
near_view.posts.should == [p1, p2, p3, p5]
|
||||
near_view.gaps.before.should == {p5.id => [p4.id]}
|
||||
near_view.gaps.after.should == {p5.id => [p6.id, p7.id]}
|
||||
expect(near_view.posts).to eq([p1, p2, p3, p5])
|
||||
expect(near_view.gaps.before).to eq({p5.id => [p4.id]})
|
||||
expect(near_view.gaps.after).to eq({p5.id => [p6.id, p7.id]})
|
||||
end
|
||||
|
||||
it 'returns deleted posts to admins' do
|
||||
coding_horror.admin = true
|
||||
near_view = topic_view_near(p5, true)
|
||||
near_view.posts.should == [p1, p2, p3, p4, p5, p6, p7]
|
||||
near_view.contains_gaps?.should == false
|
||||
expect(near_view.posts).to eq([p1, p2, p3, p4, p5, p6, p7])
|
||||
expect(near_view.contains_gaps?).to eq(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -15,8 +15,8 @@ describe TopicsBulkAction do
|
|||
|
||||
tu = TopicUser.find_by(user_id: post1.user_id, topic_id: post1.topic_id)
|
||||
|
||||
tu.last_read_post_number.should == 3
|
||||
tu.highest_seen_post_number.should == 3
|
||||
expect(tu.last_read_post_number).to eq(3)
|
||||
expect(tu.highest_seen_post_number).to eq(3)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -25,7 +25,7 @@ describe TopicsBulkAction do
|
|||
|
||||
it "raises an error with an invalid operation" do
|
||||
tba = TopicsBulkAction.new(user, [1], type: 'rm_root')
|
||||
-> { tba.perform! }.should raise_error(Discourse::InvalidParameters)
|
||||
expect { tba.perform! }.to raise_error(Discourse::InvalidParameters)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -37,9 +37,9 @@ describe TopicsBulkAction do
|
|||
it "changes the category and returns the topic_id" do
|
||||
tba = TopicsBulkAction.new(topic.user, [topic.id], type: 'change_category', category_id: category.id)
|
||||
topic_ids = tba.perform!
|
||||
topic_ids.should == [topic.id]
|
||||
expect(topic_ids).to eq([topic.id])
|
||||
topic.reload
|
||||
topic.category.should == category
|
||||
expect(topic.category).to eq(category)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -48,9 +48,9 @@ describe TopicsBulkAction do
|
|||
Guardian.any_instance.expects(:can_edit?).returns(false)
|
||||
tba = TopicsBulkAction.new(topic.user, [topic.id], type: 'change_category', category_id: category.id)
|
||||
topic_ids = tba.perform!
|
||||
topic_ids.should == []
|
||||
expect(topic_ids).to eq([])
|
||||
topic.reload
|
||||
topic.category.should_not == category
|
||||
expect(topic.category).not_to eq(category)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -73,7 +73,7 @@ describe TopicsBulkAction do
|
|||
tba = TopicsBulkAction.new(moderator, [topic.id], type: 'delete')
|
||||
tba.perform!
|
||||
topic.reload
|
||||
topic.should be_trashed
|
||||
expect(topic).to be_trashed
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -84,8 +84,8 @@ describe TopicsBulkAction do
|
|||
it "updates the notification level" do
|
||||
tba = TopicsBulkAction.new(topic.user, [topic.id], type: 'change_notification_level', notification_level_id: 2)
|
||||
topic_ids = tba.perform!
|
||||
topic_ids.should == [topic.id]
|
||||
TopicUser.get(topic, topic.user).notification_level.should == 2
|
||||
expect(topic_ids).to eq([topic.id])
|
||||
expect(TopicUser.get(topic, topic.user).notification_level).to eq(2)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -94,8 +94,8 @@ describe TopicsBulkAction do
|
|||
Guardian.any_instance.expects(:can_see?).returns(false)
|
||||
tba = TopicsBulkAction.new(topic.user, [topic.id], type: 'change_notification_level', notification_level_id: 2)
|
||||
topic_ids = tba.perform!
|
||||
topic_ids.should == []
|
||||
TopicUser.get(topic, topic.user).should be_blank
|
||||
expect(topic_ids).to eq([])
|
||||
expect(TopicUser.get(topic, topic.user)).to be_blank
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -109,9 +109,9 @@ describe TopicsBulkAction do
|
|||
Guardian.any_instance.expects(:can_create?).returns(true)
|
||||
tba = TopicsBulkAction.new(topic.user, [topic.id], type: 'close')
|
||||
topic_ids = tba.perform!
|
||||
topic_ids.should == [topic.id]
|
||||
expect(topic_ids).to eq([topic.id])
|
||||
topic.reload
|
||||
topic.should be_closed
|
||||
expect(topic).to be_closed
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -120,9 +120,9 @@ describe TopicsBulkAction do
|
|||
Guardian.any_instance.expects(:can_moderate?).returns(false)
|
||||
tba = TopicsBulkAction.new(topic.user, [topic.id], type: 'close')
|
||||
topic_ids = tba.perform!
|
||||
topic_ids.should be_blank
|
||||
expect(topic_ids).to be_blank
|
||||
topic.reload
|
||||
topic.should_not be_closed
|
||||
expect(topic).not_to be_closed
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -136,9 +136,9 @@ describe TopicsBulkAction do
|
|||
Guardian.any_instance.expects(:can_create?).returns(true)
|
||||
tba = TopicsBulkAction.new(topic.user, [topic.id], type: 'archive')
|
||||
topic_ids = tba.perform!
|
||||
topic_ids.should == [topic.id]
|
||||
expect(topic_ids).to eq([topic.id])
|
||||
topic.reload
|
||||
topic.should be_archived
|
||||
expect(topic).to be_archived
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -147,9 +147,9 @@ describe TopicsBulkAction do
|
|||
Guardian.any_instance.expects(:can_moderate?).returns(false)
|
||||
tba = TopicsBulkAction.new(topic.user, [topic.id], type: 'archive')
|
||||
topic_ids = tba.perform!
|
||||
topic_ids.should be_blank
|
||||
expect(topic_ids).to be_blank
|
||||
topic.reload
|
||||
topic.should_not be_archived
|
||||
expect(topic).not_to be_archived
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -8,7 +8,7 @@ describe Trashable do
|
|||
p2 = Fabricate(:post)
|
||||
|
||||
expect { p1.trash! }.to change{Post.count}.by(-1)
|
||||
Post.with_deleted.count.should == Post.count + 1
|
||||
expect(Post.with_deleted.count).to eq(Post.count + 1)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -17,47 +17,47 @@ describe Unread do
|
|||
it 'should have 0 unread posts if the user has seen all posts' do
|
||||
@topic_user.stubs(:last_read_post_number).returns(13)
|
||||
@topic_user.stubs(:highest_seen_post_number).returns(13)
|
||||
@unread.unread_posts.should == 0
|
||||
expect(@unread.unread_posts).to eq(0)
|
||||
end
|
||||
|
||||
it 'should have 6 unread posts if the user has seen all but 6 posts' do
|
||||
@topic_user.stubs(:last_read_post_number).returns(5)
|
||||
@topic_user.stubs(:highest_seen_post_number).returns(11)
|
||||
@unread.unread_posts.should == 6
|
||||
expect(@unread.unread_posts).to eq(6)
|
||||
end
|
||||
|
||||
it 'should have 0 unread posts if the user has seen more posts than exist (deleted)' do
|
||||
@topic_user.stubs(:last_read_post_number).returns(100)
|
||||
@topic_user.stubs(:highest_seen_post_number).returns(13)
|
||||
@unread.unread_posts.should == 0
|
||||
expect(@unread.unread_posts).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
describe 'new_posts' do
|
||||
it 'should have 0 new posts if the user has read all posts' do
|
||||
@topic_user.stubs(:last_read_post_number).returns(13)
|
||||
@unread.new_posts.should == 0
|
||||
expect(@unread.new_posts).to eq(0)
|
||||
end
|
||||
|
||||
it 'returns 0 when the topic is the same length as when you last saw it' do
|
||||
@topic_user.stubs(:highest_seen_post_number).returns(13)
|
||||
@unread.new_posts.should == 0
|
||||
expect(@unread.new_posts).to eq(0)
|
||||
end
|
||||
|
||||
it 'has 3 new posts if the user has read 10 posts' do
|
||||
@topic_user.stubs(:highest_seen_post_number).returns(10)
|
||||
@unread.new_posts.should == 3
|
||||
expect(@unread.new_posts).to eq(3)
|
||||
end
|
||||
|
||||
it 'has 0 new posts if the user has read 10 posts but is not tracking' do
|
||||
@topic_user.stubs(:highest_seen_post_number).returns(10)
|
||||
@topic_user.stubs(:notification_level).returns(TopicUser.notification_levels[:regular])
|
||||
@unread.new_posts.should == 0
|
||||
expect(@unread.new_posts).to eq(0)
|
||||
end
|
||||
|
||||
it 'has 0 new posts if the user read more posts than exist (deleted)' do
|
||||
@topic_user.stubs(:highest_seen_post_number).returns(16)
|
||||
@unread.new_posts.should == 0
|
||||
expect(@unread.new_posts).to eq(0)
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -15,21 +15,21 @@ describe UrlHelper do
|
|||
store = stub
|
||||
store.expects(:has_been_uploaded?).returns(true)
|
||||
Discourse.stubs(:store).returns(store)
|
||||
helper.is_local("http://discuss.site.com/path/to/file.png").should == true
|
||||
expect(helper.is_local("http://discuss.site.com/path/to/file.png")).to eq(true)
|
||||
end
|
||||
|
||||
it "is true for relative assets" do
|
||||
store = stub
|
||||
store.expects(:has_been_uploaded?).returns(false)
|
||||
Discourse.stubs(:store).returns(store)
|
||||
helper.is_local("/assets/javascripts/all.js").should == true
|
||||
expect(helper.is_local("/assets/javascripts/all.js")).to eq(true)
|
||||
end
|
||||
|
||||
it "is true for plugin assets" do
|
||||
store = stub
|
||||
store.expects(:has_been_uploaded?).returns(false)
|
||||
Discourse.stubs(:store).returns(store)
|
||||
helper.is_local("/plugins/all.js").should == true
|
||||
expect(helper.is_local("/plugins/all.js")).to eq(true)
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -37,16 +37,16 @@ describe UrlHelper do
|
|||
describe "#absolute" do
|
||||
|
||||
it "does not change non-relative url" do
|
||||
helper.absolute("http://www.discourse.org").should == "http://www.discourse.org"
|
||||
expect(helper.absolute("http://www.discourse.org")).to eq("http://www.discourse.org")
|
||||
end
|
||||
|
||||
it "changes a relative url to an absolute one using base url by default" do
|
||||
helper.absolute("/path/to/file").should == "http://test.localhost/path/to/file"
|
||||
expect(helper.absolute("/path/to/file")).to eq("http://test.localhost/path/to/file")
|
||||
end
|
||||
|
||||
it "changes a relative url to an absolute one using the cdn when enabled" do
|
||||
Rails.configuration.action_controller.stubs(:asset_host).returns("http://my.cdn.com")
|
||||
helper.absolute("/path/to/file").should == "http://my.cdn.com/path/to/file"
|
||||
expect(helper.absolute("/path/to/file")).to eq("http://my.cdn.com/path/to/file")
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -55,7 +55,7 @@ describe UrlHelper do
|
|||
|
||||
it "changes a relative url to an absolute one using base url even when cdn is enabled" do
|
||||
Rails.configuration.action_controller.stubs(:asset_host).returns("http://my.cdn.com")
|
||||
helper.absolute_without_cdn("/path/to/file").should == "http://test.localhost/path/to/file"
|
||||
expect(helper.absolute_without_cdn("/path/to/file")).to eq("http://test.localhost/path/to/file")
|
||||
end
|
||||
|
||||
end
|
||||
|
@ -63,9 +63,9 @@ describe UrlHelper do
|
|||
describe "#schemaless" do
|
||||
|
||||
it "removes http or https schemas only" do
|
||||
helper.schemaless("http://www.discourse.org").should == "//www.discourse.org"
|
||||
helper.schemaless("https://secure.discourse.org").should == "//secure.discourse.org"
|
||||
helper.schemaless("ftp://ftp.discourse.org").should == "ftp://ftp.discourse.org"
|
||||
expect(helper.schemaless("http://www.discourse.org")).to eq("//www.discourse.org")
|
||||
expect(helper.schemaless("https://secure.discourse.org")).to eq("//secure.discourse.org")
|
||||
expect(helper.schemaless("ftp://ftp.discourse.org")).to eq("ftp://ftp.discourse.org")
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -5,7 +5,7 @@ describe UserNameSuggester do
|
|||
|
||||
describe 'name heuristics' do
|
||||
it 'is able to guess a decent username from an email' do
|
||||
UserNameSuggester.suggest('bob@bob.com').should == 'bob'
|
||||
expect(UserNameSuggester.suggest('bob@bob.com')).to eq('bob')
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -15,62 +15,62 @@ describe UserNameSuggester do
|
|||
end
|
||||
|
||||
it "doesn't raise an error on nil username" do
|
||||
UserNameSuggester.suggest(nil).should == nil
|
||||
expect(UserNameSuggester.suggest(nil)).to eq(nil)
|
||||
end
|
||||
|
||||
it 'corrects weird characters' do
|
||||
UserNameSuggester.suggest("Darth%^Vader").should == 'Darth_Vader'
|
||||
expect(UserNameSuggester.suggest("Darth%^Vader")).to eq('Darth_Vader')
|
||||
end
|
||||
|
||||
it "transliterates some characters" do
|
||||
UserNameSuggester.suggest("Jørn").should == 'Jorn'
|
||||
expect(UserNameSuggester.suggest("Jørn")).to eq('Jorn')
|
||||
end
|
||||
|
||||
it 'adds 1 to an existing username' do
|
||||
user = Fabricate(:user)
|
||||
UserNameSuggester.suggest(user.username).should == "#{user.username}1"
|
||||
expect(UserNameSuggester.suggest(user.username)).to eq("#{user.username}1")
|
||||
end
|
||||
|
||||
it "adds numbers if it's too short" do
|
||||
UserNameSuggester.suggest('a').should == 'a11'
|
||||
expect(UserNameSuggester.suggest('a')).to eq('a11')
|
||||
end
|
||||
|
||||
it "has a special case for me and i emails" do
|
||||
UserNameSuggester.suggest('me@eviltrout.com').should == 'eviltrout'
|
||||
UserNameSuggester.suggest('i@eviltrout.com').should == 'eviltrout'
|
||||
expect(UserNameSuggester.suggest('me@eviltrout.com')).to eq('eviltrout')
|
||||
expect(UserNameSuggester.suggest('i@eviltrout.com')).to eq('eviltrout')
|
||||
end
|
||||
|
||||
it "shortens very long suggestions" do
|
||||
UserNameSuggester.suggest("myreallylongnameisrobinwardesquire").should == 'myreallylongnam'
|
||||
expect(UserNameSuggester.suggest("myreallylongnameisrobinwardesquire")).to eq('myreallylongnam')
|
||||
end
|
||||
|
||||
it "makes room for the digit added if the username is too long" do
|
||||
User.create(username: 'myreallylongnam', email: 'fake@discourse.org')
|
||||
UserNameSuggester.suggest("myreallylongnam").should == 'myreallylongna1'
|
||||
expect(UserNameSuggester.suggest("myreallylongnam")).to eq('myreallylongna1')
|
||||
end
|
||||
|
||||
it "removes leading character if it is not alphanumeric" do
|
||||
UserNameSuggester.suggest("_myname").should == 'myname'
|
||||
expect(UserNameSuggester.suggest("_myname")).to eq('myname')
|
||||
end
|
||||
|
||||
it "removes trailing characters if they are invalid" do
|
||||
UserNameSuggester.suggest("myname!^$=").should == 'myname'
|
||||
expect(UserNameSuggester.suggest("myname!^$=")).to eq('myname')
|
||||
end
|
||||
|
||||
it "replace dots" do
|
||||
UserNameSuggester.suggest("my.name").should == 'my_name'
|
||||
expect(UserNameSuggester.suggest("my.name")).to eq('my_name')
|
||||
end
|
||||
|
||||
it "remove leading dots" do
|
||||
UserNameSuggester.suggest(".myname").should == 'myname'
|
||||
expect(UserNameSuggester.suggest(".myname")).to eq('myname')
|
||||
end
|
||||
|
||||
it "remove trailing dots" do
|
||||
UserNameSuggester.suggest("myname.").should == 'myname'
|
||||
expect(UserNameSuggester.suggest("myname.")).to eq('myname')
|
||||
end
|
||||
|
||||
it 'should handle typical facebook usernames' do
|
||||
UserNameSuggester.suggest('roger.nelson.3344913').should == 'roger_nelson_33'
|
||||
expect(UserNameSuggester.suggest('roger.nelson.3344913')).to eq('roger_nelson_33')
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ describe AllowedIpAddressValidator do
|
|||
it 'should add an error' do
|
||||
ScreenedIpAddress.stubs(:should_block?).returns(true)
|
||||
validate
|
||||
record.errors[:ip_address].should be_present
|
||||
expect(record.errors[:ip_address]).to be_present
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -18,7 +18,7 @@ describe AllowedIpAddressValidator do
|
|||
it 'should add an error' do
|
||||
SpamHandler.stubs(:should_prevent_registration_from_ip?).returns(true)
|
||||
validate
|
||||
record.errors[:ip_address].should be_present
|
||||
expect(record.errors[:ip_address]).to be_present
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -26,7 +26,7 @@ describe AllowedIpAddressValidator do
|
|||
it "shouldn't add an error" do
|
||||
ScreenedIpAddress.stubs(:should_block?).returns(false)
|
||||
validate
|
||||
record.errors[:ip_address].should_not be_present
|
||||
expect(record.errors[:ip_address]).not_to be_present
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -35,7 +35,7 @@ describe AllowedIpAddressValidator do
|
|||
ScreenedIpAddress.expects(:should_block?).never
|
||||
record.ip_address = nil
|
||||
validate
|
||||
record.errors[:ip_address].should_not be_present
|
||||
expect(record.errors[:ip_address]).not_to be_present
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -5,16 +5,16 @@ describe EmailSettingValidator do
|
|||
subject(:validator) { described_class.new }
|
||||
|
||||
it "returns true for blank values" do
|
||||
validator.valid_value?('').should == true
|
||||
validator.valid_value?(nil).should == true
|
||||
expect(validator.valid_value?('')).to eq(true)
|
||||
expect(validator.valid_value?(nil)).to eq(true)
|
||||
end
|
||||
|
||||
it "returns true if value is a valid email address" do
|
||||
validator.valid_value?('vader@example.com').should == true
|
||||
expect(validator.valid_value?('vader@example.com')).to eq(true)
|
||||
end
|
||||
|
||||
it "returns false if value is not a valid email address" do
|
||||
validator.valid_value?('my house').should == false
|
||||
expect(validator.valid_value?('my house')).to eq(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -10,13 +10,13 @@ describe EmailValidator do
|
|||
it "doesn't add an error when email doesn't match a blocked email" do
|
||||
ScreenedEmail.stubs(:should_block?).with(record.email).returns(false)
|
||||
validate
|
||||
record.errors[:email].should_not be_present
|
||||
expect(record.errors[:email]).not_to be_present
|
||||
end
|
||||
|
||||
it "adds an error when email matches a blocked email" do
|
||||
ScreenedEmail.stubs(:should_block?).with(record.email).returns(true)
|
||||
validate
|
||||
record.errors[:email].should be_present
|
||||
expect(record.errors[:email]).to be_present
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -5,12 +5,12 @@ describe IntegerSettingValidator do
|
|||
|
||||
shared_examples "for all IntegerSettingValidator opts" do
|
||||
it "returns false for blank values" do
|
||||
validator.valid_value?('').should == false
|
||||
validator.valid_value?(nil).should == false
|
||||
expect(validator.valid_value?('')).to eq(false)
|
||||
expect(validator.valid_value?(nil)).to eq(false)
|
||||
end
|
||||
|
||||
it "returns false if value is not a valid integer" do
|
||||
validator.valid_value?('two').should == false
|
||||
expect(validator.valid_value?('two')).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -20,10 +20,10 @@ describe IntegerSettingValidator do
|
|||
include_examples "for all IntegerSettingValidator opts"
|
||||
|
||||
it "returns true if value is a valid integer" do
|
||||
validator.valid_value?(1).should == true
|
||||
validator.valid_value?(-1).should == true
|
||||
validator.valid_value?('1').should == true
|
||||
validator.valid_value?('-1').should == true
|
||||
expect(validator.valid_value?(1)).to eq(true)
|
||||
expect(validator.valid_value?(-1)).to eq(true)
|
||||
expect(validator.valid_value?('1')).to eq(true)
|
||||
expect(validator.valid_value?('-1')).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -33,18 +33,18 @@ describe IntegerSettingValidator do
|
|||
include_examples "for all IntegerSettingValidator opts"
|
||||
|
||||
it "returns true if value is equal to min" do
|
||||
validator.valid_value?(2).should == true
|
||||
validator.valid_value?('2').should == true
|
||||
expect(validator.valid_value?(2)).to eq(true)
|
||||
expect(validator.valid_value?('2')).to eq(true)
|
||||
end
|
||||
|
||||
it "returns true if value is greater than min" do
|
||||
validator.valid_value?(3).should == true
|
||||
validator.valid_value?('3').should == true
|
||||
expect(validator.valid_value?(3)).to eq(true)
|
||||
expect(validator.valid_value?('3')).to eq(true)
|
||||
end
|
||||
|
||||
it "returns false if value is less than min" do
|
||||
validator.valid_value?(1).should == false
|
||||
validator.valid_value?('1').should == false
|
||||
expect(validator.valid_value?(1)).to eq(false)
|
||||
expect(validator.valid_value?('1')).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -54,18 +54,18 @@ describe IntegerSettingValidator do
|
|||
include_examples "for all IntegerSettingValidator opts"
|
||||
|
||||
it "returns true if value is equal to max" do
|
||||
validator.valid_value?(3).should == true
|
||||
validator.valid_value?('3').should == true
|
||||
expect(validator.valid_value?(3)).to eq(true)
|
||||
expect(validator.valid_value?('3')).to eq(true)
|
||||
end
|
||||
|
||||
it "returns true if value is less than max" do
|
||||
validator.valid_value?(2).should == true
|
||||
validator.valid_value?('2').should == true
|
||||
expect(validator.valid_value?(2)).to eq(true)
|
||||
expect(validator.valid_value?('2')).to eq(true)
|
||||
end
|
||||
|
||||
it "returns false if value is greater than min" do
|
||||
validator.valid_value?(4).should == false
|
||||
validator.valid_value?('4').should == false
|
||||
expect(validator.valid_value?(4)).to eq(false)
|
||||
expect(validator.valid_value?('4')).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -75,14 +75,14 @@ describe IntegerSettingValidator do
|
|||
include_examples "for all IntegerSettingValidator opts"
|
||||
|
||||
it "returns true if value is in range" do
|
||||
validator.valid_value?(-1).should == true
|
||||
validator.valid_value?(0).should == true
|
||||
validator.valid_value?(3).should == true
|
||||
expect(validator.valid_value?(-1)).to eq(true)
|
||||
expect(validator.valid_value?(0)).to eq(true)
|
||||
expect(validator.valid_value?(3)).to eq(true)
|
||||
end
|
||||
|
||||
it "returns false if value is out of range" do
|
||||
validator.valid_value?(4).should == false
|
||||
validator.valid_value?(-2).should == false
|
||||
expect(validator.valid_value?(4)).to eq(false)
|
||||
expect(validator.valid_value?(-2)).to eq(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -10,19 +10,19 @@ describe IpAddressFormatValidator do
|
|||
it "should not add an error for #{arg}" do
|
||||
record.ip_address = arg
|
||||
validate
|
||||
record.errors[:ip_address].should_not be_present
|
||||
expect(record.errors[:ip_address]).not_to be_present
|
||||
end
|
||||
end
|
||||
|
||||
it 'should add an error for nil IP address' do
|
||||
record.ip_address = nil
|
||||
validate
|
||||
record.errors[:ip_address].should be_present
|
||||
expect(record.errors[:ip_address]).to be_present
|
||||
end
|
||||
|
||||
it 'should add an error for invalid IP address' do
|
||||
record.ip_address = '99.99.99'
|
||||
validate
|
||||
record.errors[:ip_address].should be_present
|
||||
expect(record.errors[:ip_address]).to be_present
|
||||
end
|
||||
end
|
||||
|
|
|
@ -20,25 +20,25 @@ describe PasswordValidator do
|
|||
it "doesn't add an error when password is good" do
|
||||
@password = "weron235alsfn234"
|
||||
validate
|
||||
record.errors[:password].should_not be_present
|
||||
expect(record.errors[:password]).not_to be_present
|
||||
end
|
||||
|
||||
it "adds an error when password is too short" do
|
||||
@password = "p"
|
||||
validate
|
||||
record.errors[:password].should be_present
|
||||
expect(record.errors[:password]).to be_present
|
||||
end
|
||||
|
||||
it "adds an error when password is blank" do
|
||||
@password = ''
|
||||
validate
|
||||
record.errors[:password].should be_present
|
||||
expect(record.errors[:password]).to be_present
|
||||
end
|
||||
|
||||
it "adds an error when password is nil" do
|
||||
@password = nil
|
||||
validate
|
||||
record.errors[:password].should be_present
|
||||
expect(record.errors[:password]).to be_present
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -48,7 +48,7 @@ describe PasswordValidator do
|
|||
it "adds an error when password length is 11" do
|
||||
@password = "gt38sdt92bv"
|
||||
validate
|
||||
record.errors[:password].should be_present
|
||||
expect(record.errors[:password]).to be_present
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -62,14 +62,14 @@ describe PasswordValidator do
|
|||
SiteSetting.stubs(:block_common_passwords).returns(true)
|
||||
@password = "password"
|
||||
validate
|
||||
record.errors[:password].should be_present
|
||||
expect(record.errors[:password]).to be_present
|
||||
end
|
||||
|
||||
it "doesn't add an error when block_common_passwords is disabled" do
|
||||
SiteSetting.stubs(:block_common_passwords).returns(false)
|
||||
@password = "password"
|
||||
validate
|
||||
record.errors[:password].should_not be_present
|
||||
expect(record.errors[:password]).not_to be_present
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -80,7 +80,7 @@ describe PasswordValidator do
|
|||
it "doesn't add an error if password is not required" do
|
||||
@password = nil
|
||||
validate
|
||||
record.errors[:password].should_not be_present
|
||||
expect(record.errors[:password]).not_to be_present
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ describe Validators::PostValidator do
|
|||
|
||||
it "should not add an error" do
|
||||
validator.unique_post_validator(post)
|
||||
post.errors.count.should == 0
|
||||
expect(post.errors.count).to eq(0)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -81,7 +81,7 @@ describe Validators::PostValidator do
|
|||
it "should not add an error if post.skip_unique_check is true" do
|
||||
post.skip_unique_check = true
|
||||
validator.unique_post_validator(post)
|
||||
post.errors.count.should == 0
|
||||
expect(post.errors.count).to eq(0)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
|
@ -25,45 +25,45 @@ describe "A record validated with QualityTitleValidator" do
|
|||
|
||||
it "allows a regular title with a few ascii characters" do
|
||||
topic.title = valid_title
|
||||
topic.should be_valid
|
||||
expect(topic).to be_valid
|
||||
end
|
||||
|
||||
it "allows non ascii" do
|
||||
topic.title = "Iñtërnâtiônàlizætiøn"
|
||||
topic.should be_valid
|
||||
expect(topic).to be_valid
|
||||
end
|
||||
|
||||
it 'allows Chinese characters' do
|
||||
topic.title = '现在发现使用中文标题没法发帖子了'
|
||||
topic.should be_valid
|
||||
expect(topic).to be_valid
|
||||
end
|
||||
|
||||
it "allows anything in a private message" do
|
||||
topic.stubs(:private_message? => true)
|
||||
[short_title, long_title, xxxxx_title].each do |bad_title|
|
||||
topic.title = bad_title
|
||||
topic.should be_valid
|
||||
expect(topic).to be_valid
|
||||
end
|
||||
end
|
||||
|
||||
it "strips a title when identifying length" do
|
||||
topic.title = short_title.center(SiteSetting.min_topic_title_length + 1, ' ')
|
||||
topic.should_not be_valid
|
||||
expect(topic).not_to be_valid
|
||||
end
|
||||
|
||||
it "doesn't allow a long title" do
|
||||
topic.title = long_title
|
||||
topic.should_not be_valid
|
||||
expect(topic).not_to be_valid
|
||||
end
|
||||
|
||||
it "doesn't allow a short title" do
|
||||
topic.title = short_title
|
||||
topic.should_not be_valid
|
||||
expect(topic).not_to be_valid
|
||||
end
|
||||
|
||||
it "doesn't allow a title of one repeated character" do
|
||||
topic.title = xxxxx_title
|
||||
topic.should_not be_valid
|
||||
expect(topic).not_to be_valid
|
||||
end
|
||||
|
||||
# describe "with a name" do
|
||||
|
|
|
@ -5,8 +5,8 @@ describe StringSettingValidator do
|
|||
describe '#valid_value?' do
|
||||
shared_examples "for all StringSettingValidator opts" do
|
||||
it "returns true for blank values" do
|
||||
validator.valid_value?('').should == true
|
||||
validator.valid_value?(nil).should == true
|
||||
expect(validator.valid_value?('')).to eq(true)
|
||||
expect(validator.valid_value?(nil)).to eq(true)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -16,21 +16,21 @@ describe StringSettingValidator do
|
|||
include_examples "for all StringSettingValidator opts"
|
||||
|
||||
it "returns true if value matches the regex" do
|
||||
validator.valid_value?('The bacon is delicious').should == true
|
||||
expect(validator.valid_value?('The bacon is delicious')).to eq(true)
|
||||
end
|
||||
|
||||
it "returns false if the value doesn't match the regex" do
|
||||
validator.valid_value?('The vegetables are delicious').should == false
|
||||
expect(validator.valid_value?('The vegetables are delicious')).to eq(false)
|
||||
end
|
||||
|
||||
it "test some other regexes" do
|
||||
v = described_class.new(regex: '^(chocolate|banana)$')
|
||||
v.valid_value?('chocolate').should == true
|
||||
v.valid_value?('chocolates').should == false
|
||||
expect(v.valid_value?('chocolate')).to eq(true)
|
||||
expect(v.valid_value?('chocolates')).to eq(false)
|
||||
|
||||
v = described_class.new(regex: '^[\w]+$')
|
||||
v.valid_value?('the_file').should == true
|
||||
v.valid_value?('the_file.bat').should == false
|
||||
expect(v.valid_value?('the_file')).to eq(true)
|
||||
expect(v.valid_value?('the_file.bat')).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -40,12 +40,12 @@ describe StringSettingValidator do
|
|||
include_examples "for all StringSettingValidator opts"
|
||||
|
||||
it "returns true if length is ok" do
|
||||
validator.valid_value?('ok').should == true
|
||||
validator.valid_value?('yep long enough').should == true
|
||||
expect(validator.valid_value?('ok')).to eq(true)
|
||||
expect(validator.valid_value?('yep long enough')).to eq(true)
|
||||
end
|
||||
|
||||
it "returns false if too short" do
|
||||
validator.valid_value?('x').should == false
|
||||
expect(validator.valid_value?('x')).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -55,43 +55,43 @@ describe StringSettingValidator do
|
|||
include_examples "for all StringSettingValidator opts"
|
||||
|
||||
it "returns true if length is ok" do
|
||||
validator.valid_value?('Z').should == true
|
||||
validator.valid_value?('abcde').should == true
|
||||
expect(validator.valid_value?('Z')).to eq(true)
|
||||
expect(validator.valid_value?('abcde')).to eq(true)
|
||||
end
|
||||
|
||||
it "returns false if too long" do
|
||||
validator.valid_value?('banana').should == false
|
||||
expect(validator.valid_value?('banana')).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
context 'combinations of options' do
|
||||
it "min and regex" do
|
||||
v = described_class.new(regex: '^[\w]+$', min: 3)
|
||||
v.valid_value?('chocolate').should == true
|
||||
v.valid_value?('hi').should == false
|
||||
v.valid_value?('game.exe').should == false
|
||||
expect(v.valid_value?('chocolate')).to eq(true)
|
||||
expect(v.valid_value?('hi')).to eq(false)
|
||||
expect(v.valid_value?('game.exe')).to eq(false)
|
||||
end
|
||||
|
||||
it "max and regex" do
|
||||
v = described_class.new(regex: '^[\w]+$', max: 5)
|
||||
v.valid_value?('chocolate').should == false
|
||||
v.valid_value?('a_b_c').should == true
|
||||
v.valid_value?('a b c').should == false
|
||||
expect(v.valid_value?('chocolate')).to eq(false)
|
||||
expect(v.valid_value?('a_b_c')).to eq(true)
|
||||
expect(v.valid_value?('a b c')).to eq(false)
|
||||
end
|
||||
|
||||
it "min and max" do
|
||||
v = described_class.new(min: 3, max: 5)
|
||||
v.valid_value?('chocolate').should == false
|
||||
v.valid_value?('a').should == false
|
||||
v.valid_value?('a b c').should == true
|
||||
v.valid_value?('a b').should == true
|
||||
expect(v.valid_value?('chocolate')).to eq(false)
|
||||
expect(v.valid_value?('a')).to eq(false)
|
||||
expect(v.valid_value?('a b c')).to eq(true)
|
||||
expect(v.valid_value?('a b')).to eq(true)
|
||||
end
|
||||
|
||||
it "min, max, and regex" do
|
||||
v = described_class.new(min: 3, max: 12, regex: 'bacon')
|
||||
v.valid_value?('go bacon!').should == true
|
||||
v.valid_value?('sprinkle bacon on your cereal').should == false
|
||||
v.valid_value?('ba').should == false
|
||||
expect(v.valid_value?('go bacon!')).to eq(true)
|
||||
expect(v.valid_value?('sprinkle bacon on your cereal')).to eq(false)
|
||||
expect(v.valid_value?('ba')).to eq(false)
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -5,17 +5,17 @@ describe UsernameSettingValidator do
|
|||
subject(:validator) { described_class.new }
|
||||
|
||||
it "returns true for blank values" do
|
||||
validator.valid_value?('').should == true
|
||||
validator.valid_value?(nil).should == true
|
||||
expect(validator.valid_value?('')).to eq(true)
|
||||
expect(validator.valid_value?(nil)).to eq(true)
|
||||
end
|
||||
|
||||
it "returns true if value matches an existing user's username" do
|
||||
Fabricate(:user, username: 'vader')
|
||||
validator.valid_value?('vader').should == true
|
||||
expect(validator.valid_value?('vader')).to eq(true)
|
||||
end
|
||||
|
||||
it "returns false if value does not match a user's username" do
|
||||
validator.valid_value?('no way').should == false
|
||||
expect(validator.valid_value?('no way')).to eq(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
Loading…
Reference in New Issue