mirror of
https://github.com/discourse/discourse.git
synced 2025-05-28 13:51:18 +08:00
FEATURE: control which web crawlers can access using a whitelist or blacklist
This commit is contained in:
@ -28,6 +28,12 @@ describe CrawlerDetection do
|
||||
expect(described_class.crawler?("Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)")).to eq(true)
|
||||
expect(described_class.crawler?("Baiduspider+(+http://www.baidu.com/search/spider.htm)")).to eq(true)
|
||||
expect(described_class.crawler?("Mozilla/5.0 (compatible; YandexBot/3.0; +http://yandex.com/bots)")).to eq(true)
|
||||
|
||||
expect(described_class.crawler?("DiscourseAPI Ruby Gem 0.19.0")).to eq(true)
|
||||
expect(described_class.crawler?("Pingdom.com_bot_version_1.4_(http://www.pingdom.com/)")).to eq(true)
|
||||
expect(described_class.crawler?("LogicMonitor SiteMonitor/1.0")).to eq(true)
|
||||
expect(described_class.crawler?("Java/1.8.0_151")).to eq(true)
|
||||
expect(described_class.crawler?("Mozilla/5.0 (compatible; Yahoo! Slurp; http://help.yahoo.com/help/us/ysearch/slurp)")).to eq(true)
|
||||
end
|
||||
|
||||
it "returns false for non-crawler user agents" do
|
||||
@ -37,13 +43,106 @@ describe CrawlerDetection do
|
||||
expect(described_class.crawler?("Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5355d Safari/8536.25")).to eq(false)
|
||||
expect(described_class.crawler?("Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:25.0) Gecko/20100101 Firefox/25.0")).to eq(false)
|
||||
expect(described_class.crawler?("Mozilla/5.0 (Linux; U; Android 4.0.3; ko-kr; LG-L160L Build/IML74K) AppleWebkit/534.30 (KHTML, like Gecko) Version/4.0 Mobile Safari/534.30")).to eq(false)
|
||||
|
||||
expect(described_class.crawler?("DiscourseAPI Ruby Gem 0.19.0")).to eq(true)
|
||||
expect(described_class.crawler?("Pingdom.com_bot_version_1.4_(http://www.pingdom.com/)")).to eq(true)
|
||||
expect(described_class.crawler?("LogicMonitor SiteMonitor/1.0")).to eq(true)
|
||||
expect(described_class.crawler?("Java/1.8.0_151")).to eq(true)
|
||||
expect(described_class.crawler?("Mozilla/5.0 (compatible; Yahoo! Slurp; http://help.yahoo.com/help/us/ysearch/slurp)")).to eq(true)
|
||||
end
|
||||
|
||||
end
|
||||
|
||||
describe 'allow_crawler?' do
|
||||
it 'returns true if whitelist and blacklist are blank' do
|
||||
expect(CrawlerDetection.allow_crawler?('Googlebot/2.1 (+http://www.google.com/bot.html)')).to eq(true)
|
||||
end
|
||||
|
||||
context 'whitelist is set' do
|
||||
before do
|
||||
SiteSetting.whitelisted_crawler_user_agents = 'Googlebot|Twitterbot'
|
||||
end
|
||||
|
||||
it 'returns true for matching user agents' do
|
||||
expect(CrawlerDetection.allow_crawler?('Googlebot/2.1 (+http://www.google.com/bot.html)')).to eq(true)
|
||||
expect(CrawlerDetection.allow_crawler?('Googlebot-Image/1.0')).to eq(true)
|
||||
expect(CrawlerDetection.allow_crawler?('Twitterbot')).to eq(true)
|
||||
end
|
||||
|
||||
it 'returns false for user agents that do not match' do
|
||||
expect(CrawlerDetection.allow_crawler?('facebookexternalhit/1.1 (+http(s)://www.facebook.com/externalhit_uatext.php)')).to eq(false)
|
||||
expect(CrawlerDetection.allow_crawler?('Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)')).to eq(false)
|
||||
expect(CrawlerDetection.allow_crawler?('')).to eq(false)
|
||||
end
|
||||
|
||||
context 'and blacklist is set' do
|
||||
before do
|
||||
SiteSetting.blacklisted_crawler_user_agents = 'Googlebot-Image'
|
||||
end
|
||||
|
||||
it 'ignores the blacklist' do
|
||||
expect(CrawlerDetection.allow_crawler?('Googlebot-Image/1.0')).to eq(true)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
context 'blacklist is set' do
|
||||
before do
|
||||
SiteSetting.blacklisted_crawler_user_agents = 'Googlebot|Twitterbot'
|
||||
end
|
||||
|
||||
it 'returns true for crawlers that do not match' do
|
||||
expect(CrawlerDetection.allow_crawler?('Mediapartners-Google')).to eq(true)
|
||||
expect(CrawlerDetection.allow_crawler?('facebookexternalhit/1.1 (+http(s)://www.facebook.com/externalhit_uatext.php)')).to eq(true)
|
||||
expect(CrawlerDetection.allow_crawler?('')).to eq(true)
|
||||
end
|
||||
|
||||
it 'returns false for user agents that match' do
|
||||
expect(CrawlerDetection.allow_crawler?('Googlebot/2.1 (+http://www.google.com/bot.html)')).to eq(false)
|
||||
expect(CrawlerDetection.allow_crawler?('Googlebot-Image/1.0')).to eq(false)
|
||||
expect(CrawlerDetection.allow_crawler?('Twitterbot')).to eq(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
describe 'is_blocked_crawler?' do
|
||||
it 'is false if user agent is a crawler and no whitelist or blacklist is defined' do
|
||||
expect(CrawlerDetection.is_blocked_crawler?('Twitterbot')).to eq(false)
|
||||
end
|
||||
|
||||
it 'is false if user agent is not a crawler and no whitelist or blacklist is defined' do
|
||||
expect(CrawlerDetection.is_blocked_crawler?('Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36')).to eq(false)
|
||||
end
|
||||
|
||||
it 'is true if user agent is a crawler and is not whitelisted' do
|
||||
SiteSetting.whitelisted_crawler_user_agents = 'Googlebot'
|
||||
expect(CrawlerDetection.is_blocked_crawler?('Twitterbot')).to eq(true)
|
||||
end
|
||||
|
||||
it 'is false if user agent is not a crawler and there is a whitelist' do
|
||||
SiteSetting.whitelisted_crawler_user_agents = 'Googlebot'
|
||||
expect(CrawlerDetection.is_blocked_crawler?('Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36')).to eq(false)
|
||||
end
|
||||
|
||||
it 'is true if user agent is a crawler and is blacklisted' do
|
||||
SiteSetting.blacklisted_crawler_user_agents = 'Twitterbot'
|
||||
expect(CrawlerDetection.is_blocked_crawler?('Twitterbot')).to eq(true)
|
||||
end
|
||||
|
||||
it 'is true if user agent is a crawler and is not blacklisted' do
|
||||
SiteSetting.blacklisted_crawler_user_agents = 'Twitterbot'
|
||||
expect(CrawlerDetection.is_blocked_crawler?('Googlebot')).to eq(false)
|
||||
end
|
||||
|
||||
it 'is false if user agent is not a crawler and blacklist is defined' do
|
||||
SiteSetting.blacklisted_crawler_user_agents = 'Mozilla'
|
||||
expect(CrawlerDetection.is_blocked_crawler?('Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36')).to eq(false)
|
||||
end
|
||||
|
||||
it 'is true if user agent is missing and whitelist is defined' do
|
||||
SiteSetting.whitelisted_crawler_user_agents = 'Googlebot'
|
||||
expect(CrawlerDetection.is_blocked_crawler?('')).to eq(true)
|
||||
expect(CrawlerDetection.is_blocked_crawler?(nil)).to eq(true)
|
||||
end
|
||||
|
||||
it 'is false if user agent is missing and blacklist is defined' do
|
||||
SiteSetting.blacklisted_crawler_user_agents = 'Googlebot'
|
||||
expect(CrawlerDetection.is_blocked_crawler?('')).to eq(false)
|
||||
expect(CrawlerDetection.is_blocked_crawler?(nil)).to eq(false)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -273,4 +273,50 @@ describe Middleware::RequestTracker do
|
||||
expect(timing[:redis][:calls]).to eq 2
|
||||
end
|
||||
end
|
||||
|
||||
context "crawler blocking" do
|
||||
let :middleware do
|
||||
app = lambda do |env|
|
||||
[200, {}, ['OK']]
|
||||
end
|
||||
|
||||
Middleware::RequestTracker.new(app)
|
||||
end
|
||||
|
||||
def expect_success_response(status, _, response)
|
||||
expect(status).to eq(200)
|
||||
expect(response).to eq(['OK'])
|
||||
end
|
||||
|
||||
def expect_blocked_response(status, _, response)
|
||||
expect(status).to eq(403)
|
||||
expect(response).to be_blank
|
||||
end
|
||||
|
||||
it "applies whitelisted_crawler_user_agents correctly" do
|
||||
SiteSetting.whitelisted_crawler_user_agents = 'Googlebot'
|
||||
expect_success_response(*middleware.call(env))
|
||||
expect_blocked_response(*middleware.call(env('HTTP_USER_AGENT' => 'Twitterbot')))
|
||||
expect_success_response(*middleware.call(env('HTTP_USER_AGENT' => 'Googlebot/2.1 (+http://www.google.com/bot.html)')))
|
||||
expect_blocked_response(*middleware.call(env('HTTP_USER_AGENT' => 'DiscourseAPI Ruby Gem 0.19.0')))
|
||||
end
|
||||
|
||||
it "applies blacklisted_crawler_user_agents correctly" do
|
||||
SiteSetting.blacklisted_crawler_user_agents = 'Googlebot'
|
||||
expect_success_response(*middleware.call(env))
|
||||
expect_blocked_response(*middleware.call(env('HTTP_USER_AGENT' => 'Googlebot/2.1 (+http://www.google.com/bot.html)')))
|
||||
expect_success_response(*middleware.call(env('HTTP_USER_AGENT' => 'Twitterbot')))
|
||||
expect_success_response(*middleware.call(env('HTTP_USER_AGENT' => 'DiscourseAPI Ruby Gem 0.19.0')))
|
||||
end
|
||||
|
||||
it "blocked crawlers shouldn't log page views" do
|
||||
ApplicationRequest.clear_cache!
|
||||
SiteSetting.blacklisted_crawler_user_agents = 'Googlebot'
|
||||
expect {
|
||||
middleware.call(env('HTTP_USER_AGENT' => 'Googlebot/2.1 (+http://www.google.com/bot.html)'))
|
||||
ApplicationRequest.write_cache!
|
||||
}.to_not change { ApplicationRequest.count }
|
||||
end
|
||||
end
|
||||
|
||||
end
|
||||
|
Reference in New Issue
Block a user