mirror of
https://github.com/discourse/discourse.git
synced 2025-05-22 03:51:07 +08:00
FEATURE: flexible crawler detection
You can use the crawler user agents site setting to amend what user agents are considered crawlers based on a string match in the user agent Also improves performance of crawler detection slightly
This commit is contained in:
@ -3,6 +3,14 @@ require_dependency 'crawler_detection'
|
||||
|
||||
describe CrawlerDetection do
|
||||
describe "crawler?" do
|
||||
|
||||
it "can be amended via site settings" do
|
||||
SiteSetting.crawler_user_agents = 'Mooble|Kaboodle+*'
|
||||
expect(CrawlerDetection.crawler?("Mozilla/5.0 (compatible; Kaboodle+*/2.1; +http://www.google.com/bot.html)")).to eq(true)
|
||||
expect(CrawlerDetection.crawler?("Mozilla/5.0 (compatible; Mooble+*/2.1; +http://www.google.com/bot.html)")).to eq(true)
|
||||
expect(CrawlerDetection.crawler?("Mozilla/5.0 (compatible; Gooble+*/2.1; +http://www.google.com/bot.html)")).to eq(false)
|
||||
end
|
||||
|
||||
it "returns true for crawler user agents" do
|
||||
# https://support.google.com/webmasters/answer/1061943?hl=en
|
||||
expect(described_class.crawler?("Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)")).to eq(true)
|
||||
|
Reference in New Issue
Block a user