From 3ac6054bf9eeb86231ea0a6c390b8ac50e97cbcc Mon Sep 17 00:00:00 2001 From: eric sabelhaus Date: Wed, 18 Jan 2017 07:21:16 -0500 Subject: correct User_agent placement in robots.txt --- public/robots.txt | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) (limited to 'public/robots.txt') diff --git a/public/robots.txt b/public/robots.txt index 7d69fad59d1..123272a9834 100644 --- a/public/robots.txt +++ b/public/robots.txt @@ -4,13 +4,12 @@ # User-Agent: * # Disallow: / -User-Agent: * - # Add a 1 second delay between successive requests to the same server, limits resources used by crawler # Only some crawlers respect this setting, e.g. Googlebot does not # Crawl-delay: 1 # Based on details in https://gitlab.com/gitlab-org/gitlab-ce/blob/master/config/routes.rb, https://gitlab.com/gitlab-org/gitlab-ce/blob/master/spec/routing, and using application +User-Agent: * Disallow: /autocomplete/users Disallow: /search Disallow: /api @@ -23,12 +22,14 @@ Disallow: /groups/*/edit Disallow: /users # Global snippets +User-Agent: * Disallow: /s/ Disallow: /snippets/new Disallow: /snippets/*/edit Disallow: /snippets/*/raw # Project details +User-Agent: * Disallow: /*/*.git Disallow: /*/*/fork/new Disallow: /*/*/repository/archive* -- cgit v1.2.3