diff --git a/ghost/core/core/frontend/public/robots.txt b/ghost/core/core/frontend/public/robots.txt index 4654e58f39..8ad5b3f97b 100644 --- a/ghost/core/core/frontend/public/robots.txt +++ b/ghost/core/core/frontend/public/robots.txt @@ -1,7 +1,6 @@ User-agent: * Sitemap: {{blog-url}}/sitemap.xml Disallow: /ghost/ -Disallow: /p/ Disallow: /email/ Disallow: /r/ Disallow: /webmentions/receive/ diff --git a/ghost/core/test/e2e-frontend/default_routes.test.js b/ghost/core/test/e2e-frontend/default_routes.test.js index d4ee04009c..567cf13494 100644 --- a/ghost/core/test/e2e-frontend/default_routes.test.js +++ b/ghost/core/test/e2e-frontend/default_routes.test.js @@ -319,12 +319,9 @@ describe('Default Frontend routing', function () { .expect(200) .expect(assertCorrectFrontendHeaders); - // The response here is a publicly documented format users rely on - // In case it's changed remember to update the docs at https://ghost.org/help/modifying-robots-txt/ res.text.should.equal( 'User-agent: *\n' + 'Sitemap: http://127.0.0.1:2369/sitemap.xml\nDisallow: /ghost/\n' + - 'Disallow: /p/\n' + 'Disallow: /email/\n' + 'Disallow: /r/\n' + 'Disallow: /webmentions/receive/\n'