Add per-tenant sitemap to robots.txt file

While we ping some search engines (currently, only Google) when
generating the sitemap files, we weren't telling search engines
accessing through the `robots.txt` file where to find the sitemap. Now
we're doing so, using the right sitemap file for the right tenant.
This commit is contained in:
Javi Martín
2022-09-30 16:04:56 +02:00
parent 5100884110
commit 468761253b
4 changed files with 33 additions and 0 deletions

View File

@@ -0,0 +1,7 @@
class RobotsController < ApplicationController
skip_authorization_check
def index
respond_to :text
end
end

View File

@@ -13,3 +13,9 @@ Disallow: /*?*search
Disallow: /*?*locale-switcher Disallow: /*?*locale-switcher
Disallow: /*?*filter Disallow: /*?*filter
Disallow: user_id Disallow: user_id
<% if Tenant.default? %>
Sitemap: <%= "#{root_url}sitemap.xml" %>
<% else %>
Sitemap: <%= "#{root_url}tenants/#{Tenant.current_schema}/sitemap.xml" %>
<% end %>

View File

@@ -30,6 +30,7 @@ Rails.application.routes.draw do
root "welcome#index" root "welcome#index"
get "/welcome", to: "welcome#welcome" get "/welcome", to: "welcome#welcome"
get "/consul.json", to: "installation#details" get "/consul.json", to: "installation#details"
get "robots.txt", to: "robots#index"
resources :stats, only: [:index] resources :stats, only: [:index]
resources :images, only: [:destroy] resources :images, only: [:destroy]

View File

@@ -0,0 +1,19 @@
require "rails_helper"
describe "robots.txt" do
scenario "uses the default sitemap for the default tenant" do
visit "/robots.txt"
expect(page).to have_content "Sitemap: #{app_host}/sitemap.xml"
end
scenario "uses a different sitemap for other tenants" do
create(:tenant, schema: "cyborgs")
with_subdomain("cyborgs") do
visit "/robots.txt"
expect(page).to have_content "Sitemap: http://cyborgs.lvh.me:#{app_port}/tenants/cyborgs/sitemap.xml"
end
end
end