From 752214fed571dc7cf63bf9e250eea5b70d5e0b69 Mon Sep 17 00:00:00 2001 From: Ben Sima Date: Thu, 28 Sep 2023 19:57:27 -0400 Subject: Disallow search engines from indexing my code MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit I don't really want my code to be discoverable, I only want people looking at it if I have explicitly told them about it. I tested it like so: ϟ curl -v https://simatime.com/git/ 2>&1 | rg x-robots < x-robots-tag: noindex, follow --- Biz/Cloud/Cgit.nix | 15 +++++++++++++++ 1 file changed, 15 insertions(+) (limited to 'Biz/Cloud/Cgit.nix') diff --git a/Biz/Cloud/Cgit.nix b/Biz/Cloud/Cgit.nix index 78eda7b..0b3a71b 100644 --- a/Biz/Cloud/Cgit.nix +++ b/Biz/Cloud/Cgit.nix @@ -36,6 +36,14 @@ in Location to serve cgit on. ''; }; + + allowCrawlers = mkOption { + default = true; + type = types.bool; + description = '' + Allow search engines to crawl and index this site. + ''; + }; }; # Remove the global options for serialization into cgitrc @@ -108,6 +116,13 @@ in fastcgi_split_path_info ^(${location}/)(/?.+)$; fastcgi_param PATH_INFO $fastcgi_path_info; '' + ) + ( + if !cfg.allowCrawlers + then + '' + add_header X-Robots-Tag "noindex, follow" always; + '' + else "" ); }; }; -- cgit v1.2.3