From 81c7ddba5de38de9c163837467c013f357f4d5ce Mon Sep 17 00:00:00 2001 From: Pk11 Date: Sun, 4 Jul 2021 07:38:40 -0500 Subject: [PATCH] Make robots.txt to block duplicate pages --- _config.yml | 6 +----- robots.txt | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 5 deletions(-) create mode 100644 robots.txt diff --git a/_config.yml b/_config.yml index 77c0e60a..4ff8880a 100644 --- a/_config.yml +++ b/_config.yml @@ -11,13 +11,9 @@ sass: defaults: - scope: - path: "" + path: "*.md" values: layout: wiki - - scope: - path: assets/js - values: - layout: null collections: en-US: diff --git a/robots.txt b/robots.txt new file mode 100644 index 00000000..e263eaf0 --- /dev/null +++ b/robots.txt @@ -0,0 +1,15 @@ +--- +--- + +User-agent: * +Disallow: /ic-IC/ +{% for collection in site.collections %} + {%- unless collection.label == "ic-IC" -%} + {%- for item in collection.docs -%} + +{% unless item.lang == collection.label %}Disallow: {{ item.url }} +{% endunless %} + + {%- endfor -%} + {%- endunless -%} +{%- endfor -%}