Make robots.txt to block duplicate pages

This commit is contained in:
Pk11 2021-07-04 07:38:40 -05:00
parent fed8dcf6d4
commit 81c7ddba5d
2 changed files with 16 additions and 5 deletions

View File

@ -11,13 +11,9 @@ sass:
defaults:
- scope:
path: ""
path: "*.md"
values:
layout: wiki
- scope:
path: assets/js
values:
layout: null
collections:
en-US:

15
robots.txt Normal file
View File

@ -0,0 +1,15 @@
---
---
User-agent: *
Disallow: /ic-IC/
{% for collection in site.collections %}
{%- unless collection.label == "ic-IC" -%}
{%- for item in collection.docs -%}
{% unless item.lang == collection.label %}Disallow: {{ item.url }}
{% endunless %}
{%- endfor -%}
{%- endunless -%}
{%- endfor -%}