Skip to content
Snippets Groups Projects
Commit a8853cdc authored by John Xina's avatar John Xina
Browse files

add robots.txt support, default to strict policy

parent cd933fff
No related branches found
Tags v0.1
No related merge requests found
......@@ -17,7 +17,7 @@ import requests
from urllib.parse import urlparse
import asyncio
from aioflask import request, make_response, redirect, url_for
from aioflask import request, make_response, redirect, url_for, send_from_directory
import subprocess
from bs4 import BeautifulSoup
......@@ -90,6 +90,15 @@ async def test_view():
resp.set_cookie('theme', 'default')
return resp
@app.route('/robots.txt')
def robots_txt():
policy = appconf['site'].get('robots_policy') or 'strict'
if policy == 'PLEASE_INDEX_EVERYTHING':
return '', 404
return send_from_directory(f'static/rules', f'robots_{policy}.txt')
##########################################
# Error handling
##########################################
......
......@@ -12,6 +12,14 @@ site_allow_download = true
# This will allow (potentially unsafe) error response to be showed directly on site.
site_show_unsafe_error_response = false
# This controls whether the search engines will index your site or not.
# Available options:
# - 'strict', indexing is disallowed.
# - 'relexed', search engines can index articles and search page.
# - 'PLEASE_INDEX_EVERYTHING', a VERY DANGEROUS option, may give you a lawsuit.
# Default to 'strict'.
robots_policy = 'strict'
[flask]
host = '0.0.0.0'
......
User-agent: *
Disallow: /vv/
Disallow: /space/
Disallow: /author/
Disallow: /video_listen/
Disallow: /video/
User-agent: *
Disallow: /
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment