https://blog.csdn.net/fanghua_vip/article/details/79535639
http://tool.chinaz.com/robots/
User-agent: *
Disallow:
Crawl-delay: 5
Disallow: /bin/
Disallow: /photos
Sitemap: http://xieboke.net/sitemap.xml
def robots(request):
with open("joyoo/robots.txt", "r") as f:
content = "<pre>"
for text in f.readlines():
content += text
content += "</pre>"
return HttpResponse(content)
urlpatterns = [
url(r'^robots', robots),
]