docs: add robots.txt and error.html

- robots.txt prevents crawling of everything but /stable
- error.html has a simple meta refresh to the latest stable docs. This will
also help crawlers find the right path.
This commit is contained in:
Aldo Cortesi 2018-03-05 10:57:50 +13:00
parent a68c7ffb27
commit ee6937f948
4 changed files with 19 additions and 1 deletions

View File

@ -0,0 +1,9 @@
Not found
<html>
<head>
<meta http-equiv="refresh" content="0;URL='/stable'" />
</head>
<body>
Not found - redirecting you to <a href="/stable">latest stable docs</a>.
</body>
</html>

View File

@ -0,0 +1,3 @@
User-agent: *
Disallow: /archive/
Disallow: /master/

7
docs/setup Executable file
View File

@ -0,0 +1,7 @@
#!/bin/sh
aws configure set preview.cloudfront true
aws --profile mitmproxy \
s3 cp --acl public-read ./bucketassets/error.html s3://docs.mitmproxy.org/error.html
aws --profile mitmproxy \
s3 cp --acl public-read ./bucketassets/robots.txt s3://docs.mitmproxy.org/robots.txt

View File

@ -27,7 +27,6 @@ $family-sans-serif: BlinkMacSystemFont, -apple-system, "Segoe UI", "Roboto", "Ox
margin-bottom: 1em;
}
.sidebar {
background-color: #F1F1F1;
.version {