mirror of
https://github.com/superseriousbusiness/gotosocial
synced 2025-06-05 21:59:39 +02:00
[feature] Add meta robots
tag; allow robots to index profile card if user is Discoverable (#842)
* rework robots.txt response * don't let robots snippet from statuses/threads * allow robots to index if user is Discoverable * add license text
This commit is contained in:
@ -1,3 +1,21 @@
|
||||
/*
|
||||
GoToSocial
|
||||
Copyright (C) 2021-2022 GoToSocial Authors admin@gotosocial.org
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
package security
|
||||
|
||||
import (
|
||||
@ -7,11 +25,33 @@ import (
|
||||
)
|
||||
|
||||
const robotsString = `User-agent: *
|
||||
Disallow: /
|
||||
Crawl-delay: 500
|
||||
# api stuff
|
||||
Disallow: /api/
|
||||
# auth/login stuff
|
||||
Disallow: /auth/
|
||||
Disallow: /oauth/
|
||||
Disallow: /check_your_email
|
||||
Disallow: /wait_for_approval
|
||||
Disallow: /account_disabled
|
||||
# well known stuff
|
||||
Disallow: /.well-known/
|
||||
# files
|
||||
Disallow: /fileserver/
|
||||
# s2s AP stuff
|
||||
Disallow: /users/
|
||||
Disallow: /emoji/
|
||||
# panels
|
||||
Disallow: /admin
|
||||
Disallow: /user
|
||||
Disallow: /settings/
|
||||
`
|
||||
|
||||
// RobotsGETHandler returns the most restrictive possible robots.txt file in response to a call to /robots.txt.
|
||||
// The response instructs bots with *any* user agent not to index the instance at all.
|
||||
// RobotsGETHandler returns a decent robots.txt that prevents crawling
|
||||
// the api, auth pages, settings pages, etc.
|
||||
//
|
||||
// More granular robots meta tags are then applied for web pages
|
||||
// depending on user preferences (see internal/web).
|
||||
func (m *Module) RobotsGETHandler(c *gin.Context) {
|
||||
c.String(http.StatusOK, robotsString)
|
||||
}
|
||||
|
Reference in New Issue
Block a user