mirror of
https://github.com/superseriousbusiness/gotosocial
synced 2025-06-05 21:59:39 +02:00
[chore] The Big Middleware and API Refactor (tm) (#1250)
* interim commit: start refactoring middlewares into package under router * another interim commit, this is becoming a big job * another fucking massive interim commit * refactor bookmarks to new style * ambassador, wiz zeze commits you are spoiling uz * she compiles, we're getting there * we're just normal men; we're just innocent men * apiutil * whoopsie * i'm glad noone reads commit msgs haha :blob_sweat: * use that weirdo go-bytesize library for maxMultipartMemory * fix media module paths
This commit is contained in:
@ -18,7 +18,45 @@
|
||||
|
||||
package web
|
||||
|
||||
// https://developers.google.com/search/docs/crawling-indexing/robots-meta-tag#robotsmeta
|
||||
const (
|
||||
robotsAllowSome = "nofollow, noarchive, nositelinkssearchbox, max-image-preview:standard"
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
)
|
||||
|
||||
const (
|
||||
robotsPath = "/robots.txt"
|
||||
robotsMetaAllowSome = "nofollow, noarchive, nositelinkssearchbox, max-image-preview:standard" // https://developers.google.com/search/docs/crawling-indexing/robots-meta-tag#robotsmeta
|
||||
robotsTxt = `# GoToSocial robots.txt -- to edit, see internal/web/robots.go
|
||||
# more info @ https://developers.google.com/search/docs/crawling-indexing/robots/intro
|
||||
User-agent: *
|
||||
Crawl-delay: 500
|
||||
# api stuff
|
||||
Disallow: /api/
|
||||
# auth/login stuff
|
||||
Disallow: /auth/
|
||||
Disallow: /oauth/
|
||||
Disallow: /check_your_email
|
||||
Disallow: /wait_for_approval
|
||||
Disallow: /account_disabled
|
||||
# well known stuff
|
||||
Disallow: /.well-known/
|
||||
# files
|
||||
Disallow: /fileserver/
|
||||
# s2s AP stuff
|
||||
Disallow: /users/
|
||||
Disallow: /emoji/
|
||||
# panels
|
||||
Disallow: /admin
|
||||
Disallow: /user
|
||||
Disallow: /settings/`
|
||||
)
|
||||
|
||||
// robotsGETHandler returns a decent robots.txt that prevents crawling
|
||||
// the api, auth pages, settings pages, etc.
|
||||
//
|
||||
// More granular robots meta tags are then applied for web pages
|
||||
// depending on user preferences (see internal/web).
|
||||
func (m *Module) robotsGETHandler(c *gin.Context) {
|
||||
c.String(http.StatusOK, robotsTxt)
|
||||
}
|
||||
|
Reference in New Issue
Block a user