mirror of
https://github.com/superseriousbusiness/gotosocial
synced 2025-06-05 21:59:39 +02:00
[feature] Use X-Robots-Tag
headers to instruct scrapers/crawlers (#3737)
* [feature] Use `X-Robots-Tag` headers to instruct scrapers/crawlers * use switch for RobotsHeaders
This commit is contained in:
@@ -20,6 +20,7 @@ package api
|
||||
import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/api/nodeinfo"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/config"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/middleware"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/processing"
|
||||
"github.com/superseriousbusiness/gotosocial/internal/router"
|
||||
@@ -43,6 +44,16 @@ func (w *NodeInfo) Route(r *router.Router, m ...gin.HandlerFunc) {
|
||||
}),
|
||||
)
|
||||
|
||||
// If instance is configured to serve instance stats
|
||||
// faithfully at nodeinfo, we should allow robots to
|
||||
// crawl nodeinfo endpoints in a limited capacity.
|
||||
// In all other cases, disallow everything.
|
||||
if config.GetInstanceStatsMode() == config.InstanceStatsModeServe {
|
||||
nodeInfoGroup.Use(middleware.RobotsHeaders("allowSome"))
|
||||
} else {
|
||||
nodeInfoGroup.Use(middleware.RobotsHeaders(""))
|
||||
}
|
||||
|
||||
w.nodeInfo.Route(nodeInfoGroup.Handle)
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user