feat: add ai.txt and robots.txt to prevent AI crawlers
This commit is contained in:
21
app/app.go
21
app/app.go
@@ -11,6 +11,7 @@ import (
|
||||
"forge.capytal.company/capytalcode/project-comicverse/lib/router"
|
||||
"forge.capytal.company/capytalcode/project-comicverse/lib/router/rerrors"
|
||||
"github.com/minio/minio-go/v7"
|
||||
"keikos.work/assets"
|
||||
"keikos.work/configs"
|
||||
"keikos.work/handlers/pages"
|
||||
)
|
||||
@@ -70,6 +71,26 @@ func NewWeb(opts Options) App {
|
||||
r.Handle("/", &pages.Home{})
|
||||
r.Handle("/assets/", opts.Assets)
|
||||
|
||||
robots, err := assets.ASSETS.ReadFile("robots.txt")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
r.HandleFunc("/robots.txt", func(w http.ResponseWriter, r *http.Request) {
|
||||
if _, err := w.Write(robots); err != nil {
|
||||
rerrors.InternalError(err).ServeHTTP(w, r)
|
||||
}
|
||||
})
|
||||
|
||||
ai, err := assets.ASSETS.ReadFile("ai.txt")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
r.HandleFunc("/ai.txt", func(w http.ResponseWriter, r *http.Request) {
|
||||
if _, err := w.Write(ai); err != nil {
|
||||
rerrors.InternalError(err).ServeHTTP(w, r)
|
||||
}
|
||||
})
|
||||
|
||||
// imgs := &pages.Images{S3: opts.S3}
|
||||
// r.HandleFunc("GET /images", imgs.List)
|
||||
// r.HandleFunc("GET /images/{name}", imgs.Get)
|
||||
|
||||
6
assets/ai.txt
Normal file
6
assets/ai.txt
Normal file
@@ -0,0 +1,6 @@
|
||||
# Spawning AI
|
||||
# Prevent datasets from using the following file types
|
||||
|
||||
User-Agent: *
|
||||
Disallow: /
|
||||
Disallow: *
|
||||
@@ -4,5 +4,5 @@ import (
|
||||
"embed"
|
||||
)
|
||||
|
||||
//go:embed css fonts img
|
||||
//go:embed css fonts img robots.txt ai.txt
|
||||
var ASSETS embed.FS
|
||||
|
||||
42
assets/robots.txt
Normal file
42
assets/robots.txt
Normal file
@@ -0,0 +1,42 @@
|
||||
User-agent: AI2Bot
|
||||
User-agent: Ai2Bot-Dolma
|
||||
User-agent: Amazonbot
|
||||
User-agent: anthropic-ai
|
||||
User-agent: Applebot
|
||||
User-agent: Applebot-Extended
|
||||
User-agent: Bytespider
|
||||
User-agent: CCBot
|
||||
User-agent: ChatGPT-User
|
||||
User-agent: Claude-Web
|
||||
User-agent: ClaudeBot
|
||||
User-agent: cohere-ai
|
||||
User-agent: Diffbot
|
||||
User-agent: DuckAssistBot
|
||||
User-agent: FacebookBot
|
||||
User-agent: FriendlyCrawler
|
||||
User-agent: Google-Extended
|
||||
User-agent: GoogleOther
|
||||
User-agent: GoogleOther-Image
|
||||
User-agent: GoogleOther-Video
|
||||
User-agent: GPTBot
|
||||
User-agent: iaskspider/2.0
|
||||
User-agent: ICC-Crawler
|
||||
User-agent: ImagesiftBot
|
||||
User-agent: img2dataset
|
||||
User-agent: ISSCyberRiskCrawler
|
||||
User-agent: Kangaroo Bot
|
||||
User-agent: Meta-ExternalAgent
|
||||
User-agent: Meta-ExternalFetcher
|
||||
User-agent: OAI-SearchBot
|
||||
User-agent: omgili
|
||||
User-agent: omgilibot
|
||||
User-agent: PanguBot
|
||||
User-agent: PerplexityBot
|
||||
User-agent: PetalBot
|
||||
User-agent: Scrapy
|
||||
User-agent: Sidetrade indexer bot
|
||||
User-agent: Timpibot
|
||||
User-agent: VelenPublicWebCrawler
|
||||
User-agent: Webzio-Extended
|
||||
User-agent: YouBot
|
||||
Disallow: /
|
||||
Reference in New Issue
Block a user