.
This commit is contained in:
@@ -0,0 +1,43 @@
|
||||
/**
|
||||
* robots.ts — Robots.txt generation for Novarix Networks
|
||||
*
|
||||
* Next.js App Router generates /robots.txt automatically from the object
|
||||
* returned by this default export.
|
||||
*
|
||||
* ─── Notes ───────────────────────────────────────────────────────────────
|
||||
*
|
||||
* • The `sitemap` field should match the canonical domain. If the domain
|
||||
* ever changes, update both this file and sitemap.ts.
|
||||
*
|
||||
* • To block specific paths (e.g. staging pages or admin routes), add
|
||||
* `disallow` entries to the rules array:
|
||||
*
|
||||
* rules: [
|
||||
* { userAgent: "*", allow: "/", disallow: ["/admin/", "/staging/"] },
|
||||
* ],
|
||||
*
|
||||
* • To block a specific crawler entirely:
|
||||
*
|
||||
* rules: [
|
||||
* { userAgent: "*", allow: "/" },
|
||||
* { userAgent: "GPTBot", disallow: "/" },
|
||||
* ],
|
||||
*
|
||||
* ─── Output ──────────────────────────────────────────────────────────────
|
||||
*
|
||||
* User-agent: *
|
||||
* Allow: /
|
||||
* Sitemap: https://novarixnet.com/sitemap.xml
|
||||
*/
|
||||
|
||||
import type { MetadataRoute } from "next";
|
||||
|
||||
export default function robots(): MetadataRoute.Robots {
|
||||
return {
|
||||
rules: {
|
||||
userAgent: "*",
|
||||
allow: "/",
|
||||
},
|
||||
sitemap: "https://novarixnet.com/sitemap.xml",
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user