This repository has been archived on 2026-05-03. You can view files and clone it. You cannot open issues or pull requests or push a commit.
Files
Kismet Hasanaj 34dc9aec52 .
2026-05-02 20:07:02 +02:00

44 lines
1.4 KiB
TypeScript

/**
* robots.ts — Robots.txt generation for Novarix Networks
*
* Next.js App Router generates /robots.txt automatically from the object
* returned by this default export.
*
* ─── Notes ───────────────────────────────────────────────────────────────
*
* • The `sitemap` field should match the canonical domain. If the domain
* ever changes, update both this file and sitemap.ts.
*
* • To block specific paths (e.g. staging pages or admin routes), add
* `disallow` entries to the rules array:
*
* rules: [
* { userAgent: "*", allow: "/", disallow: ["/admin/", "/staging/"] },
* ],
*
* • To block a specific crawler entirely:
*
* rules: [
* { userAgent: "*", allow: "/" },
* { userAgent: "GPTBot", disallow: "/" },
* ],
*
* ─── Output ──────────────────────────────────────────────────────────────
*
* User-agent: *
* Allow: /
* Sitemap: https://novarixnet.com/sitemap.xml
*/
import type { MetadataRoute } from "next";
export default function robots(): MetadataRoute.Robots {
return {
rules: {
userAgent: "*",
allow: "/",
},
sitemap: "https://novarixnet.com/sitemap.xml",
};
}