Added generated sitemap

This commit is contained in:
Ryan Freeman 2023-07-31 22:24:30 +01:00
parent 8035f977ae
commit f6e75acc3c
2 changed files with 22 additions and 82 deletions

View File

@ -1,18 +1,25 @@
import {MetadataRoute} from 'next'
import {getAllArticles} from '@/lib/getAllArticles'
export default function sitemap(): MetadataRoute.Sitemap {
return [
{
url: 'https://acme.com',
lastModified: new Date(),
},
{
url: 'https://acme.com/about',
lastModified: new Date(),
},
{
url: 'https://acme.com/blog',
lastModified: new Date(),
},
export default async function sitemap(): Promise<MetadataRoute.Sitemap> {
const urls = [
'https://ryanfreeman.dev/',
'https://ryanfreeman.dev/about',
'https://ryanfreeman.dev/dashboard',
'https://ryanfreeman.dev/writing',
'https://ryanfreeman.dev/projects',
'https://ryanfreeman.dev/uses'
]
const pages = urls.map(url => ({
url,
lastModified: new Date()
}))
const posts = (await getAllArticles()).map(({slug, date}) => ({
url: `https://ryanfreeman.dev/${slug}/`,
lastModified: new Date(date).toISOString()
}))
return [...pages, ...posts]
}

View File

@ -1,67 +0,0 @@
import glob from 'fast-glob'
import path from 'path'
import {getAllArticles} from '@/lib/getAllArticles'
import {writeFile} from 'fs/promises'
const BASE_URL = process.env.NEXT_PUBLIC_SITE_URL
async function createSitemap(pages: string[]) {
const sitemap =
`<?xml version="1.0" encoding="UTF-8"?>
<urlset xmlns="http://www.sitemaps.org/schemas/sitemap/0.9">
${pages.map((url) =>
`<url>
<loc>${url}</loc>
<lastmod>${new Date().toISOString()}</lastmod>
<changefreq>daily</changefreq>
<priority>1.0</priority>
</url>`).join('')}
</urlset>`.replace(/(\s\s+|\t)/g, ' ').trim()
await writeFile('./public/sitemap.xml', sitemap, 'utf8')
}
async function createRobots() {
const robots =
`# *
User-agent: *
Allow: /
# Host
Host: ${BASE_URL}
# Sitemaps
Sitemap: ${BASE_URL}/sitemap.xml`.replace(/(\s\s+|\t)/g, ' ').trim()
await writeFile('./public/robots.txt', robots, 'utf8')
}
export async function generateSitemap() {
const excluded = [
'_app.tsx',
'_document.tsx',
'index.tsx'
]
const pages = (await glob(['*.tsx', '*.jsx'], {
cwd: path.join(process.cwd(), '/pages/'),
})).filter((page) => {
return !excluded
.includes(page)
}).map((page) => {
return `${BASE_URL}/${page}`
.replace(/\.(tsx|jsx)$/, '')
})
pages.unshift(`${BASE_URL}/`)
pages.push(`${BASE_URL}/writing`)
const articles = await getAllArticles()
const slugs = articles.map(({slug}) => `${BASE_URL}/writing/${slug}`)
const allPages = [...pages, ...slugs]
await Promise.all([
await createSitemap(allPages),
await createRobots()
])
}