mirror of
https://github.com/jakejarvis/jarv.is.git
synced 2025-04-26 18:48:28 -04:00
generate permalinks when parsing note
This commit is contained in:
parent
6d81939b8b
commit
d2b71887b4
@ -25,9 +25,9 @@ export const buildFeed = () => {
|
|||||||
const notes = getAllNotes();
|
const notes = getAllNotes();
|
||||||
notes.forEach((note: any) => {
|
notes.forEach((note: any) => {
|
||||||
feed.addItem({
|
feed.addItem({
|
||||||
|
guid: note.permalink,
|
||||||
|
link: note.permalink,
|
||||||
title: note.title,
|
title: note.title,
|
||||||
link: `${config.baseUrl}/notes/${note.slug}/`,
|
|
||||||
guid: `${config.baseUrl}/notes/${note.slug}/`,
|
|
||||||
description: note.description,
|
description: note.description,
|
||||||
image: note.image ? `${config.baseUrl}${note.image}` : "",
|
image: note.image ? `${config.baseUrl}${note.image}` : "",
|
||||||
author: [
|
author: [
|
||||||
|
@ -2,17 +2,18 @@ import fs from "fs";
|
|||||||
import path from "path";
|
import path from "path";
|
||||||
import matter from "gray-matter";
|
import matter from "gray-matter";
|
||||||
import { format, parseISO } from "date-fns";
|
import { format, parseISO } from "date-fns";
|
||||||
import { NOTES_DIR } from "./config";
|
import * as config from "./config";
|
||||||
|
|
||||||
export const getNoteData = (file: string) => {
|
export const getNoteData = (file: string) => {
|
||||||
const slug = file.replace(/\.mdx$/, "");
|
const slug = file.replace(/\.mdx$/, "");
|
||||||
const fullPath = path.join(process.cwd(), NOTES_DIR, `${slug}.mdx`);
|
const fullPath = path.join(process.cwd(), config.NOTES_DIR, `${slug}.mdx`);
|
||||||
const contents = fs.readFileSync(fullPath, "utf8");
|
const contents = fs.readFileSync(fullPath, "utf8");
|
||||||
const { data } = matter(contents);
|
const { data } = matter(contents);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
...data,
|
...data,
|
||||||
slug,
|
slug,
|
||||||
|
permalink: `${config.baseUrl}/notes/${slug}/`,
|
||||||
date: parseISO(data.date).toISOString(), // validate/normalize the date string provided from front matter
|
date: parseISO(data.date).toISOString(), // validate/normalize the date string provided from front matter
|
||||||
year: parseInt(format(parseISO(data.date), "yyyy")), // parse years here so it's easier to group them on list page
|
year: parseInt(format(parseISO(data.date), "yyyy")), // parse years here so it's easier to group them on list page
|
||||||
};
|
};
|
||||||
@ -20,7 +21,7 @@ export const getNoteData = (file: string) => {
|
|||||||
|
|
||||||
// all .mdx files in NOTES_DIR
|
// all .mdx files in NOTES_DIR
|
||||||
export const getNoteFiles = () =>
|
export const getNoteFiles = () =>
|
||||||
fs.readdirSync(path.join(process.cwd(), NOTES_DIR)).filter((notePath) => /\.mdx$/.test(notePath));
|
fs.readdirSync(path.join(process.cwd(), config.NOTES_DIR)).filter((notePath) => /\.mdx$/.test(notePath));
|
||||||
|
|
||||||
export const getAllNotes = () =>
|
export const getAllNotes = () =>
|
||||||
getNoteFiles()
|
getNoteFiles()
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
import * as Sentry from "@sentry/node";
|
import * as Sentry from "@sentry/node";
|
||||||
import * as config from "../../lib/config";
|
|
||||||
import { getAllNotes } from "../../lib/parse-notes";
|
import { getAllNotes } from "../../lib/parse-notes";
|
||||||
import pRetry from "p-retry";
|
import pRetry from "p-retry";
|
||||||
import faunadb from "faunadb";
|
import faunadb from "faunadb";
|
||||||
@ -101,7 +100,7 @@ const getSiteStats = async (client) => {
|
|||||||
const match: any = notes.find((note) => `notes/${note.slug}` === page.slug);
|
const match: any = notes.find((note) => `notes/${note.slug}` === page.slug);
|
||||||
if (match) {
|
if (match) {
|
||||||
page.title = match.title;
|
page.title = match.title;
|
||||||
page.url = `${config.baseUrl}/${page.slug}/`;
|
page.url = match.permalink;
|
||||||
page.date = match.date;
|
page.date = match.date;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,95 +0,0 @@
|
|||||||
import * as Sentry from "@sentry/node";
|
|
||||||
import { graphql } from "@octokit/graphql";
|
|
||||||
import type { NextApiRequest, NextApiResponse } from "next";
|
|
||||||
|
|
||||||
Sentry.init({
|
|
||||||
dsn: process.env.SENTRY_DSN || process.env.NEXT_PUBLIC_SENTRY_DSN || "",
|
|
||||||
environment: process.env.NODE_ENV || process.env.VERCEL_ENV || process.env.NEXT_PUBLIC_VERCEL_ENV || "",
|
|
||||||
});
|
|
||||||
|
|
||||||
const handler = async (req: NextApiRequest, res: NextApiResponse) => {
|
|
||||||
try {
|
|
||||||
if (req.method !== "GET") {
|
|
||||||
return res.status(405).send(""); // 405 Method Not Allowed
|
|
||||||
}
|
|
||||||
|
|
||||||
// allow custom limit, max. 24 results
|
|
||||||
let limit = 24;
|
|
||||||
if (parseInt(req.query.limit as string) > 0 && parseInt(req.query.limit as string) < limit) {
|
|
||||||
limit = parseInt(req.query.limit as string);
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = await fetchRepos(req.query.sort === "top" ? "STARGAZERS" : "PUSHED_AT", limit);
|
|
||||||
|
|
||||||
// let Vercel edge cache results for 15 mins
|
|
||||||
res.setHeader("Cache-Control", "s-maxage=900, stale-while-revalidate");
|
|
||||||
|
|
||||||
return res.status(200).json(result);
|
|
||||||
} catch (error) {
|
|
||||||
console.error(error);
|
|
||||||
|
|
||||||
// log error to sentry, give it 2 seconds to finish sending
|
|
||||||
Sentry.captureException(error);
|
|
||||||
await Sentry.flush(2000);
|
|
||||||
|
|
||||||
const message = error instanceof Error ? error.message : "Unknown error.";
|
|
||||||
|
|
||||||
// 500 Internal Server Error
|
|
||||||
return res.status(500).json({ success: false, message });
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const fetchRepos = async (sort, limit) => {
|
|
||||||
// https://docs.github.com/en/graphql/reference/objects#repository
|
|
||||||
const { user } = await graphql(
|
|
||||||
`
|
|
||||||
query ($username: String!, $sort: String, $limit: Int) {
|
|
||||||
user(login: $username) {
|
|
||||||
repositories(
|
|
||||||
first: $limit
|
|
||||||
isLocked: false
|
|
||||||
isFork: false
|
|
||||||
ownerAffiliations: OWNER
|
|
||||||
privacy: PUBLIC
|
|
||||||
orderBy: { field: $sort, direction: DESC }
|
|
||||||
) {
|
|
||||||
edges {
|
|
||||||
node {
|
|
||||||
name
|
|
||||||
url
|
|
||||||
description
|
|
||||||
pushedAt
|
|
||||||
stargazerCount
|
|
||||||
forkCount
|
|
||||||
primaryLanguage {
|
|
||||||
name
|
|
||||||
color
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
`,
|
|
||||||
{
|
|
||||||
username: "jakejarvis",
|
|
||||||
limit,
|
|
||||||
sort,
|
|
||||||
headers: {
|
|
||||||
authorization: `token ${process.env.GH_PUBLIC_TOKEN}`,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
return user.repositories.edges.map(({ node: repo }) => ({
|
|
||||||
name: repo.name,
|
|
||||||
url: repo.url,
|
|
||||||
description: repo.description,
|
|
||||||
updatedAt: new Date(repo.pushedAt),
|
|
||||||
stars: repo.stargazerCount,
|
|
||||||
forks: repo.forkCount,
|
|
||||||
language: repo.primaryLanguage,
|
|
||||||
}));
|
|
||||||
};
|
|
||||||
|
|
||||||
export default handler;
|
|
@ -20,15 +20,15 @@ import rehypeExternalLinks from "rehype-external-links";
|
|||||||
import rehypeSlug from "rehype-slug";
|
import rehypeSlug from "rehype-slug";
|
||||||
import rehypeAutolinkHeadings from "rehype-autolink-headings";
|
import rehypeAutolinkHeadings from "rehype-autolink-headings";
|
||||||
|
|
||||||
const Note = ({ source, frontMatter, slug }) => (
|
const Note = ({ frontMatter, source }) => (
|
||||||
<>
|
<>
|
||||||
<NextSeo
|
<NextSeo
|
||||||
title={frontMatter.title}
|
title={frontMatter.title}
|
||||||
description={frontMatter.description}
|
description={frontMatter.description}
|
||||||
canonical={`${config.baseUrl}/notes/${slug}/`}
|
canonical={frontMatter.permalink}
|
||||||
openGraph={{
|
openGraph={{
|
||||||
title: frontMatter.title,
|
title: frontMatter.title,
|
||||||
url: `${config.baseUrl}/notes/${slug}/`,
|
url: frontMatter.permalink,
|
||||||
type: "article",
|
type: "article",
|
||||||
article: {
|
article: {
|
||||||
publishedTime: frontMatter.date,
|
publishedTime: frontMatter.date,
|
||||||
@ -47,7 +47,7 @@ const Note = ({ source, frontMatter, slug }) => (
|
|||||||
}}
|
}}
|
||||||
/>
|
/>
|
||||||
<ArticleJsonLd
|
<ArticleJsonLd
|
||||||
url={`${config.baseUrl}/notes/${slug}/`}
|
url={frontMatter.permalink}
|
||||||
title={frontMatter.title}
|
title={frontMatter.title}
|
||||||
description={frontMatter.description}
|
description={frontMatter.description}
|
||||||
datePublished={frontMatter.date}
|
datePublished={frontMatter.date}
|
||||||
@ -60,7 +60,7 @@ const Note = ({ source, frontMatter, slug }) => (
|
|||||||
|
|
||||||
<Layout>
|
<Layout>
|
||||||
<Container>
|
<Container>
|
||||||
<Meta {...frontMatter} slug={slug} />
|
<Meta {...frontMatter} />
|
||||||
<Content>
|
<Content>
|
||||||
<div className="markdown">
|
<div className="markdown">
|
||||||
<MDXRemote {...source} components={mdxComponents} />
|
<MDXRemote {...source} components={mdxComponents} />
|
||||||
@ -73,9 +73,8 @@ const Note = ({ source, frontMatter, slug }) => (
|
|||||||
|
|
||||||
export const getStaticProps: GetStaticProps = async ({ params }) => {
|
export const getStaticProps: GetStaticProps = async ({ params }) => {
|
||||||
const filePath = path.join(process.cwd(), config.NOTES_DIR, `${params.slug}.mdx`);
|
const filePath = path.join(process.cwd(), config.NOTES_DIR, `${params.slug}.mdx`);
|
||||||
const source = fs.readFileSync(filePath);
|
const rawSource = fs.readFileSync(filePath);
|
||||||
|
const { data, content } = matter(rawSource);
|
||||||
const { content, data } = matter(source);
|
|
||||||
|
|
||||||
const mdxSource = await serialize(content, {
|
const mdxSource = await serialize(content, {
|
||||||
scope: data,
|
scope: data,
|
||||||
@ -97,10 +96,11 @@ export const getStaticProps: GetStaticProps = async ({ params }) => {
|
|||||||
props: {
|
props: {
|
||||||
frontMatter: {
|
frontMatter: {
|
||||||
...data,
|
...data,
|
||||||
|
slug: params.slug,
|
||||||
|
permalink: `${config.baseUrl}/notes/${params.slug}/`,
|
||||||
date: parseISO(data.date).toISOString(), // validate/normalize the date string provided from front matter
|
date: parseISO(data.date).toISOString(), // validate/normalize the date string provided from front matter
|
||||||
},
|
},
|
||||||
source: mdxSource,
|
source: mdxSource,
|
||||||
slug: params.slug,
|
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
Loading…
x
Reference in New Issue
Block a user