mirror of
https://github.com/jakejarvis/jarv.is.git
synced 2025-10-30 00:55:49 -04:00
fs -> fs/promises -> asyncify all note parsing
This commit is contained in:
@@ -9,11 +9,10 @@ import type { ParsedUrlQuery } from "querystring";
|
||||
|
||||
// handles literally *everything* about building the server-side rss/atom feeds and writing the response.
|
||||
// all the page needs to do is `return buildFeed(context, { format: "rss" })` from getServerSideProps.
|
||||
|
||||
export const buildFeed = (
|
||||
export const buildFeed = async (
|
||||
context: GetServerSidePropsContext<ParsedUrlQuery, PreviewData>,
|
||||
options?: { type: "rss" | "atom" }
|
||||
): { props: Record<string, unknown> } => {
|
||||
): Promise<{ props: Record<string, unknown> }> => {
|
||||
const { res } = context;
|
||||
|
||||
// https://github.com/jpmonette/feed#example
|
||||
@@ -37,7 +36,7 @@ export const buildFeed = (
|
||||
});
|
||||
|
||||
// add notes separately using their frontmatter
|
||||
const notes = getAllNotes();
|
||||
const notes = await getAllNotes();
|
||||
notes.forEach((note) => {
|
||||
feed.addItem({
|
||||
guid: note.permalink,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import fs from "fs";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import { renderToStaticMarkup } from "react-dom/server";
|
||||
import { serialize } from "next-mdx-remote/serialize";
|
||||
@@ -21,16 +21,17 @@ import rehypePrism from "rehype-prism-plus";
|
||||
import type { NoteType } from "../../types";
|
||||
|
||||
// returns all .mdx files in NOTES_DIR (without .mdx extension)
|
||||
export const getNoteSlugs = () =>
|
||||
fs
|
||||
.readdirSync(path.join(process.cwd(), NOTES_DIR))
|
||||
.filter((file) => /\.mdx$/.test(file))
|
||||
.map((noteFile) => noteFile.replace(/\.mdx$/, ""));
|
||||
export const getNoteSlugs = async () => {
|
||||
// get all files in NOTES_DIR
|
||||
const files = await fs.readdir(path.join(process.cwd(), NOTES_DIR));
|
||||
// narrow to only the .mdx files and strip the .mdx extension
|
||||
return files.filter((file) => /\.mdx$/.test(file)).map((noteFile) => noteFile.replace(/\.mdx$/, ""));
|
||||
};
|
||||
|
||||
// returns front matter and/or *raw* markdown contents of a given slug
|
||||
export const getNoteData = (slug: string): Omit<NoteType, "source"> & { content: string } => {
|
||||
export const getNoteData = async (slug: string): Promise<Omit<NoteType, "source"> & { content: string }> => {
|
||||
const fullPath = path.join(process.cwd(), NOTES_DIR, `${slug}.mdx`);
|
||||
const rawContent = fs.readFileSync(fullPath, "utf8");
|
||||
const rawContent = await fs.readFile(fullPath, "utf8");
|
||||
const { data, content } = matter(rawContent);
|
||||
|
||||
// carefully allow VERY limited markdown in post titles...
|
||||
@@ -65,7 +66,7 @@ export const getNoteData = (slug: string): Omit<NoteType, "source"> & { content:
|
||||
|
||||
// fully parses MDX into JS and returns *everything* about a note
|
||||
export const getNote = async (slug: string): Promise<NoteType> => {
|
||||
const { frontMatter, content } = getNoteData(slug);
|
||||
const { frontMatter, content } = await getNoteData(slug);
|
||||
const source = await serialize(content, {
|
||||
parseFrontmatter: false,
|
||||
mdxOptions: {
|
||||
@@ -96,7 +97,15 @@ export const getNote = async (slug: string): Promise<NoteType> => {
|
||||
};
|
||||
|
||||
// returns the front matter of ALL notes, sorted reverse chronologically
|
||||
export const getAllNotes = () =>
|
||||
getNoteSlugs()
|
||||
.map((slug) => getNoteData(slug).frontMatter)
|
||||
.sort((note1: NoteType["frontMatter"], note2: NoteType["frontMatter"]) => (note1.date > note2.date ? -1 : 1));
|
||||
export const getAllNotes = async () => {
|
||||
const slugs = await getNoteSlugs();
|
||||
|
||||
// for each slug, query its front matter
|
||||
// https://stackoverflow.com/a/40140562/1438024
|
||||
const data = await Promise.all(slugs.map(async (slug) => (await getNoteData(slug)).frontMatter));
|
||||
|
||||
// sort the results by date
|
||||
return data.sort((note1: NoteType["frontMatter"], note2: NoteType["frontMatter"]) =>
|
||||
note1.date > note2.date ? -1 : 1
|
||||
);
|
||||
};
|
||||
|
||||
@@ -98,7 +98,7 @@ const incrementPageHits = async (slug: string | string[], client: faunadb.Client
|
||||
};
|
||||
|
||||
const getSiteStats = async (client: faunadb.Client) => {
|
||||
const notes = getAllNotes();
|
||||
const notes = await getAllNotes();
|
||||
const q = faunadb.query;
|
||||
|
||||
const { data: pages }: { data: PageStats[] } = await client.query(
|
||||
|
||||
@@ -86,7 +86,8 @@ export const getStaticProps: GetStaticProps = async ({ params }: { params: Pick<
|
||||
};
|
||||
|
||||
export const getStaticPaths: GetStaticPaths = async () => {
|
||||
const paths = getNoteSlugs().map((slug) => ({ params: { slug } }));
|
||||
const slugs = await getNoteSlugs();
|
||||
const paths = slugs.map((slug) => ({ params: { slug } }));
|
||||
|
||||
return {
|
||||
paths,
|
||||
|
||||
@@ -22,9 +22,10 @@ const Notes = ({ notesByYear }: NotesListProps) => (
|
||||
|
||||
export const getStaticProps: GetStaticProps = async () => {
|
||||
// parse the year of each note and group them together
|
||||
const notes = await getAllNotes();
|
||||
const notesByYear: NotesListProps["notesByYear"] = {};
|
||||
|
||||
getAllNotes().map((note) => {
|
||||
notes.map((note) => {
|
||||
const year = new Date(note.date).getUTCFullYear();
|
||||
(notesByYear[year] || (notesByYear[year] = [])).push(note);
|
||||
});
|
||||
|
||||
@@ -32,7 +32,7 @@ export const getServerSideProps: GetServerSideProps = async (context) => {
|
||||
];
|
||||
|
||||
// push notes separately and use their metadata
|
||||
const notes = getAllNotes();
|
||||
const notes = await getAllNotes();
|
||||
notes.map((note) =>
|
||||
pages.push({
|
||||
loc: urlJoin("/notes/", note.slug, "/"),
|
||||
|
||||
Reference in New Issue
Block a user