7 changed files with 1 additions and 399 deletions
@ -1,69 +0,0 @@ |
|||||
const fs = require('fs-extra'); |
|
||||
const path = require('path'); |
|
||||
const fm = require('gray-matter'); |
|
||||
const globby = require('globby'); |
|
||||
const parseISO = require('date-fns/parseISO'); |
|
||||
const readingTime = require('reading-time'); |
|
||||
const {markdownToHtml} = require('../plugins/markdownToHtml'); |
|
||||
|
|
||||
/** |
|
||||
* This looks at the ./src/pages/blog directory and creates a route manifest that can be used |
|
||||
* in the sidebar and footers, and (in theory) category and author pages. |
|
||||
* |
|
||||
* For now, the blog manifest is a big array in reverse chronological order. |
|
||||
*/ |
|
||||
Promise.resolve() |
|
||||
.then(async () => { |
|
||||
const routes = []; |
|
||||
const blogPosts = await globby('src/pages/blog/**/*.md'); |
|
||||
|
|
||||
for (let postpath of blogPosts) { |
|
||||
const [year, month, day, title] = postpath |
|
||||
.replace('src/pages/blog/', '') |
|
||||
.split('/'); |
|
||||
|
|
||||
const rawStr = await fs.readFile(postpath, 'utf8'); |
|
||||
const {data, excerpt, content} = fm(rawStr, { |
|
||||
excerpt: function firstLine(file, options) { |
|
||||
file.excerpt = file.content.split('\n').slice(0, 2).join(' '); |
|
||||
}, |
|
||||
}); |
|
||||
const rendered = await markdownToHtml(excerpt.trimLeft().trim()); |
|
||||
|
|
||||
routes.unshift({ |
|
||||
path: postpath.replace('src/pages', ''), |
|
||||
date: [year, month, day].join('-'), |
|
||||
title: data.title, |
|
||||
author: data.author, |
|
||||
excerpt: rendered, |
|
||||
readingTime: readingTime(content).text, |
|
||||
}); |
|
||||
} |
|
||||
|
|
||||
const sorted = routes.sort((post1, post2) => |
|
||||
parseISO(post1.date) > parseISO(post2.date) ? -1 : 1 |
|
||||
); |
|
||||
const blogManifest = { |
|
||||
routes: sorted, |
|
||||
}; |
|
||||
const blogRecentSidebar = { |
|
||||
routes: [ |
|
||||
{ |
|
||||
title: 'Recent Posts', |
|
||||
path: '/blog', |
|
||||
heading: true, |
|
||||
routes: sorted.slice(0, 25), |
|
||||
}, |
|
||||
], |
|
||||
}; |
|
||||
|
|
||||
await fs.writeFile( |
|
||||
path.resolve('./src/blogIndex.json'), |
|
||||
JSON.stringify(blogManifest, null, 2) |
|
||||
); |
|
||||
await fs.writeFile( |
|
||||
path.resolve('./src/blogIndexRecent.json'), |
|
||||
JSON.stringify(blogRecentSidebar, null, 2) |
|
||||
); |
|
||||
}) |
|
||||
.catch(console.error); |
|
@ -1,46 +0,0 @@ |
|||||
const RSS = require('rss'); |
|
||||
const fs = require('fs-extra'); |
|
||||
const authorsJson = require('../src/authors.json'); |
|
||||
const blogIndexJson = require('../src/blogIndex.json'); |
|
||||
const parse = require('date-fns/parse'); |
|
||||
|
|
||||
function removeFromLast(path, key) { |
|
||||
const i = path.lastIndexOf(key); |
|
||||
return i === -1 ? path : path.substring(0, i); |
|
||||
} |
|
||||
|
|
||||
const SITE_URL = 'https://reactjs.org'; |
|
||||
|
|
||||
function generate() { |
|
||||
const feed = new RSS({ |
|
||||
title: 'React.js Blog', |
|
||||
site_url: SITE_URL, |
|
||||
feed_url: SITE_URL + '/feed.xml', |
|
||||
}); |
|
||||
|
|
||||
blogIndexJson.routes.map((meta) => { |
|
||||
feed.item({ |
|
||||
title: meta.title, |
|
||||
guid: removeFromLast(meta.path, '.'), |
|
||||
url: SITE_URL + removeFromLast(meta.path, '.'), |
|
||||
date: parse(meta.date, 'yyyy-MM-dd', new Date()), |
|
||||
description: meta.description, |
|
||||
custom_elements: [].concat( |
|
||||
meta.author.map((author) => ({ |
|
||||
author: [{ name: authorsJson[author].name }], |
|
||||
})) |
|
||||
), |
|
||||
}); |
|
||||
}); |
|
||||
|
|
||||
const rss = feed.xml({ indent: true }); |
|
||||
|
|
||||
fs.writeFileSync('./.next/static/feed.xml', rss); |
|
||||
} |
|
||||
|
|
||||
try { |
|
||||
generate(); |
|
||||
} catch (error) { |
|
||||
console.error('Error generating rss feed'); |
|
||||
throw error; |
|
||||
} |
|
@ -1,81 +0,0 @@ |
|||||
/** |
|
||||
* Copyright (c) Facebook, Inc. and its affiliates. |
|
||||
*/ |
|
||||
|
|
||||
const resolve = require('path').resolve; |
|
||||
const {writeFile} = require('fs-extra'); |
|
||||
const readFileSync = require('fs').readFileSync; |
|
||||
const safeLoad = require('js-yaml').safeLoad; |
|
||||
const path = require('path'); |
|
||||
const versionsFile = resolve(__dirname, '../../content/versions.yml'); |
|
||||
const file = readFileSync(versionsFile, 'utf8'); |
|
||||
const versions = safeLoad(file); |
|
||||
const redirectsFilePath = path.join('vercel.json'); |
|
||||
|
|
||||
function writeRedirectsFile(redirects, redirectsFilePath) { |
|
||||
if (!redirects.length) { |
|
||||
return null; |
|
||||
} |
|
||||
|
|
||||
/** |
|
||||
* We will first read the old config to validate if the redirect already exists in the json |
|
||||
*/ |
|
||||
const vercelConfigPath = resolve(__dirname, '../../vercel.json'); |
|
||||
const vercelConfigFile = readFileSync(vercelConfigPath); |
|
||||
const oldConfigContent = JSON.parse(vercelConfigFile); |
|
||||
/** |
|
||||
* Map data as vercel expects it to be |
|
||||
*/ |
|
||||
|
|
||||
let vercelRedirects = {}; |
|
||||
|
|
||||
redirects.forEach((redirect) => { |
|
||||
const {fromPath, isPermanent, toPath} = redirect; |
|
||||
|
|
||||
vercelRedirects[fromPath] = { |
|
||||
destination: toPath, |
|
||||
permanent: !!isPermanent, |
|
||||
}; |
|
||||
}); |
|
||||
|
|
||||
/** |
|
||||
* Make sure we dont have the same redirect already |
|
||||
*/ |
|
||||
oldConfigContent.redirects.forEach((data) => { |
|
||||
if(vercelRedirects[data.source]){ |
|
||||
delete vercelRedirects[data.source]; |
|
||||
} |
|
||||
}); |
|
||||
|
|
||||
/** |
|
||||
* Serialize the object to array of objects |
|
||||
*/ |
|
||||
let newRedirects = []; |
|
||||
Object.keys(vercelRedirects).forEach((value) => |
|
||||
newRedirects.push({ |
|
||||
source: value, |
|
||||
destination: vercelRedirects[value].destination, |
|
||||
permanent: !!vercelRedirects[value].isPermanent, |
|
||||
}) |
|
||||
); |
|
||||
|
|
||||
/** |
|
||||
* We already have a vercel.json so we spread the new contents along with old ones |
|
||||
*/ |
|
||||
const newContents = { |
|
||||
...oldConfigContent, |
|
||||
redirects: [...oldConfigContent.redirects, ...newRedirects], |
|
||||
}; |
|
||||
writeFile(redirectsFilePath, JSON.stringify(newContents, null, 2)); |
|
||||
} |
|
||||
|
|
||||
// versions.yml structure is [{path: string, url: string, ...}, ...]
|
|
||||
writeRedirectsFile( |
|
||||
versions |
|
||||
.filter((version) => version.path && version.url) |
|
||||
.map((version) => ({ |
|
||||
fromPath: version.path, |
|
||||
toPath: version.url, |
|
||||
})), |
|
||||
redirectsFilePath |
|
||||
); |
|
@ -1,50 +0,0 @@ |
|||||
const fs = require('fs-extra'); |
|
||||
const path = require('path'); |
|
||||
const fm = require('gray-matter'); |
|
||||
const globby = require('globby'); |
|
||||
const parse = require('date-fns/parse'); |
|
||||
|
|
||||
/** |
|
||||
* This script takes the gatsby blog posts directory and migrates it. |
|
||||
* |
|
||||
* In gatsby, blog posts were put in markdown files title YYYY-MM-DD-post-title.md. |
|
||||
* This script looks at that directory and then moves posts into folders paths |
|
||||
* that match the end URL structure of /blog/YYYY/MM/DD/postitle.md |
|
||||
* |
|
||||
* This allows us to use MDX in blog posts. |
|
||||
*/ |
|
||||
|
|
||||
// I dropped them into src/pages/oldblog
|
|
||||
// @todo remove after migration
|
|
||||
// I am not proud of this. Also, the blog posts needed to be cleaned up for MDX, don't run this again.
|
|
||||
Promise.resolve() |
|
||||
.then(async () => { |
|
||||
const blogManifest = {}; |
|
||||
const blogPosts = await globby('src/pages/oldblog/*.md'); |
|
||||
// console.log(blogPosts);
|
|
||||
for (let postpath of blogPosts.sort()) { |
|
||||
const rawStr = await fs.readFile(postpath, 'utf8'); |
|
||||
// console.log(rawStr);
|
|
||||
const {data, content} = fm(rawStr); |
|
||||
const cleanPath = postpath.replace('src/pages/oldblog/', ''); |
|
||||
const yrStr = parseInt(cleanPath.substr(0, 4), 10); // 2013-06-02
|
|
||||
// console.log(yrStr);
|
|
||||
const dateStr = cleanPath.substr(0, 10); // 2013-06-02
|
|
||||
const postFileName = cleanPath.substr(11); |
|
||||
// console.log(postFileName, dateStr);
|
|
||||
const datePath = dateStr.split('-').join('/'); |
|
||||
// console.log(datePath);
|
|
||||
const newPath = './src/pages/blog/' + datePath + '/' + postFileName; |
|
||||
// console.log(newPath);
|
|
||||
await fs.ensureFile(path.resolve(newPath)); |
|
||||
await fs.writeFile( |
|
||||
path.resolve(newPath), |
|
||||
rawStr |
|
||||
.replace('<br>', '<br/>') |
|
||||
.replace('<hr>', '<hr/>') |
|
||||
.replace('layout: post', '') |
|
||||
.replace('\nauthor', '\nlayout: Post\nauthor') |
|
||||
); |
|
||||
} |
|
||||
}) |
|
||||
.catch(console.error); |
|
@ -1,35 +0,0 @@ |
|||||
const fs = require('fs-extra'); |
|
||||
const path = require('path'); |
|
||||
const fm = require('gray-matter'); |
|
||||
const globby = require('globby'); |
|
||||
|
|
||||
/** |
|
||||
* This script ensures that every file in the docs folder is named corresponding |
|
||||
* to its respective frontmatter permalink. In the old site, the path of the page was set by |
|
||||
* the `permalink` in markdown frontmatter, and not the name of the file itself or it's id. |
|
||||
* In the new Next.js site, with its filesystem router, the name of the file must |
|
||||
* match exactly to its `permalink`. |
|
||||
*/ |
|
||||
Promise.resolve() |
|
||||
.then(async () => { |
|
||||
const pages = await globby('src/pages/docs/**/*.{md,mdx}'); |
|
||||
for (let sourcePath of pages.sort()) { |
|
||||
const rawStr = await fs.readFile(sourcePath, 'utf8'); |
|
||||
const {data, content} = fm(rawStr); |
|
||||
const currentPath = sourcePath |
|
||||
.replace('src/pages/', '') |
|
||||
.replace('.md', ''); |
|
||||
const permalink = data.permalink.replace('.html', ''); |
|
||||
if (permalink !== currentPath) { |
|
||||
const destPath = 'src/pages/' + permalink + '.md'; |
|
||||
try { |
|
||||
await fs.move(sourcePath, destPath); |
|
||||
console.log(`MOVED: ${sourcePath} --> ${destPath}`); |
|
||||
} catch (error) { |
|
||||
console.error(`ERROR: ${sourcePath} --> ${destPath}`); |
|
||||
console.error(error); |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
}) |
|
||||
.catch(console.error); |
|
@ -1,117 +0,0 @@ |
|||||
const fs = require('fs-extra'); |
|
||||
const path = require('path'); |
|
||||
const fm = require('gray-matter'); |
|
||||
const globby = require('globby'); |
|
||||
|
|
||||
/** |
|
||||
* This script takes a look at all the redirect frontmatter and converts it |
|
||||
* into a Next.js compatible redirects list. It also merges it with netlify's |
|
||||
* _redirects, which we moved by hand below. |
|
||||
* |
|
||||
* @remarks |
|
||||
* In the old gatsby site, redirects were specified in docs and blog post |
|
||||
* frontmatter that looks like: |
|
||||
* |
|
||||
* --- |
|
||||
* redirect_from: |
|
||||
* - /docs/old-path.html#maybe-an-anchor |
|
||||
* --- |
|
||||
*/ |
|
||||
|
|
||||
const netlifyRedirects = [ |
|
||||
{ |
|
||||
source: '/html-jsx.html', |
|
||||
destination: 'https://magic.reactjs.net/htmltojsx.htm', |
|
||||
permanent: true, |
|
||||
}, |
|
||||
{ |
|
||||
source: '/tips/controlled-input-null-value.html', |
|
||||
destination: '/docs/forms.html#controlled-input-null-value', |
|
||||
permanent: false, // @todo why were these not permanent on netlify?
|
|
||||
}, |
|
||||
{ |
|
||||
source: '/concurrent', |
|
||||
destination: '/docs/concurrent-mode-intro.html', |
|
||||
permanent: false, |
|
||||
}, |
|
||||
{ |
|
||||
source: '/hooks', |
|
||||
destination: '/docs/hooks-intro.html', |
|
||||
permanent: false, |
|
||||
}, |
|
||||
{ |
|
||||
source: '/tutorial', |
|
||||
destination: '/tutorial/tutorial.html', |
|
||||
permanent: false, |
|
||||
}, |
|
||||
{ |
|
||||
source: '/your-story', |
|
||||
destination: 'https://www.surveymonkey.co.uk/r/MVQV2R9', |
|
||||
permanent: true, |
|
||||
}, |
|
||||
{ |
|
||||
source: '/stories', |
|
||||
destination: 'https://medium.com/react-community-stories', |
|
||||
permanent: true, |
|
||||
}, |
|
||||
]; |
|
||||
|
|
||||
Promise.resolve() |
|
||||
.then(async () => { |
|
||||
let contentRedirects = []; |
|
||||
let redirectPageCount = 0; |
|
||||
|
|
||||
// Get all markdown pages
|
|
||||
const pages = await globby('src/pages/**/*.{md,mdx}'); |
|
||||
for (let filepath of pages) { |
|
||||
// Read file as string
|
|
||||
const rawStr = await fs.readFile(filepath, 'utf8'); |
|
||||
// Extract frontmatter
|
|
||||
const {data, content} = fm(rawStr); |
|
||||
// Look for redirect yaml
|
|
||||
if (data.redirect_from) { |
|
||||
redirectPageCount++; |
|
||||
|
|
||||
let destinationPath = filepath |
|
||||
.replace('src/pages', '') |
|
||||
.replace('.md', ''); |
|
||||
|
|
||||
// Fix /docs/index -> /docs
|
|
||||
if (destinationPath === '/docs/index') { |
|
||||
destinationPath = '/docs'; |
|
||||
} |
|
||||
|
|
||||
if (destinationPath === '/index') { |
|
||||
destinationPath = '/'; |
|
||||
} |
|
||||
|
|
||||
for (let sourcePath of data.redirect_from) { |
|
||||
contentRedirects.push({ |
|
||||
source: '/' + sourcePath, // add slash
|
|
||||
destination: destinationPath, |
|
||||
permanent: true, |
|
||||
}); |
|
||||
} |
|
||||
} |
|
||||
} |
|
||||
console.log( |
|
||||
`Found ${redirectPageCount} pages with \`redirect_from\` frontmatter` |
|
||||
); |
|
||||
console.log( |
|
||||
`Writing ${contentRedirects.length} redirects to redirects.json` |
|
||||
); |
|
||||
|
|
||||
await fs.writeFile( |
|
||||
path.resolve('./src/redirects.json'), |
|
||||
JSON.stringify( |
|
||||
{ |
|
||||
redirects: [...contentRedirects, ...netlifyRedirects], |
|
||||
}, |
|
||||
null, |
|
||||
2 |
|
||||
) |
|
||||
); |
|
||||
|
|
||||
console.log('✅ Done writing redirects'); |
|
||||
}) |
|
||||
.catch(console.error); |
|
Loading…
Reference in new issue