diff --git a/packages/cli/src/commands/build.js b/packages/cli/src/commands/build.js
index 76e9d9857..b8b9e142a 100644
--- a/packages/cli/src/commands/build.js
+++ b/packages/cli/src/commands/build.js
@@ -1,6 +1,7 @@
import { bundleCompilation } from '../lifecycles/bundle.js';
+import { checkResourceExists } from '../lib/resource-utils.js';
import { copyAssets } from '../lifecycles/copy.js';
-import fs from 'fs';
+import fs from 'fs/promises';
import { preRenderCompilationWorker, preRenderCompilationCustom, staticRenderCompilation } from '../lifecycles/prerender.js';
import { ServerInterface } from '../lib/server-interface.js';
@@ -15,8 +16,10 @@ const runProductionBuild = async (compilation) => {
? compilation.config.plugins.find(plugin => plugin.type === 'renderer').provider(compilation)
: {};
- if (!fs.existsSync(outputDir.pathname)) {
- fs.mkdirSync(outputDir.pathname);
+ if (!await checkResourceExists(outputDir)) {
+ await fs.mkdir(outputDir, {
+ recursive: true
+ });
}
if (prerender || prerenderPlugin.prerender) {
diff --git a/packages/cli/src/commands/eject.js b/packages/cli/src/commands/eject.js
index c0b41d6e7..886b74e2a 100644
--- a/packages/cli/src/commands/eject.js
+++ b/packages/cli/src/commands/eject.js
@@ -1,19 +1,19 @@
-import fs from 'fs';
+import fs from 'fs/promises';
const ejectConfiguration = async (compilation) => {
return new Promise(async (resolve, reject) => {
try {
const configFileDirUrl = new URL('../config/', import.meta.url);
- const configFiles = await fs.promises.readdir(configFileDirUrl);
+ const configFiles = await fs.readdir(configFileDirUrl);
- configFiles.forEach((configFile) => {
- const from = new URL(`./${configFile}`, configFileDirUrl);
- const to = new URL(`./${configFile}`, compilation.context.projectDirectory);
+ for (const file of configFiles) {
+ const from = new URL(`./${file}`, configFileDirUrl);
+ const to = new URL(`./${file}`, compilation.context.projectDirectory);
- fs.copyFileSync(from.pathname, to.pathname);
+ await fs.copyFile(from, to);
- console.log(`Ejected ${configFile} successfully.`);
- });
+ console.log(`Ejected ${file} successfully.`);
+ }
console.debug('all configuration files ejected.');
diff --git a/packages/cli/src/config/rollup.config.js b/packages/cli/src/config/rollup.config.js
index 5e27c7213..0b0c57a7c 100644
--- a/packages/cli/src/config/rollup.config.js
+++ b/packages/cli/src/config/rollup.config.js
@@ -1,4 +1,5 @@
-import fs from 'fs';
+import fs from 'fs/promises';
+import { checkResourceExists, normalizePathnameForWindows } from '../lib/resource-utils.js';
function greenwoodResourceLoader (compilation) {
const resourcePlugins = compilation.config.plugins.filter((plugin) => {
@@ -9,22 +10,25 @@ function greenwoodResourceLoader (compilation) {
return {
name: 'greenwood-resource-loader',
- resolveId(id) {
+ async resolveId(id) {
const normalizedId = id.replace(/\?type=(.*)/, '');
const { userWorkspace } = compilation.context;
- if ((id.indexOf('./') === 0 || id.indexOf('/') === 0) && fs.existsSync(new URL(`./${normalizedId}`, userWorkspace).pathname)) {
- return new URL(`./${normalizedId}`, userWorkspace).pathname;
- }
+ if (id.startsWith('./') || id.startsWith('/')) {
+ const prefix = id.startsWith('/') ? '.' : '';
+ const userWorkspaceUrl = new URL(`${prefix}${normalizedId}`, userWorkspace);
- return null;
+ if (await checkResourceExists(userWorkspaceUrl)) {
+ return normalizePathnameForWindows(userWorkspaceUrl);
+ }
+ }
},
async load(id) {
const pathname = id.indexOf('?') >= 0 ? id.slice(0, id.indexOf('?')) : id;
const extension = pathname.split('.').pop();
if (extension !== '' && extension !== 'js') {
- const url = new URL(`file://${pathname}`);
+ const url = new URL(`file://${pathname}?type=${extension}`);
const request = new Request(url.href);
let response = new Response('');
@@ -49,15 +53,14 @@ function greenwoodResourceLoader (compilation) {
function greenwoodSyncPageResourceBundlesPlugin(compilation) {
return {
name: 'greenwood-sync-page-resource-bundles-plugin',
- writeBundle(outputOptions, bundles) {
+ async writeBundle(outputOptions, bundles) {
const { outputDir } = compilation.context;
for (const resource of compilation.resources.values()) {
- const resourceKey = resource.sourcePathURL.pathname;
+ const resourceKey = normalizePathnameForWindows(resource.sourcePathURL);
for (const bundle in bundles) {
let facadeModuleId = (bundles[bundle].facadeModuleId || '').replace(/\\/g, '/');
-
/*
* this is an odd issue related to symlinking in our Greenwood monorepo when building the website
* and managing packages that we create as "virtual" modules, like for the mpa router
@@ -76,8 +79,11 @@ function greenwoodSyncPageResourceBundlesPlugin(compilation) {
* pathToMatch (before): /node_modules/@greenwood/cli/src/lib/router.js
* pathToMatch (after): /cli/src/lib/router.js
*/
- if (facadeModuleId && resourceKey.indexOf('/node_modules/@greenwood/cli') > 0 && facadeModuleId.indexOf('/packages/cli') > 0 && fs.existsSync(facadeModuleId)) {
- facadeModuleId = facadeModuleId.replace('/packages/cli', '/node_modules/@greenwood/cli');
+
+ if (resourceKey?.indexOf('/node_modules/@greenwood/cli') > 0 && facadeModuleId?.indexOf('/packages/cli') > 0) {
+ if (await checkResourceExists(new URL(`file://${facadeModuleId}`))) {
+ facadeModuleId = facadeModuleId.replace('/packages/cli', '/node_modules/@greenwood/cli');
+ }
}
if (resourceKey === facadeModuleId) {
@@ -86,15 +92,15 @@ function greenwoodSyncPageResourceBundlesPlugin(compilation) {
const noop = rawAttributes && rawAttributes.indexOf('data-gwd-opt="none"') >= 0 || compilation.config.optimization === 'none';
const outputPath = new URL(`./${fileName}`, outputDir);
- compilation.resources.set(resourceKey, {
- ...compilation.resources.get(resourceKey),
+ compilation.resources.set(resource.sourcePathURL.pathname, {
+ ...compilation.resources.get(resource.sourcePathURL.pathname),
optimizedFileName: fileName,
- optimizedFileContents: fs.readFileSync(outputPath, 'utf-8'),
+ optimizedFileContents: await fs.readFile(outputPath, 'utf-8'),
contents: contents.replace(/\.\//g, '/')
});
if (noop) {
- fs.writeFileSync(outputPath.pathname, contents);
+ await fs.writeFile(outputPath, contents);
}
}
}
@@ -107,7 +113,7 @@ const getRollupConfig = async (compilation) => {
const { outputDir } = compilation.context;
const input = [...compilation.resources.values()]
.filter(resource => resource.type === 'script')
- .map(resource => resource.sourcePathURL.pathname);
+ .map(resource => normalizePathnameForWindows(resource.sourcePathURL));
const customRollupPlugins = compilation.config.plugins.filter(plugin => {
return plugin.type === 'rollup';
}).map(plugin => {
@@ -118,7 +124,7 @@ const getRollupConfig = async (compilation) => {
preserveEntrySignatures: 'strict', // https://github.com/ProjectEvergreen/greenwood/pull/990
input,
output: {
- dir: outputDir.pathname,
+ dir: normalizePathnameForWindows(outputDir),
entryFileNames: '[name].[hash].js',
chunkFileNames: '[name].[hash].js',
sourcemap: true
diff --git a/packages/cli/src/index.js b/packages/cli/src/index.js
index f398ed2e7..566b68ac2 100755
--- a/packages/cli/src/index.js
+++ b/packages/cli/src/index.js
@@ -6,11 +6,11 @@
process.setMaxListeners(0);
import { generateCompilation } from './lifecycles/compile.js';
-import fs from 'fs';
+import fs from 'fs/promises';
import program from 'commander';
import { URL } from 'url';
-const greenwoodPackageJson = JSON.parse(await fs.promises.readFile(new URL('../package.json', import.meta.url), 'utf-8'));
+const greenwoodPackageJson = JSON.parse(await fs.readFile(new URL('../package.json', import.meta.url), 'utf-8'));
let cmdOption = {};
let command = '';
diff --git a/packages/cli/src/lib/node-modules-utils.js b/packages/cli/src/lib/node-modules-utils.js
index 07db68c4c..70d0fa4d6 100644
--- a/packages/cli/src/lib/node-modules-utils.js
+++ b/packages/cli/src/lib/node-modules-utils.js
@@ -1,7 +1,6 @@
// TODO convert this to use / return URLs
import { createRequire } from 'module'; // https://stackoverflow.com/a/62499498/417806
-import fs from 'fs';
-import path from 'path';
+import { checkResourceExists } from '../lib/resource-utils.js';
// defer to NodeJS to find where on disk a package is located using import.meta.resolve
// and return the root absolute location
@@ -36,14 +35,14 @@ async function getNodeModulesLocationForPackage(packageName) {
const nodeModulesPackageRoot = `${locations[location]}/${packageName}`;
const packageJsonLocation = `${nodeModulesPackageRoot}/package.json`;
- if (fs.existsSync(packageJsonLocation)) {
+ if (await checkResourceExists(new URL(`file://${packageJsonLocation}`))) {
nodeModulesUrl = nodeModulesPackageRoot;
}
}
if (!nodeModulesUrl) {
console.debug(`Unable to look up ${packageName} using NodeJS require.resolve. Falling back to process.cwd()`);
- nodeModulesUrl = path.join(process.cwd(), 'node_modules', packageName); // force / for consistency and path matching);
+ nodeModulesUrl = new URL(`./node_modules/${packageName}`, `file://${process.cwd()}`).pathname;
}
}
diff --git a/packages/cli/src/lib/resource-interface.js b/packages/cli/src/lib/resource-interface.js
index 57660c992..d2f7bb7dd 100644
--- a/packages/cli/src/lib/resource-interface.js
+++ b/packages/cli/src/lib/resource-interface.js
@@ -1,4 +1,4 @@
-import fs from 'fs';
+import { checkResourceExists } from './resource-utils.js';
class ResourceInterface {
constructor(compilation, options = {}) {
@@ -17,23 +17,25 @@ class ResourceInterface {
// * deep link route - /blog/releases/some-post
// * and a nested path in the template - ../../styles/theme.css
// so will get resolved as `${rootUrl}/styles/theme.css`
- resolveForRelativeUrl(url, rootUrl) {
+ async resolveForRelativeUrl(url, rootUrl) {
+ const search = url.search || '';
let reducedUrl;
- if (fs.existsSync(new URL(`.${url.pathname}`, rootUrl).pathname)) {
- return new URL(`.${url.pathname}`, rootUrl);
+ if (await checkResourceExists(new URL(`.${url.pathname}`, rootUrl))) {
+ return new URL(`.${url.pathname}${search}`, rootUrl);
}
- url.pathname.split('/')
- .filter((segment) => segment !== '')
- .reduce((acc, segment) => {
- const reducedPath = url.pathname.replace(`${acc}/${segment}`, '');
+ const segments = url.pathname.split('/').filter(segment => segment !== '');
+ segments.shift();
- if (reducedPath !== '' && fs.existsSync(new URL(`.${reducedPath}`, rootUrl).pathname)) {
- reducedUrl = new URL(`.${reducedPath}`, rootUrl);
- }
- return `${acc}/${segment}`;
- }, '');
+ for (let i = 0, l = segments.length - 1; i < l; i += 1) {
+ const nextSegments = segments.slice(i);
+ const urlToCheck = new URL(`./${nextSegments.join('/')}`, rootUrl);
+
+ if (await checkResourceExists(urlToCheck)) {
+ reducedUrl = new URL(`${urlToCheck}${search}`);
+ }
+ }
return reducedUrl;
}
diff --git a/packages/cli/src/lib/resource-utils.js b/packages/cli/src/lib/resource-utils.js
index c51afcc27..44cf5b517 100644
--- a/packages/cli/src/lib/resource-utils.js
+++ b/packages/cli/src/lib/resource-utils.js
@@ -1,10 +1,9 @@
-import fs from 'fs';
+import fs from 'fs/promises';
import { hashString } from '../lib/hashing-utils.js';
-function modelResource(context, type, src = undefined, contents = undefined, optimizationAttr = undefined, rawAttributes = undefined) {
+async function modelResource(context, type, src = undefined, contents = undefined, optimizationAttr = undefined, rawAttributes = undefined) {
const { projectDirectory, scratchDir, userWorkspace } = context;
const extension = type === 'script' ? 'js' : 'css';
- const windowsDriveRegex = /\/[a-zA-Z]{1}:\//;
let sourcePathURL;
if (src) {
@@ -14,25 +13,12 @@ function modelResource(context, type, src = undefined, contents = undefined, opt
? new URL(`.${src}`, userWorkspace)
: new URL(`./${src.replace(/\.\.\//g, '').replace('./', '')}`, userWorkspace);
- contents = fs.readFileSync(sourcePathURL, 'utf-8');
+ contents = await fs.readFile(sourcePathURL, 'utf-8');
} else {
const scratchFileName = hashString(contents);
sourcePathURL = new URL(`./${scratchFileName}.${extension}`, scratchDir);
- fs.writeFileSync(sourcePathURL, contents);
- }
-
- // TODO (good first issue) handle for Windows adding extra / in front of drive letter for whatever reason :(
- // e.g. turn /C:/... -> C:/...
- // and also URL is readonly in NodeJS??
- if (windowsDriveRegex.test(sourcePathURL.pathname)) {
- const driveMatch = sourcePathURL.pathname.match(windowsDriveRegex)[0];
-
- sourcePathURL = {
- ...sourcePathURL,
- pathname: sourcePathURL.pathname.replace(driveMatch, driveMatch.replace('/', '')),
- href: sourcePathURL.href.replace(driveMatch, driveMatch.replace('/', ''))
- };
+ await fs.writeFile(sourcePathURL, contents);
}
return {
@@ -65,7 +51,34 @@ function mergeResponse(destination, source) {
});
}
+// On Windows, a URL with a drive letter like C:/ thinks it is a protocol and so prepends a /, e.g. /C:/
+// This is fine with never fs methods that Greenwood uses, but tools like Rollupand PostCSS will need this handled manually
+// https://github.com/rollup/rollup/issues/3779
+function normalizePathnameForWindows(url) {
+ const windowsDriveRegex = /\/[a-zA-Z]{1}:\//;
+ const { pathname = '' } = url;
+
+ if (windowsDriveRegex.test(pathname)) {
+ const driveMatch = pathname.match(windowsDriveRegex)[0];
+
+ return pathname.replace(driveMatch, driveMatch.replace('/', ''));
+ }
+
+ return pathname;
+}
+
+async function checkResourceExists(url) {
+ try {
+ await fs.access(url);
+ return true;
+ } catch (e) {
+ return false;
+ }
+}
+
export {
mergeResponse,
- modelResource
+ modelResource,
+ normalizePathnameForWindows,
+ checkResourceExists
};
\ No newline at end of file
diff --git a/packages/cli/src/lib/ssr-route-worker.js b/packages/cli/src/lib/ssr-route-worker.js
index 129fecc45..e43c380d6 100644
--- a/packages/cli/src/lib/ssr-route-worker.js
+++ b/packages/cli/src/lib/ssr-route-worker.js
@@ -1,9 +1,8 @@
// https://github.com/nodejs/modules/issues/307#issuecomment-858729422
-import { pathToFileURL } from 'url';
import { parentPort } from 'worker_threads';
import { renderToString, renderFromHTML } from 'wc-compiler';
-async function executeRouteModule({ modulePath, compilation, route, label, id, prerender, htmlContents, scripts }) {
+async function executeRouteModule({ moduleUrl, compilation, route, label, id, prerender, htmlContents, scripts }) {
const parsedCompilation = JSON.parse(compilation);
const data = {
template: null,
@@ -18,11 +17,11 @@ async function executeRouteModule({ modulePath, compilation, route, label, id, p
data.html = html;
} else {
- const module = await import(pathToFileURL(modulePath)).then(module => module);
+ const module = await import(moduleUrl).then(module => module);
const { getTemplate = null, getBody = null, getFrontmatter = null } = module;
if (module.default) {
- const { html } = await renderToString(pathToFileURL(modulePath));
+ const { html } = await renderToString(new URL(moduleUrl));
data.body = html;
} else {
diff --git a/packages/cli/src/lifecycles/bundle.js b/packages/cli/src/lifecycles/bundle.js
index 6f685b143..bc58cf0dd 100644
--- a/packages/cli/src/lifecycles/bundle.js
+++ b/packages/cli/src/lifecycles/bundle.js
@@ -1,8 +1,8 @@
/* eslint-disable max-depth */
-import fs from 'fs';
+import fs from 'fs/promises';
import { getRollupConfig } from '../config/rollup.config.js';
import { hashString } from '../lib/hashing-utils.js';
-import { mergeResponse } from '../lib/resource-utils.js';
+import { checkResourceExists, mergeResponse } from '../lib/resource-utils.js';
import path from 'path';
import { rollup } from 'rollup';
@@ -15,7 +15,7 @@ async function cleanUpResources(compilation) {
const optAttr = ['inline', 'static'].indexOf(optimizationAttr) >= 0;
if (optimizedFileName && (!src || (optAttr || optConfig))) {
- fs.unlinkSync(new URL(`./${optimizedFileName}`, outputDir).pathname);
+ await fs.unlink(new URL(`./${optimizedFileName}`, outputDir));
}
}
}
@@ -27,13 +27,14 @@ async function optimizeStaticPages(compilation, plugins) {
.filter(page => !page.isSSR || (page.isSSR && page.data.static) || (page.isSSR && compilation.config.prerender))
.map(async (page) => {
const { route, outputPath } = page;
+ const outputDirUrl = new URL(`.${route}`, outputDir);
const url = new URL(`http://localhost:${compilation.config.port}${route}`);
- const contents = await fs.promises.readFile(new URL(`./${outputPath}`, scratchDir), 'utf-8');
+ const contents = await fs.readFile(new URL(`./${outputPath}`, scratchDir), 'utf-8');
const headers = new Headers({ 'Content-Type': 'text/html' });
let response = new Response(contents, { headers });
- if (route !== '/404/' && !fs.existsSync(new URL(`.${route}`, outputDir).pathname)) {
- fs.mkdirSync(new URL(`.${route}`, outputDir).pathname, {
+ if (!await checkResourceExists(outputDirUrl)) {
+ await fs.mkdir(outputDirUrl, {
recursive: true
});
}
@@ -49,7 +50,7 @@ async function optimizeStaticPages(compilation, plugins) {
// clean up optimization markers
const body = (await response.text()).replace(/data-gwd-opt=".*[a-z]"/g, '');
- await fs.promises.writeFile(new URL(`./${outputPath}`, outputDir), body);
+ await fs.writeFile(new URL(`./${outputPath}`, outputDir), body);
})
);
}
@@ -78,13 +79,16 @@ async function bundleStyleResources(compilation, resourcePlugins) {
optimizedFileName = `${hashString(contents)}.css`;
}
- const outputPathRoot = new URL(`./${optimizedFileName}`, outputDir).pathname
+ const outputPathRoot = new URL(`./${optimizedFileName}`, outputDir)
+ .pathname
.split('/')
.slice(0, -1)
- .join('/');
+ .join('/')
+ .concat('/');
+ const outputPathRootUrl = new URL(`file://${outputPathRoot}`);
- if (!fs.existsSync(outputPathRoot)) {
- fs.mkdirSync(outputPathRoot, {
+ if (!await checkResourceExists(outputPathRootUrl)) {
+ await fs.mkdir(new URL(`file://${outputPathRoot}`), {
recursive: true
});
}
@@ -132,7 +136,7 @@ async function bundleStyleResources(compilation, resourcePlugins) {
optimizedFileContents
});
- await fs.promises.writeFile(new URL(`./${optimizedFileName}`, outputDir), optimizedFileContents);
+ await fs.writeFile(new URL(`./${optimizedFileName}`, outputDir), optimizedFileContents);
}
}
}
diff --git a/packages/cli/src/lifecycles/config.js b/packages/cli/src/lifecycles/config.js
index 0d5a6c468..191e1fcbe 100644
--- a/packages/cli/src/lifecycles/config.js
+++ b/packages/cli/src/lifecycles/config.js
@@ -1,4 +1,5 @@
-import fs from 'fs';
+import fs from 'fs/promises';
+import { checkResourceExists } from '../lib/resource-utils.js';
const cwd = new URL(`file://${process.cwd()}/`);
const greenwoodPluginsDirectoryUrl = new URL('../plugins/', import.meta.url);
@@ -13,7 +14,7 @@ const greenwoodPlugins = (await Promise.all([
new URL('./resource/', greenwoodPluginsDirectoryUrl),
new URL('./server/', greenwoodPluginsDirectoryUrl)
].map(async (pluginDirectoryUrl) => {
- const files = await fs.promises.readdir(pluginDirectoryUrl);
+ const files = await fs.readdir(pluginDirectoryUrl);
return await Promise.all(files.map(async(file) => {
const importUrl = new URL(`./${file}`, pluginDirectoryUrl);
@@ -57,8 +58,20 @@ const readAndMergeConfig = async() => {
// deep clone of default config
const configUrl = new URL('./greenwood.config.js', cwd);
let customConfig = Object.assign({}, defaultConfig);
+ let hasConfigFile;
+ let isSPA;
- if (fs.existsSync(configUrl.pathname)) {
+ // check for greenwood.config.js
+ if (await checkResourceExists(configUrl)) {
+ hasConfigFile = true;
+ }
+
+ // check for SPA
+ if (await checkResourceExists(new URL('./index.html', customConfig.workspace))) {
+ isSPA = true;
+ }
+
+ if (hasConfigFile) {
const userCfgFile = (await import(configUrl)).default;
const { workspace, devServer, markdown, optimization, plugins, port, prerender, staticRouter, pagesDirectory, templatesDirectory, interpolateFrontmatter } = userCfgFile;
@@ -68,11 +81,11 @@ const readAndMergeConfig = async() => {
reject('Error: greenwood.config.js workspace must be an instance of URL');
}
- if (!fs.existsSync(workspace.pathname)) {
+ if (await checkResourceExists(workspace)) {
+ customConfig.workspace = workspace;
+ } else {
reject('Error: greenwood.config.js workspace doesn\'t exist! Please double check your configuration.');
}
-
- customConfig.workspace = workspace;
}
if (typeof optimization === 'string' && optimizations.indexOf(optimization.toLowerCase()) >= 0) {
@@ -194,7 +207,7 @@ const readAndMergeConfig = async() => {
}
// SPA should _not_ prerender unless if user has specified prerender should be true
- if (prerender === undefined && fs.existsSync(new URL('./index.html', customConfig.workspace))) {
+ if (prerender === undefined && isSPA) {
customConfig.prerender = false;
}
@@ -207,7 +220,7 @@ const readAndMergeConfig = async() => {
}
} else {
// SPA should _not_ prerender unless if user has specified prerender should be true
- if (fs.existsSync(new URL('./index.html', customConfig.workspace).pathname)) {
+ if (isSPA) {
customConfig.prerender = false;
}
}
diff --git a/packages/cli/src/lifecycles/context.js b/packages/cli/src/lifecycles/context.js
index e2bdaf332..1318c343c 100644
--- a/packages/cli/src/lifecycles/context.js
+++ b/packages/cli/src/lifecycles/context.js
@@ -1,4 +1,5 @@
-import fs from 'fs';
+import fs from 'fs/promises';
+import { checkResourceExists } from '../lib/resource-utils.js';
const initContext = async({ config }) => {
@@ -14,7 +15,6 @@ const initContext = async({ config }) => {
const apisDir = new URL('./api/', userWorkspace);
const pagesDir = new URL(`./${pagesDirectory}/`, userWorkspace);
const userTemplatesDir = new URL(`./${templatesDirectory}/`, userWorkspace);
-
const context = {
dataDir,
outputDir,
@@ -26,8 +26,8 @@ const initContext = async({ config }) => {
projectDirectory
};
- if (!fs.existsSync(scratchDir.pathname)) {
- fs.mkdirSync(scratchDir.pathname, {
+ if (!await checkResourceExists(scratchDir)) {
+ await fs.mkdir(scratchDir, {
recursive: true
});
}
diff --git a/packages/cli/src/lifecycles/copy.js b/packages/cli/src/lifecycles/copy.js
index 09c3a35c6..8964354b3 100644
--- a/packages/cli/src/lifecycles/copy.js
+++ b/packages/cli/src/lifecycles/copy.js
@@ -1,13 +1,13 @@
import fs from 'fs';
-import path from 'path';
+import { checkResourceExists } from '../lib/resource-utils.js';
async function rreaddir (dir, allFiles = []) {
- const files = (await fs.promises.readdir(dir)).map(f => path.join(dir, f));
+ const files = (await fs.promises.readdir(dir)).map(f => new URL(`./${f}`, dir));
allFiles.push(...files);
await Promise.all(files.map(async f => (
- await fs.promises.stat(f)).isDirectory() && rreaddir(f, allFiles
+ await fs.promises.stat(f)).isDirectory() && await rreaddir(new URL(`file://${f.pathname}/`), allFiles
)));
return allFiles;
@@ -17,8 +17,8 @@ async function rreaddir (dir, allFiles = []) {
async function copyFile(source, target, projectDirectory) {
try {
console.info(`copying file... ${source.pathname.replace(projectDirectory.pathname, '')}`);
- const rd = fs.createReadStream(source.pathname);
- const wr = fs.createWriteStream(target.pathname);
+ const rd = fs.createReadStream(source);
+ const wr = fs.createWriteStream(target);
return await new Promise((resolve, reject) => {
rd.on('error', reject);
@@ -36,29 +36,27 @@ async function copyFile(source, target, projectDirectory) {
async function copyDirectory(fromUrl, toUrl, projectDirectory) {
try {
console.info(`copying directory... ${fromUrl.pathname.replace(projectDirectory.pathname, '')}`);
- const files = await rreaddir(fromUrl.pathname);
+ const files = await rreaddir(fromUrl);
if (files.length > 0) {
- if (!fs.existsSync(toUrl.pathname)) {
- fs.mkdirSync(toUrl.pathname, {
+ if (!await checkResourceExists(toUrl)) {
+ await fs.promises.mkdir(toUrl, {
recursive: true
});
}
- await Promise.all(files.filter((filePath) => {
- const target = filePath.replace(fromUrl.pathname, toUrl.pathname);
- const isDirectory = fs.lstatSync(filePath).isDirectory();
- if (isDirectory && !fs.existsSync(target)) {
- fs.mkdirSync(target);
+ for (const fileUrl of files) {
+ const targetUrl = new URL(`file://${fileUrl.pathname.replace(fromUrl.pathname, toUrl.pathname)}`);
+ const isDirectory = (await fs.promises.stat(fileUrl)).isDirectory();
+
+ if (isDirectory && !await checkResourceExists(targetUrl)) {
+ await fs.promises.mkdir(targetUrl, {
+ recursive: true
+ });
} else if (!isDirectory) {
- return filePath;
+ await copyFile(fileUrl, targetUrl, projectDirectory);
}
- }).map((filePath) => {
- const sourceUrl = new URL(`file://${filePath}`);
- const targetUrl = new URL(`file://${filePath.replace(fromUrl.pathname, toUrl.pathname)}`);
-
- return copyFile(sourceUrl, targetUrl, projectDirectory);
- }));
+ }
}
} catch (e) {
console.error('ERROR', e);
diff --git a/packages/cli/src/lifecycles/graph.js b/packages/cli/src/lifecycles/graph.js
index ed6d26337..0064ec3b1 100644
--- a/packages/cli/src/lifecycles/graph.js
+++ b/packages/cli/src/lifecycles/graph.js
@@ -1,19 +1,16 @@
/* eslint-disable complexity, max-depth */
-import fs from 'fs';
+import fs from 'fs/promises';
import fm from 'front-matter';
-import { modelResource } from '../lib/resource-utils.js';
-import path from 'path';
+import { checkResourceExists, modelResource } from '../lib/resource-utils.js';
import toc from 'markdown-toc';
import { Worker } from 'worker_threads';
-// TODO convert graph to use URLs
-// https://github.com/ProjectEvergreen/greenwood/issues/952
const generateGraph = async (compilation) => {
return new Promise(async (resolve, reject) => {
try {
const { context } = compilation;
- const { pagesDir, userWorkspace } = context;
+ const { pagesDir, projectDirectory, userWorkspace, scratchDir } = context;
let graph = [{
outputPath: 'index.html',
filename: 'index.html',
@@ -26,23 +23,23 @@ const generateGraph = async (compilation) => {
}];
const walkDirectoryForPages = async function(directory, pages = []) {
- const files = fs.readdirSync(directory);
+ const files = await fs.readdir(directory);
for (const filename of files) {
- const fullPath = path.normalize(`${directory}${path.sep}${filename}`);
-
- if (fs.statSync(fullPath).isDirectory()) {
- pages = await walkDirectoryForPages(fullPath, pages);
+ const filenameUrl = new URL(`./${filename}`, directory);
+ const filenameUrlAsDir = new URL(`./${filename}/`, directory);
+ const isDirectory = await checkResourceExists(filenameUrlAsDir) && (await fs.stat(filenameUrlAsDir)).isDirectory();
+
+ if (isDirectory) {
+ pages = await walkDirectoryForPages(filenameUrlAsDir, pages);
} else {
- const extension = path.extname(filename);
+ const extension = `.${filenameUrl.pathname.split('.').pop()}`;
const isStatic = extension === '.md' || extension === '.html';
const isDynamic = extension === '.js';
- const relativePagePath = fullPath.substring(pagesDir.pathname.length - 1, fullPath.length);
- const relativeWorkspacePath = directory.replace(process.cwd(), '').replace(path.sep, '');
- let route = relativePagePath
- .replace(extension, '')
- .replace(/\\/g, '/');
- let id = filename.split(path.sep)[filename.split(path.sep).length - 1].replace(extension, '');
+ const relativePagePath = filenameUrl.pathname.replace(pagesDir.pathname, '/');
+ const relativeWorkspacePath = directory.pathname.replace(projectDirectory.pathname, '');
+ let route = relativePagePath.replace(extension, '');
+ let id = filename.split('/')[filename.split('/').length - 1].replace(extension, '');
let template = 'page';
let title = null;
let imports = [];
@@ -57,7 +54,7 @@ const generateGraph = async (compilation) => {
* - pages/blog/index.{html,md,js} -> /blog/
* - pages/blog/some-post.{html,md,js} -> /blog/some-post/
*/
- if (relativePagePath.lastIndexOf(path.sep) > 0) {
+ if (relativePagePath.lastIndexOf('/') > 0) {
// https://github.com/ProjectEvergreen/greenwood/issues/455
route = id === 'index' || route.replace('/index', '') === `/${id}`
? route.replace('index', '')
@@ -69,17 +66,14 @@ const generateGraph = async (compilation) => {
}
if (isStatic) {
- const fileContents = fs.readFileSync(fullPath, 'utf8');
+ const fileContents = await fs.readFile(filenameUrl, 'utf8');
const { attributes } = fm(fileContents);
template = attributes.template || 'page';
title = attributes.title || title;
id = attributes.label || id;
imports = attributes.imports || [];
-
- filePath = route === '/' || relativePagePath.lastIndexOf(path.sep) === 0
- ? `${relativeWorkspacePath}${filename}`
- : `${relativeWorkspacePath}${path.sep}${filename}`,
+ filePath = `${relativeWorkspacePath}${filename}`;
// prune "reserved" attributes that are supported by Greenwood
// https://www.greenwoodjs.io/docs/front-matter
@@ -129,13 +123,13 @@ const generateGraph = async (compilation) => {
await new Promise((resolve, reject) => {
const worker = new Worker(routeWorkerUrl);
- worker.on('message', (result) => {
+ worker.on('message', async (result) => {
if (result.frontmatter) {
- const resources = (result.frontmatter.imports || []).map((resource) => {
- const type = path.extname(resource) === '.js' ? 'script' : 'link';
+ const resources = await Promise.all((result.frontmatter.imports || []).map(async (resource) => {
+ const type = resource.split('.').pop() === 'js' ? 'script' : 'link';
- return modelResource(compilation.context, type, resource);
- });
+ return await modelResource(compilation.context, type, resource);
+ }));
result.frontmatter.imports = resources;
ssrFrontmatter = result.frontmatter;
@@ -150,7 +144,7 @@ const generateGraph = async (compilation) => {
});
worker.postMessage({
- modulePath: fullPath,
+ moduleUrl: filenameUrl.href,
compilation: JSON.stringify(compilation),
route
});
@@ -216,7 +210,8 @@ const generateGraph = async (compilation) => {
};
console.debug('building from local sources...');
- if (fs.existsSync(new URL('./index.html', userWorkspace).pathname)) { // SPA
+ // test for SPA
+ if (await checkResourceExists(new URL('./index.html', userWorkspace))) {
graph = [{
...graph[0],
path: `${userWorkspace.pathname}index.html`,
@@ -224,8 +219,8 @@ const generateGraph = async (compilation) => {
}];
} else {
const oldGraph = graph[0];
-
- graph = fs.existsSync(pagesDir.pathname) ? await walkDirectoryForPages(pagesDir.pathname) : graph;
+
+ graph = await checkResourceExists(pagesDir) ? await walkDirectoryForPages(pagesDir) : graph;
const has404Page = graph.filter(page => page.route === '/404/').length === 1;
@@ -271,7 +266,7 @@ const generateGraph = async (compilation) => {
path: null,
data: {},
imports: [],
- outputPath: path.join(node.route, 'index.html'),
+ outputPath: `${node.route}index.html`,
...node,
external: true
});
@@ -281,11 +276,11 @@ const generateGraph = async (compilation) => {
compilation.graph = graph;
- if (!fs.existsSync(context.scratchDir.pathname)) {
- await fs.promises.mkdir(context.scratchDir.pathname);
+ if (!await checkResourceExists(scratchDir)) {
+ await fs.mkdir(scratchDir);
}
- await fs.promises.writeFile(`${context.scratchDir.pathname}graph.json`, JSON.stringify(compilation.graph));
+ await fs.writeFile(new URL('./graph.json', scratchDir), JSON.stringify(compilation.graph));
resolve(compilation);
} catch (err) {
diff --git a/packages/cli/src/lifecycles/prerender.js b/packages/cli/src/lifecycles/prerender.js
index 6b8a30838..0fa273b2a 100644
--- a/packages/cli/src/lifecycles/prerender.js
+++ b/packages/cli/src/lifecycles/prerender.js
@@ -1,6 +1,6 @@
-import fs from 'fs';
+import fs from 'fs/promises';
import htmlparser from 'node-html-parser';
-import { modelResource } from '../lib/resource-utils.js';
+import { checkResourceExists, modelResource } from '../lib/resource-utils.js';
import os from 'os';
import { WorkerPool } from '../lib/threadpool.js';
@@ -8,9 +8,9 @@ function isLocalLink(url = '') {
return url !== '' && (url.indexOf('http') !== 0 && url.indexOf('//') !== 0);
}
-function createOutputDirectory(route, { pathname }) {
- if (route !== '/404/' && !fs.existsSync(pathname)) {
- fs.mkdirSync(pathname, {
+async function createOutputDirectory(route, outputDir) {
+ if (route !== '/404/' && !await checkResourceExists(outputDir)) {
+ await fs.mkdir(outputDir, {
recursive: true
});
}
@@ -21,7 +21,7 @@ function createOutputDirectory(route, { pathname }) {
// or could this be done sooner (like in appTemplate building in html resource plugin)?
// Or do we need to ensure userland code / plugins have gone first
// before we can curate the final list of
- return modelResource(context, 'script', src, null, optimizationAttr, rawAttrs);
+ return await modelResource(context, 'script', src, null, optimizationAttr, rawAttrs);
} else if (script.rawText) {
//
- return modelResource(context, 'script', null, script.rawText, optimizationAttr, rawAttrs);
+ return await modelResource(context, 'script', null, script.rawText, optimizationAttr, rawAttrs);
}
- });
+ }));
- const styles = root.querySelectorAll('style')
+ const styles = await Promise.all(root.querySelectorAll('style')
.filter(style => !(/\$/).test(style.rawText) && !(//).test(style.rawText)) // filter out Shady DOM