Skip to content

Commit

Permalink
Use fs and vm for async chunk loading in Node (vercel/turborepo#8722)
Browse files Browse the repository at this point in the history
### Description

Use `fs` and `vm` when loading chunks asynchronously in Node, to not
block the thread while reading large chunks

Closes PACK-411

### Testing Instructions

Using the `test/e2e/app-dir/dynamic/` Next.js tests
  • Loading branch information
mischnic authored Jul 16, 2024
1 parent 4636af0 commit fe4d1c9
Show file tree
Hide file tree
Showing 3 changed files with 70 additions and 17 deletions.
48 changes: 40 additions & 8 deletions crates/turbopack-ecmascript-runtime/js/src/nodejs/runtime.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@ type ModuleFactory = (
) => undefined;

const url = require("url");
const fs = require("fs/promises");
const vm = require("vm");

const moduleFactories: ModuleFactories = Object.create(null);
const moduleCache: ModuleCache = Object.create(null);
Expand Down Expand Up @@ -118,15 +120,45 @@ async function loadChunkAsync(
source: SourceInfo,
chunkData: ChunkData
): Promise<any> {
return new Promise<void>((resolve, reject) => {
try {
loadChunk(chunkData, source);
} catch (err) {
reject(err);
return;
const chunkPath = typeof chunkData === "string" ? chunkData : chunkData.path;
if (!chunkPath.endsWith(".js")) {
// We only support loading JS chunks in Node.js.
// This branch can be hit when trying to load a CSS chunk.
return;
}

const resolved = path.resolve(RUNTIME_ROOT, chunkPath);

try {
const contents = await fs.readFile(resolved, "utf-8");

const module = {
exports: {},
};
vm.runInThisContext(
"(function(module, exports, require, __dirname, __filename) {" +
contents +
"\n})",
resolved
)(module, module.exports, require, path.dirname(resolved), resolved);

const chunkModules: ModuleFactories = module.exports;
for (const [moduleId, moduleFactory] of Object.entries(chunkModules)) {
if (!moduleFactories[moduleId]) {
moduleFactories[moduleId] = moduleFactory;
}
}
resolve();
});
} catch (e) {
let errorMessage = `Failed to load chunk ${chunkPath}`;

if (source) {
errorMessage += ` from ${stringifySourceInfo(source)}`;
}

throw new Error(errorMessage, {
cause: e,
});
}
}

function loadWebAssembly(chunkPath: ChunkPath, imports: WebAssembly.Imports) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -428,6 +428,8 @@ function stringifySourceInfo(source) {
}
}
const url = require("url");
const fs = require("fs/promises");
const vm = require("vm");
const moduleFactories = Object.create(null);
const moduleCache = Object.create(null);
/**
Expand Down Expand Up @@ -476,15 +478,34 @@ function loadChunkPath(chunkPath, source) {
}
}
async function loadChunkAsync(source, chunkData) {
return new Promise((resolve, reject)=>{
try {
loadChunk(chunkData, source);
} catch (err) {
reject(err);
return;
const chunkPath = typeof chunkData === "string" ? chunkData : chunkData.path;
if (!chunkPath.endsWith(".js")) {
// We only support loading JS chunks in Node.js.
// This branch can be hit when trying to load a CSS chunk.
return;
}
const resolved = path.resolve(RUNTIME_ROOT, chunkPath);
try {
const contents = await fs.readFile(resolved, "utf-8");
const module1 = {
exports: {}
};
vm.runInThisContext("(function(module, exports, require, __dirname, __filename) {" + contents + "\n})", resolved)(module1, module1.exports, require, path.dirname(resolved), resolved);
const chunkModules = module1.exports;
for (const [moduleId, moduleFactory] of Object.entries(chunkModules)){
if (!moduleFactories[moduleId]) {
moduleFactories[moduleId] = moduleFactory;
}
}
resolve();
});
} catch (e) {
let errorMessage = `Failed to load chunk ${chunkPath}`;
if (source) {
errorMessage += ` from ${stringifySourceInfo(source)}`;
}
throw new Error(errorMessage, {
cause: e
});
}
}
function loadWebAssembly(chunkPath, imports) {
const resolved = path.resolve(RUNTIME_ROOT, chunkPath);
Expand Down
Loading

0 comments on commit fe4d1c9

Please sign in to comment.