initial commit
This commit is contained in:
21
node_modules/ts-node/LICENSE
generated
vendored
Normal file
21
node_modules/ts-node/LICENSE
generated
vendored
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Blake Embrey (hello@blakeembrey.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
1442
node_modules/ts-node/README.md
generated
vendored
Normal file
1442
node_modules/ts-node/README.md
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
8
node_modules/ts-node/child-loader.mjs
generated
vendored
Normal file
8
node_modules/ts-node/child-loader.mjs
generated
vendored
Normal file
@ -0,0 +1,8 @@
|
||||
import { fileURLToPath } from 'url';
|
||||
import { createRequire } from 'module';
|
||||
const require = createRequire(fileURLToPath(import.meta.url));
|
||||
|
||||
// TODO why use require() here? I think we can just `import`
|
||||
/** @type {import('./dist/child-loader')} */
|
||||
const childLoader = require('./dist/child/child-loader');
|
||||
export const { resolve, load, getFormat, transformSource } = childLoader;
|
24
node_modules/ts-node/dist-raw/NODE-LICENSE.md
generated
vendored
Normal file
24
node_modules/ts-node/dist-raw/NODE-LICENSE.md
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
This directory contains portions of Node.js source code which is licensed as follows:
|
||||
|
||||
---
|
||||
|
||||
Copyright Joyent, Inc. and other Node contributors.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the
|
||||
"Software"), to deal in the Software without restriction, including
|
||||
without limitation the rights to use, copy, modify, merge, publish,
|
||||
distribute, sublicense, and/or sell copies of the Software, and to permit
|
||||
persons to whom the Software is furnished to do so, subject to the
|
||||
following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included
|
||||
in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
|
||||
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
|
||||
NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
|
||||
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
|
||||
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
|
||||
USE OR OTHER DEALINGS IN THE SOFTWARE.
|
36
node_modules/ts-node/dist-raw/README.md
generated
vendored
Normal file
36
node_modules/ts-node/dist-raw/README.md
generated
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
The `dist-raw` directory contains JS sources that are distributed verbatim, not compiled nor typechecked via TS.
|
||||
|
||||
To implement ESM support, we unfortunately must duplicate some of node's built-in functionality that is not
|
||||
exposed via an API. We have copy-pasted the necessary code from https://github.com/nodejs/node/tree/master/lib
|
||||
then modified it to suite our needs.
|
||||
|
||||
Formatting may be intentionally bad to keep the diff as small as possible, to make it easier to merge
|
||||
upstream changes and understand our modifications. For example, when we need to wrap node's source code
|
||||
in a factory function, we will not indent the function body, to avoid whitespace changes in the diff.
|
||||
|
||||
One obvious problem with this approach: the code has been pulled from one version of node, whereas users of ts-node
|
||||
run multiple versions of node.
|
||||
Users running node 12 may see that ts-node behaves like node 14, for example.
|
||||
|
||||
## `raw` directory
|
||||
|
||||
Within the `raw` directory, we keep unmodified copies of the node source files. This allows us to use diffing tools to
|
||||
compare files in `raw` to those in `dist-raw`, which will highlight all of the changes we have made. Hopefully, these
|
||||
changes are as minimal as possible.
|
||||
|
||||
## Naming convention
|
||||
|
||||
Not used consistently, but the idea is:
|
||||
|
||||
`node-<directory>(...-<directory>)-<filename>.js`
|
||||
|
||||
`node-internal-errors.js` -> `github.com/nodejs/node/blob/TAG/lib/internal/errors.js`
|
||||
|
||||
So, take the path within node's `lib/` directory, and replace slashes with hyphens.
|
||||
|
||||
In the `raw` directory, files are suffixed with the version number or revision from which
|
||||
they were downloaded.
|
||||
|
||||
If they have a `stripped` suffix, this means they have large chunks of code deleted, but no other modifications.
|
||||
This is useful when diffing. Sometimes our `dist-raw` files only have a small part of a much larger node source file.
|
||||
It is easier to diff `raw/*-stripped.js` against `dist-raw/*.js`.
|
4
node_modules/ts-node/dist-raw/node-internal-constants.js
generated
vendored
Normal file
4
node_modules/ts-node/dist-raw/node-internal-constants.js
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
// Copied from https://github.com/nodejs/node/blob/master/lib/internal/constants.js
|
||||
module.exports = {
|
||||
CHAR_FORWARD_SLASH: 47, /* / */
|
||||
};
|
82
node_modules/ts-node/dist-raw/node-internal-errors.js
generated
vendored
Normal file
82
node_modules/ts-node/dist-raw/node-internal-errors.js
generated
vendored
Normal file
@ -0,0 +1,82 @@
|
||||
'use strict';
|
||||
|
||||
const path = require('path');
|
||||
|
||||
exports.codes = {
|
||||
ERR_INPUT_TYPE_NOT_ALLOWED: createErrorCtor(joinArgs('ERR_INPUT_TYPE_NOT_ALLOWED')),
|
||||
ERR_INVALID_ARG_VALUE: createErrorCtor(joinArgs('ERR_INVALID_ARG_VALUE')),
|
||||
ERR_INVALID_MODULE_SPECIFIER: createErrorCtor(joinArgs('ERR_INVALID_MODULE_SPECIFIER')),
|
||||
ERR_INVALID_PACKAGE_CONFIG: createErrorCtor(joinArgs('ERR_INVALID_PACKAGE_CONFIG')),
|
||||
ERR_INVALID_PACKAGE_TARGET: createErrorCtor(joinArgs('ERR_INVALID_PACKAGE_TARGET')),
|
||||
ERR_MANIFEST_DEPENDENCY_MISSING: createErrorCtor(joinArgs('ERR_MANIFEST_DEPENDENCY_MISSING')),
|
||||
ERR_MODULE_NOT_FOUND: createErrorCtor((path, base, type = 'package') => {
|
||||
return `Cannot find ${type} '${path}' imported from ${base}`
|
||||
}),
|
||||
ERR_PACKAGE_IMPORT_NOT_DEFINED: createErrorCtor(joinArgs('ERR_PACKAGE_IMPORT_NOT_DEFINED')),
|
||||
ERR_PACKAGE_PATH_NOT_EXPORTED: createErrorCtor(joinArgs('ERR_PACKAGE_PATH_NOT_EXPORTED')),
|
||||
ERR_UNSUPPORTED_DIR_IMPORT: createErrorCtor(joinArgs('ERR_UNSUPPORTED_DIR_IMPORT')),
|
||||
ERR_UNSUPPORTED_ESM_URL_SCHEME: createErrorCtor(joinArgs('ERR_UNSUPPORTED_ESM_URL_SCHEME')),
|
||||
ERR_UNKNOWN_FILE_EXTENSION: createErrorCtor(joinArgs('ERR_UNKNOWN_FILE_EXTENSION')),
|
||||
}
|
||||
|
||||
function joinArgs(name) {
|
||||
return (...args) => {
|
||||
return [name, ...args].join(' ')
|
||||
}
|
||||
}
|
||||
|
||||
function createErrorCtor(errorMessageCreator) {
|
||||
return class CustomError extends Error {
|
||||
constructor(...args) {
|
||||
super(errorMessageCreator(...args))
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.createErrRequireEsm = createErrRequireEsm;
|
||||
|
||||
// Native ERR_REQUIRE_ESM Error is declared here:
|
||||
// https://github.com/nodejs/node/blob/2d5d77306f6dff9110c1f77fefab25f973415770/lib/internal/errors.js#L1294-L1313
|
||||
// Error class factory is implemented here:
|
||||
// function E: https://github.com/nodejs/node/blob/2d5d77306f6dff9110c1f77fefab25f973415770/lib/internal/errors.js#L323-L341
|
||||
// function makeNodeErrorWithCode: https://github.com/nodejs/node/blob/2d5d77306f6dff9110c1f77fefab25f973415770/lib/internal/errors.js#L251-L278
|
||||
// The code below should create an error that matches the native error as closely as possible.
|
||||
// Third-party libraries which attempt to catch the native ERR_REQUIRE_ESM should recognize our imitation error.
|
||||
function createErrRequireEsm(filename, parentPath, packageJsonPath) {
|
||||
const code = 'ERR_REQUIRE_ESM'
|
||||
const err = new Error(getErrRequireEsmMessage(filename, parentPath, packageJsonPath))
|
||||
// Set `name` to be used in stack trace, generate stack trace with that name baked in, then re-declare the `name` field.
|
||||
// This trick is copied from node's source.
|
||||
err.name = `Error [${ code }]`
|
||||
err.stack
|
||||
Object.defineProperty(err, 'name', {
|
||||
value: 'Error',
|
||||
enumerable: false,
|
||||
writable: true,
|
||||
configurable: true
|
||||
})
|
||||
err.code = code
|
||||
return err
|
||||
}
|
||||
|
||||
// Copy-pasted from https://github.com/nodejs/node/blob/b533fb3508009e5f567cc776daba8fbf665386a6/lib/internal/errors.js#L1293-L1311
|
||||
// so that our error message is identical to the native message.
|
||||
function getErrRequireEsmMessage(filename, parentPath = null, packageJsonPath = null) {
|
||||
const ext = path.extname(filename)
|
||||
let msg = `Must use import to load ES Module: ${filename}`;
|
||||
if (parentPath && packageJsonPath) {
|
||||
const path = require('path');
|
||||
const basename = path.basename(filename) === path.basename(parentPath) ?
|
||||
filename : path.basename(filename);
|
||||
msg +=
|
||||
'\nrequire() of ES modules is not supported.\nrequire() of ' +
|
||||
`${filename} ${parentPath ? `from ${parentPath} ` : ''}` +
|
||||
`is an ES module file as it is a ${ext} file whose nearest parent ` +
|
||||
`package.json contains "type": "module" which defines all ${ext} ` +
|
||||
'files in that package scope as ES modules.\nInstead ' +
|
||||
'change the requiring code to use ' +
|
||||
'import(), or remove "type": "module" from ' +
|
||||
`${packageJsonPath}.\n`;
|
||||
return msg;
|
||||
}
|
||||
return msg;
|
||||
}
|
89
node_modules/ts-node/dist-raw/node-internal-modules-cjs-helpers.js
generated
vendored
Normal file
89
node_modules/ts-node/dist-raw/node-internal-modules-cjs-helpers.js
generated
vendored
Normal file
@ -0,0 +1,89 @@
|
||||
// Copied from https://github.com/nodejs/node/blob/v17.0.1/lib/internal/modules/cjs/helpers.js
|
||||
|
||||
'use strict';
|
||||
|
||||
const {
|
||||
ArrayPrototypeForEach,
|
||||
ObjectDefineProperty,
|
||||
ObjectPrototypeHasOwnProperty,
|
||||
SafeSet,
|
||||
StringPrototypeIncludes,
|
||||
StringPrototypeStartsWith,
|
||||
} = require('./node-primordials');
|
||||
|
||||
const { getOptionValue } = require('./node-options');
|
||||
const userConditions = getOptionValue('--conditions');
|
||||
|
||||
const noAddons = getOptionValue('--no-addons');
|
||||
const addonConditions = noAddons ? [] : ['node-addons'];
|
||||
|
||||
// TODO: Use this set when resolving pkg#exports conditions in loader.js.
|
||||
const cjsConditions = new SafeSet([
|
||||
'require',
|
||||
'node',
|
||||
...addonConditions,
|
||||
...userConditions,
|
||||
]);
|
||||
|
||||
/**
|
||||
* @param {any} object
|
||||
* @param {string} [dummyModuleName]
|
||||
* @return {void}
|
||||
*/
|
||||
function addBuiltinLibsToObject(object, dummyModuleName) {
|
||||
// Make built-in modules available directly (loaded lazily).
|
||||
const Module = require('module').Module;
|
||||
const { builtinModules } = Module;
|
||||
|
||||
// To require built-in modules in user-land and ignore modules whose
|
||||
// `canBeRequiredByUsers` is false. So we create a dummy module object and not
|
||||
// use `require()` directly.
|
||||
const dummyModule = new Module(dummyModuleName);
|
||||
|
||||
ArrayPrototypeForEach(builtinModules, (name) => {
|
||||
// Neither add underscored modules, nor ones that contain slashes (e.g.,
|
||||
// 'fs/promises') or ones that are already defined.
|
||||
if (StringPrototypeStartsWith(name, '_') ||
|
||||
StringPrototypeIncludes(name, '/') ||
|
||||
ObjectPrototypeHasOwnProperty(object, name)) {
|
||||
return;
|
||||
}
|
||||
// Goals of this mechanism are:
|
||||
// - Lazy loading of built-in modules
|
||||
// - Having all built-in modules available as non-enumerable properties
|
||||
// - Allowing the user to re-assign these variables as if there were no
|
||||
// pre-existing globals with the same name.
|
||||
|
||||
const setReal = (val) => {
|
||||
// Deleting the property before re-assigning it disables the
|
||||
// getter/setter mechanism.
|
||||
delete object[name];
|
||||
object[name] = val;
|
||||
};
|
||||
|
||||
ObjectDefineProperty(object, name, {
|
||||
get: () => {
|
||||
// Node 12 hack; remove when we drop node12 support
|
||||
const lib = (dummyModule.require || require)(name);
|
||||
|
||||
// Disable the current getter/setter and set up a new
|
||||
// non-enumerable property.
|
||||
delete object[name];
|
||||
ObjectDefineProperty(object, name, {
|
||||
get: () => lib,
|
||||
set: setReal,
|
||||
configurable: true,
|
||||
enumerable: false
|
||||
});
|
||||
|
||||
return lib;
|
||||
},
|
||||
set: setReal,
|
||||
configurable: true,
|
||||
enumerable: false
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
exports.addBuiltinLibsToObject = addBuiltinLibsToObject;
|
||||
exports.cjsConditions = cjsConditions;
|
593
node_modules/ts-node/dist-raw/node-internal-modules-cjs-loader.js
generated
vendored
Normal file
593
node_modules/ts-node/dist-raw/node-internal-modules-cjs-loader.js
generated
vendored
Normal file
@ -0,0 +1,593 @@
|
||||
// Copied from several files in node's source code.
|
||||
// https://github.com/nodejs/node/blob/2d5d77306f6dff9110c1f77fefab25f973415770/lib/internal/modules/cjs/loader.js
|
||||
// Each function and variable below must have a comment linking to the source in node's github repo.
|
||||
|
||||
'use strict';
|
||||
|
||||
const {
|
||||
ArrayIsArray,
|
||||
ArrayPrototypeIncludes,
|
||||
ArrayPrototypeJoin,
|
||||
ArrayPrototypePush,
|
||||
JSONParse,
|
||||
ObjectKeys,
|
||||
RegExpPrototypeTest,
|
||||
SafeMap,
|
||||
SafeWeakMap,
|
||||
StringPrototypeCharCodeAt,
|
||||
StringPrototypeEndsWith,
|
||||
StringPrototypeLastIndexOf,
|
||||
StringPrototypeIndexOf,
|
||||
StringPrototypeMatch,
|
||||
StringPrototypeSlice,
|
||||
StringPrototypeStartsWith,
|
||||
} = require('./node-primordials');
|
||||
const { NativeModule } = require('./node-nativemodule');
|
||||
const { pathToFileURL, fileURLToPath } = require('url');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { sep } = path;
|
||||
const { internalModuleStat } = require('./node-internalBinding-fs');
|
||||
const packageJsonReader = require('./node-internal-modules-package_json_reader');
|
||||
const {
|
||||
cjsConditions,
|
||||
} = require('./node-internal-modules-cjs-helpers');
|
||||
const { getOptionValue } = require('./node-options');
|
||||
const preserveSymlinks = getOptionValue('--preserve-symlinks');
|
||||
const preserveSymlinksMain = getOptionValue('--preserve-symlinks-main');
|
||||
const {normalizeSlashes} = require('../dist/util');
|
||||
const {createErrRequireEsm} = require('./node-internal-errors');
|
||||
const {
|
||||
codes: {
|
||||
ERR_INVALID_MODULE_SPECIFIER,
|
||||
},
|
||||
} = require('./node-internal-errors');
|
||||
|
||||
const {
|
||||
CHAR_FORWARD_SLASH,
|
||||
} = require('./node-internal-constants');
|
||||
|
||||
const Module = require('module');
|
||||
|
||||
const isWindows = process.platform === 'win32';
|
||||
|
||||
let statCache = null;
|
||||
|
||||
function stat(filename) {
|
||||
filename = path.toNamespacedPath(filename);
|
||||
if (statCache !== null) {
|
||||
const result = statCache.get(filename);
|
||||
if (result !== undefined) return result;
|
||||
}
|
||||
const result = internalModuleStat(filename);
|
||||
if (statCache !== null && result >= 0) {
|
||||
// Only set cache when `internalModuleStat(filename)` succeeds.
|
||||
statCache.set(filename, result);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// Note:
|
||||
// we cannot get access to node's internal cache, which is populated from
|
||||
// within node's Module constructor. So the cache here will always be empty.
|
||||
// It's possible we could approximate our own cache by building it up with
|
||||
// hacky workarounds, but it's not worth the complexity and flakiness.
|
||||
const moduleParentCache = new SafeWeakMap();
|
||||
|
||||
// Given a module name, and a list of paths to test, returns the first
|
||||
// matching file in the following precedence.
|
||||
//
|
||||
// require("a.<ext>")
|
||||
// -> a.<ext>
|
||||
//
|
||||
// require("a")
|
||||
// -> a
|
||||
// -> a.<ext>
|
||||
// -> a/index.<ext>
|
||||
|
||||
const packageJsonCache = new SafeMap();
|
||||
|
||||
function readPackage(requestPath) {
|
||||
const jsonPath = path.resolve(requestPath, 'package.json');
|
||||
|
||||
const existing = packageJsonCache.get(jsonPath);
|
||||
if (existing !== undefined) return existing;
|
||||
|
||||
const result = packageJsonReader.read(jsonPath);
|
||||
const json = result.containsKeys === false ? '{}' : result.string;
|
||||
if (json === undefined) {
|
||||
packageJsonCache.set(jsonPath, false);
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
const parsed = JSONParse(json);
|
||||
const filtered = {
|
||||
name: parsed.name,
|
||||
main: parsed.main,
|
||||
exports: parsed.exports,
|
||||
imports: parsed.imports,
|
||||
type: parsed.type
|
||||
};
|
||||
packageJsonCache.set(jsonPath, filtered);
|
||||
return filtered;
|
||||
} catch (e) {
|
||||
e.path = jsonPath;
|
||||
e.message = 'Error parsing ' + jsonPath + ': ' + e.message;
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
function readPackageScope(checkPath) {
|
||||
const rootSeparatorIndex = StringPrototypeIndexOf(checkPath, sep);
|
||||
let separatorIndex;
|
||||
do {
|
||||
separatorIndex = StringPrototypeLastIndexOf(checkPath, sep);
|
||||
checkPath = StringPrototypeSlice(checkPath, 0, separatorIndex);
|
||||
if (StringPrototypeEndsWith(checkPath, sep + 'node_modules'))
|
||||
return false;
|
||||
const pjson = readPackage(checkPath + sep);
|
||||
if (pjson) return {
|
||||
data: pjson,
|
||||
path: checkPath,
|
||||
};
|
||||
} while (separatorIndex > rootSeparatorIndex);
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {{
|
||||
* nodeEsmResolver: ReturnType<typeof import('./node-internal-modules-esm-resolve').createResolve>,
|
||||
* extensions: import('../src/file-extensions').Extensions,
|
||||
* preferTsExts
|
||||
* }} opts
|
||||
*/
|
||||
function createCjsLoader(opts) {
|
||||
const {nodeEsmResolver, preferTsExts} = opts;
|
||||
const {replacementsForCjs, replacementsForJs, replacementsForMjs, replacementsForJsx} = opts.extensions;
|
||||
const {
|
||||
encodedSepRegEx,
|
||||
packageExportsResolve,
|
||||
packageImportsResolve
|
||||
} = nodeEsmResolver;
|
||||
|
||||
function tryPackage(requestPath, exts, isMain, originalPath) {
|
||||
// const pkg = readPackage(requestPath)?.main;
|
||||
const tmp = readPackage(requestPath)
|
||||
const pkg = tmp != null ? tmp.main : undefined;
|
||||
|
||||
if (!pkg) {
|
||||
return tryExtensions(path.resolve(requestPath, 'index'), exts, isMain);
|
||||
}
|
||||
|
||||
const filename = path.resolve(requestPath, pkg);
|
||||
let actual = tryReplacementExtensions(filename, isMain) ||
|
||||
tryFile(filename, isMain) ||
|
||||
tryExtensions(filename, exts, isMain) ||
|
||||
tryExtensions(path.resolve(filename, 'index'), exts, isMain);
|
||||
if (actual === false) {
|
||||
actual = tryExtensions(path.resolve(requestPath, 'index'), exts, isMain);
|
||||
if (!actual) {
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
const err = new Error(
|
||||
`Cannot find module '${filename}'. ` +
|
||||
'Please verify that the package.json has a valid "main" entry'
|
||||
);
|
||||
err.code = 'MODULE_NOT_FOUND';
|
||||
err.path = path.resolve(requestPath, 'package.json');
|
||||
err.requestPath = originalPath;
|
||||
// TODO(BridgeAR): Add the requireStack as well.
|
||||
throw err;
|
||||
} else {
|
||||
const jsonPath = path.resolve(requestPath, 'package.json');
|
||||
process.emitWarning(
|
||||
`Invalid 'main' field in '${jsonPath}' of '${pkg}'. ` +
|
||||
'Please either fix that or report it to the module author',
|
||||
'DeprecationWarning',
|
||||
'DEP0128'
|
||||
);
|
||||
}
|
||||
}
|
||||
return actual;
|
||||
}
|
||||
|
||||
// In order to minimize unnecessary lstat() calls,
|
||||
// this cache is a list of known-real paths.
|
||||
// Set to an empty Map to reset.
|
||||
const realpathCache = new SafeMap();
|
||||
|
||||
// Check if the file exists and is not a directory
|
||||
// if using --preserve-symlinks and isMain is false,
|
||||
// keep symlinks intact, otherwise resolve to the
|
||||
// absolute realpath.
|
||||
function tryFile(requestPath, isMain) {
|
||||
const rc = stat(requestPath);
|
||||
if (rc !== 0) return;
|
||||
if (preserveSymlinks && !isMain) {
|
||||
return path.resolve(requestPath);
|
||||
}
|
||||
return toRealPath(requestPath);
|
||||
}
|
||||
|
||||
function toRealPath(requestPath) {
|
||||
return fs.realpathSync(requestPath, {
|
||||
// [internalFS.realpathCacheKey]: realpathCache
|
||||
});
|
||||
}
|
||||
|
||||
function statReplacementExtensions(p) {
|
||||
const lastDotIndex = p.lastIndexOf('.');
|
||||
if(lastDotIndex >= 0) {
|
||||
const ext = p.slice(lastDotIndex);
|
||||
if (ext === '.js' || ext === '.jsx' || ext === '.mjs' || ext === '.cjs') {
|
||||
const pathnameWithoutExtension = p.slice(0, lastDotIndex);
|
||||
const replacementExts =
|
||||
ext === '.js' ? replacementsForJs
|
||||
: ext === '.jsx' ? replacementsForJsx
|
||||
: ext === '.mjs' ? replacementsForMjs
|
||||
: replacementsForCjs;
|
||||
for (let i = 0; i < replacementExts.length; i++) {
|
||||
const filename = pathnameWithoutExtension + replacementExts[i];
|
||||
const rc = stat(filename);
|
||||
if (rc === 0) {
|
||||
return [rc, filename];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return [stat(p), p];
|
||||
}
|
||||
function tryReplacementExtensions(p, isMain) {
|
||||
const lastDotIndex = p.lastIndexOf('.');
|
||||
if(lastDotIndex >= 0) {
|
||||
const ext = p.slice(lastDotIndex);
|
||||
if (ext === '.js' || ext === '.jsx' || ext === '.mjs' || ext === '.cjs') {
|
||||
const pathnameWithoutExtension = p.slice(0, lastDotIndex);
|
||||
const replacementExts =
|
||||
ext === '.js' ? replacementsForJs
|
||||
: ext === '.jsx' ? replacementsForJsx
|
||||
: ext === '.mjs' ? replacementsForMjs
|
||||
: replacementsForCjs;
|
||||
for (let i = 0; i < replacementExts.length; i++) {
|
||||
const filename = tryFile(pathnameWithoutExtension + replacementExts[i], isMain);
|
||||
if (filename) {
|
||||
return filename;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Given a path, check if the file exists with any of the set extensions
|
||||
function tryExtensions(p, exts, isMain) {
|
||||
for (let i = 0; i < exts.length; i++) {
|
||||
const filename = tryFile(p + exts[i], isMain);
|
||||
|
||||
if (filename) {
|
||||
return filename;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function trySelfParentPath(parent) {
|
||||
if (!parent) return false;
|
||||
|
||||
if (parent.filename) {
|
||||
return parent.filename;
|
||||
} else if (parent.id === '<repl>' || parent.id === 'internal/preload') {
|
||||
try {
|
||||
return process.cwd() + path.sep;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function trySelf(parentPath, request) {
|
||||
if (!parentPath) return false;
|
||||
|
||||
const { data: pkg, path: pkgPath } = readPackageScope(parentPath) || {};
|
||||
if (!pkg || pkg.exports === undefined) return false;
|
||||
if (typeof pkg.name !== 'string') return false;
|
||||
|
||||
let expansion;
|
||||
if (request === pkg.name) {
|
||||
expansion = '.';
|
||||
} else if (StringPrototypeStartsWith(request, `${pkg.name}/`)) {
|
||||
expansion = '.' + StringPrototypeSlice(request, pkg.name.length);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
return finalizeEsmResolution(packageExportsResolve(
|
||||
pathToFileURL(pkgPath + '/package.json'), expansion, pkg,
|
||||
pathToFileURL(parentPath), cjsConditions).resolved, parentPath, pkgPath);
|
||||
} catch (e) {
|
||||
if (e.code === 'ERR_MODULE_NOT_FOUND')
|
||||
throw createEsmNotFoundErr(request, pkgPath + '/package.json');
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
// This only applies to requests of a specific form:
|
||||
// 1. name/.*
|
||||
// 2. @scope/name/.*
|
||||
const EXPORTS_PATTERN = /^((?:@[^/\\%]+\/)?[^./\\%][^/\\%]*)(\/.*)?$/;
|
||||
function resolveExports(nmPath, request) {
|
||||
// The implementation's behavior is meant to mirror resolution in ESM.
|
||||
const { 1: name, 2: expansion = '' } =
|
||||
StringPrototypeMatch(request, EXPORTS_PATTERN) || [];
|
||||
if (!name)
|
||||
return;
|
||||
const pkgPath = path.resolve(nmPath, name);
|
||||
const pkg = readPackage(pkgPath);
|
||||
// if (pkg?.exports != null) {
|
||||
if (pkg != null && pkg.exports != null) {
|
||||
try {
|
||||
return finalizeEsmResolution(packageExportsResolve(
|
||||
pathToFileURL(pkgPath + '/package.json'), '.' + expansion, pkg, null,
|
||||
cjsConditions).resolved, null, pkgPath);
|
||||
} catch (e) {
|
||||
if (e.code === 'ERR_MODULE_NOT_FOUND')
|
||||
throw createEsmNotFoundErr(request, pkgPath + '/package.json');
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Backwards compat for old node versions
|
||||
const hasModulePathCache = !!require('module')._pathCache;
|
||||
const Module_pathCache = Object.create(null);
|
||||
const Module_pathCache_get = hasModulePathCache ? (cacheKey) => Module._pathCache[cacheKey] : (cacheKey) => Module_pathCache[cacheKey];
|
||||
const Module_pathCache_set = hasModulePathCache ? (cacheKey, value) => (Module._pathCache[cacheKey] = value) : (cacheKey) => (Module_pathCache[cacheKey] = value);
|
||||
|
||||
const trailingSlashRegex = /(?:^|\/)\.?\.$/;
|
||||
const Module_findPath = function _findPath(request, paths, isMain) {
|
||||
const absoluteRequest = path.isAbsolute(request);
|
||||
if (absoluteRequest) {
|
||||
paths = [''];
|
||||
} else if (!paths || paths.length === 0) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const cacheKey = request + '\x00' + ArrayPrototypeJoin(paths, '\x00');
|
||||
const entry = Module_pathCache_get(cacheKey);
|
||||
if (entry)
|
||||
return entry;
|
||||
|
||||
let exts;
|
||||
let trailingSlash = request.length > 0 &&
|
||||
StringPrototypeCharCodeAt(request, request.length - 1) ===
|
||||
CHAR_FORWARD_SLASH;
|
||||
if (!trailingSlash) {
|
||||
trailingSlash = RegExpPrototypeTest(trailingSlashRegex, request);
|
||||
}
|
||||
|
||||
// For each path
|
||||
for (let i = 0; i < paths.length; i++) {
|
||||
// Don't search further if path doesn't exist
|
||||
const curPath = paths[i];
|
||||
if (curPath && stat(curPath) < 1) continue;
|
||||
|
||||
if (!absoluteRequest) {
|
||||
const exportsResolved = resolveExports(curPath, request);
|
||||
if (exportsResolved)
|
||||
return exportsResolved;
|
||||
}
|
||||
|
||||
const _basePath = path.resolve(curPath, request);
|
||||
let filename;
|
||||
|
||||
const [rc, basePath] = statReplacementExtensions(_basePath);
|
||||
if (!trailingSlash) {
|
||||
if (rc === 0) { // File.
|
||||
if (!isMain) {
|
||||
if (preserveSymlinks) {
|
||||
filename = path.resolve(basePath);
|
||||
} else {
|
||||
filename = toRealPath(basePath);
|
||||
}
|
||||
} else if (preserveSymlinksMain) {
|
||||
// For the main module, we use the preserveSymlinksMain flag instead
|
||||
// mainly for backward compatibility, as the preserveSymlinks flag
|
||||
// historically has not applied to the main module. Most likely this
|
||||
// was intended to keep .bin/ binaries working, as following those
|
||||
// symlinks is usually required for the imports in the corresponding
|
||||
// files to resolve; that said, in some use cases following symlinks
|
||||
// causes bigger problems which is why the preserveSymlinksMain option
|
||||
// is needed.
|
||||
filename = path.resolve(basePath);
|
||||
} else {
|
||||
filename = toRealPath(basePath);
|
||||
}
|
||||
}
|
||||
|
||||
if (!filename) {
|
||||
// Try it with each of the extensions
|
||||
if (exts === undefined)
|
||||
exts = ObjectKeys(Module._extensions);
|
||||
filename = tryExtensions(basePath, exts, isMain);
|
||||
}
|
||||
}
|
||||
|
||||
if (!filename && rc === 1) { // Directory.
|
||||
// try it with each of the extensions at "index"
|
||||
if (exts === undefined)
|
||||
exts = ObjectKeys(Module._extensions);
|
||||
filename = tryPackage(basePath, exts, isMain, request);
|
||||
}
|
||||
|
||||
if (filename) {
|
||||
Module_pathCache_set(cacheKey, filename);
|
||||
return filename;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
const Module_resolveFilename = function _resolveFilename(request, parent, isMain, options) {
|
||||
if (StringPrototypeStartsWith(request, 'node:') ||
|
||||
NativeModule.canBeRequiredByUsers(request)) {
|
||||
return request;
|
||||
}
|
||||
|
||||
let paths;
|
||||
|
||||
if (typeof options === 'object' && options !== null) {
|
||||
if (ArrayIsArray(options.paths)) {
|
||||
const isRelative = StringPrototypeStartsWith(request, './') ||
|
||||
StringPrototypeStartsWith(request, '../') ||
|
||||
((isWindows && StringPrototypeStartsWith(request, '.\\')) ||
|
||||
StringPrototypeStartsWith(request, '..\\'));
|
||||
|
||||
if (isRelative) {
|
||||
paths = options.paths;
|
||||
} else {
|
||||
const fakeParent = new Module('', null);
|
||||
|
||||
paths = [];
|
||||
|
||||
for (let i = 0; i < options.paths.length; i++) {
|
||||
const path = options.paths[i];
|
||||
fakeParent.paths = Module._nodeModulePaths(path);
|
||||
const lookupPaths = Module._resolveLookupPaths(request, fakeParent);
|
||||
|
||||
for (let j = 0; j < lookupPaths.length; j++) {
|
||||
if (!ArrayPrototypeIncludes(paths, lookupPaths[j]))
|
||||
ArrayPrototypePush(paths, lookupPaths[j]);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (options.paths === undefined) {
|
||||
paths = Module._resolveLookupPaths(request, parent);
|
||||
} else {
|
||||
throw new ERR_INVALID_ARG_VALUE('options.paths', options.paths);
|
||||
}
|
||||
} else {
|
||||
paths = Module._resolveLookupPaths(request, parent);
|
||||
}
|
||||
|
||||
// if (parent?.filename) {
|
||||
// node 12 hack
|
||||
if (parent != null && parent.filename) {
|
||||
if (request[0] === '#') {
|
||||
const pkg = readPackageScope(parent.filename) || {};
|
||||
|
||||
// if (pkg.data?.imports != null) {
|
||||
// node 12 hack
|
||||
if (pkg.data != null && pkg.data.imports != null) {
|
||||
try {
|
||||
return finalizeEsmResolution(
|
||||
packageImportsResolve(request, pathToFileURL(parent.filename),
|
||||
cjsConditions), parent.filename,
|
||||
pkg.path);
|
||||
} catch (e) {
|
||||
if (e.code === 'ERR_MODULE_NOT_FOUND')
|
||||
throw createEsmNotFoundErr(request);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try module self resolution first
|
||||
const parentPath = trySelfParentPath(parent);
|
||||
const selfResolved = trySelf(parentPath, request);
|
||||
if (selfResolved) {
|
||||
const cacheKey = request + '\x00' +
|
||||
(paths.length === 1 ? paths[0] : ArrayPrototypeJoin(paths, '\x00'));
|
||||
Module._pathCache[cacheKey] = selfResolved;
|
||||
return selfResolved;
|
||||
}
|
||||
|
||||
// Look up the filename first, since that's the cache key.
|
||||
const filename = Module._findPath(request, paths, isMain, false);
|
||||
if (filename) return filename;
|
||||
const requireStack = [];
|
||||
for (let cursor = parent;
|
||||
cursor;
|
||||
cursor = moduleParentCache.get(cursor)) {
|
||||
ArrayPrototypePush(requireStack, cursor.filename || cursor.id);
|
||||
}
|
||||
let message = `Cannot find module '${request}'`;
|
||||
if (requireStack.length > 0) {
|
||||
message = message + '\nRequire stack:\n- ' +
|
||||
ArrayPrototypeJoin(requireStack, '\n- ');
|
||||
}
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
const err = new Error(message);
|
||||
err.code = 'MODULE_NOT_FOUND';
|
||||
err.requireStack = requireStack;
|
||||
throw err;
|
||||
};
|
||||
|
||||
function finalizeEsmResolution(resolved, parentPath, pkgPath) {
|
||||
if (RegExpPrototypeTest(encodedSepRegEx, resolved))
|
||||
throw new ERR_INVALID_MODULE_SPECIFIER(
|
||||
resolved, 'must not include encoded "/" or "\\" characters', parentPath);
|
||||
const filename = fileURLToPath(resolved);
|
||||
const actual = tryReplacementExtensions(filename) || tryFile(filename);
|
||||
if (actual)
|
||||
return actual;
|
||||
const err = createEsmNotFoundErr(filename,
|
||||
path.resolve(pkgPath, 'package.json'));
|
||||
throw err;
|
||||
}
|
||||
|
||||
function createEsmNotFoundErr(request, path) {
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
const err = new Error(`Cannot find module '${request}'`);
|
||||
err.code = 'MODULE_NOT_FOUND';
|
||||
if (path)
|
||||
err.path = path;
|
||||
// TODO(BridgeAR): Add the requireStack as well.
|
||||
return err;
|
||||
}
|
||||
|
||||
|
||||
return {
|
||||
Module_findPath,
|
||||
Module_resolveFilename
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* copied from Module._extensions['.js']
|
||||
* https://github.com/nodejs/node/blob/v15.3.0/lib/internal/modules/cjs/loader.js#L1113-L1120
|
||||
* @param {import('../src/index').Service} service
|
||||
* @param {NodeJS.Module} module
|
||||
* @param {string} filename
|
||||
*/
|
||||
function assertScriptCanLoadAsCJSImpl(service, module, filename) {
|
||||
const pkg = readPackageScope(filename);
|
||||
|
||||
// ts-node modification: allow our configuration to override
|
||||
const tsNodeClassification = service.moduleTypeClassifier.classifyModuleByModuleTypeOverrides(normalizeSlashes(filename));
|
||||
if(tsNodeClassification.moduleType === 'cjs') return;
|
||||
|
||||
// ignore package.json when file extension is ESM-only or CJS-only
|
||||
// [MUST_UPDATE_FOR_NEW_FILE_EXTENSIONS]
|
||||
const lastDotIndex = filename.lastIndexOf('.');
|
||||
const ext = lastDotIndex >= 0 ? filename.slice(lastDotIndex) : '';
|
||||
|
||||
if((ext === '.cts' || ext === '.cjs') && tsNodeClassification.moduleType === 'auto') return;
|
||||
|
||||
// Function require shouldn't be used in ES modules.
|
||||
if (ext === '.mts' || ext === '.mjs' || tsNodeClassification.moduleType === 'esm' || (pkg && pkg.data && pkg.data.type === 'module')) {
|
||||
const parentPath = module.parent && module.parent.filename;
|
||||
const packageJsonPath = pkg ? path.resolve(pkg.path, 'package.json') : null;
|
||||
throw createErrRequireEsm(filename, parentPath, packageJsonPath);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
module.exports = {
|
||||
createCjsLoader,
|
||||
assertScriptCanLoadAsCJSImpl,
|
||||
readPackageScope
|
||||
};
|
106
node_modules/ts-node/dist-raw/node-internal-modules-esm-get_format.js
generated
vendored
Normal file
106
node_modules/ts-node/dist-raw/node-internal-modules-esm-get_format.js
generated
vendored
Normal file
@ -0,0 +1,106 @@
|
||||
// Copied from https://raw.githubusercontent.com/nodejs/node/v15.3.0/lib/internal/modules/esm/get_format.js
|
||||
|
||||
'use strict';
|
||||
const {
|
||||
RegExpPrototypeExec,
|
||||
StringPrototypeStartsWith,
|
||||
} = require('./node-primordials');
|
||||
const { extname } = require('path');
|
||||
const { getOptionValue } = require('./node-options');
|
||||
|
||||
const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(s => parseInt(s, 10));
|
||||
const experimentalJsonModules =
|
||||
nodeMajor > 17
|
||||
|| (nodeMajor === 17 && nodeMinor >= 5)
|
||||
|| (nodeMajor === 16 && nodeMinor >= 15)
|
||||
|| getOptionValue('--experimental-json-modules');
|
||||
const experimentalWasmModules = getOptionValue('--experimental-wasm-modules');
|
||||
const { URL, fileURLToPath } = require('url');
|
||||
const { ERR_UNKNOWN_FILE_EXTENSION } = require('./node-internal-errors').codes;
|
||||
|
||||
const extensionFormatMap = {
|
||||
'__proto__': null,
|
||||
'.cjs': 'commonjs',
|
||||
'.js': 'module',
|
||||
'.mjs': 'module'
|
||||
};
|
||||
|
||||
const legacyExtensionFormatMap = {
|
||||
'__proto__': null,
|
||||
'.cjs': 'commonjs',
|
||||
'.js': 'commonjs',
|
||||
'.json': 'commonjs',
|
||||
'.mjs': 'module',
|
||||
'.node': 'commonjs'
|
||||
};
|
||||
|
||||
if (experimentalWasmModules)
|
||||
extensionFormatMap['.wasm'] = legacyExtensionFormatMap['.wasm'] = 'wasm';
|
||||
|
||||
if (experimentalJsonModules)
|
||||
extensionFormatMap['.json'] = legacyExtensionFormatMap['.json'] = 'json';
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {'node' | 'explicit'} [tsNodeExperimentalSpecifierResolution]
|
||||
* @param {ReturnType<
|
||||
* typeof import('../dist-raw/node-internal-modules-esm-resolve').createResolve
|
||||
* >} nodeEsmResolver
|
||||
*/
|
||||
function createGetFormat(tsNodeExperimentalSpecifierResolution, nodeEsmResolver) {
|
||||
// const experimentalSpeciferResolution = tsNodeExperimentalSpecifierResolution ?? getOptionValue('--experimental-specifier-resolution');
|
||||
let experimentalSpeciferResolution = tsNodeExperimentalSpecifierResolution != null ? tsNodeExperimentalSpecifierResolution : getOptionValue('--experimental-specifier-resolution');
|
||||
const { getPackageType } = nodeEsmResolver;
|
||||
|
||||
/**
|
||||
* @param {string} url
|
||||
* @param {{}} context
|
||||
* @param {any} defaultGetFormatUnused
|
||||
* @returns {ReturnType<import('../src/esm').NodeLoaderHooksAPI1.GetFormatHook>}
|
||||
*/
|
||||
function defaultGetFormat(url, context, defaultGetFormatUnused) {
|
||||
if (StringPrototypeStartsWith(url, 'node:')) {
|
||||
return { format: 'builtin' };
|
||||
}
|
||||
const parsed = new URL(url);
|
||||
if (parsed.protocol === 'data:') {
|
||||
const [ , mime ] = RegExpPrototypeExec(
|
||||
/^([^/]+\/[^;,]+)(?:[^,]*?)(;base64)?,/,
|
||||
parsed.pathname,
|
||||
) || [ null, null, null ];
|
||||
const format = ({
|
||||
'__proto__': null,
|
||||
'text/javascript': 'module',
|
||||
'application/json': experimentalJsonModules ? 'json' : null,
|
||||
'application/wasm': experimentalWasmModules ? 'wasm' : null
|
||||
})[mime] || null;
|
||||
return { format };
|
||||
} else if (parsed.protocol === 'file:') {
|
||||
const ext = extname(parsed.pathname);
|
||||
let format;
|
||||
if (ext === '.js') {
|
||||
format = getPackageType(parsed.href) === 'module' ? 'module' : 'commonjs';
|
||||
} else {
|
||||
format = extensionFormatMap[ext];
|
||||
}
|
||||
if (!format) {
|
||||
if (experimentalSpeciferResolution === 'node') {
|
||||
process.emitWarning(
|
||||
'The Node.js specifier resolution in ESM is experimental.',
|
||||
'ExperimentalWarning');
|
||||
format = legacyExtensionFormatMap[ext];
|
||||
} else {
|
||||
throw new ERR_UNKNOWN_FILE_EXTENSION(ext, fileURLToPath(url));
|
||||
}
|
||||
}
|
||||
return { format: format || null };
|
||||
}
|
||||
return { format: null };
|
||||
}
|
||||
|
||||
return {defaultGetFormat};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createGetFormat
|
||||
};
|
962
node_modules/ts-node/dist-raw/node-internal-modules-esm-resolve.js
generated
vendored
Normal file
962
node_modules/ts-node/dist-raw/node-internal-modules-esm-resolve.js
generated
vendored
Normal file
@ -0,0 +1,962 @@
|
||||
// Copied from https://raw.githubusercontent.com/nodejs/node/v15.3.0/lib/internal/modules/esm/resolve.js
|
||||
|
||||
'use strict';
|
||||
|
||||
const {versionGteLt} = require('../dist/util');
|
||||
|
||||
// Test for node >14.13.1 || (>=12.20.0 && <13)
|
||||
const builtinModuleProtocol =
|
||||
versionGteLt(process.versions.node, '14.13.1') ||
|
||||
versionGteLt(process.versions.node, '12.20.0', '13.0.0')
|
||||
? 'node:'
|
||||
: 'nodejs:';
|
||||
|
||||
const {
|
||||
ArrayIsArray,
|
||||
ArrayPrototypeJoin,
|
||||
ArrayPrototypeShift,
|
||||
JSONParse,
|
||||
JSONStringify,
|
||||
ObjectFreeze,
|
||||
ObjectGetOwnPropertyNames,
|
||||
ObjectPrototypeHasOwnProperty,
|
||||
RegExpPrototypeTest,
|
||||
SafeMap,
|
||||
SafeSet,
|
||||
StringPrototypeEndsWith,
|
||||
StringPrototypeIndexOf,
|
||||
StringPrototypeLastIndexOf,
|
||||
StringPrototypeReplace,
|
||||
StringPrototypeSlice,
|
||||
StringPrototypeSplit,
|
||||
StringPrototypeStartsWith,
|
||||
StringPrototypeSubstr,
|
||||
} = require('./node-primordials');
|
||||
|
||||
// const internalFS = require('internal/fs/utils');
|
||||
const Module = require('module');
|
||||
const { NativeModule } = require('./node-nativemodule');
|
||||
const {
|
||||
realpathSync,
|
||||
statSync,
|
||||
Stats,
|
||||
} = require('fs');
|
||||
// const { getOptionValue } = require('internal/options');
|
||||
const { getOptionValue } = require('./node-options');
|
||||
// // Do not eagerly grab .manifest, it may be in TDZ
|
||||
// const policy = getOptionValue('--experimental-policy') ?
|
||||
// require('internal/process/policy') :
|
||||
// null;
|
||||
// disabled for now. I am not sure if/how we should support this
|
||||
const policy = null;
|
||||
const { sep, relative } = require('path');
|
||||
const preserveSymlinks = getOptionValue('--preserve-symlinks');
|
||||
const preserveSymlinksMain = getOptionValue('--preserve-symlinks-main');
|
||||
const typeFlag = getOptionValue('--input-type');
|
||||
// const { URL, pathToFileURL, fileURLToPath } = require('internal/url');
|
||||
const { URL, pathToFileURL, fileURLToPath } = require('url');
|
||||
const {
|
||||
ERR_INPUT_TYPE_NOT_ALLOWED,
|
||||
ERR_INVALID_ARG_VALUE,
|
||||
ERR_INVALID_MODULE_SPECIFIER,
|
||||
ERR_INVALID_PACKAGE_CONFIG,
|
||||
ERR_INVALID_PACKAGE_TARGET,
|
||||
ERR_MANIFEST_DEPENDENCY_MISSING,
|
||||
ERR_MODULE_NOT_FOUND,
|
||||
ERR_PACKAGE_IMPORT_NOT_DEFINED,
|
||||
ERR_PACKAGE_PATH_NOT_EXPORTED,
|
||||
ERR_UNSUPPORTED_DIR_IMPORT,
|
||||
ERR_UNSUPPORTED_ESM_URL_SCHEME,
|
||||
// } = require('internal/errors').codes;
|
||||
} = require('./node-internal-errors').codes;
|
||||
|
||||
// const { Module: CJSModule } = require('internal/modules/cjs/loader');
|
||||
const CJSModule = Module;
|
||||
|
||||
// const packageJsonReader = require('internal/modules/package_json_reader');
|
||||
const packageJsonReader = require('./node-internal-modules-package_json_reader');
|
||||
const userConditions = getOptionValue('--conditions');
|
||||
const DEFAULT_CONDITIONS = ObjectFreeze(['node', 'import', ...userConditions]);
|
||||
const DEFAULT_CONDITIONS_SET = new SafeSet(DEFAULT_CONDITIONS);
|
||||
|
||||
const pendingDeprecation = getOptionValue('--pending-deprecation');
|
||||
|
||||
/**
|
||||
* @param {{
|
||||
* extensions: import('../src/file-extensions').Extensions,
|
||||
* preferTsExts: boolean | undefined;
|
||||
* tsNodeExperimentalSpecifierResolution: import('../src/index').ExperimentalSpecifierResolution | undefined;
|
||||
* }} opts
|
||||
*/
|
||||
function createResolve(opts) {
|
||||
// TODO receive cached fs implementations here
|
||||
const {preferTsExts, tsNodeExperimentalSpecifierResolution, extensions} = opts;
|
||||
const esrnExtensions = extensions.experimentalSpecifierResolutionAddsIfOmitted;
|
||||
const {legacyMainResolveAddsIfOmitted, replacementsForCjs, replacementsForJs, replacementsForMjs, replacementsForJsx} = extensions;
|
||||
// const experimentalSpecifierResolution = tsNodeExperimentalSpecifierResolution ?? getOptionValue('--experimental-specifier-resolution');
|
||||
const experimentalSpecifierResolution = tsNodeExperimentalSpecifierResolution != null ? tsNodeExperimentalSpecifierResolution : getOptionValue('--experimental-specifier-resolution');
|
||||
|
||||
const emittedPackageWarnings = new SafeSet();
|
||||
function emitFolderMapDeprecation(match, pjsonUrl, isExports, base) {
|
||||
const pjsonPath = fileURLToPath(pjsonUrl);
|
||||
if (!pendingDeprecation) {
|
||||
const nodeModulesIndex = StringPrototypeLastIndexOf(pjsonPath,
|
||||
'/node_modules/');
|
||||
if (nodeModulesIndex !== -1) {
|
||||
const afterNodeModulesPath = StringPrototypeSlice(pjsonPath,
|
||||
nodeModulesIndex + 14,
|
||||
-13);
|
||||
try {
|
||||
const { packageSubpath } = parsePackageName(afterNodeModulesPath);
|
||||
if (packageSubpath === '.')
|
||||
return;
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
if (emittedPackageWarnings.has(pjsonPath + '|' + match))
|
||||
return;
|
||||
emittedPackageWarnings.add(pjsonPath + '|' + match);
|
||||
process.emitWarning(
|
||||
`Use of deprecated folder mapping "${match}" in the ${isExports ?
|
||||
'"exports"' : '"imports"'} field module resolution of the package at ${
|
||||
pjsonPath}${base ? ` imported from ${fileURLToPath(base)}` : ''}.\n` +
|
||||
`Update this package.json to use a subpath pattern like "${match}*".`,
|
||||
'DeprecationWarning',
|
||||
'DEP0148'
|
||||
);
|
||||
}
|
||||
|
||||
function getConditionsSet(conditions) {
|
||||
if (conditions !== undefined && conditions !== DEFAULT_CONDITIONS) {
|
||||
if (!ArrayIsArray(conditions)) {
|
||||
throw new ERR_INVALID_ARG_VALUE('conditions', conditions,
|
||||
'expected an array');
|
||||
}
|
||||
return new SafeSet(conditions);
|
||||
}
|
||||
return DEFAULT_CONDITIONS_SET;
|
||||
}
|
||||
|
||||
const realpathCache = new SafeMap();
|
||||
const packageJSONCache = new SafeMap(); /* string -> PackageConfig */
|
||||
|
||||
const statSupportsThrowIfNoEntry = versionGteLt(process.versions.node, '15.3.0') ||
|
||||
versionGteLt(process.versions.node, '14.17.0', '15.0.0');
|
||||
const tryStatSync = statSupportsThrowIfNoEntry ? tryStatSyncWithoutErrors : tryStatSyncWithErrors;
|
||||
const statsIfNotFound = new Stats();
|
||||
function tryStatSyncWithoutErrors(path) {
|
||||
const stats = statSync(path, { throwIfNoEntry: false });
|
||||
if(stats != null) return stats;
|
||||
return statsIfNotFound;
|
||||
}
|
||||
function tryStatSyncWithErrors(path) {
|
||||
try {
|
||||
return statSync(path);
|
||||
} catch {
|
||||
return statsIfNotFound;
|
||||
}
|
||||
}
|
||||
|
||||
function getPackageConfig(path, specifier, base) {
|
||||
const existing = packageJSONCache.get(path);
|
||||
if (existing !== undefined) {
|
||||
return existing;
|
||||
}
|
||||
const source = packageJsonReader.read(path).string;
|
||||
if (source === undefined) {
|
||||
const packageConfig = {
|
||||
pjsonPath: path,
|
||||
exists: false,
|
||||
main: undefined,
|
||||
name: undefined,
|
||||
type: 'none',
|
||||
exports: undefined,
|
||||
imports: undefined,
|
||||
};
|
||||
packageJSONCache.set(path, packageConfig);
|
||||
return packageConfig;
|
||||
}
|
||||
|
||||
let packageJSON;
|
||||
try {
|
||||
packageJSON = JSONParse(source);
|
||||
} catch (error) {
|
||||
throw new ERR_INVALID_PACKAGE_CONFIG(
|
||||
path,
|
||||
(base ? `"${specifier}" from ` : '') + fileURLToPath(base || specifier),
|
||||
error.message
|
||||
);
|
||||
}
|
||||
|
||||
let { imports, main, name, type } = packageJSON;
|
||||
const { exports } = packageJSON;
|
||||
if (typeof imports !== 'object' || imports === null) imports = undefined;
|
||||
if (typeof main !== 'string') main = undefined;
|
||||
if (typeof name !== 'string') name = undefined;
|
||||
// Ignore unknown types for forwards compatibility
|
||||
if (type !== 'module' && type !== 'commonjs') type = 'none';
|
||||
|
||||
const packageConfig = {
|
||||
pjsonPath: path,
|
||||
exists: true,
|
||||
main,
|
||||
name,
|
||||
type,
|
||||
exports,
|
||||
imports,
|
||||
};
|
||||
packageJSONCache.set(path, packageConfig);
|
||||
return packageConfig;
|
||||
}
|
||||
|
||||
function getPackageScopeConfig(resolved) {
|
||||
let packageJSONUrl = new URL('./package.json', resolved);
|
||||
while (true) {
|
||||
const packageJSONPath = packageJSONUrl.pathname;
|
||||
if (StringPrototypeEndsWith(packageJSONPath, 'node_modules/package.json'))
|
||||
break;
|
||||
const packageConfig = getPackageConfig(fileURLToPath(packageJSONUrl),
|
||||
resolved);
|
||||
if (packageConfig.exists) return packageConfig;
|
||||
|
||||
const lastPackageJSONUrl = packageJSONUrl;
|
||||
packageJSONUrl = new URL('../package.json', packageJSONUrl);
|
||||
|
||||
// Terminates at root where ../package.json equals ../../package.json
|
||||
// (can't just check "/package.json" for Windows support).
|
||||
if (packageJSONUrl.pathname === lastPackageJSONUrl.pathname) break;
|
||||
}
|
||||
const packageJSONPath = fileURLToPath(packageJSONUrl);
|
||||
const packageConfig = {
|
||||
pjsonPath: packageJSONPath,
|
||||
exists: false,
|
||||
main: undefined,
|
||||
name: undefined,
|
||||
type: 'none',
|
||||
exports: undefined,
|
||||
imports: undefined,
|
||||
};
|
||||
packageJSONCache.set(packageJSONPath, packageConfig);
|
||||
return packageConfig;
|
||||
}
|
||||
|
||||
/*
|
||||
* Legacy CommonJS main resolution:
|
||||
* 1. let M = pkg_url + (json main field)
|
||||
* 2. TRY(M, M.js, M.json, M.node)
|
||||
* 3. TRY(M/index.js, M/index.json, M/index.node)
|
||||
* 4. TRY(pkg_url/index.js, pkg_url/index.json, pkg_url/index.node)
|
||||
* 5. NOT_FOUND
|
||||
*/
|
||||
function fileExists(url) {
|
||||
return tryStatSync(fileURLToPath(url)).isFile();
|
||||
}
|
||||
|
||||
function legacyMainResolve(packageJSONUrl, packageConfig, base) {
|
||||
let guess;
|
||||
if (packageConfig.main !== undefined) {
|
||||
// Note: fs check redundances will be handled by Descriptor cache here.
|
||||
if(guess = resolveReplacementExtensions(new URL(`./${packageConfig.main}`, packageJSONUrl))) {
|
||||
return guess;
|
||||
}
|
||||
if (fileExists(guess = new URL(`./${packageConfig.main}`,
|
||||
packageJSONUrl))) {
|
||||
return guess;
|
||||
}
|
||||
for(const extension of legacyMainResolveAddsIfOmitted) {
|
||||
if (fileExists(guess = new URL(`./${packageConfig.main}${extension}`,
|
||||
packageJSONUrl))) {
|
||||
return guess;
|
||||
}
|
||||
}
|
||||
for(const extension of legacyMainResolveAddsIfOmitted) {
|
||||
if (fileExists(guess = new URL(`./${packageConfig.main}/index${extension}`,
|
||||
packageJSONUrl))) {
|
||||
return guess;
|
||||
}
|
||||
}
|
||||
// Fallthrough.
|
||||
}
|
||||
for(const extension of legacyMainResolveAddsIfOmitted) {
|
||||
if (fileExists(guess = new URL(`./index${extension}`, packageJSONUrl))) {
|
||||
return guess;
|
||||
}
|
||||
}
|
||||
// Not found.
|
||||
throw new ERR_MODULE_NOT_FOUND(
|
||||
fileURLToPath(new URL('.', packageJSONUrl)), fileURLToPath(base));
|
||||
}
|
||||
|
||||
/** attempts replacement extensions, then tries exact name, then attempts appending extensions */
|
||||
function resolveExtensionsWithTryExactName(search) {
|
||||
const resolvedReplacementExtension = resolveReplacementExtensions(search);
|
||||
if(resolvedReplacementExtension) return resolvedReplacementExtension;
|
||||
if (fileExists(search)) return search;
|
||||
return resolveExtensions(search);
|
||||
}
|
||||
|
||||
// This appends missing extensions
|
||||
function resolveExtensions(search) {
|
||||
for (let i = 0; i < esrnExtensions.length; i++) {
|
||||
const extension = esrnExtensions[i];
|
||||
const guess = new URL(`${search.pathname}${extension}`, search);
|
||||
if (fileExists(guess)) return guess;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/** This replaces JS with TS extensions */
|
||||
function resolveReplacementExtensions(search) {
|
||||
const lastDotIndex = search.pathname.lastIndexOf('.');
|
||||
if(lastDotIndex >= 0) {
|
||||
const ext = search.pathname.slice(lastDotIndex);
|
||||
if (ext === '.js' || ext === '.jsx' || ext === '.mjs' || ext === '.cjs') {
|
||||
const pathnameWithoutExtension = search.pathname.slice(0, lastDotIndex);
|
||||
const replacementExts =
|
||||
ext === '.js' ? replacementsForJs
|
||||
: ext === '.jsx' ? replacementsForJsx
|
||||
: ext === '.mjs' ? replacementsForMjs
|
||||
: replacementsForCjs;
|
||||
const guess = new URL(search.toString());
|
||||
for (let i = 0; i < replacementExts.length; i++) {
|
||||
const extension = replacementExts[i];
|
||||
guess.pathname = `${pathnameWithoutExtension}${extension}`;
|
||||
if (fileExists(guess)) return guess;
|
||||
}
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function resolveIndex(search) {
|
||||
return resolveExtensions(new URL('index', search));
|
||||
}
|
||||
|
||||
const encodedSepRegEx = /%2F|%2C/i;
|
||||
function finalizeResolution(resolved, base) {
|
||||
if (RegExpPrototypeTest(encodedSepRegEx, resolved.pathname))
|
||||
throw new ERR_INVALID_MODULE_SPECIFIER(
|
||||
resolved.pathname, 'must not include encoded "/" or "\\" characters',
|
||||
fileURLToPath(base));
|
||||
|
||||
if (experimentalSpecifierResolution === 'node') {
|
||||
const path = fileURLToPath(resolved);
|
||||
let file = resolveExtensionsWithTryExactName(resolved);
|
||||
if (file !== undefined) return file;
|
||||
if (!StringPrototypeEndsWith(path, '/')) {
|
||||
file = resolveIndex(new URL(`${resolved}/`));
|
||||
if (file !== undefined) return file;
|
||||
} else {
|
||||
return resolveIndex(resolved) || resolved;
|
||||
}
|
||||
throw new ERR_MODULE_NOT_FOUND(
|
||||
resolved.pathname, fileURLToPath(base), 'module');
|
||||
}
|
||||
|
||||
const file = resolveReplacementExtensions(resolved) || resolved;
|
||||
const path = fileURLToPath(file);
|
||||
|
||||
const stats = tryStatSync(StringPrototypeEndsWith(path, '/') ?
|
||||
StringPrototypeSlice(path, -1) : path);
|
||||
if (stats.isDirectory()) {
|
||||
const err = new ERR_UNSUPPORTED_DIR_IMPORT(path, fileURLToPath(base));
|
||||
err.url = String(resolved);
|
||||
throw err;
|
||||
} else if (!stats.isFile()) {
|
||||
throw new ERR_MODULE_NOT_FOUND(
|
||||
path || resolved.pathname, fileURLToPath(base), 'module');
|
||||
}
|
||||
|
||||
return file;
|
||||
}
|
||||
|
||||
function throwImportNotDefined(specifier, packageJSONUrl, base) {
|
||||
throw new ERR_PACKAGE_IMPORT_NOT_DEFINED(
|
||||
specifier, packageJSONUrl && fileURLToPath(new URL('.', packageJSONUrl)),
|
||||
fileURLToPath(base));
|
||||
}
|
||||
|
||||
function throwExportsNotFound(subpath, packageJSONUrl, base) {
|
||||
throw new ERR_PACKAGE_PATH_NOT_EXPORTED(
|
||||
fileURLToPath(new URL('.', packageJSONUrl)), subpath,
|
||||
base && fileURLToPath(base));
|
||||
}
|
||||
|
||||
function throwInvalidSubpath(subpath, packageJSONUrl, internal, base) {
|
||||
const reason = `request is not a valid subpath for the "${internal ?
|
||||
'imports' : 'exports'}" resolution of ${fileURLToPath(packageJSONUrl)}`;
|
||||
throw new ERR_INVALID_MODULE_SPECIFIER(subpath, reason,
|
||||
base && fileURLToPath(base));
|
||||
}
|
||||
|
||||
function throwInvalidPackageTarget(
|
||||
subpath, target, packageJSONUrl, internal, base) {
|
||||
if (typeof target === 'object' && target !== null) {
|
||||
target = JSONStringify(target, null, '');
|
||||
} else {
|
||||
target = `${target}`;
|
||||
}
|
||||
throw new ERR_INVALID_PACKAGE_TARGET(
|
||||
fileURLToPath(new URL('.', packageJSONUrl)), subpath, target,
|
||||
internal, base && fileURLToPath(base));
|
||||
}
|
||||
|
||||
const invalidSegmentRegEx = /(^|\\|\/)(\.\.?|node_modules)(\\|\/|$)/;
|
||||
const patternRegEx = /\*/g;
|
||||
|
||||
function resolvePackageTargetString(
|
||||
target, subpath, match, packageJSONUrl, base, pattern, internal, conditions) {
|
||||
if (subpath !== '' && !pattern && target[target.length - 1] !== '/')
|
||||
throwInvalidPackageTarget(match, target, packageJSONUrl, internal, base);
|
||||
|
||||
if (!StringPrototypeStartsWith(target, './')) {
|
||||
if (internal && !StringPrototypeStartsWith(target, '../') &&
|
||||
!StringPrototypeStartsWith(target, '/')) {
|
||||
let isURL = false;
|
||||
try {
|
||||
new URL(target);
|
||||
isURL = true;
|
||||
} catch {}
|
||||
if (!isURL) {
|
||||
const exportTarget = pattern ?
|
||||
StringPrototypeReplace(target, patternRegEx, subpath) :
|
||||
target + subpath;
|
||||
return packageResolve(exportTarget, packageJSONUrl, conditions);
|
||||
}
|
||||
}
|
||||
throwInvalidPackageTarget(match, target, packageJSONUrl, internal, base);
|
||||
}
|
||||
|
||||
if (RegExpPrototypeTest(invalidSegmentRegEx, StringPrototypeSlice(target, 2)))
|
||||
throwInvalidPackageTarget(match, target, packageJSONUrl, internal, base);
|
||||
|
||||
const resolved = new URL(target, packageJSONUrl);
|
||||
const resolvedPath = resolved.pathname;
|
||||
const packagePath = new URL('.', packageJSONUrl).pathname;
|
||||
|
||||
if (!StringPrototypeStartsWith(resolvedPath, packagePath))
|
||||
throwInvalidPackageTarget(match, target, packageJSONUrl, internal, base);
|
||||
|
||||
if (subpath === '') return resolved;
|
||||
|
||||
if (RegExpPrototypeTest(invalidSegmentRegEx, subpath))
|
||||
throwInvalidSubpath(match + subpath, packageJSONUrl, internal, base);
|
||||
|
||||
if (pattern)
|
||||
return new URL(StringPrototypeReplace(resolved.href, patternRegEx,
|
||||
subpath));
|
||||
return new URL(subpath, resolved);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} key
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isArrayIndex(key) {
|
||||
const keyNum = +key;
|
||||
if (`${keyNum}` !== key) return false;
|
||||
return keyNum >= 0 && keyNum < 0xFFFF_FFFF;
|
||||
}
|
||||
|
||||
function resolvePackageTarget(packageJSONUrl, target, subpath, packageSubpath,
|
||||
base, pattern, internal, conditions) {
|
||||
if (typeof target === 'string') {
|
||||
return resolvePackageTargetString(
|
||||
target, subpath, packageSubpath, packageJSONUrl, base, pattern, internal,
|
||||
conditions);
|
||||
} else if (ArrayIsArray(target)) {
|
||||
if (target.length === 0)
|
||||
return null;
|
||||
|
||||
let lastException;
|
||||
for (let i = 0; i < target.length; i++) {
|
||||
const targetItem = target[i];
|
||||
let resolved;
|
||||
try {
|
||||
resolved = resolvePackageTarget(
|
||||
packageJSONUrl, targetItem, subpath, packageSubpath, base, pattern,
|
||||
internal, conditions);
|
||||
} catch (e) {
|
||||
lastException = e;
|
||||
if (e.code === 'ERR_INVALID_PACKAGE_TARGET')
|
||||
continue;
|
||||
throw e;
|
||||
}
|
||||
if (resolved === undefined)
|
||||
continue;
|
||||
if (resolved === null) {
|
||||
lastException = null;
|
||||
continue;
|
||||
}
|
||||
return resolved;
|
||||
}
|
||||
if (lastException === undefined || lastException === null)
|
||||
return lastException;
|
||||
throw lastException;
|
||||
} else if (typeof target === 'object' && target !== null) {
|
||||
const keys = ObjectGetOwnPropertyNames(target);
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const key = keys[i];
|
||||
if (isArrayIndex(key)) {
|
||||
throw new ERR_INVALID_PACKAGE_CONFIG(
|
||||
fileURLToPath(packageJSONUrl), base,
|
||||
'"exports" cannot contain numeric property keys.');
|
||||
}
|
||||
}
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const key = keys[i];
|
||||
if (key === 'default' || conditions.has(key)) {
|
||||
const conditionalTarget = target[key];
|
||||
const resolved = resolvePackageTarget(
|
||||
packageJSONUrl, conditionalTarget, subpath, packageSubpath, base,
|
||||
pattern, internal, conditions);
|
||||
if (resolved === undefined)
|
||||
continue;
|
||||
return resolved;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
} else if (target === null) {
|
||||
return null;
|
||||
}
|
||||
throwInvalidPackageTarget(packageSubpath, target, packageJSONUrl, internal,
|
||||
base);
|
||||
}
|
||||
|
||||
function isConditionalExportsMainSugar(exports, packageJSONUrl, base) {
|
||||
if (typeof exports === 'string' || ArrayIsArray(exports)) return true;
|
||||
if (typeof exports !== 'object' || exports === null) return false;
|
||||
|
||||
const keys = ObjectGetOwnPropertyNames(exports);
|
||||
let isConditionalSugar = false;
|
||||
let i = 0;
|
||||
for (let j = 0; j < keys.length; j++) {
|
||||
const key = keys[j];
|
||||
const curIsConditionalSugar = key === '' || key[0] !== '.';
|
||||
if (i++ === 0) {
|
||||
isConditionalSugar = curIsConditionalSugar;
|
||||
} else if (isConditionalSugar !== curIsConditionalSugar) {
|
||||
throw new ERR_INVALID_PACKAGE_CONFIG(
|
||||
fileURLToPath(packageJSONUrl), base,
|
||||
'"exports" cannot contain some keys starting with \'.\' and some not.' +
|
||||
' The exports object must either be an object of package subpath keys' +
|
||||
' or an object of main entry condition name keys only.');
|
||||
}
|
||||
}
|
||||
return isConditionalSugar;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {URL} packageJSONUrl
|
||||
* @param {string} packageSubpath
|
||||
* @param {object} packageConfig
|
||||
* @param {string} base
|
||||
* @param {Set<string>} conditions
|
||||
* @returns {{resolved: URL, exact: boolean}}
|
||||
*/
|
||||
function packageExportsResolve(
|
||||
packageJSONUrl, packageSubpath, packageConfig, base, conditions) {
|
||||
let exports = packageConfig.exports;
|
||||
if (isConditionalExportsMainSugar(exports, packageJSONUrl, base))
|
||||
exports = { '.': exports };
|
||||
|
||||
if (ObjectPrototypeHasOwnProperty(exports, packageSubpath)) {
|
||||
const target = exports[packageSubpath];
|
||||
const resolved = resolvePackageTarget(
|
||||
packageJSONUrl, target, '', packageSubpath, base, false, false, conditions
|
||||
);
|
||||
if (resolved === null || resolved === undefined)
|
||||
throwExportsNotFound(packageSubpath, packageJSONUrl, base);
|
||||
return { resolved, exact: true };
|
||||
}
|
||||
|
||||
let bestMatch = '';
|
||||
const keys = ObjectGetOwnPropertyNames(exports);
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const key = keys[i];
|
||||
if (key[key.length - 1] === '*' &&
|
||||
StringPrototypeStartsWith(packageSubpath,
|
||||
StringPrototypeSlice(key, 0, -1)) &&
|
||||
packageSubpath.length >= key.length &&
|
||||
key.length > bestMatch.length) {
|
||||
bestMatch = key;
|
||||
} else if (key[key.length - 1] === '/' &&
|
||||
StringPrototypeStartsWith(packageSubpath, key) &&
|
||||
key.length > bestMatch.length) {
|
||||
bestMatch = key;
|
||||
}
|
||||
}
|
||||
|
||||
if (bestMatch) {
|
||||
const target = exports[bestMatch];
|
||||
const pattern = bestMatch[bestMatch.length - 1] === '*';
|
||||
const subpath = StringPrototypeSubstr(packageSubpath, bestMatch.length -
|
||||
(pattern ? 1 : 0));
|
||||
const resolved = resolvePackageTarget(packageJSONUrl, target, subpath,
|
||||
bestMatch, base, pattern, false,
|
||||
conditions);
|
||||
if (resolved === null || resolved === undefined)
|
||||
throwExportsNotFound(packageSubpath, packageJSONUrl, base);
|
||||
if (!pattern)
|
||||
emitFolderMapDeprecation(bestMatch, packageJSONUrl, true, base);
|
||||
return { resolved, exact: pattern };
|
||||
}
|
||||
|
||||
throwExportsNotFound(packageSubpath, packageJSONUrl, base);
|
||||
}
|
||||
|
||||
function packageImportsResolve(name, base, conditions) {
|
||||
if (name === '#' || StringPrototypeStartsWith(name, '#/')) {
|
||||
const reason = 'is not a valid internal imports specifier name';
|
||||
throw new ERR_INVALID_MODULE_SPECIFIER(name, reason, fileURLToPath(base));
|
||||
}
|
||||
let packageJSONUrl;
|
||||
const packageConfig = getPackageScopeConfig(base);
|
||||
if (packageConfig.exists) {
|
||||
packageJSONUrl = pathToFileURL(packageConfig.pjsonPath);
|
||||
const imports = packageConfig.imports;
|
||||
if (imports) {
|
||||
if (ObjectPrototypeHasOwnProperty(imports, name)) {
|
||||
const resolved = resolvePackageTarget(
|
||||
packageJSONUrl, imports[name], '', name, base, false, true, conditions
|
||||
);
|
||||
if (resolved !== null)
|
||||
return { resolved, exact: true };
|
||||
} else {
|
||||
let bestMatch = '';
|
||||
const keys = ObjectGetOwnPropertyNames(imports);
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
const key = keys[i];
|
||||
if (key[key.length - 1] === '*' &&
|
||||
StringPrototypeStartsWith(name,
|
||||
StringPrototypeSlice(key, 0, -1)) &&
|
||||
name.length >= key.length &&
|
||||
key.length > bestMatch.length) {
|
||||
bestMatch = key;
|
||||
} else if (key[key.length - 1] === '/' &&
|
||||
StringPrototypeStartsWith(name, key) &&
|
||||
key.length > bestMatch.length) {
|
||||
bestMatch = key;
|
||||
}
|
||||
}
|
||||
|
||||
if (bestMatch) {
|
||||
const target = imports[bestMatch];
|
||||
const pattern = bestMatch[bestMatch.length - 1] === '*';
|
||||
const subpath = StringPrototypeSubstr(name, bestMatch.length -
|
||||
(pattern ? 1 : 0));
|
||||
const resolved = resolvePackageTarget(
|
||||
packageJSONUrl, target, subpath, bestMatch, base, pattern, true,
|
||||
conditions);
|
||||
if (resolved !== null) {
|
||||
if (!pattern)
|
||||
emitFolderMapDeprecation(bestMatch, packageJSONUrl, false, base);
|
||||
return { resolved, exact: pattern };
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
throwImportNotDefined(name, packageJSONUrl, base);
|
||||
}
|
||||
|
||||
function getPackageType(url) {
|
||||
const packageConfig = getPackageScopeConfig(url);
|
||||
return packageConfig.type;
|
||||
}
|
||||
|
||||
function parsePackageName(specifier, base) {
|
||||
let separatorIndex = StringPrototypeIndexOf(specifier, '/');
|
||||
let validPackageName = true;
|
||||
let isScoped = false;
|
||||
if (specifier[0] === '@') {
|
||||
isScoped = true;
|
||||
if (separatorIndex === -1 || specifier.length === 0) {
|
||||
validPackageName = false;
|
||||
} else {
|
||||
separatorIndex = StringPrototypeIndexOf(
|
||||
specifier, '/', separatorIndex + 1);
|
||||
}
|
||||
}
|
||||
|
||||
const packageName = separatorIndex === -1 ?
|
||||
specifier : StringPrototypeSlice(specifier, 0, separatorIndex);
|
||||
|
||||
// Package name cannot have leading . and cannot have percent-encoding or
|
||||
// separators.
|
||||
for (let i = 0; i < packageName.length; i++) {
|
||||
if (packageName[i] === '%' || packageName[i] === '\\') {
|
||||
validPackageName = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!validPackageName) {
|
||||
throw new ERR_INVALID_MODULE_SPECIFIER(
|
||||
specifier, 'is not a valid package name', fileURLToPath(base));
|
||||
}
|
||||
|
||||
const packageSubpath = '.' + (separatorIndex === -1 ? '' :
|
||||
StringPrototypeSlice(specifier, separatorIndex));
|
||||
|
||||
return { packageName, packageSubpath, isScoped };
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} specifier
|
||||
* @param {URL} base
|
||||
* @param {Set<string>} conditions
|
||||
* @returns {URL}
|
||||
*/
|
||||
function packageResolve(specifier, base, conditions) {
|
||||
const { packageName, packageSubpath, isScoped } =
|
||||
parsePackageName(specifier, base);
|
||||
|
||||
// ResolveSelf
|
||||
const packageConfig = getPackageScopeConfig(base);
|
||||
if (packageConfig.exists) {
|
||||
const packageJSONUrl = pathToFileURL(packageConfig.pjsonPath);
|
||||
if (packageConfig.name === packageName &&
|
||||
packageConfig.exports !== undefined && packageConfig.exports !== null) {
|
||||
return packageExportsResolve(
|
||||
packageJSONUrl, packageSubpath, packageConfig, base, conditions
|
||||
).resolved;
|
||||
}
|
||||
}
|
||||
|
||||
let packageJSONUrl =
|
||||
new URL('./node_modules/' + packageName + '/package.json', base);
|
||||
let packageJSONPath = fileURLToPath(packageJSONUrl);
|
||||
let lastPath;
|
||||
do {
|
||||
const stat = tryStatSync(StringPrototypeSlice(packageJSONPath, 0,
|
||||
packageJSONPath.length - 13));
|
||||
if (!stat.isDirectory()) {
|
||||
lastPath = packageJSONPath;
|
||||
packageJSONUrl = new URL((isScoped ?
|
||||
'../../../../node_modules/' : '../../../node_modules/') +
|
||||
packageName + '/package.json', packageJSONUrl);
|
||||
packageJSONPath = fileURLToPath(packageJSONUrl);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Package match.
|
||||
const packageConfig = getPackageConfig(packageJSONPath, specifier, base);
|
||||
if (packageConfig.exports !== undefined && packageConfig.exports !== null)
|
||||
return packageExportsResolve(
|
||||
packageJSONUrl, packageSubpath, packageConfig, base, conditions
|
||||
).resolved;
|
||||
if (packageSubpath === '.')
|
||||
return legacyMainResolve(packageJSONUrl, packageConfig, base);
|
||||
return new URL(packageSubpath, packageJSONUrl);
|
||||
// Cross-platform root check.
|
||||
} while (packageJSONPath.length !== lastPath.length);
|
||||
|
||||
// eslint can't handle the above code.
|
||||
// eslint-disable-next-line no-unreachable
|
||||
throw new ERR_MODULE_NOT_FOUND(packageName, fileURLToPath(base));
|
||||
}
|
||||
|
||||
function isBareSpecifier(specifier) {
|
||||
return specifier[0] && specifier[0] !== '/' && specifier[0] !== '.';
|
||||
}
|
||||
|
||||
function isRelativeSpecifier(specifier) {
|
||||
if (specifier[0] === '.') {
|
||||
if (specifier.length === 1 || specifier[1] === '/') return true;
|
||||
if (specifier[1] === '.') {
|
||||
if (specifier.length === 2 || specifier[2] === '/') return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function shouldBeTreatedAsRelativeOrAbsolutePath(specifier) {
|
||||
if (specifier === '') return false;
|
||||
if (specifier[0] === '/') return true;
|
||||
return isRelativeSpecifier(specifier);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} specifier
|
||||
* @param {URL} base
|
||||
* @param {Set<string>} conditions
|
||||
* @returns {URL}
|
||||
*/
|
||||
function moduleResolve(specifier, base, conditions) {
|
||||
// Order swapped from spec for minor perf gain.
|
||||
// Ok since relative URLs cannot parse as URLs.
|
||||
let resolved;
|
||||
if (shouldBeTreatedAsRelativeOrAbsolutePath(specifier)) {
|
||||
resolved = new URL(specifier, base);
|
||||
} else if (specifier[0] === '#') {
|
||||
({ resolved } = packageImportsResolve(specifier, base, conditions));
|
||||
} else {
|
||||
try {
|
||||
resolved = new URL(specifier);
|
||||
} catch {
|
||||
resolved = packageResolve(specifier, base, conditions);
|
||||
}
|
||||
}
|
||||
return finalizeResolution(resolved, base);
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to resolve an import as a CommonJS module
|
||||
* @param {string} specifier
|
||||
* @param {string} parentURL
|
||||
* @returns {boolean|string}
|
||||
*/
|
||||
function resolveAsCommonJS(specifier, parentURL) {
|
||||
try {
|
||||
const parent = fileURLToPath(parentURL);
|
||||
const tmpModule = new CJSModule(parent, null);
|
||||
tmpModule.paths = CJSModule._nodeModulePaths(parent);
|
||||
|
||||
let found = CJSModule._resolveFilename(specifier, tmpModule, false);
|
||||
|
||||
// If it is a relative specifier return the relative path
|
||||
// to the parent
|
||||
if (isRelativeSpecifier(specifier)) {
|
||||
found = relative(parent, found);
|
||||
// Add '.separator if the path does not start with '..separator'
|
||||
// This should be a safe assumption because when loading
|
||||
// esm modules there should be always a file specified so
|
||||
// there should not be a specifier like '..' or '.'
|
||||
if (!StringPrototypeStartsWith(found, `..${sep}`)) {
|
||||
found = `.${sep}${found}`;
|
||||
}
|
||||
} else if (isBareSpecifier(specifier)) {
|
||||
// If it is a bare specifier return the relative path within the
|
||||
// module
|
||||
const pkg = StringPrototypeSplit(specifier, '/')[0];
|
||||
const index = StringPrototypeIndexOf(found, pkg);
|
||||
if (index !== -1) {
|
||||
found = StringPrototypeSlice(found, index);
|
||||
}
|
||||
}
|
||||
// Normalize the path separator to give a valid suggestion
|
||||
// on Windows
|
||||
if (process.platform === 'win32') {
|
||||
found = StringPrototypeReplace(found, new RegExp(`\\${sep}`, 'g'), '/');
|
||||
}
|
||||
return found;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function defaultResolve(specifier, context = {}, defaultResolveUnused) {
|
||||
let { parentURL, conditions } = context;
|
||||
if (parentURL && policy != null && policy.manifest) {
|
||||
const redirects = policy.manifest.getDependencyMapper(parentURL);
|
||||
if (redirects) {
|
||||
const { resolve, reaction } = redirects;
|
||||
const destination = resolve(specifier, new SafeSet(conditions));
|
||||
let missing = true;
|
||||
if (destination === true) {
|
||||
missing = false;
|
||||
} else if (destination) {
|
||||
const href = destination.href;
|
||||
return { url: href };
|
||||
}
|
||||
if (missing) {
|
||||
reaction(new ERR_MANIFEST_DEPENDENCY_MISSING(
|
||||
parentURL,
|
||||
specifier,
|
||||
ArrayPrototypeJoin([...conditions], ', '))
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
let parsed;
|
||||
try {
|
||||
parsed = new URL(specifier);
|
||||
if (parsed.protocol === 'data:') {
|
||||
return {
|
||||
url: specifier
|
||||
};
|
||||
}
|
||||
} catch {}
|
||||
if (parsed && parsed.protocol === builtinModuleProtocol)
|
||||
return { url: specifier };
|
||||
if (parsed && parsed.protocol !== 'file:' && parsed.protocol !== 'data:')
|
||||
throw new ERR_UNSUPPORTED_ESM_URL_SCHEME(parsed);
|
||||
if (NativeModule.canBeRequiredByUsers(specifier)) {
|
||||
return {
|
||||
url: builtinModuleProtocol + specifier
|
||||
};
|
||||
}
|
||||
if (parentURL && StringPrototypeStartsWith(parentURL, 'data:')) {
|
||||
// This is gonna blow up, we want the error
|
||||
new URL(specifier, parentURL);
|
||||
}
|
||||
|
||||
const isMain = parentURL === undefined;
|
||||
if (isMain) {
|
||||
parentURL = pathToFileURL(`${process.cwd()}/`).href;
|
||||
|
||||
// This is the initial entry point to the program, and --input-type has
|
||||
// been passed as an option; but --input-type can only be used with
|
||||
// --eval, --print or STDIN string input. It is not allowed with file
|
||||
// input, to avoid user confusion over how expansive the effect of the
|
||||
// flag should be (i.e. entry point only, package scope surrounding the
|
||||
// entry point, etc.).
|
||||
if (typeFlag)
|
||||
throw new ERR_INPUT_TYPE_NOT_ALLOWED();
|
||||
}
|
||||
|
||||
conditions = getConditionsSet(conditions);
|
||||
let url;
|
||||
try {
|
||||
url = moduleResolve(specifier, parentURL, conditions);
|
||||
} catch (error) {
|
||||
// Try to give the user a hint of what would have been the
|
||||
// resolved CommonJS module
|
||||
if (error.code === 'ERR_MODULE_NOT_FOUND' ||
|
||||
error.code === 'ERR_UNSUPPORTED_DIR_IMPORT') {
|
||||
if (StringPrototypeStartsWith(specifier, 'file://')) {
|
||||
specifier = fileURLToPath(specifier);
|
||||
}
|
||||
const found = resolveAsCommonJS(specifier, parentURL);
|
||||
if (found) {
|
||||
// Modify the stack and message string to include the hint
|
||||
const lines = StringPrototypeSplit(error.stack, '\n');
|
||||
const hint = `Did you mean to import ${found}?`;
|
||||
error.stack =
|
||||
ArrayPrototypeShift(lines) + '\n' +
|
||||
hint + '\n' +
|
||||
ArrayPrototypeJoin(lines, '\n');
|
||||
error.message += `\n${hint}`;
|
||||
}
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (isMain ? !preserveSymlinksMain : !preserveSymlinks) {
|
||||
const urlPath = fileURLToPath(url);
|
||||
const real = realpathSync(urlPath, {
|
||||
// [internalFS.realpathCacheKey]: realpathCache
|
||||
});
|
||||
const old = url;
|
||||
url = pathToFileURL(
|
||||
real + (StringPrototypeEndsWith(urlPath, sep) ? '/' : ''));
|
||||
url.search = old.search;
|
||||
url.hash = old.hash;
|
||||
}
|
||||
|
||||
return { url: `${url}` };
|
||||
}
|
||||
|
||||
return {
|
||||
DEFAULT_CONDITIONS,
|
||||
defaultResolve,
|
||||
encodedSepRegEx,
|
||||
getPackageType,
|
||||
packageExportsResolve,
|
||||
packageImportsResolve
|
||||
};
|
||||
}
|
||||
module.exports = {
|
||||
createResolve
|
||||
};
|
44
node_modules/ts-node/dist-raw/node-internal-modules-package_json_reader.js
generated
vendored
Normal file
44
node_modules/ts-node/dist-raw/node-internal-modules-package_json_reader.js
generated
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
// copied from https://github.com/nodejs/node/blob/v15.3.0/lib/internal/modules/package_json_reader.js
|
||||
'use strict';
|
||||
|
||||
const { SafeMap } = require('./node-primordials');
|
||||
const { internalModuleReadJSON } = require('./node-internalBinding-fs');
|
||||
const { pathToFileURL } = require('url');
|
||||
const { toNamespacedPath } = require('path');
|
||||
// const { getOptionValue } = require('./node-options');
|
||||
|
||||
const cache = new SafeMap();
|
||||
|
||||
let manifest;
|
||||
|
||||
/**
|
||||
* @param {string} jsonPath
|
||||
* @return {{string: string, containsKeys: boolean}}
|
||||
*/
|
||||
function read(jsonPath) {
|
||||
if (cache.has(jsonPath)) {
|
||||
return cache.get(jsonPath);
|
||||
}
|
||||
|
||||
const [string, containsKeys] = internalModuleReadJSON(
|
||||
toNamespacedPath(jsonPath)
|
||||
);
|
||||
const result = { string, containsKeys };
|
||||
if (string !== undefined) {
|
||||
if (manifest === undefined) {
|
||||
// manifest = getOptionValue('--experimental-policy') ?
|
||||
// require('internal/process/policy').manifest :
|
||||
// null;
|
||||
// disabled for now. I am not sure if/how we should support this
|
||||
manifest = null;
|
||||
}
|
||||
if (manifest !== null) {
|
||||
const jsonURL = pathToFileURL(jsonPath);
|
||||
manifest.assertIntegrity(jsonURL, string);
|
||||
}
|
||||
}
|
||||
cache.set(jsonPath, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
module.exports = { read };
|
254
node_modules/ts-node/dist-raw/node-internal-repl-await.js
generated
vendored
Normal file
254
node_modules/ts-node/dist-raw/node-internal-repl-await.js
generated
vendored
Normal file
@ -0,0 +1,254 @@
|
||||
// copied from https://github.com/nodejs/node/blob/88799930794045795e8abac874730f9eba7e2300/lib/internal/repl/await.js
|
||||
'use strict';
|
||||
|
||||
const {
|
||||
ArrayFrom,
|
||||
ArrayPrototypeForEach,
|
||||
ArrayPrototypeIncludes,
|
||||
ArrayPrototypeJoin,
|
||||
ArrayPrototypePop,
|
||||
ArrayPrototypePush,
|
||||
FunctionPrototype,
|
||||
ObjectKeys,
|
||||
RegExpPrototypeSymbolReplace,
|
||||
StringPrototypeEndsWith,
|
||||
StringPrototypeIncludes,
|
||||
StringPrototypeIndexOf,
|
||||
StringPrototypeRepeat,
|
||||
StringPrototypeSplit,
|
||||
StringPrototypeStartsWith,
|
||||
SyntaxError,
|
||||
} = require('./node-primordials');
|
||||
|
||||
const parser = require('acorn').Parser;
|
||||
const walk = require('acorn-walk');
|
||||
const { Recoverable } = require('repl');
|
||||
|
||||
function isTopLevelDeclaration(state) {
|
||||
return state.ancestors[state.ancestors.length - 2] === state.body;
|
||||
}
|
||||
|
||||
const noop = FunctionPrototype;
|
||||
const visitorsWithoutAncestors = {
|
||||
ClassDeclaration(node, state, c) {
|
||||
if (isTopLevelDeclaration(state)) {
|
||||
state.prepend(node, `${node.id.name}=`);
|
||||
ArrayPrototypePush(
|
||||
state.hoistedDeclarationStatements,
|
||||
`let ${node.id.name}; `
|
||||
);
|
||||
}
|
||||
|
||||
walk.base.ClassDeclaration(node, state, c);
|
||||
},
|
||||
ForOfStatement(node, state, c) {
|
||||
if (node.await === true) {
|
||||
state.containsAwait = true;
|
||||
}
|
||||
walk.base.ForOfStatement(node, state, c);
|
||||
},
|
||||
FunctionDeclaration(node, state, c) {
|
||||
state.prepend(node, `${node.id.name}=`);
|
||||
ArrayPrototypePush(
|
||||
state.hoistedDeclarationStatements,
|
||||
`var ${node.id.name}; `
|
||||
);
|
||||
},
|
||||
FunctionExpression: noop,
|
||||
ArrowFunctionExpression: noop,
|
||||
MethodDefinition: noop,
|
||||
AwaitExpression(node, state, c) {
|
||||
state.containsAwait = true;
|
||||
walk.base.AwaitExpression(node, state, c);
|
||||
},
|
||||
ReturnStatement(node, state, c) {
|
||||
state.containsReturn = true;
|
||||
walk.base.ReturnStatement(node, state, c);
|
||||
},
|
||||
VariableDeclaration(node, state, c) {
|
||||
const variableKind = node.kind;
|
||||
const isIterableForDeclaration = ArrayPrototypeIncludes(
|
||||
['ForOfStatement', 'ForInStatement'],
|
||||
state.ancestors[state.ancestors.length - 2].type
|
||||
);
|
||||
|
||||
if (variableKind === 'var' || isTopLevelDeclaration(state)) {
|
||||
state.replace(
|
||||
node.start,
|
||||
node.start + variableKind.length + (isIterableForDeclaration ? 1 : 0),
|
||||
variableKind === 'var' && isIterableForDeclaration ?
|
||||
'' :
|
||||
'void' + (node.declarations.length === 1 ? '' : ' (')
|
||||
);
|
||||
|
||||
if (!isIterableForDeclaration) {
|
||||
ArrayPrototypeForEach(node.declarations, (decl) => {
|
||||
state.prepend(decl, '(');
|
||||
state.append(decl, decl.init ? ')' : '=undefined)');
|
||||
});
|
||||
|
||||
if (node.declarations.length !== 1) {
|
||||
state.append(node.declarations[node.declarations.length - 1], ')');
|
||||
}
|
||||
}
|
||||
|
||||
const variableIdentifiersToHoist = [
|
||||
['var', []],
|
||||
['let', []],
|
||||
];
|
||||
function registerVariableDeclarationIdentifiers(node) {
|
||||
switch (node.type) {
|
||||
case 'Identifier':
|
||||
ArrayPrototypePush(
|
||||
variableIdentifiersToHoist[variableKind === 'var' ? 0 : 1][1],
|
||||
node.name
|
||||
);
|
||||
break;
|
||||
case 'ObjectPattern':
|
||||
ArrayPrototypeForEach(node.properties, (property) => {
|
||||
registerVariableDeclarationIdentifiers(property.value);
|
||||
});
|
||||
break;
|
||||
case 'ArrayPattern':
|
||||
ArrayPrototypeForEach(node.elements, (element) => {
|
||||
registerVariableDeclarationIdentifiers(element);
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
ArrayPrototypeForEach(node.declarations, (decl) => {
|
||||
registerVariableDeclarationIdentifiers(decl.id);
|
||||
});
|
||||
|
||||
ArrayPrototypeForEach(
|
||||
variableIdentifiersToHoist,
|
||||
({ 0: kind, 1: identifiers }) => {
|
||||
if (identifiers.length > 0) {
|
||||
ArrayPrototypePush(
|
||||
state.hoistedDeclarationStatements,
|
||||
`${kind} ${ArrayPrototypeJoin(identifiers, ', ')}; `
|
||||
);
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
walk.base.VariableDeclaration(node, state, c);
|
||||
}
|
||||
};
|
||||
|
||||
const visitors = {};
|
||||
for (const nodeType of ObjectKeys(walk.base)) {
|
||||
const callback = visitorsWithoutAncestors[nodeType] || walk.base[nodeType];
|
||||
visitors[nodeType] = (node, state, c) => {
|
||||
const isNew = node !== state.ancestors[state.ancestors.length - 1];
|
||||
if (isNew) {
|
||||
ArrayPrototypePush(state.ancestors, node);
|
||||
}
|
||||
callback(node, state, c);
|
||||
if (isNew) {
|
||||
ArrayPrototypePop(state.ancestors);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function processTopLevelAwait(src) {
|
||||
const wrapPrefix = '(async () => { ';
|
||||
const wrapped = `${wrapPrefix}${src} })()`;
|
||||
const wrappedArray = ArrayFrom(wrapped);
|
||||
let root;
|
||||
try {
|
||||
root = parser.parse(wrapped, { ecmaVersion: 'latest' });
|
||||
} catch (e) {
|
||||
if (StringPrototypeStartsWith(e.message, 'Unterminated '))
|
||||
throw new Recoverable(e);
|
||||
// If the parse error is before the first "await", then use the execution
|
||||
// error. Otherwise we must emit this parse error, making it look like a
|
||||
// proper syntax error.
|
||||
const awaitPos = StringPrototypeIndexOf(src, 'await');
|
||||
const errPos = e.pos - wrapPrefix.length;
|
||||
if (awaitPos > errPos)
|
||||
return null;
|
||||
// Convert keyword parse errors on await into their original errors when
|
||||
// possible.
|
||||
if (errPos === awaitPos + 6 &&
|
||||
StringPrototypeIncludes(e.message, 'Expecting Unicode escape sequence'))
|
||||
return null;
|
||||
if (errPos === awaitPos + 7 &&
|
||||
StringPrototypeIncludes(e.message, 'Unexpected token'))
|
||||
return null;
|
||||
const line = e.loc.line;
|
||||
const column = line === 1 ? e.loc.column - wrapPrefix.length : e.loc.column;
|
||||
let message = '\n' + StringPrototypeSplit(src, '\n')[line - 1] + '\n' +
|
||||
StringPrototypeRepeat(' ', column) +
|
||||
'^\n\n' + RegExpPrototypeSymbolReplace(/ \([^)]+\)/, e.message, '');
|
||||
// V8 unexpected token errors include the token string.
|
||||
if (StringPrototypeEndsWith(message, 'Unexpected token'))
|
||||
message += " '" +
|
||||
// Wrapper end may cause acorn to report error position after the source
|
||||
((src.length - 1) >= (e.pos - wrapPrefix.length)
|
||||
? src[e.pos - wrapPrefix.length]
|
||||
: src[src.length - 1]) +
|
||||
"'";
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
throw new SyntaxError(message);
|
||||
}
|
||||
const body = root.body[0].expression.callee.body;
|
||||
const state = {
|
||||
body,
|
||||
ancestors: [],
|
||||
hoistedDeclarationStatements: [],
|
||||
replace(from, to, str) {
|
||||
for (let i = from; i < to; i++) {
|
||||
wrappedArray[i] = '';
|
||||
}
|
||||
if (from === to) str += wrappedArray[from];
|
||||
wrappedArray[from] = str;
|
||||
},
|
||||
prepend(node, str) {
|
||||
wrappedArray[node.start] = str + wrappedArray[node.start];
|
||||
},
|
||||
append(node, str) {
|
||||
wrappedArray[node.end - 1] += str;
|
||||
},
|
||||
containsAwait: false,
|
||||
containsReturn: false
|
||||
};
|
||||
|
||||
walk.recursive(body, state, visitors);
|
||||
|
||||
// Do not transform if
|
||||
// 1. False alarm: there isn't actually an await expression.
|
||||
// 2. There is a top-level return, which is not allowed.
|
||||
if (!state.containsAwait || state.containsReturn) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const last = body.body[body.body.length - 1];
|
||||
if (last.type === 'ExpressionStatement') {
|
||||
// For an expression statement of the form
|
||||
// ( expr ) ;
|
||||
// ^^^^^^^^^^ // last
|
||||
// ^^^^ // last.expression
|
||||
//
|
||||
// We do not want the left parenthesis before the `return` keyword;
|
||||
// therefore we prepend the `return (` to `last`.
|
||||
//
|
||||
// On the other hand, we do not want the right parenthesis after the
|
||||
// semicolon. Since there can only be more right parentheses between
|
||||
// last.expression.end and the semicolon, appending one more to
|
||||
// last.expression should be fine.
|
||||
state.prepend(last, 'return (');
|
||||
state.append(last.expression, ')');
|
||||
}
|
||||
|
||||
return (
|
||||
ArrayPrototypeJoin(state.hoistedDeclarationStatements, '') +
|
||||
ArrayPrototypeJoin(wrappedArray, '')
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
processTopLevelAwait
|
||||
};
|
58
node_modules/ts-node/dist-raw/node-internalBinding-fs.js
generated
vendored
Normal file
58
node_modules/ts-node/dist-raw/node-internalBinding-fs.js
generated
vendored
Normal file
@ -0,0 +1,58 @@
|
||||
const fs = require('fs');
|
||||
const {versionGteLt} = require('../dist/util');
|
||||
|
||||
// In node's core, this is implemented in C
|
||||
// https://github.com/nodejs/node/blob/v15.3.0/src/node_file.cc#L891-L985
|
||||
/**
|
||||
* @param {string} path
|
||||
* @returns {[] | [string, boolean]}
|
||||
*/
|
||||
function internalModuleReadJSON(path) {
|
||||
let string
|
||||
try {
|
||||
string = fs.readFileSync(path, 'utf8')
|
||||
} catch (e) {
|
||||
if (e.code === 'ENOENT') return []
|
||||
throw e
|
||||
}
|
||||
// Node's implementation checks for the presence of relevant keys: main, name, type, exports, imports
|
||||
// Node does this for performance to skip unnecessary parsing.
|
||||
// This would slow us down and, based on our usage, we can skip it.
|
||||
const containsKeys = true
|
||||
return [string, containsKeys]
|
||||
}
|
||||
|
||||
// In node's core, this is implemented in C
|
||||
// https://github.com/nodejs/node/blob/63e7dc1e5c71b70c80ed9eda230991edb00811e2/src/node_file.cc#L987-L1005
|
||||
/**
|
||||
* @param {string} path
|
||||
* @returns {number} 0 = file, 1 = dir, negative = error
|
||||
*/
|
||||
function internalModuleStat(path) {
|
||||
const stat = fs.statSync(path, { throwIfNoEntry: false });
|
||||
if(!stat) return -1;
|
||||
if(stat.isFile()) return 0;
|
||||
if(stat.isDirectory()) return 1;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} path
|
||||
* @returns {number} 0 = file, 1 = dir, negative = error
|
||||
*/
|
||||
function internalModuleStatInefficient(path) {
|
||||
try {
|
||||
const stat = fs.statSync(path);
|
||||
if(stat.isFile()) return 0;
|
||||
if(stat.isDirectory()) return 1;
|
||||
} catch(e) {
|
||||
return -e.errno || -1;
|
||||
}
|
||||
}
|
||||
|
||||
const statSupportsThrowIfNoEntry = versionGteLt(process.versions.node, '15.3.0') ||
|
||||
versionGteLt(process.versions.node, '14.17.0', '15.0.0');
|
||||
|
||||
module.exports = {
|
||||
internalModuleReadJSON,
|
||||
internalModuleStat: statSupportsThrowIfNoEntry ? internalModuleStat : internalModuleStatInefficient
|
||||
};
|
9
node_modules/ts-node/dist-raw/node-nativemodule.js
generated
vendored
Normal file
9
node_modules/ts-node/dist-raw/node-nativemodule.js
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
|
||||
// Node imports this from 'internal/bootstrap/loaders'
|
||||
const Module = require('module');
|
||||
const NativeModule = {
|
||||
canBeRequiredByUsers(specifier) {
|
||||
return Module.builtinModules.includes(specifier)
|
||||
}
|
||||
};
|
||||
exports.NativeModule = NativeModule;
|
103
node_modules/ts-node/dist-raw/node-options.js
generated
vendored
Normal file
103
node_modules/ts-node/dist-raw/node-options.js
generated
vendored
Normal file
@ -0,0 +1,103 @@
|
||||
// Replacement for node's internal 'internal/options' module
|
||||
|
||||
exports.getOptionValue = getOptionValue;
|
||||
function getOptionValue(opt) {
|
||||
parseOptions();
|
||||
return options[opt];
|
||||
}
|
||||
|
||||
let options;
|
||||
function parseOptions() {
|
||||
if (!options) {
|
||||
options = {
|
||||
'--preserve-symlinks': false,
|
||||
'--preserve-symlinks-main': false,
|
||||
'--input-type': undefined,
|
||||
'--experimental-specifier-resolution': 'explicit',
|
||||
'--experimental-policy': undefined,
|
||||
'--conditions': [],
|
||||
'--pending-deprecation': false,
|
||||
...parseArgv(getNodeOptionsEnvArgv()),
|
||||
...parseArgv(process.execArgv),
|
||||
...getOptionValuesFromOtherEnvVars()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function parseArgv(argv) {
|
||||
return require('arg')({
|
||||
'--preserve-symlinks': Boolean,
|
||||
'--preserve-symlinks-main': Boolean,
|
||||
'--input-type': String,
|
||||
'--experimental-specifier-resolution': String,
|
||||
// Legacy alias for node versions prior to 12.16
|
||||
'--es-module-specifier-resolution': '--experimental-specifier-resolution',
|
||||
'--experimental-policy': String,
|
||||
'--conditions': [String],
|
||||
'--pending-deprecation': Boolean,
|
||||
'--experimental-json-modules': Boolean,
|
||||
'--experimental-wasm-modules': Boolean,
|
||||
}, {
|
||||
argv,
|
||||
permissive: true
|
||||
});
|
||||
}
|
||||
|
||||
function getNodeOptionsEnvArgv() {
|
||||
const errors = [];
|
||||
const envArgv = ParseNodeOptionsEnvVar(process.env.NODE_OPTIONS || '', errors);
|
||||
if (errors.length !== 0) {
|
||||
// TODO: handle errors somehow
|
||||
}
|
||||
return envArgv;
|
||||
}
|
||||
|
||||
// Direct JS port of C implementation: https://github.com/nodejs/node/blob/67ba825037b4082d5d16f922fb9ce54516b4a869/src/node_options.cc#L1024-L1063
|
||||
function ParseNodeOptionsEnvVar(node_options, errors) {
|
||||
const env_argv = [];
|
||||
|
||||
let is_in_string = false;
|
||||
let will_start_new_arg = true;
|
||||
for (let index = 0; index < node_options.length; ++index) {
|
||||
let c = node_options[index];
|
||||
|
||||
// Backslashes escape the following character
|
||||
if (c === '\\' && is_in_string) {
|
||||
if (index + 1 === node_options.length) {
|
||||
errors.push("invalid value for NODE_OPTIONS " +
|
||||
"(invalid escape)\n");
|
||||
return env_argv;
|
||||
} else {
|
||||
c = node_options[++index];
|
||||
}
|
||||
} else if (c === ' ' && !is_in_string) {
|
||||
will_start_new_arg = true;
|
||||
continue;
|
||||
} else if (c === '"') {
|
||||
is_in_string = !is_in_string;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (will_start_new_arg) {
|
||||
env_argv.push(c);
|
||||
will_start_new_arg = false;
|
||||
} else {
|
||||
env_argv[env_argv.length - 1] += c;
|
||||
}
|
||||
}
|
||||
|
||||
if (is_in_string) {
|
||||
errors.push("invalid value for NODE_OPTIONS " +
|
||||
"(unterminated string)\n");
|
||||
}
|
||||
return env_argv;
|
||||
}
|
||||
|
||||
// Get option values that can be specified via env vars besides NODE_OPTIONS
|
||||
function getOptionValuesFromOtherEnvVars() {
|
||||
const options = {};
|
||||
if(process.env.NODE_PENDING_DEPRECATION === '1') {
|
||||
options['--pending-deprecation'] = true;
|
||||
}
|
||||
return options;
|
||||
}
|
37
node_modules/ts-node/dist-raw/node-primordials.js
generated
vendored
Normal file
37
node_modules/ts-node/dist-raw/node-primordials.js
generated
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
module.exports = {
|
||||
ArrayFrom: Array.from,
|
||||
ArrayIsArray: Array.isArray,
|
||||
ArrayPrototypeShift: (obj) => Array.prototype.shift.call(obj),
|
||||
ArrayPrototypeForEach: (arr, ...rest) => Array.prototype.forEach.apply(arr, rest),
|
||||
ArrayPrototypeIncludes: (arr, ...rest) => Array.prototype.includes.apply(arr, rest),
|
||||
ArrayPrototypeJoin: (arr, ...rest) => Array.prototype.join.apply(arr, rest),
|
||||
ArrayPrototypePop: (arr, ...rest) => Array.prototype.pop.apply(arr, rest),
|
||||
ArrayPrototypePush: (arr, ...rest) => Array.prototype.push.apply(arr, rest),
|
||||
FunctionPrototype: Function.prototype,
|
||||
JSONParse: JSON.parse,
|
||||
JSONStringify: JSON.stringify,
|
||||
ObjectFreeze: Object.freeze,
|
||||
ObjectKeys: Object.keys,
|
||||
ObjectGetOwnPropertyNames: Object.getOwnPropertyNames,
|
||||
ObjectDefineProperty: Object.defineProperty,
|
||||
ObjectPrototypeHasOwnProperty: (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop),
|
||||
RegExpPrototypeExec: (obj, string) => RegExp.prototype.exec.call(obj, string),
|
||||
RegExpPrototypeTest: (obj, string) => RegExp.prototype.test.call(obj, string),
|
||||
RegExpPrototypeSymbolReplace: (obj, ...rest) => RegExp.prototype[Symbol.replace].apply(obj, rest),
|
||||
SafeMap: Map,
|
||||
SafeSet: Set,
|
||||
SafeWeakMap: WeakMap,
|
||||
StringPrototypeEndsWith: (str, ...rest) => String.prototype.endsWith.apply(str, rest),
|
||||
StringPrototypeIncludes: (str, ...rest) => String.prototype.includes.apply(str, rest),
|
||||
StringPrototypeLastIndexOf: (str, ...rest) => String.prototype.lastIndexOf.apply(str, rest),
|
||||
StringPrototypeIndexOf: (str, ...rest) => String.prototype.indexOf.apply(str, rest),
|
||||
StringPrototypeRepeat: (str, ...rest) => String.prototype.repeat.apply(str, rest),
|
||||
StringPrototypeReplace: (str, ...rest) => String.prototype.replace.apply(str, rest),
|
||||
StringPrototypeSlice: (str, ...rest) => String.prototype.slice.apply(str, rest),
|
||||
StringPrototypeSplit: (str, ...rest) => String.prototype.split.apply(str, rest),
|
||||
StringPrototypeStartsWith: (str, ...rest) => String.prototype.startsWith.apply(str, rest),
|
||||
StringPrototypeSubstr: (str, ...rest) => String.prototype.substr.apply(str, rest),
|
||||
StringPrototypeCharCodeAt: (str, ...rest) => String.prototype.charCodeAt.apply(str, rest),
|
||||
StringPrototypeMatch: (str, ...rest) => String.prototype.match.apply(str, rest),
|
||||
SyntaxError: SyntaxError
|
||||
};
|
9
node_modules/ts-node/dist-raw/runmain-hack.js
generated
vendored
Normal file
9
node_modules/ts-node/dist-raw/runmain-hack.js
generated
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
const {pathToFileURL} = require('url');
|
||||
|
||||
// Hack to avoid Module.runMain on node 18.6.0
|
||||
// Keeping it simple for now, isolated in this file.
|
||||
// Could theoretically probe `getFormat` impl to determine if `import()` or `Module._load()` is best
|
||||
// Note that I attempted a try-catch around `Module._load`, but it poisons some sort of cache such that subsequent `import()` is impossible.
|
||||
exports.run = function(entryPointPath) {
|
||||
import(pathToFileURL(entryPointPath));
|
||||
}
|
2
node_modules/ts-node/dist/bin-cwd.d.ts
generated
vendored
Normal file
2
node_modules/ts-node/dist/bin-cwd.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
#!/usr/bin/env node
|
||||
export {};
|
6
node_modules/ts-node/dist/bin-cwd.js
generated
vendored
Executable file
6
node_modules/ts-node/dist/bin-cwd.js
generated
vendored
Executable file
@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const bin_1 = require("./bin");
|
||||
(0, bin_1.main)(undefined, { '--cwdMode': true });
|
||||
//# sourceMappingURL=bin-cwd.js.map
|
1
node_modules/ts-node/dist/bin-cwd.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/bin-cwd.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"bin-cwd.js","sourceRoot":"","sources":["../src/bin-cwd.ts"],"names":[],"mappings":";;;AAEA,+BAA6B;AAE7B,IAAA,UAAI,EAAC,SAAS,EAAE,EAAE,WAAW,EAAE,IAAI,EAAE,CAAC,CAAC","sourcesContent":["#!/usr/bin/env node\n\nimport { main } from './bin';\n\nmain(undefined, { '--cwdMode': true });\n"]}
|
2
node_modules/ts-node/dist/bin-esm.d.ts
generated
vendored
Normal file
2
node_modules/ts-node/dist/bin-esm.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
#!/usr/bin/env node
|
||||
export {};
|
6
node_modules/ts-node/dist/bin-esm.js
generated
vendored
Executable file
6
node_modules/ts-node/dist/bin-esm.js
generated
vendored
Executable file
@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const bin_1 = require("./bin");
|
||||
(0, bin_1.main)(undefined, { '--esm': true });
|
||||
//# sourceMappingURL=bin-esm.js.map
|
1
node_modules/ts-node/dist/bin-esm.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/bin-esm.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"bin-esm.js","sourceRoot":"","sources":["../src/bin-esm.ts"],"names":[],"mappings":";;;AAEA,+BAA6B;AAE7B,IAAA,UAAI,EAAC,SAAS,EAAE,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC,CAAC","sourcesContent":["#!/usr/bin/env node\n\nimport { main } from './bin';\n\nmain(undefined, { '--esm': true });\n"]}
|
2
node_modules/ts-node/dist/bin-script-deprecated.d.ts
generated
vendored
Normal file
2
node_modules/ts-node/dist/bin-script-deprecated.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
#!/usr/bin/env node
|
||||
export {};
|
7
node_modules/ts-node/dist/bin-script-deprecated.js
generated
vendored
Executable file
7
node_modules/ts-node/dist/bin-script-deprecated.js
generated
vendored
Executable file
@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env node
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const bin_1 = require("./bin");
|
||||
console.warn('ts-script has been deprecated and will be removed in the next major release.', 'Please use ts-node-script instead');
|
||||
(0, bin_1.main)(undefined, { '--scriptMode': true });
|
||||
//# sourceMappingURL=bin-script-deprecated.js.map
|
1
node_modules/ts-node/dist/bin-script-deprecated.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/bin-script-deprecated.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"bin-script-deprecated.js","sourceRoot":"","sources":["../src/bin-script-deprecated.ts"],"names":[],"mappings":";;;AAEA,+BAA6B;AAE7B,OAAO,CAAC,IAAI,CACV,8EAA8E,EAC9E,mCAAmC,CACpC,CAAC;AAEF,IAAA,UAAI,EAAC,SAAS,EAAE,EAAE,cAAc,EAAE,IAAI,EAAE,CAAC,CAAC","sourcesContent":["#!/usr/bin/env node\n\nimport { main } from './bin';\n\nconsole.warn(\n 'ts-script has been deprecated and will be removed in the next major release.',\n 'Please use ts-node-script instead'\n);\n\nmain(undefined, { '--scriptMode': true });\n"]}
|
2
node_modules/ts-node/dist/bin-script.d.ts
generated
vendored
Normal file
2
node_modules/ts-node/dist/bin-script.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
#!/usr/bin/env node
|
||||
export {};
|
6
node_modules/ts-node/dist/bin-script.js
generated
vendored
Executable file
6
node_modules/ts-node/dist/bin-script.js
generated
vendored
Executable file
@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const bin_1 = require("./bin");
|
||||
(0, bin_1.main)(undefined, { '--scriptMode': true });
|
||||
//# sourceMappingURL=bin-script.js.map
|
1
node_modules/ts-node/dist/bin-script.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/bin-script.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"bin-script.js","sourceRoot":"","sources":["../src/bin-script.ts"],"names":[],"mappings":";;;AAEA,+BAA6B;AAE7B,IAAA,UAAI,EAAC,SAAS,EAAE,EAAE,cAAc,EAAE,IAAI,EAAE,CAAC,CAAC","sourcesContent":["#!/usr/bin/env node\n\nimport { main } from './bin';\n\nmain(undefined, { '--scriptMode': true });\n"]}
|
2
node_modules/ts-node/dist/bin-transpile.d.ts
generated
vendored
Normal file
2
node_modules/ts-node/dist/bin-transpile.d.ts
generated
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
#!/usr/bin/env node
|
||||
export {};
|
6
node_modules/ts-node/dist/bin-transpile.js
generated
vendored
Executable file
6
node_modules/ts-node/dist/bin-transpile.js
generated
vendored
Executable file
@ -0,0 +1,6 @@
|
||||
#!/usr/bin/env node
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const bin_1 = require("./bin");
|
||||
(0, bin_1.main)(undefined, { '--transpileOnly': true });
|
||||
//# sourceMappingURL=bin-transpile.js.map
|
1
node_modules/ts-node/dist/bin-transpile.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/bin-transpile.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"bin-transpile.js","sourceRoot":"","sources":["../src/bin-transpile.ts"],"names":[],"mappings":";;;AAEA,+BAA6B;AAE7B,IAAA,UAAI,EAAC,SAAS,EAAE,EAAE,iBAAiB,EAAE,IAAI,EAAE,CAAC,CAAC","sourcesContent":["#!/usr/bin/env node\n\nimport { main } from './bin';\n\nmain(undefined, { '--transpileOnly': true });\n"]}
|
11
node_modules/ts-node/dist/bin.d.ts
generated
vendored
Normal file
11
node_modules/ts-node/dist/bin.d.ts
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Main `bin` functionality.
|
||||
*
|
||||
* This file is split into a chain of functions (phases), each one adding to a shared state object.
|
||||
* This is done so that the next function can either be invoked in-process or, if necessary, invoked in a child process.
|
||||
*
|
||||
* The functions are intentionally given uncreative names and left in the same order as the original code, to make a
|
||||
* smaller git diff.
|
||||
*/
|
||||
export declare function main(argv?: string[], entrypointArgs?: Record<string, any>): void;
|
581
node_modules/ts-node/dist/bin.js
generated
vendored
Executable file
581
node_modules/ts-node/dist/bin.js
generated
vendored
Executable file
@ -0,0 +1,581 @@
|
||||
#!/usr/bin/env node
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.bootstrap = exports.main = void 0;
|
||||
const path_1 = require("path");
|
||||
const util_1 = require("util");
|
||||
const Module = require("module");
|
||||
let arg;
|
||||
const util_2 = require("./util");
|
||||
const repl_1 = require("./repl");
|
||||
const index_1 = require("./index");
|
||||
const node_internal_modules_cjs_helpers_1 = require("../dist-raw/node-internal-modules-cjs-helpers");
|
||||
const spawn_child_1 = require("./child/spawn-child");
|
||||
const configuration_1 = require("./configuration");
|
||||
/**
|
||||
* Main `bin` functionality.
|
||||
*
|
||||
* This file is split into a chain of functions (phases), each one adding to a shared state object.
|
||||
* This is done so that the next function can either be invoked in-process or, if necessary, invoked in a child process.
|
||||
*
|
||||
* The functions are intentionally given uncreative names and left in the same order as the original code, to make a
|
||||
* smaller git diff.
|
||||
*/
|
||||
function main(argv = process.argv.slice(2), entrypointArgs = {}) {
|
||||
const args = parseArgv(argv, entrypointArgs);
|
||||
const state = {
|
||||
shouldUseChildProcess: false,
|
||||
isInChildProcess: false,
|
||||
isCli: true,
|
||||
tsNodeScript: __filename,
|
||||
parseArgvResult: args,
|
||||
};
|
||||
return bootstrap(state);
|
||||
}
|
||||
exports.main = main;
|
||||
/** @internal */
|
||||
function bootstrap(state) {
|
||||
if (!state.phase2Result) {
|
||||
state.phase2Result = phase2(state);
|
||||
if (state.shouldUseChildProcess && !state.isInChildProcess) {
|
||||
// Note: When transitioning into the child-process after `phase2`,
|
||||
// the updated working directory needs to be preserved.
|
||||
return (0, spawn_child_1.callInChild)(state);
|
||||
}
|
||||
}
|
||||
if (!state.phase3Result) {
|
||||
state.phase3Result = phase3(state);
|
||||
if (state.shouldUseChildProcess && !state.isInChildProcess) {
|
||||
// Note: When transitioning into the child-process after `phase2`,
|
||||
// the updated working directory needs to be preserved.
|
||||
return (0, spawn_child_1.callInChild)(state);
|
||||
}
|
||||
}
|
||||
return phase4(state);
|
||||
}
|
||||
exports.bootstrap = bootstrap;
|
||||
function parseArgv(argv, entrypointArgs) {
|
||||
arg !== null && arg !== void 0 ? arg : (arg = require('arg'));
|
||||
// HACK: technically, this function is not marked @internal so it's possible
|
||||
// that libraries in the wild are doing `require('ts-node/dist/bin').main({'--transpile-only': true})`
|
||||
// We can mark this function @internal in next major release.
|
||||
// For now, rewrite args to avoid a breaking change.
|
||||
entrypointArgs = { ...entrypointArgs };
|
||||
for (const key of Object.keys(entrypointArgs)) {
|
||||
entrypointArgs[key.replace(/([a-z])-([a-z])/g, (_$0, $1, $2) => `${$1}${$2.toUpperCase()}`)] = entrypointArgs[key];
|
||||
}
|
||||
const args = {
|
||||
...entrypointArgs,
|
||||
...arg({
|
||||
// Node.js-like options.
|
||||
'--eval': String,
|
||||
'--interactive': Boolean,
|
||||
'--print': Boolean,
|
||||
'--require': [String],
|
||||
// CLI options.
|
||||
'--help': Boolean,
|
||||
'--cwdMode': Boolean,
|
||||
'--scriptMode': Boolean,
|
||||
'--version': arg.COUNT,
|
||||
'--showConfig': Boolean,
|
||||
'--esm': Boolean,
|
||||
// Project options.
|
||||
'--cwd': String,
|
||||
'--files': Boolean,
|
||||
'--compiler': String,
|
||||
'--compilerOptions': util_2.parse,
|
||||
'--project': String,
|
||||
'--ignoreDiagnostics': [String],
|
||||
'--ignore': [String],
|
||||
'--transpileOnly': Boolean,
|
||||
'--transpiler': String,
|
||||
'--swc': Boolean,
|
||||
'--typeCheck': Boolean,
|
||||
'--compilerHost': Boolean,
|
||||
'--pretty': Boolean,
|
||||
'--skipProject': Boolean,
|
||||
'--skipIgnore': Boolean,
|
||||
'--preferTsExts': Boolean,
|
||||
'--logError': Boolean,
|
||||
'--emit': Boolean,
|
||||
'--scope': Boolean,
|
||||
'--scopeDir': String,
|
||||
'--noExperimentalReplAwait': Boolean,
|
||||
'--experimentalSpecifierResolution': String,
|
||||
// Aliases.
|
||||
'-e': '--eval',
|
||||
'-i': '--interactive',
|
||||
'-p': '--print',
|
||||
'-r': '--require',
|
||||
'-h': '--help',
|
||||
'-s': '--script-mode',
|
||||
'-v': '--version',
|
||||
'-T': '--transpileOnly',
|
||||
'-H': '--compilerHost',
|
||||
'-I': '--ignore',
|
||||
'-P': '--project',
|
||||
'-C': '--compiler',
|
||||
'-D': '--ignoreDiagnostics',
|
||||
'-O': '--compilerOptions',
|
||||
'--dir': '--cwd',
|
||||
// Support both tsc-style camelCase and node-style hypen-case for *all* flags
|
||||
'--cwd-mode': '--cwdMode',
|
||||
'--script-mode': '--scriptMode',
|
||||
'--show-config': '--showConfig',
|
||||
'--compiler-options': '--compilerOptions',
|
||||
'--ignore-diagnostics': '--ignoreDiagnostics',
|
||||
'--transpile-only': '--transpileOnly',
|
||||
'--type-check': '--typeCheck',
|
||||
'--compiler-host': '--compilerHost',
|
||||
'--skip-project': '--skipProject',
|
||||
'--skip-ignore': '--skipIgnore',
|
||||
'--prefer-ts-exts': '--preferTsExts',
|
||||
'--log-error': '--logError',
|
||||
'--scope-dir': '--scopeDir',
|
||||
'--no-experimental-repl-await': '--noExperimentalReplAwait',
|
||||
'--experimental-specifier-resolution': '--experimentalSpecifierResolution',
|
||||
}, {
|
||||
argv,
|
||||
stopAtPositional: true,
|
||||
}),
|
||||
};
|
||||
// Only setting defaults for CLI-specific flags
|
||||
// Anything passed to `register()` can be `undefined`; `create()` will apply
|
||||
// defaults.
|
||||
const { '--cwd': cwdArg, '--help': help = false, '--scriptMode': scriptMode, '--cwdMode': cwdMode, '--version': version = 0, '--showConfig': showConfig, '--require': argsRequire = [], '--eval': code = undefined, '--print': print = false, '--interactive': interactive = false, '--files': files, '--compiler': compiler, '--compilerOptions': compilerOptions, '--project': project, '--ignoreDiagnostics': ignoreDiagnostics, '--ignore': ignore, '--transpileOnly': transpileOnly, '--typeCheck': typeCheck, '--transpiler': transpiler, '--swc': swc, '--compilerHost': compilerHost, '--pretty': pretty, '--skipProject': skipProject, '--skipIgnore': skipIgnore, '--preferTsExts': preferTsExts, '--logError': logError, '--emit': emit, '--scope': scope = undefined, '--scopeDir': scopeDir = undefined, '--noExperimentalReplAwait': noExperimentalReplAwait, '--experimentalSpecifierResolution': experimentalSpecifierResolution, '--esm': esm, _: restArgs, } = args;
|
||||
return {
|
||||
// Note: argv and restArgs may be overwritten by child process
|
||||
argv: process.argv,
|
||||
restArgs,
|
||||
cwdArg,
|
||||
help,
|
||||
scriptMode,
|
||||
cwdMode,
|
||||
version,
|
||||
showConfig,
|
||||
argsRequire,
|
||||
code,
|
||||
print,
|
||||
interactive,
|
||||
files,
|
||||
compiler,
|
||||
compilerOptions,
|
||||
project,
|
||||
ignoreDiagnostics,
|
||||
ignore,
|
||||
transpileOnly,
|
||||
typeCheck,
|
||||
transpiler,
|
||||
swc,
|
||||
compilerHost,
|
||||
pretty,
|
||||
skipProject,
|
||||
skipIgnore,
|
||||
preferTsExts,
|
||||
logError,
|
||||
emit,
|
||||
scope,
|
||||
scopeDir,
|
||||
noExperimentalReplAwait,
|
||||
experimentalSpecifierResolution,
|
||||
esm,
|
||||
};
|
||||
}
|
||||
function phase2(payload) {
|
||||
const { help, version, cwdArg, esm } = payload.parseArgvResult;
|
||||
if (help) {
|
||||
console.log(`
|
||||
Usage: ts-node [options] [ -e script | script.ts ] [arguments]
|
||||
|
||||
Options:
|
||||
|
||||
-e, --eval [code] Evaluate code
|
||||
-p, --print Print result of \`--eval\`
|
||||
-r, --require [path] Require a node module before execution
|
||||
-i, --interactive Opens the REPL even if stdin does not appear to be a terminal
|
||||
|
||||
--esm Bootstrap with the ESM loader, enabling full ESM support
|
||||
--swc Use the faster swc transpiler
|
||||
|
||||
-h, --help Print CLI usage
|
||||
-v, --version Print module version information. -vvv to print additional information
|
||||
--showConfig Print resolved configuration and exit
|
||||
|
||||
-T, --transpileOnly Use TypeScript's faster \`transpileModule\` or a third-party transpiler
|
||||
-H, --compilerHost Use TypeScript's compiler host API
|
||||
-I, --ignore [pattern] Override the path patterns to skip compilation
|
||||
-P, --project [path] Path to TypeScript JSON project file
|
||||
-C, --compiler [name] Specify a custom TypeScript compiler
|
||||
--transpiler [name] Specify a third-party, non-typechecking transpiler
|
||||
-D, --ignoreDiagnostics [code] Ignore TypeScript warnings by diagnostic code
|
||||
-O, --compilerOptions [opts] JSON object to merge with compiler options
|
||||
|
||||
--cwd Behave as if invoked within this working directory.
|
||||
--files Load \`files\`, \`include\` and \`exclude\` from \`tsconfig.json\` on startup
|
||||
--pretty Use pretty diagnostic formatter (usually enabled by default)
|
||||
--cwdMode Use current directory instead of <script.ts> for config resolution
|
||||
--skipProject Skip reading \`tsconfig.json\`
|
||||
--skipIgnore Skip \`--ignore\` checks
|
||||
--emit Emit output files into \`.ts-node\` directory
|
||||
--scope Scope compiler to files within \`scopeDir\`. Anything outside this directory is ignored.
|
||||
--scopeDir Directory for \`--scope\`
|
||||
--preferTsExts Prefer importing TypeScript files over JavaScript files
|
||||
--logError Logs TypeScript errors to stderr instead of throwing exceptions
|
||||
--noExperimentalReplAwait Disable top-level await in REPL. Equivalent to node's --no-experimental-repl-await
|
||||
--experimentalSpecifierResolution [node|explicit]
|
||||
Equivalent to node's --experimental-specifier-resolution
|
||||
`);
|
||||
process.exit(0);
|
||||
}
|
||||
// Output project information.
|
||||
if (version === 1) {
|
||||
console.log(`v${index_1.VERSION}`);
|
||||
process.exit(0);
|
||||
}
|
||||
const cwd = cwdArg ? (0, path_1.resolve)(cwdArg) : process.cwd();
|
||||
// If ESM is explicitly enabled through the flag, stage3 should be run in a child process
|
||||
// with the ESM loaders configured.
|
||||
if (esm)
|
||||
payload.shouldUseChildProcess = true;
|
||||
return {
|
||||
cwd,
|
||||
};
|
||||
}
|
||||
function phase3(payload) {
|
||||
const { emit, files, pretty, transpileOnly, transpiler, noExperimentalReplAwait, typeCheck, swc, compilerHost, ignore, preferTsExts, logError, scriptMode, cwdMode, project, skipProject, skipIgnore, compiler, ignoreDiagnostics, compilerOptions, argsRequire, scope, scopeDir, esm, experimentalSpecifierResolution, } = payload.parseArgvResult;
|
||||
const { cwd } = payload.phase2Result;
|
||||
// NOTE: When we transition to a child process for ESM, the entry-point script determined
|
||||
// here might not be the one used later in `phase4`. This can happen when we execute the
|
||||
// original entry-point but then the process forks itself using e.g. `child_process.fork`.
|
||||
// We will always use the original TS project in forked processes anyway, so it is
|
||||
// expected and acceptable to retrieve the entry-point information here in `phase2`.
|
||||
// See: https://github.com/TypeStrong/ts-node/issues/1812.
|
||||
const { entryPointPath } = getEntryPointInfo(payload);
|
||||
const preloadedConfig = (0, configuration_1.findAndReadConfig)({
|
||||
cwd,
|
||||
emit,
|
||||
files,
|
||||
pretty,
|
||||
transpileOnly: (transpileOnly !== null && transpileOnly !== void 0 ? transpileOnly : transpiler != null) ? true : undefined,
|
||||
experimentalReplAwait: noExperimentalReplAwait ? false : undefined,
|
||||
typeCheck,
|
||||
transpiler,
|
||||
swc,
|
||||
compilerHost,
|
||||
ignore,
|
||||
logError,
|
||||
projectSearchDir: getProjectSearchDir(cwd, scriptMode, cwdMode, entryPointPath),
|
||||
project,
|
||||
skipProject,
|
||||
skipIgnore,
|
||||
compiler,
|
||||
ignoreDiagnostics,
|
||||
compilerOptions,
|
||||
require: argsRequire,
|
||||
scope,
|
||||
scopeDir,
|
||||
preferTsExts,
|
||||
esm,
|
||||
experimentalSpecifierResolution: experimentalSpecifierResolution,
|
||||
});
|
||||
// If ESM is enabled through the parsed tsconfig, stage4 should be run in a child
|
||||
// process with the ESM loaders configured.
|
||||
if (preloadedConfig.options.esm)
|
||||
payload.shouldUseChildProcess = true;
|
||||
return { preloadedConfig };
|
||||
}
|
||||
/**
|
||||
* Determines the entry-point information from the argv and phase2 result. This
|
||||
* method will be invoked in two places:
|
||||
*
|
||||
* 1. In phase 3 to be able to find a project from the potential entry-point script.
|
||||
* 2. In phase 4 to determine the actual entry-point script.
|
||||
*
|
||||
* Note that we need to explicitly re-resolve the entry-point information in the final
|
||||
* stage because the previous stage information could be modified when the bootstrap
|
||||
* invocation transitioned into a child process for ESM.
|
||||
*
|
||||
* Stages before (phase 4) can and will be cached by the child process through the Brotli
|
||||
* configuration and entry-point information is only reliable in the final phase. More
|
||||
* details can be found in here: https://github.com/TypeStrong/ts-node/issues/1812.
|
||||
*/
|
||||
function getEntryPointInfo(state) {
|
||||
const { code, interactive, restArgs } = state.parseArgvResult;
|
||||
const { cwd } = state.phase2Result;
|
||||
const { isCli } = state;
|
||||
// Figure out which we are executing: piped stdin, --eval, REPL, and/or entrypoint
|
||||
// This is complicated because node's behavior is complicated
|
||||
// `node -e code -i ./script.js` ignores -e
|
||||
const executeEval = code != null && !(interactive && restArgs.length);
|
||||
const executeEntrypoint = !executeEval && restArgs.length > 0;
|
||||
const executeRepl = !executeEntrypoint &&
|
||||
(interactive || (process.stdin.isTTY && !executeEval));
|
||||
const executeStdin = !executeEval && !executeRepl && !executeEntrypoint;
|
||||
/**
|
||||
* Unresolved. May point to a symlink, not realpath. May be missing file extension
|
||||
* NOTE: resolution relative to cwd option (not `process.cwd()`) is legacy backwards-compat; should be changed in next major: https://github.com/TypeStrong/ts-node/issues/1834
|
||||
*/
|
||||
const entryPointPath = executeEntrypoint
|
||||
? isCli
|
||||
? (0, path_1.resolve)(cwd, restArgs[0])
|
||||
: (0, path_1.resolve)(restArgs[0])
|
||||
: undefined;
|
||||
return {
|
||||
executeEval,
|
||||
executeEntrypoint,
|
||||
executeRepl,
|
||||
executeStdin,
|
||||
entryPointPath,
|
||||
};
|
||||
}
|
||||
function phase4(payload) {
|
||||
var _a, _b, _c, _d, _e, _f, _g;
|
||||
const { isInChildProcess, tsNodeScript } = payload;
|
||||
const { version, showConfig, restArgs, code, print, argv } = payload.parseArgvResult;
|
||||
const { cwd } = payload.phase2Result;
|
||||
const { preloadedConfig } = payload.phase3Result;
|
||||
const { entryPointPath, executeEntrypoint, executeEval, executeRepl, executeStdin, } = getEntryPointInfo(payload);
|
||||
let evalStuff;
|
||||
let replStuff;
|
||||
let stdinStuff;
|
||||
let evalAwarePartialHost = undefined;
|
||||
if (executeEval) {
|
||||
const state = new repl_1.EvalState((0, path_1.join)(cwd, repl_1.EVAL_FILENAME));
|
||||
evalStuff = {
|
||||
state,
|
||||
repl: (0, repl_1.createRepl)({
|
||||
state,
|
||||
composeWithEvalAwarePartialHost: evalAwarePartialHost,
|
||||
ignoreDiagnosticsThatAreAnnoyingInInteractiveRepl: false,
|
||||
}),
|
||||
};
|
||||
({ evalAwarePartialHost } = evalStuff.repl);
|
||||
// Create a local module instance based on `cwd`.
|
||||
const module = (evalStuff.module = new Module(repl_1.EVAL_NAME));
|
||||
module.filename = evalStuff.state.path;
|
||||
module.paths = Module._nodeModulePaths(cwd);
|
||||
}
|
||||
if (executeStdin) {
|
||||
const state = new repl_1.EvalState((0, path_1.join)(cwd, repl_1.STDIN_FILENAME));
|
||||
stdinStuff = {
|
||||
state,
|
||||
repl: (0, repl_1.createRepl)({
|
||||
state,
|
||||
composeWithEvalAwarePartialHost: evalAwarePartialHost,
|
||||
ignoreDiagnosticsThatAreAnnoyingInInteractiveRepl: false,
|
||||
}),
|
||||
};
|
||||
({ evalAwarePartialHost } = stdinStuff.repl);
|
||||
// Create a local module instance based on `cwd`.
|
||||
const module = (stdinStuff.module = new Module(repl_1.STDIN_NAME));
|
||||
module.filename = stdinStuff.state.path;
|
||||
module.paths = Module._nodeModulePaths(cwd);
|
||||
}
|
||||
if (executeRepl) {
|
||||
const state = new repl_1.EvalState((0, path_1.join)(cwd, repl_1.REPL_FILENAME));
|
||||
replStuff = {
|
||||
state,
|
||||
repl: (0, repl_1.createRepl)({
|
||||
state,
|
||||
composeWithEvalAwarePartialHost: evalAwarePartialHost,
|
||||
}),
|
||||
};
|
||||
({ evalAwarePartialHost } = replStuff.repl);
|
||||
}
|
||||
// Register the TypeScript compiler instance.
|
||||
const service = (0, index_1.createFromPreloadedConfig)({
|
||||
// Since this struct may have been marshalled across thread or process boundaries, we must restore
|
||||
// un-marshall-able values.
|
||||
...preloadedConfig,
|
||||
options: {
|
||||
...preloadedConfig.options,
|
||||
readFile: (_a = evalAwarePartialHost === null || evalAwarePartialHost === void 0 ? void 0 : evalAwarePartialHost.readFile) !== null && _a !== void 0 ? _a : undefined,
|
||||
fileExists: (_b = evalAwarePartialHost === null || evalAwarePartialHost === void 0 ? void 0 : evalAwarePartialHost.fileExists) !== null && _b !== void 0 ? _b : undefined,
|
||||
tsTrace: index_1.DEFAULTS.tsTrace,
|
||||
},
|
||||
});
|
||||
(0, index_1.register)(service);
|
||||
if (isInChildProcess)
|
||||
require('./child/child-loader').lateBindHooks((0, index_1.createEsmHooks)(service));
|
||||
// Bind REPL service to ts-node compiler service (chicken-and-egg problem)
|
||||
replStuff === null || replStuff === void 0 ? void 0 : replStuff.repl.setService(service);
|
||||
evalStuff === null || evalStuff === void 0 ? void 0 : evalStuff.repl.setService(service);
|
||||
stdinStuff === null || stdinStuff === void 0 ? void 0 : stdinStuff.repl.setService(service);
|
||||
// Output project information.
|
||||
if (version === 2) {
|
||||
console.log(`ts-node v${index_1.VERSION}`);
|
||||
console.log(`node ${process.version}`);
|
||||
console.log(`compiler v${service.ts.version}`);
|
||||
process.exit(0);
|
||||
}
|
||||
if (version >= 3) {
|
||||
console.log(`ts-node v${index_1.VERSION} ${(0, path_1.dirname)(__dirname)}`);
|
||||
console.log(`node ${process.version}`);
|
||||
console.log(`compiler v${service.ts.version} ${(_c = service.compilerPath) !== null && _c !== void 0 ? _c : ''}`);
|
||||
process.exit(0);
|
||||
}
|
||||
if (showConfig) {
|
||||
const ts = service.ts;
|
||||
if (typeof ts.convertToTSConfig !== 'function') {
|
||||
console.error('Error: --showConfig requires a typescript versions >=3.2 that support --showConfig');
|
||||
process.exit(1);
|
||||
}
|
||||
let moduleTypes = undefined;
|
||||
if (service.options.moduleTypes) {
|
||||
// Assumption: this codepath requires CLI invocation, so moduleTypes must have come from a tsconfig, not API.
|
||||
const showRelativeTo = (0, path_1.dirname)(service.configFilePath);
|
||||
moduleTypes = {};
|
||||
for (const [key, value] of Object.entries(service.options.moduleTypes)) {
|
||||
moduleTypes[(0, path_1.relative)(showRelativeTo, (0, path_1.resolve)((_d = service.options.optionBasePaths) === null || _d === void 0 ? void 0 : _d.moduleTypes, key))] = value;
|
||||
}
|
||||
}
|
||||
const json = {
|
||||
['ts-node']: {
|
||||
...service.options,
|
||||
require: ((_e = service.options.require) === null || _e === void 0 ? void 0 : _e.length)
|
||||
? service.options.require
|
||||
: undefined,
|
||||
moduleTypes,
|
||||
optionBasePaths: undefined,
|
||||
compilerOptions: undefined,
|
||||
project: (_f = service.configFilePath) !== null && _f !== void 0 ? _f : service.options.project,
|
||||
},
|
||||
...ts.convertToTSConfig(service.config, (_g = service.configFilePath) !== null && _g !== void 0 ? _g : (0, path_1.join)(cwd, 'ts-node-implicit-tsconfig.json'), service.ts.sys),
|
||||
};
|
||||
console.log(
|
||||
// Assumes that all configuration options which can possibly be specified via the CLI are JSON-compatible.
|
||||
// If, in the future, we must log functions, for example readFile and fileExists, then we can implement a JSON
|
||||
// replacer function.
|
||||
JSON.stringify(json, null, 2));
|
||||
process.exit(0);
|
||||
}
|
||||
// Prepend `ts-node` arguments to CLI for child processes.
|
||||
process.execArgv.push(tsNodeScript, ...argv.slice(2, argv.length - restArgs.length));
|
||||
// TODO this comes from BootstrapState
|
||||
process.argv = [process.argv[1]]
|
||||
.concat(executeEntrypoint ? [entryPointPath] : [])
|
||||
.concat(restArgs.slice(executeEntrypoint ? 1 : 0));
|
||||
// Execute the main contents (either eval, script or piped).
|
||||
if (executeEntrypoint) {
|
||||
if (payload.isInChildProcess &&
|
||||
(0, util_2.versionGteLt)(process.versions.node, '18.6.0')) {
|
||||
// HACK workaround node regression
|
||||
require('../dist-raw/runmain-hack.js').run(entryPointPath);
|
||||
}
|
||||
else {
|
||||
Module.runMain();
|
||||
}
|
||||
}
|
||||
else {
|
||||
// Note: eval and repl may both run, but never with stdin.
|
||||
// If stdin runs, eval and repl will not.
|
||||
if (executeEval) {
|
||||
(0, node_internal_modules_cjs_helpers_1.addBuiltinLibsToObject)(global);
|
||||
evalAndExitOnTsError(evalStuff.repl, evalStuff.module, code, print, 'eval');
|
||||
}
|
||||
if (executeRepl) {
|
||||
replStuff.repl.start();
|
||||
}
|
||||
if (executeStdin) {
|
||||
let buffer = code || '';
|
||||
process.stdin.on('data', (chunk) => (buffer += chunk));
|
||||
process.stdin.on('end', () => {
|
||||
evalAndExitOnTsError(stdinStuff.repl, stdinStuff.module, buffer,
|
||||
// `echo 123 | node -p` still prints 123
|
||||
print, 'stdin');
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Get project search path from args.
|
||||
*/
|
||||
function getProjectSearchDir(cwd, scriptMode, cwdMode, scriptPath) {
|
||||
// Validate `--script-mode` / `--cwd-mode` / `--cwd` usage is correct.
|
||||
if (scriptMode && cwdMode) {
|
||||
throw new TypeError('--cwd-mode cannot be combined with --script-mode');
|
||||
}
|
||||
if (scriptMode && !scriptPath) {
|
||||
throw new TypeError('--script-mode must be used with a script name, e.g. `ts-node --script-mode <script.ts>`');
|
||||
}
|
||||
const doScriptMode = scriptMode === true ? true : cwdMode === true ? false : !!scriptPath;
|
||||
if (doScriptMode) {
|
||||
// Use node's own resolution behavior to ensure we follow symlinks.
|
||||
// scriptPath may omit file extension or point to a directory with or without package.json.
|
||||
// This happens before we are registered, so we tell node's resolver to consider ts, tsx, and jsx files.
|
||||
// In extremely rare cases, is is technically possible to resolve the wrong directory,
|
||||
// because we do not yet know preferTsExts, jsx, nor allowJs.
|
||||
// See also, justification why this will not happen in real-world situations:
|
||||
// https://github.com/TypeStrong/ts-node/pull/1009#issuecomment-613017081
|
||||
const exts = ['.js', '.jsx', '.ts', '.tsx'];
|
||||
const extsTemporarilyInstalled = [];
|
||||
for (const ext of exts) {
|
||||
if (!(0, util_2.hasOwnProperty)(require.extensions, ext)) {
|
||||
extsTemporarilyInstalled.push(ext);
|
||||
require.extensions[ext] = function () { };
|
||||
}
|
||||
}
|
||||
try {
|
||||
return (0, path_1.dirname)(requireResolveNonCached(scriptPath));
|
||||
}
|
||||
finally {
|
||||
for (const ext of extsTemporarilyInstalled) {
|
||||
delete require.extensions[ext];
|
||||
}
|
||||
}
|
||||
}
|
||||
return cwd;
|
||||
}
|
||||
const guaranteedNonexistentDirectoryPrefix = (0, path_1.resolve)(__dirname, 'doesnotexist');
|
||||
let guaranteedNonexistentDirectorySuffix = 0;
|
||||
/**
|
||||
* require.resolve an absolute path, tricking node into *not* caching the results.
|
||||
* Necessary so that we do not pollute require.resolve cache prior to installing require.extensions
|
||||
*
|
||||
* Is a terrible hack, because node does not expose the necessary cache invalidation APIs
|
||||
* https://stackoverflow.com/questions/59865584/how-to-invalidate-cached-require-resolve-results
|
||||
*/
|
||||
function requireResolveNonCached(absoluteModuleSpecifier) {
|
||||
// node <= 12.1.x fallback: The trick below triggers a node bug on old versions.
|
||||
// On these old versions, pollute the require cache instead. This is a deliberate
|
||||
// ts-node limitation that will *rarely* manifest, and will not matter once node 12
|
||||
// is end-of-life'd on 2022-04-30
|
||||
const isSupportedNodeVersion = (0, util_2.versionGteLt)(process.versions.node, '12.2.0');
|
||||
if (!isSupportedNodeVersion)
|
||||
return require.resolve(absoluteModuleSpecifier);
|
||||
const { dir, base } = (0, path_1.parse)(absoluteModuleSpecifier);
|
||||
const relativeModuleSpecifier = `./${base}`;
|
||||
const req = (0, util_2.createRequire)((0, path_1.join)(dir, 'imaginaryUncacheableRequireResolveScript'));
|
||||
return req.resolve(relativeModuleSpecifier, {
|
||||
paths: [
|
||||
`${guaranteedNonexistentDirectoryPrefix}${guaranteedNonexistentDirectorySuffix++}`,
|
||||
...(req.resolve.paths(relativeModuleSpecifier) || []),
|
||||
],
|
||||
});
|
||||
}
|
||||
/**
|
||||
* Evaluate an [eval] or [stdin] script
|
||||
*/
|
||||
function evalAndExitOnTsError(replService, module, code, isPrinted, filenameAndDirname) {
|
||||
let result;
|
||||
(0, repl_1.setupContext)(global, module, filenameAndDirname);
|
||||
try {
|
||||
result = replService.evalCode(code);
|
||||
}
|
||||
catch (error) {
|
||||
if (error instanceof index_1.TSError) {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
if (isPrinted) {
|
||||
console.log(typeof result === 'string'
|
||||
? result
|
||||
: (0, util_1.inspect)(result, { colors: process.stdout.isTTY }));
|
||||
}
|
||||
}
|
||||
if (require.main === module) {
|
||||
main();
|
||||
}
|
||||
//# sourceMappingURL=bin.js.map
|
1
node_modules/ts-node/dist/bin.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/bin.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
node_modules/ts-node/dist/child/argv-payload.d.ts
generated
vendored
Normal file
1
node_modules/ts-node/dist/child/argv-payload.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
export {};
|
19
node_modules/ts-node/dist/child/argv-payload.js
generated
vendored
Normal file
19
node_modules/ts-node/dist/child/argv-payload.js
generated
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.decompress = exports.compress = exports.argPrefix = void 0;
|
||||
const zlib_1 = require("zlib");
|
||||
/** @internal */
|
||||
exports.argPrefix = '--brotli-base64-config=';
|
||||
/** @internal */
|
||||
function compress(object) {
|
||||
return (0, zlib_1.brotliCompressSync)(Buffer.from(JSON.stringify(object), 'utf8'), {
|
||||
[zlib_1.constants.BROTLI_PARAM_QUALITY]: zlib_1.constants.BROTLI_MIN_QUALITY,
|
||||
}).toString('base64');
|
||||
}
|
||||
exports.compress = compress;
|
||||
/** @internal */
|
||||
function decompress(str) {
|
||||
return JSON.parse((0, zlib_1.brotliDecompressSync)(Buffer.from(str, 'base64')).toString());
|
||||
}
|
||||
exports.decompress = decompress;
|
||||
//# sourceMappingURL=argv-payload.js.map
|
1
node_modules/ts-node/dist/child/argv-payload.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/child/argv-payload.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"argv-payload.js","sourceRoot":"","sources":["../../src/child/argv-payload.ts"],"names":[],"mappings":";;;AAAA,+BAA2E;AAE3E,gBAAgB;AACH,QAAA,SAAS,GAAG,yBAAyB,CAAC;AAEnD,gBAAgB;AAChB,SAAgB,QAAQ,CAAC,MAAW;IAClC,OAAO,IAAA,yBAAkB,EAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,EAAE,MAAM,CAAC,EAAE;QACrE,CAAC,gBAAS,CAAC,oBAAoB,CAAC,EAAE,gBAAS,CAAC,kBAAkB;KAC/D,CAAC,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC;AACxB,CAAC;AAJD,4BAIC;AAED,gBAAgB;AAChB,SAAgB,UAAU,CAAC,GAAW;IACpC,OAAO,IAAI,CAAC,KAAK,CACf,IAAA,2BAAoB,EAAC,MAAM,CAAC,IAAI,CAAC,GAAG,EAAE,QAAQ,CAAC,CAAC,CAAC,QAAQ,EAAE,CAC5D,CAAC;AACJ,CAAC;AAJD,gCAIC","sourcesContent":["import { brotliCompressSync, brotliDecompressSync, constants } from 'zlib';\n\n/** @internal */\nexport const argPrefix = '--brotli-base64-config=';\n\n/** @internal */\nexport function compress(object: any) {\n return brotliCompressSync(Buffer.from(JSON.stringify(object), 'utf8'), {\n [constants.BROTLI_PARAM_QUALITY]: constants.BROTLI_MIN_QUALITY,\n }).toString('base64');\n}\n\n/** @internal */\nexport function decompress(str: string) {\n return JSON.parse(\n brotliDecompressSync(Buffer.from(str, 'base64')).toString()\n );\n}\n"]}
|
1
node_modules/ts-node/dist/child/child-entrypoint.d.ts
generated
vendored
Normal file
1
node_modules/ts-node/dist/child/child-entrypoint.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
export {};
|
24
node_modules/ts-node/dist/child/child-entrypoint.js
generated
vendored
Normal file
24
node_modules/ts-node/dist/child/child-entrypoint.js
generated
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const bin_1 = require("../bin");
|
||||
const argv_payload_1 = require("./argv-payload");
|
||||
const base64ConfigArg = process.argv[2];
|
||||
if (!base64ConfigArg.startsWith(argv_payload_1.argPrefix))
|
||||
throw new Error('unexpected argv');
|
||||
const base64Payload = base64ConfigArg.slice(argv_payload_1.argPrefix.length);
|
||||
const state = (0, argv_payload_1.decompress)(base64Payload);
|
||||
state.isInChildProcess = true;
|
||||
state.tsNodeScript = __filename;
|
||||
state.parseArgvResult.argv = process.argv;
|
||||
state.parseArgvResult.restArgs = process.argv.slice(3);
|
||||
// Modify and re-compress the payload delivered to subsequent child processes.
|
||||
// This logic may be refactored into bin.ts by https://github.com/TypeStrong/ts-node/issues/1831
|
||||
if (state.isCli) {
|
||||
const stateForChildren = {
|
||||
...state,
|
||||
isCli: false,
|
||||
};
|
||||
state.parseArgvResult.argv[2] = `${argv_payload_1.argPrefix}${(0, argv_payload_1.compress)(stateForChildren)}`;
|
||||
}
|
||||
(0, bin_1.bootstrap)(state);
|
||||
//# sourceMappingURL=child-entrypoint.js.map
|
1
node_modules/ts-node/dist/child/child-entrypoint.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/child/child-entrypoint.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"child-entrypoint.js","sourceRoot":"","sources":["../../src/child/child-entrypoint.ts"],"names":[],"mappings":";;AAAA,gCAAmD;AACnD,iDAAiE;AAEjE,MAAM,eAAe,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AACxC,IAAI,CAAC,eAAe,CAAC,UAAU,CAAC,wBAAS,CAAC;IAAE,MAAM,IAAI,KAAK,CAAC,iBAAiB,CAAC,CAAC;AAC/E,MAAM,aAAa,GAAG,eAAe,CAAC,KAAK,CAAC,wBAAS,CAAC,MAAM,CAAC,CAAC;AAC9D,MAAM,KAAK,GAAG,IAAA,yBAAU,EAAC,aAAa,CAAmB,CAAC;AAE1D,KAAK,CAAC,gBAAgB,GAAG,IAAI,CAAC;AAC9B,KAAK,CAAC,YAAY,GAAG,UAAU,CAAC;AAChC,KAAK,CAAC,eAAe,CAAC,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC;AAC1C,KAAK,CAAC,eAAe,CAAC,QAAQ,GAAG,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AAEvD,8EAA8E;AAC9E,gGAAgG;AAChG,IAAI,KAAK,CAAC,KAAK,EAAE;IACf,MAAM,gBAAgB,GAAmB;QACvC,GAAG,KAAK;QACR,KAAK,EAAE,KAAK;KACb,CAAC;IACF,KAAK,CAAC,eAAe,CAAC,IAAI,CAAC,CAAC,CAAC,GAAG,GAAG,wBAAS,GAAG,IAAA,uBAAQ,EAAC,gBAAgB,CAAC,EAAE,CAAC;CAC7E;AAED,IAAA,eAAS,EAAC,KAAK,CAAC,CAAC","sourcesContent":["import { BootstrapState, bootstrap } from '../bin';\nimport { argPrefix, compress, decompress } from './argv-payload';\n\nconst base64ConfigArg = process.argv[2];\nif (!base64ConfigArg.startsWith(argPrefix)) throw new Error('unexpected argv');\nconst base64Payload = base64ConfigArg.slice(argPrefix.length);\nconst state = decompress(base64Payload) as BootstrapState;\n\nstate.isInChildProcess = true;\nstate.tsNodeScript = __filename;\nstate.parseArgvResult.argv = process.argv;\nstate.parseArgvResult.restArgs = process.argv.slice(3);\n\n// Modify and re-compress the payload delivered to subsequent child processes.\n// This logic may be refactored into bin.ts by https://github.com/TypeStrong/ts-node/issues/1831\nif (state.isCli) {\n const stateForChildren: BootstrapState = {\n ...state,\n isCli: false,\n };\n state.parseArgvResult.argv[2] = `${argPrefix}${compress(stateForChildren)}`;\n}\n\nbootstrap(state);\n"]}
|
1
node_modules/ts-node/dist/child/child-loader.d.ts
generated
vendored
Normal file
1
node_modules/ts-node/dist/child/child-loader.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
export {};
|
32
node_modules/ts-node/dist/child/child-loader.js
generated
vendored
Normal file
32
node_modules/ts-node/dist/child/child-loader.js
generated
vendored
Normal file
@ -0,0 +1,32 @@
|
||||
"use strict";
|
||||
var _a;
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.transformSource = exports.getFormat = exports.load = exports.resolve = exports.lateBindHooks = void 0;
|
||||
const esm_1 = require("../esm");
|
||||
let hooks;
|
||||
/** @internal */
|
||||
function lateBindHooks(_hooks) {
|
||||
hooks = _hooks;
|
||||
}
|
||||
exports.lateBindHooks = lateBindHooks;
|
||||
const proxy = {
|
||||
resolve(...args) {
|
||||
var _a;
|
||||
return ((_a = hooks === null || hooks === void 0 ? void 0 : hooks.resolve) !== null && _a !== void 0 ? _a : args[2])(...args);
|
||||
},
|
||||
load(...args) {
|
||||
var _a;
|
||||
return ((_a = hooks === null || hooks === void 0 ? void 0 : hooks.load) !== null && _a !== void 0 ? _a : args[2])(...args);
|
||||
},
|
||||
getFormat(...args) {
|
||||
var _a;
|
||||
return ((_a = hooks === null || hooks === void 0 ? void 0 : hooks.getFormat) !== null && _a !== void 0 ? _a : args[2])(...args);
|
||||
},
|
||||
transformSource(...args) {
|
||||
var _a;
|
||||
return ((_a = hooks === null || hooks === void 0 ? void 0 : hooks.transformSource) !== null && _a !== void 0 ? _a : args[2])(...args);
|
||||
},
|
||||
};
|
||||
/** @internal */
|
||||
_a = (0, esm_1.filterHooksByAPIVersion)(proxy), exports.resolve = _a.resolve, exports.load = _a.load, exports.getFormat = _a.getFormat, exports.transformSource = _a.transformSource;
|
||||
//# sourceMappingURL=child-loader.js.map
|
1
node_modules/ts-node/dist/child/child-loader.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/child/child-loader.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"child-loader.js","sourceRoot":"","sources":["../../src/child/child-loader.ts"],"names":[],"mappings":";;;;AACA,gCAAiD;AAEjD,IAAI,KAAgD,CAAC;AAErD,gBAAgB;AAChB,SAAgB,aAAa,CAC3B,MAAiD;IAEjD,KAAK,GAAG,MAAmD,CAAC;AAC9D,CAAC;AAJD,sCAIC;AAED,MAAM,KAAK,GAA8C;IACvD,OAAO,CAAC,GAAG,IAAgD;;QACzD,OAAO,CAAC,MAAA,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,OAAO,mCAAI,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;IAC9C,CAAC;IACD,IAAI,CAAC,GAAG,IAA6C;;QACnD,OAAO,CAAC,MAAA,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,IAAI,mCAAI,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;IAC3C,CAAC;IACD,SAAS,CAAC,GAAG,IAAkD;;QAC7D,OAAO,CAAC,MAAA,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,SAAS,mCAAI,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;IAChD,CAAC;IACD,eAAe,CAAC,GAAG,IAAwD;;QACzE,OAAO,CAAC,MAAA,KAAK,aAAL,KAAK,uBAAL,KAAK,CAAE,eAAe,mCAAI,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAAC;IACtD,CAAC;CACF,CAAC;AAEF,gBAAgB;AACH,KACX,IAAA,6BAAuB,EAAC,KAAK,CAA8C,EAD9D,eAAO,eAAE,YAAI,YAAE,iBAAS,iBAAE,uBAAe,sBACsB","sourcesContent":["import type { NodeLoaderHooksAPI1, NodeLoaderHooksAPI2 } from '..';\nimport { filterHooksByAPIVersion } from '../esm';\n\nlet hooks: NodeLoaderHooksAPI1 & NodeLoaderHooksAPI2;\n\n/** @internal */\nexport function lateBindHooks(\n _hooks: NodeLoaderHooksAPI1 | NodeLoaderHooksAPI2\n) {\n hooks = _hooks as NodeLoaderHooksAPI1 & NodeLoaderHooksAPI2;\n}\n\nconst proxy: NodeLoaderHooksAPI1 & NodeLoaderHooksAPI2 = {\n resolve(...args: Parameters<NodeLoaderHooksAPI1['resolve']>) {\n return (hooks?.resolve ?? args[2])(...args);\n },\n load(...args: Parameters<NodeLoaderHooksAPI2['load']>) {\n return (hooks?.load ?? args[2])(...args);\n },\n getFormat(...args: Parameters<NodeLoaderHooksAPI1['getFormat']>) {\n return (hooks?.getFormat ?? args[2])(...args);\n },\n transformSource(...args: Parameters<NodeLoaderHooksAPI1['transformSource']>) {\n return (hooks?.transformSource ?? args[2])(...args);\n },\n};\n\n/** @internal */\nexport const { resolve, load, getFormat, transformSource } =\n filterHooksByAPIVersion(proxy) as NodeLoaderHooksAPI1 & NodeLoaderHooksAPI2;\n"]}
|
7
node_modules/ts-node/dist/child/child-require.d.ts
generated
vendored
Normal file
7
node_modules/ts-node/dist/child/child-require.d.ts
generated
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
interface EventEmitterInternals {
|
||||
_events: Record<string, Function | Array<Function>>;
|
||||
}
|
||||
declare const _process: EventEmitterInternals;
|
||||
declare let originalOnWarning: Function | undefined;
|
||||
declare const messageMatch: RegExp;
|
||||
declare function onWarning(this: any, warning: Error, ...rest: any[]): any;
|
22
node_modules/ts-node/dist/child/child-require.js
generated
vendored
Normal file
22
node_modules/ts-node/dist/child/child-require.js
generated
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
"use strict";
|
||||
const _process = process;
|
||||
// Not shown here: Additional logic to correctly interact with process's events, either using this direct manipulation, or via the API
|
||||
let originalOnWarning;
|
||||
if (Array.isArray(_process._events.warning)) {
|
||||
originalOnWarning = _process._events.warning[0];
|
||||
_process._events.warning[0] = onWarning;
|
||||
}
|
||||
else {
|
||||
originalOnWarning = _process._events.warning;
|
||||
_process._events.warning = onWarning;
|
||||
}
|
||||
const messageMatch = /(?:--(?:experimental-)?loader\b|\bCustom ESM Loaders\b)/;
|
||||
function onWarning(warning, ...rest) {
|
||||
// Suppress warning about how `--loader` is experimental
|
||||
if ((warning === null || warning === void 0 ? void 0 : warning.name) === 'ExperimentalWarning' &&
|
||||
messageMatch.test(warning === null || warning === void 0 ? void 0 : warning.message))
|
||||
return;
|
||||
// Will be undefined if `--no-warnings`
|
||||
return originalOnWarning === null || originalOnWarning === void 0 ? void 0 : originalOnWarning.call(this, warning, ...rest);
|
||||
}
|
||||
//# sourceMappingURL=child-require.js.map
|
1
node_modules/ts-node/dist/child/child-require.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/child/child-require.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"child-require.js","sourceRoot":"","sources":["../../src/child/child-require.ts"],"names":[],"mappings":";AAGA,MAAM,QAAQ,GAAG,OAAuC,CAAC;AAEzD,sIAAsI;AAEtI,IAAI,iBAAuC,CAAC;AAC5C,IAAI,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;IAC3C,iBAAiB,GAAG,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;IAChD,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,SAAS,CAAC;CACzC;KAAM;IACL,iBAAiB,GAAG,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC;IAC7C,QAAQ,CAAC,OAAO,CAAC,OAAO,GAAG,SAAS,CAAC;CACtC;AAED,MAAM,YAAY,GAAG,yDAAyD,CAAC;AAC/E,SAAS,SAAS,CAAY,OAAc,EAAE,GAAG,IAAW;IAC1D,wDAAwD;IACxD,IACE,CAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,IAAI,MAAK,qBAAqB;QACvC,YAAY,CAAC,IAAI,CAAC,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,OAAO,CAAC;QAEnC,OAAO;IACT,uCAAuC;IACvC,OAAO,iBAAiB,aAAjB,iBAAiB,uBAAjB,iBAAiB,CAAE,IAAI,CAAC,IAAI,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,CAAC;AACzD,CAAC","sourcesContent":["interface EventEmitterInternals {\n _events: Record<string, Function | Array<Function>>;\n}\nconst _process = process as any as EventEmitterInternals;\n\n// Not shown here: Additional logic to correctly interact with process's events, either using this direct manipulation, or via the API\n\nlet originalOnWarning: Function | undefined;\nif (Array.isArray(_process._events.warning)) {\n originalOnWarning = _process._events.warning[0];\n _process._events.warning[0] = onWarning;\n} else {\n originalOnWarning = _process._events.warning;\n _process._events.warning = onWarning;\n}\n\nconst messageMatch = /(?:--(?:experimental-)?loader\\b|\\bCustom ESM Loaders\\b)/;\nfunction onWarning(this: any, warning: Error, ...rest: any[]) {\n // Suppress warning about how `--loader` is experimental\n if (\n warning?.name === 'ExperimentalWarning' &&\n messageMatch.test(warning?.message)\n )\n return;\n // Will be undefined if `--no-warnings`\n return originalOnWarning?.call(this, warning, ...rest);\n}\n"]}
|
1
node_modules/ts-node/dist/child/spawn-child.d.ts
generated
vendored
Normal file
1
node_modules/ts-node/dist/child/spawn-child.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
export {};
|
49
node_modules/ts-node/dist/child/spawn-child.js
generated
vendored
Normal file
49
node_modules/ts-node/dist/child/spawn-child.js
generated
vendored
Normal file
@ -0,0 +1,49 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.callInChild = void 0;
|
||||
const child_process_1 = require("child_process");
|
||||
const url_1 = require("url");
|
||||
const util_1 = require("../util");
|
||||
const argv_payload_1 = require("./argv-payload");
|
||||
/**
|
||||
* @internal
|
||||
* @param state Bootstrap state to be transferred into the child process.
|
||||
* @param targetCwd Working directory to be preserved when transitioning to
|
||||
* the child process.
|
||||
*/
|
||||
function callInChild(state) {
|
||||
if (!(0, util_1.versionGteLt)(process.versions.node, '12.17.0')) {
|
||||
throw new Error('`ts-node-esm` and `ts-node --esm` require node version 12.17.0 or newer.');
|
||||
}
|
||||
const child = (0, child_process_1.spawn)(process.execPath, [
|
||||
'--require',
|
||||
require.resolve('./child-require.js'),
|
||||
'--loader',
|
||||
// Node on Windows doesn't like `c:\` absolute paths here; must be `file:///c:/`
|
||||
(0, url_1.pathToFileURL)(require.resolve('../../child-loader.mjs')).toString(),
|
||||
require.resolve('./child-entrypoint.js'),
|
||||
`${argv_payload_1.argPrefix}${(0, argv_payload_1.compress)(state)}`,
|
||||
...state.parseArgvResult.restArgs,
|
||||
], {
|
||||
stdio: 'inherit',
|
||||
argv0: process.argv0,
|
||||
});
|
||||
child.on('error', (error) => {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
child.on('exit', (code) => {
|
||||
child.removeAllListeners();
|
||||
process.off('SIGINT', sendSignalToChild);
|
||||
process.off('SIGTERM', sendSignalToChild);
|
||||
process.exitCode = code === null ? 1 : code;
|
||||
});
|
||||
// Ignore sigint and sigterm in parent; pass them to child
|
||||
process.on('SIGINT', sendSignalToChild);
|
||||
process.on('SIGTERM', sendSignalToChild);
|
||||
function sendSignalToChild(signal) {
|
||||
process.kill(child.pid, signal);
|
||||
}
|
||||
}
|
||||
exports.callInChild = callInChild;
|
||||
//# sourceMappingURL=spawn-child.js.map
|
1
node_modules/ts-node/dist/child/spawn-child.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/child/spawn-child.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"spawn-child.js","sourceRoot":"","sources":["../../src/child/spawn-child.ts"],"names":[],"mappings":";;;AACA,iDAAsC;AACtC,6BAAoC;AACpC,kCAAuC;AACvC,iDAAqD;AAErD;;;;;GAKG;AACH,SAAgB,WAAW,CAAC,KAAqB;IAC/C,IAAI,CAAC,IAAA,mBAAY,EAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,EAAE,SAAS,CAAC,EAAE;QACnD,MAAM,IAAI,KAAK,CACb,0EAA0E,CAC3E,CAAC;KACH;IACD,MAAM,KAAK,GAAG,IAAA,qBAAK,EACjB,OAAO,CAAC,QAAQ,EAChB;QACE,WAAW;QACX,OAAO,CAAC,OAAO,CAAC,oBAAoB,CAAC;QACrC,UAAU;QACV,gFAAgF;QAChF,IAAA,mBAAa,EAAC,OAAO,CAAC,OAAO,CAAC,wBAAwB,CAAC,CAAC,CAAC,QAAQ,EAAE;QACnE,OAAO,CAAC,OAAO,CAAC,uBAAuB,CAAC;QACxC,GAAG,wBAAS,GAAG,IAAA,uBAAQ,EAAC,KAAK,CAAC,EAAE;QAChC,GAAG,KAAK,CAAC,eAAe,CAAC,QAAQ;KAClC,EACD;QACE,KAAK,EAAE,SAAS;QAChB,KAAK,EAAE,OAAO,CAAC,KAAK;KACrB,CACF,CAAC;IACF,KAAK,CAAC,EAAE,CAAC,OAAO,EAAE,CAAC,KAAK,EAAE,EAAE;QAC1B,OAAO,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;QACrB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;IAClB,CAAC,CAAC,CAAC;IACH,KAAK,CAAC,EAAE,CAAC,MAAM,EAAE,CAAC,IAAI,EAAE,EAAE;QACxB,KAAK,CAAC,kBAAkB,EAAE,CAAC;QAC3B,OAAO,CAAC,GAAG,CAAC,QAAQ,EAAE,iBAAiB,CAAC,CAAC;QACzC,OAAO,CAAC,GAAG,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;QAC1C,OAAO,CAAC,QAAQ,GAAG,IAAI,KAAK,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC;IAC9C,CAAC,CAAC,CAAC;IACH,0DAA0D;IAC1D,OAAO,CAAC,EAAE,CAAC,QAAQ,EAAE,iBAAiB,CAAC,CAAC;IACxC,OAAO,CAAC,EAAE,CAAC,SAAS,EAAE,iBAAiB,CAAC,CAAC;IACzC,SAAS,iBAAiB,CAAC,MAAc;QACvC,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,EAAE,MAAM,CAAC,CAAC;IAClC,CAAC;AACH,CAAC;AAvCD,kCAuCC","sourcesContent":["import type { BootstrapState } from '../bin';\nimport { spawn } from 'child_process';\nimport { pathToFileURL } from 'url';\nimport { versionGteLt } from '../util';\nimport { argPrefix, compress } from './argv-payload';\n\n/**\n * @internal\n * @param state Bootstrap state to be transferred into the child process.\n * @param targetCwd Working directory to be preserved when transitioning to\n * the child process.\n */\nexport function callInChild(state: BootstrapState) {\n if (!versionGteLt(process.versions.node, '12.17.0')) {\n throw new Error(\n '`ts-node-esm` and `ts-node --esm` require node version 12.17.0 or newer.'\n );\n }\n const child = spawn(\n process.execPath,\n [\n '--require',\n require.resolve('./child-require.js'),\n '--loader',\n // Node on Windows doesn't like `c:\\` absolute paths here; must be `file:///c:/`\n pathToFileURL(require.resolve('../../child-loader.mjs')).toString(),\n require.resolve('./child-entrypoint.js'),\n `${argPrefix}${compress(state)}`,\n ...state.parseArgvResult.restArgs,\n ],\n {\n stdio: 'inherit',\n argv0: process.argv0,\n }\n );\n child.on('error', (error) => {\n console.error(error);\n process.exit(1);\n });\n child.on('exit', (code) => {\n child.removeAllListeners();\n process.off('SIGINT', sendSignalToChild);\n process.off('SIGTERM', sendSignalToChild);\n process.exitCode = code === null ? 1 : code;\n });\n // Ignore sigint and sigterm in parent; pass them to child\n process.on('SIGINT', sendSignalToChild);\n process.on('SIGTERM', sendSignalToChild);\n function sendSignalToChild(signal: string) {\n process.kill(child.pid, signal);\n }\n}\n"]}
|
1
node_modules/ts-node/dist/cjs-resolve-hooks.d.ts
generated
vendored
Normal file
1
node_modules/ts-node/dist/cjs-resolve-hooks.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
export {};
|
29
node_modules/ts-node/dist/cjs-resolve-hooks.js
generated
vendored
Normal file
29
node_modules/ts-node/dist/cjs-resolve-hooks.js
generated
vendored
Normal file
@ -0,0 +1,29 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.installCommonjsResolveHooksIfNecessary = void 0;
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
function installCommonjsResolveHooksIfNecessary(tsNodeService) {
|
||||
const Module = require('module');
|
||||
const originalResolveFilename = Module._resolveFilename;
|
||||
const originalFindPath = Module._findPath;
|
||||
const shouldInstallHook = tsNodeService.options.experimentalResolver;
|
||||
if (shouldInstallHook) {
|
||||
const { Module_findPath, Module_resolveFilename } = tsNodeService.getNodeCjsLoader();
|
||||
Module._resolveFilename = _resolveFilename;
|
||||
Module._findPath = _findPath;
|
||||
function _resolveFilename(request, parent, isMain, options, ...rest) {
|
||||
if (!tsNodeService.enabled())
|
||||
return originalResolveFilename.call(this, request, parent, isMain, options, ...rest);
|
||||
return Module_resolveFilename.call(this, request, parent, isMain, options, ...rest);
|
||||
}
|
||||
function _findPath() {
|
||||
if (!tsNodeService.enabled())
|
||||
return originalFindPath.apply(this, arguments);
|
||||
return Module_findPath.apply(this, arguments);
|
||||
}
|
||||
}
|
||||
}
|
||||
exports.installCommonjsResolveHooksIfNecessary = installCommonjsResolveHooksIfNecessary;
|
||||
//# sourceMappingURL=cjs-resolve-hooks.js.map
|
1
node_modules/ts-node/dist/cjs-resolve-hooks.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/cjs-resolve-hooks.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"cjs-resolve-hooks.js","sourceRoot":"","sources":["../src/cjs-resolve-hooks.ts"],"names":[],"mappings":";;;AAoBA;;GAEG;AACH,SAAgB,sCAAsC,CAAC,aAAsB;IAC3E,MAAM,MAAM,GAAG,OAAO,CAAC,QAAQ,CAAmC,CAAC;IACnE,MAAM,uBAAuB,GAAG,MAAM,CAAC,gBAAgB,CAAC;IACxD,MAAM,gBAAgB,GAAG,MAAM,CAAC,SAAS,CAAC;IAC1C,MAAM,iBAAiB,GAAG,aAAa,CAAC,OAAO,CAAC,oBAAoB,CAAC;IACrE,IAAI,iBAAiB,EAAE;QACrB,MAAM,EAAE,eAAe,EAAE,sBAAsB,EAAE,GAC/C,aAAa,CAAC,gBAAgB,EAAE,CAAC;QACnC,MAAM,CAAC,gBAAgB,GAAG,gBAAgB,CAAC;QAC3C,MAAM,CAAC,SAAS,GAAG,SAAS,CAAC;QAC7B,SAAS,gBAAgB,CAEvB,OAAe,EACf,MAAe,EACf,MAAgB,EAChB,OAAsC,EACtC,GAAG,IAAQ;YAEX,IAAI,CAAC,aAAa,CAAC,OAAO,EAAE;gBAC1B,OAAO,uBAAuB,CAAC,IAAI,CACjC,IAAI,EACJ,OAAO,EACP,MAAM,EACN,MAAM,EACN,OAAO,EACP,GAAG,IAAI,CACR,CAAC;YAEJ,OAAO,sBAAsB,CAAC,IAAI,CAChC,IAAI,EACJ,OAAO,EACP,MAAM,EACN,MAAM,EACN,OAAO,EACP,GAAG,IAAI,CACR,CAAC;QACJ,CAAC;QACD,SAAS,SAAS;YAChB,IAAI,CAAC,aAAa,CAAC,OAAO,EAAE;gBAC1B,OAAO,gBAAgB,CAAC,KAAK,CAAC,IAAI,EAAE,SAAgB,CAAC,CAAC;YACxD,OAAO,eAAe,CAAC,KAAK,CAAC,IAAI,EAAE,SAAgB,CAAC,CAAC;QACvD,CAAC;KACF;AACH,CAAC;AA3CD,wFA2CC","sourcesContent":["import type Module = require('module');\nimport type { Service } from '.';\n\n/** @internal */\nexport type ModuleConstructorWithInternals = typeof Module & {\n _resolveFilename(\n request: string,\n parent?: Module,\n isMain?: boolean,\n options?: ModuleResolveFilenameOptions,\n ...rest: any[]\n ): string;\n _preloadModules(requests?: string[]): void;\n _findPath(request: string, paths: string[], isMain: boolean): string;\n};\n\ninterface ModuleResolveFilenameOptions {\n paths?: Array<string>;\n}\n\n/**\n * @internal\n */\nexport function installCommonjsResolveHooksIfNecessary(tsNodeService: Service) {\n const Module = require('module') as ModuleConstructorWithInternals;\n const originalResolveFilename = Module._resolveFilename;\n const originalFindPath = Module._findPath;\n const shouldInstallHook = tsNodeService.options.experimentalResolver;\n if (shouldInstallHook) {\n const { Module_findPath, Module_resolveFilename } =\n tsNodeService.getNodeCjsLoader();\n Module._resolveFilename = _resolveFilename;\n Module._findPath = _findPath;\n function _resolveFilename(\n this: any,\n request: string,\n parent?: Module,\n isMain?: boolean,\n options?: ModuleResolveFilenameOptions,\n ...rest: []\n ): string {\n if (!tsNodeService.enabled())\n return originalResolveFilename.call(\n this,\n request,\n parent,\n isMain,\n options,\n ...rest\n );\n\n return Module_resolveFilename.call(\n this,\n request,\n parent,\n isMain,\n options,\n ...rest\n );\n }\n function _findPath(this: any): string {\n if (!tsNodeService.enabled())\n return originalFindPath.apply(this, arguments as any);\n return Module_findPath.apply(this, arguments as any);\n }\n }\n}\n"]}
|
1
node_modules/ts-node/dist/configuration.d.ts
generated
vendored
Normal file
1
node_modules/ts-node/dist/configuration.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
export {};
|
308
node_modules/ts-node/dist/configuration.js
generated
vendored
Normal file
308
node_modules/ts-node/dist/configuration.js
generated
vendored
Normal file
@ -0,0 +1,308 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getTsConfigDefaults = exports.ComputeAsCommonRootOfFiles = exports.loadCompiler = exports.resolveAndLoadCompiler = exports.readConfig = exports.findAndReadConfig = void 0;
|
||||
const path_1 = require("path");
|
||||
const index_1 = require("./index");
|
||||
const ts_internals_1 = require("./ts-internals");
|
||||
const tsconfigs_1 = require("./tsconfigs");
|
||||
const util_1 = require("./util");
|
||||
/**
|
||||
* TypeScript compiler option values required by `ts-node` which cannot be overridden.
|
||||
*/
|
||||
const TS_NODE_COMPILER_OPTIONS = {
|
||||
sourceMap: true,
|
||||
inlineSourceMap: false,
|
||||
inlineSources: true,
|
||||
declaration: false,
|
||||
noEmit: false,
|
||||
outDir: '.ts-node',
|
||||
};
|
||||
/*
|
||||
* Do post-processing on config options to support `ts-node`.
|
||||
*/
|
||||
function fixConfig(ts, config) {
|
||||
// Delete options that *should not* be passed through.
|
||||
delete config.options.out;
|
||||
delete config.options.outFile;
|
||||
delete config.options.composite;
|
||||
delete config.options.declarationDir;
|
||||
delete config.options.declarationMap;
|
||||
delete config.options.emitDeclarationOnly;
|
||||
// Target ES5 output by default (instead of ES3).
|
||||
if (config.options.target === undefined) {
|
||||
config.options.target = ts.ScriptTarget.ES5;
|
||||
}
|
||||
// Target CommonJS modules by default (instead of magically switching to ES6 when the target is ES6).
|
||||
if (config.options.module === undefined) {
|
||||
config.options.module = ts.ModuleKind.CommonJS;
|
||||
}
|
||||
return config;
|
||||
}
|
||||
/** @internal */
|
||||
function findAndReadConfig(rawOptions) {
|
||||
var _a, _b, _c, _d, _e;
|
||||
const cwd = (0, path_1.resolve)((_c = (_b = (_a = rawOptions.cwd) !== null && _a !== void 0 ? _a : rawOptions.dir) !== null && _b !== void 0 ? _b : index_1.DEFAULTS.cwd) !== null && _c !== void 0 ? _c : process.cwd());
|
||||
const compilerName = (_d = rawOptions.compiler) !== null && _d !== void 0 ? _d : index_1.DEFAULTS.compiler;
|
||||
// Compute minimum options to read the config file.
|
||||
let projectLocalResolveDir = (0, util_1.getBasePathForProjectLocalDependencyResolution)(undefined, rawOptions.projectSearchDir, rawOptions.project, cwd);
|
||||
let { compiler, ts } = resolveAndLoadCompiler(compilerName, projectLocalResolveDir);
|
||||
// Read config file and merge new options between env and CLI options.
|
||||
const { configFilePath, config, tsNodeOptionsFromTsconfig, optionBasePaths } = readConfig(cwd, ts, rawOptions);
|
||||
const options = (0, util_1.assign)({}, index_1.DEFAULTS, tsNodeOptionsFromTsconfig || {}, { optionBasePaths }, rawOptions);
|
||||
options.require = [
|
||||
...(tsNodeOptionsFromTsconfig.require || []),
|
||||
...(rawOptions.require || []),
|
||||
];
|
||||
// Re-resolve the compiler in case it has changed.
|
||||
// Compiler is loaded relative to tsconfig.json, so tsconfig discovery may cause us to load a
|
||||
// different compiler than we did above, even if the name has not changed.
|
||||
if (configFilePath) {
|
||||
projectLocalResolveDir = (0, util_1.getBasePathForProjectLocalDependencyResolution)(configFilePath, rawOptions.projectSearchDir, rawOptions.project, cwd);
|
||||
({ compiler } = resolveCompiler(options.compiler, (_e = optionBasePaths.compiler) !== null && _e !== void 0 ? _e : projectLocalResolveDir));
|
||||
}
|
||||
return {
|
||||
options,
|
||||
config,
|
||||
projectLocalResolveDir,
|
||||
optionBasePaths,
|
||||
configFilePath,
|
||||
cwd,
|
||||
compiler,
|
||||
};
|
||||
}
|
||||
exports.findAndReadConfig = findAndReadConfig;
|
||||
/**
|
||||
* Load TypeScript configuration. Returns the parsed TypeScript config and
|
||||
* any `ts-node` options specified in the config file.
|
||||
*
|
||||
* Even when a tsconfig.json is not loaded, this function still handles merging
|
||||
* compilerOptions from various sources: API, environment variables, etc.
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
function readConfig(cwd, ts, rawApiOptions) {
|
||||
var _a, _b, _c;
|
||||
// Ordered [a, b, c] where config a extends b extends c
|
||||
const configChain = [];
|
||||
let config = { compilerOptions: {} };
|
||||
let basePath = cwd;
|
||||
let configFilePath = undefined;
|
||||
const projectSearchDir = (0, path_1.resolve)(cwd, (_a = rawApiOptions.projectSearchDir) !== null && _a !== void 0 ? _a : cwd);
|
||||
const { fileExists = ts.sys.fileExists, readFile = ts.sys.readFile, skipProject = index_1.DEFAULTS.skipProject, project = index_1.DEFAULTS.project, tsTrace = index_1.DEFAULTS.tsTrace, } = rawApiOptions;
|
||||
// Read project configuration when available.
|
||||
if (!skipProject) {
|
||||
if (project) {
|
||||
const resolved = (0, path_1.resolve)(cwd, project);
|
||||
const nested = (0, path_1.join)(resolved, 'tsconfig.json');
|
||||
configFilePath = fileExists(nested) ? nested : resolved;
|
||||
}
|
||||
else {
|
||||
configFilePath = ts.findConfigFile(projectSearchDir, fileExists);
|
||||
}
|
||||
if (configFilePath) {
|
||||
let pathToNextConfigInChain = configFilePath;
|
||||
const tsInternals = (0, ts_internals_1.createTsInternals)(ts);
|
||||
const errors = [];
|
||||
// Follow chain of "extends"
|
||||
while (true) {
|
||||
const result = ts.readConfigFile(pathToNextConfigInChain, readFile);
|
||||
// Return diagnostics.
|
||||
if (result.error) {
|
||||
return {
|
||||
configFilePath,
|
||||
config: { errors: [result.error], fileNames: [], options: {} },
|
||||
tsNodeOptionsFromTsconfig: {},
|
||||
optionBasePaths: {},
|
||||
};
|
||||
}
|
||||
const c = result.config;
|
||||
const bp = (0, path_1.dirname)(pathToNextConfigInChain);
|
||||
configChain.push({
|
||||
config: c,
|
||||
basePath: bp,
|
||||
configPath: pathToNextConfigInChain,
|
||||
});
|
||||
if (c.extends == null)
|
||||
break;
|
||||
const resolvedExtendedConfigPath = tsInternals.getExtendsConfigPath(c.extends, {
|
||||
fileExists,
|
||||
readDirectory: ts.sys.readDirectory,
|
||||
readFile,
|
||||
useCaseSensitiveFileNames: ts.sys.useCaseSensitiveFileNames,
|
||||
trace: tsTrace,
|
||||
}, bp, errors, ts.createCompilerDiagnostic);
|
||||
if (errors.length) {
|
||||
return {
|
||||
configFilePath,
|
||||
config: { errors, fileNames: [], options: {} },
|
||||
tsNodeOptionsFromTsconfig: {},
|
||||
optionBasePaths: {},
|
||||
};
|
||||
}
|
||||
if (resolvedExtendedConfigPath == null)
|
||||
break;
|
||||
pathToNextConfigInChain = resolvedExtendedConfigPath;
|
||||
}
|
||||
({ config, basePath } = configChain[0]);
|
||||
}
|
||||
}
|
||||
// Merge and fix ts-node options that come from tsconfig.json(s)
|
||||
const tsNodeOptionsFromTsconfig = {};
|
||||
const optionBasePaths = {};
|
||||
for (let i = configChain.length - 1; i >= 0; i--) {
|
||||
const { config, basePath, configPath } = configChain[i];
|
||||
const options = filterRecognizedTsConfigTsNodeOptions(config['ts-node']).recognized;
|
||||
// Some options are relative to the config file, so must be converted to absolute paths here
|
||||
if (options.require) {
|
||||
// Modules are found relative to the tsconfig file, not the `dir` option
|
||||
const tsconfigRelativeResolver = (0, util_1.createProjectLocalResolveHelper)((0, path_1.dirname)(configPath));
|
||||
options.require = options.require.map((path) => tsconfigRelativeResolver(path, false));
|
||||
}
|
||||
if (options.scopeDir) {
|
||||
options.scopeDir = (0, path_1.resolve)(basePath, options.scopeDir);
|
||||
}
|
||||
// Downstream code uses the basePath; we do not do that here.
|
||||
if (options.moduleTypes) {
|
||||
optionBasePaths.moduleTypes = basePath;
|
||||
}
|
||||
if (options.transpiler != null) {
|
||||
optionBasePaths.transpiler = basePath;
|
||||
}
|
||||
if (options.compiler != null) {
|
||||
optionBasePaths.compiler = basePath;
|
||||
}
|
||||
if (options.swc != null) {
|
||||
optionBasePaths.swc = basePath;
|
||||
}
|
||||
(0, util_1.assign)(tsNodeOptionsFromTsconfig, options);
|
||||
}
|
||||
// Remove resolution of "files".
|
||||
const files = (_c = (_b = rawApiOptions.files) !== null && _b !== void 0 ? _b : tsNodeOptionsFromTsconfig.files) !== null && _c !== void 0 ? _c : index_1.DEFAULTS.files;
|
||||
// Only if a config file is *not* loaded, load an implicit configuration from @tsconfig/bases
|
||||
const skipDefaultCompilerOptions = configFilePath != null;
|
||||
const defaultCompilerOptionsForNodeVersion = skipDefaultCompilerOptions
|
||||
? undefined
|
||||
: {
|
||||
...(0, tsconfigs_1.getDefaultTsconfigJsonForNodeVersion)(ts).compilerOptions,
|
||||
types: ['node'],
|
||||
};
|
||||
// Merge compilerOptions from all sources
|
||||
config.compilerOptions = Object.assign({},
|
||||
// automatically-applied options from @tsconfig/bases
|
||||
defaultCompilerOptionsForNodeVersion,
|
||||
// tsconfig.json "compilerOptions"
|
||||
config.compilerOptions,
|
||||
// from env var
|
||||
index_1.DEFAULTS.compilerOptions,
|
||||
// tsconfig.json "ts-node": "compilerOptions"
|
||||
tsNodeOptionsFromTsconfig.compilerOptions,
|
||||
// passed programmatically
|
||||
rawApiOptions.compilerOptions,
|
||||
// overrides required by ts-node, cannot be changed
|
||||
TS_NODE_COMPILER_OPTIONS);
|
||||
const fixedConfig = fixConfig(ts, ts.parseJsonConfigFileContent(config, {
|
||||
fileExists,
|
||||
readFile,
|
||||
// Only used for globbing "files", "include", "exclude"
|
||||
// When `files` option disabled, we want to avoid the fs calls
|
||||
readDirectory: files ? ts.sys.readDirectory : () => [],
|
||||
useCaseSensitiveFileNames: ts.sys.useCaseSensitiveFileNames,
|
||||
}, basePath, undefined, configFilePath));
|
||||
return {
|
||||
configFilePath,
|
||||
config: fixedConfig,
|
||||
tsNodeOptionsFromTsconfig,
|
||||
optionBasePaths,
|
||||
};
|
||||
}
|
||||
exports.readConfig = readConfig;
|
||||
/**
|
||||
* Load the typescript compiler. It is required to load the tsconfig but might
|
||||
* be changed by the tsconfig, so we have to do this twice.
|
||||
* @internal
|
||||
*/
|
||||
function resolveAndLoadCompiler(name, relativeToPath) {
|
||||
const { compiler } = resolveCompiler(name, relativeToPath);
|
||||
const ts = loadCompiler(compiler);
|
||||
return { compiler, ts };
|
||||
}
|
||||
exports.resolveAndLoadCompiler = resolveAndLoadCompiler;
|
||||
function resolveCompiler(name, relativeToPath) {
|
||||
const projectLocalResolveHelper = (0, util_1.createProjectLocalResolveHelper)(relativeToPath);
|
||||
const compiler = projectLocalResolveHelper(name || 'typescript', true);
|
||||
return { compiler };
|
||||
}
|
||||
/** @internal */
|
||||
function loadCompiler(compiler) {
|
||||
return (0, util_1.attemptRequireWithV8CompileCache)(require, compiler);
|
||||
}
|
||||
exports.loadCompiler = loadCompiler;
|
||||
/**
|
||||
* Given the raw "ts-node" sub-object from a tsconfig, return an object with only the properties
|
||||
* recognized by "ts-node"
|
||||
*/
|
||||
function filterRecognizedTsConfigTsNodeOptions(jsonObject) {
|
||||
if (jsonObject == null)
|
||||
return { recognized: {}, unrecognized: {} };
|
||||
const { compiler, compilerHost, compilerOptions, emit, files, ignore, ignoreDiagnostics, logError, preferTsExts, pretty, require, skipIgnore, transpileOnly, typeCheck, transpiler, scope, scopeDir, moduleTypes, experimentalReplAwait, swc, experimentalResolver, esm, experimentalSpecifierResolution, experimentalTsImportSpecifiers, ...unrecognized } = jsonObject;
|
||||
const filteredTsConfigOptions = {
|
||||
compiler,
|
||||
compilerHost,
|
||||
compilerOptions,
|
||||
emit,
|
||||
experimentalReplAwait,
|
||||
files,
|
||||
ignore,
|
||||
ignoreDiagnostics,
|
||||
logError,
|
||||
preferTsExts,
|
||||
pretty,
|
||||
require,
|
||||
skipIgnore,
|
||||
transpileOnly,
|
||||
typeCheck,
|
||||
transpiler,
|
||||
scope,
|
||||
scopeDir,
|
||||
moduleTypes,
|
||||
swc,
|
||||
experimentalResolver,
|
||||
esm,
|
||||
experimentalSpecifierResolution,
|
||||
experimentalTsImportSpecifiers,
|
||||
};
|
||||
// Use the typechecker to make sure this implementation has the correct set of properties
|
||||
const catchExtraneousProps = null;
|
||||
const catchMissingProps = null;
|
||||
return { recognized: filteredTsConfigOptions, unrecognized };
|
||||
}
|
||||
/** @internal */
|
||||
exports.ComputeAsCommonRootOfFiles = Symbol();
|
||||
/**
|
||||
* Some TS compiler options have defaults which are not provided by TS's config parsing functions.
|
||||
* This function centralizes the logic for computing those defaults.
|
||||
* @internal
|
||||
*/
|
||||
function getTsConfigDefaults(config, basePath, _files, _include, _exclude) {
|
||||
const { composite = false } = config.options;
|
||||
let rootDir = config.options.rootDir;
|
||||
if (rootDir == null) {
|
||||
if (composite)
|
||||
rootDir = basePath;
|
||||
// Return this symbol to avoid computing from `files`, which would require fs calls
|
||||
else
|
||||
rootDir = exports.ComputeAsCommonRootOfFiles;
|
||||
}
|
||||
const { outDir = rootDir } = config.options;
|
||||
// Docs are wrong: https://www.typescriptlang.org/tsconfig#include
|
||||
// Docs say **, but it's actually **/*; compiler throws error for **
|
||||
const include = _files ? [] : ['**/*'];
|
||||
const files = _files !== null && _files !== void 0 ? _files : [];
|
||||
// Docs are misleading: https://www.typescriptlang.org/tsconfig#exclude
|
||||
// Docs say it excludes node_modules, bower_components, jspm_packages, but actually those are excluded via behavior of "include"
|
||||
const exclude = _exclude !== null && _exclude !== void 0 ? _exclude : [outDir]; // TODO technically, outDir is absolute path, but exclude should be relative glob pattern?
|
||||
// TODO compute baseUrl
|
||||
return { rootDir, outDir, include, files, exclude, composite };
|
||||
}
|
||||
exports.getTsConfigDefaults = getTsConfigDefaults;
|
||||
//# sourceMappingURL=configuration.js.map
|
1
node_modules/ts-node/dist/configuration.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/configuration.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
53
node_modules/ts-node/dist/esm.d.ts
generated
vendored
Normal file
53
node_modules/ts-node/dist/esm.d.ts
generated
vendored
Normal file
@ -0,0 +1,53 @@
|
||||
/// <reference types="node" />
|
||||
/// <reference types="node" />
|
||||
import { Service } from './index';
|
||||
export interface NodeLoaderHooksAPI1 {
|
||||
resolve: NodeLoaderHooksAPI1.ResolveHook;
|
||||
getFormat: NodeLoaderHooksAPI1.GetFormatHook;
|
||||
transformSource: NodeLoaderHooksAPI1.TransformSourceHook;
|
||||
}
|
||||
export declare namespace NodeLoaderHooksAPI1 {
|
||||
type ResolveHook = NodeLoaderHooksAPI2.ResolveHook;
|
||||
type GetFormatHook = (url: string, context: {}, defaultGetFormat: GetFormatHook) => Promise<{
|
||||
format: NodeLoaderHooksFormat;
|
||||
}>;
|
||||
type TransformSourceHook = (source: string | Buffer, context: {
|
||||
url: string;
|
||||
format: NodeLoaderHooksFormat;
|
||||
}, defaultTransformSource: NodeLoaderHooksAPI1.TransformSourceHook) => Promise<{
|
||||
source: string | Buffer;
|
||||
}>;
|
||||
}
|
||||
export interface NodeLoaderHooksAPI2 {
|
||||
resolve: NodeLoaderHooksAPI2.ResolveHook;
|
||||
load: NodeLoaderHooksAPI2.LoadHook;
|
||||
}
|
||||
export declare namespace NodeLoaderHooksAPI2 {
|
||||
type ResolveHook = (specifier: string, context: {
|
||||
conditions?: NodeImportConditions;
|
||||
importAssertions?: NodeImportAssertions;
|
||||
parentURL: string;
|
||||
}, defaultResolve: ResolveHook) => Promise<{
|
||||
url: string;
|
||||
format?: NodeLoaderHooksFormat;
|
||||
shortCircuit?: boolean;
|
||||
}>;
|
||||
type LoadHook = (url: string, context: {
|
||||
format: NodeLoaderHooksFormat | null | undefined;
|
||||
importAssertions?: NodeImportAssertions;
|
||||
}, defaultLoad: NodeLoaderHooksAPI2['load']) => Promise<{
|
||||
format: NodeLoaderHooksFormat;
|
||||
source: string | Buffer | undefined;
|
||||
shortCircuit?: boolean;
|
||||
}>;
|
||||
type NodeImportConditions = unknown;
|
||||
interface NodeImportAssertions {
|
||||
type?: 'json';
|
||||
}
|
||||
}
|
||||
export declare type NodeLoaderHooksFormat = 'builtin' | 'commonjs' | 'dynamic' | 'json' | 'module' | 'wasm';
|
||||
export declare type NodeImportConditions = unknown;
|
||||
export interface NodeImportAssertions {
|
||||
type?: 'json';
|
||||
}
|
||||
export declare function createEsmHooks(tsNodeService: Service): NodeLoaderHooksAPI1 | NodeLoaderHooksAPI2;
|
228
node_modules/ts-node/dist/esm.js
generated
vendored
Normal file
228
node_modules/ts-node/dist/esm.js
generated
vendored
Normal file
@ -0,0 +1,228 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createEsmHooks = exports.registerAndCreateEsmHooks = exports.filterHooksByAPIVersion = void 0;
|
||||
const index_1 = require("./index");
|
||||
const url_1 = require("url");
|
||||
const path_1 = require("path");
|
||||
const assert = require("assert");
|
||||
const util_1 = require("./util");
|
||||
const module_1 = require("module");
|
||||
// The hooks API changed in node version X so we need to check for backwards compatibility.
|
||||
const newHooksAPI = (0, util_1.versionGteLt)(process.versions.node, '16.12.0');
|
||||
/** @internal */
|
||||
function filterHooksByAPIVersion(hooks) {
|
||||
const { getFormat, load, resolve, transformSource } = hooks;
|
||||
// Explicit return type to avoid TS's non-ideal inferred type
|
||||
const hooksAPI = newHooksAPI
|
||||
? { resolve, load, getFormat: undefined, transformSource: undefined }
|
||||
: { resolve, getFormat, transformSource, load: undefined };
|
||||
return hooksAPI;
|
||||
}
|
||||
exports.filterHooksByAPIVersion = filterHooksByAPIVersion;
|
||||
/** @internal */
|
||||
function registerAndCreateEsmHooks(opts) {
|
||||
// Automatically performs registration just like `-r ts-node/register`
|
||||
const tsNodeInstance = (0, index_1.register)(opts);
|
||||
return createEsmHooks(tsNodeInstance);
|
||||
}
|
||||
exports.registerAndCreateEsmHooks = registerAndCreateEsmHooks;
|
||||
function createEsmHooks(tsNodeService) {
|
||||
tsNodeService.enableExperimentalEsmLoaderInterop();
|
||||
// Custom implementation that considers additional file extensions and automatically adds file extensions
|
||||
const nodeResolveImplementation = tsNodeService.getNodeEsmResolver();
|
||||
const nodeGetFormatImplementation = tsNodeService.getNodeEsmGetFormat();
|
||||
const extensions = tsNodeService.extensions;
|
||||
const hooksAPI = filterHooksByAPIVersion({
|
||||
resolve,
|
||||
load,
|
||||
getFormat,
|
||||
transformSource,
|
||||
});
|
||||
function isFileUrlOrNodeStyleSpecifier(parsed) {
|
||||
// We only understand file:// URLs, but in node, the specifier can be a node-style `./foo` or `foo`
|
||||
const { protocol } = parsed;
|
||||
return protocol === null || protocol === 'file:';
|
||||
}
|
||||
/**
|
||||
* Named "probably" as a reminder that this is a guess.
|
||||
* node does not explicitly tell us if we're resolving the entrypoint or not.
|
||||
*/
|
||||
function isProbablyEntrypoint(specifier, parentURL) {
|
||||
return parentURL === undefined && specifier.startsWith('file://');
|
||||
}
|
||||
// Side-channel between `resolve()` and `load()` hooks
|
||||
const rememberIsProbablyEntrypoint = new Set();
|
||||
const rememberResolvedViaCommonjsFallback = new Set();
|
||||
async function resolve(specifier, context, defaultResolve) {
|
||||
const defer = async () => {
|
||||
const r = await defaultResolve(specifier, context, defaultResolve);
|
||||
return r;
|
||||
};
|
||||
// See: https://github.com/nodejs/node/discussions/41711
|
||||
// nodejs will likely implement a similar fallback. Till then, we can do our users a favor and fallback today.
|
||||
async function entrypointFallback(cb) {
|
||||
try {
|
||||
const resolution = await cb();
|
||||
if ((resolution === null || resolution === void 0 ? void 0 : resolution.url) &&
|
||||
isProbablyEntrypoint(specifier, context.parentURL))
|
||||
rememberIsProbablyEntrypoint.add(resolution.url);
|
||||
return resolution;
|
||||
}
|
||||
catch (esmResolverError) {
|
||||
if (!isProbablyEntrypoint(specifier, context.parentURL))
|
||||
throw esmResolverError;
|
||||
try {
|
||||
let cjsSpecifier = specifier;
|
||||
// Attempt to convert from ESM file:// to CommonJS path
|
||||
try {
|
||||
if (specifier.startsWith('file://'))
|
||||
cjsSpecifier = (0, url_1.fileURLToPath)(specifier);
|
||||
}
|
||||
catch { }
|
||||
const resolution = (0, url_1.pathToFileURL)((0, module_1.createRequire)(process.cwd()).resolve(cjsSpecifier)).toString();
|
||||
rememberIsProbablyEntrypoint.add(resolution);
|
||||
rememberResolvedViaCommonjsFallback.add(resolution);
|
||||
return { url: resolution, format: 'commonjs' };
|
||||
}
|
||||
catch (commonjsResolverError) {
|
||||
throw esmResolverError;
|
||||
}
|
||||
}
|
||||
}
|
||||
return addShortCircuitFlag(async () => {
|
||||
const parsed = (0, url_1.parse)(specifier);
|
||||
const { pathname, protocol, hostname } = parsed;
|
||||
if (!isFileUrlOrNodeStyleSpecifier(parsed)) {
|
||||
return entrypointFallback(defer);
|
||||
}
|
||||
if (protocol !== null && protocol !== 'file:') {
|
||||
return entrypointFallback(defer);
|
||||
}
|
||||
// Malformed file:// URL? We should always see `null` or `''`
|
||||
if (hostname) {
|
||||
// TODO file://./foo sets `hostname` to `'.'`. Perhaps we should special-case this.
|
||||
return entrypointFallback(defer);
|
||||
}
|
||||
// pathname is the path to be resolved
|
||||
return entrypointFallback(() => nodeResolveImplementation.defaultResolve(specifier, context, defaultResolve));
|
||||
});
|
||||
}
|
||||
// `load` from new loader hook API (See description at the top of this file)
|
||||
async function load(url, context, defaultLoad) {
|
||||
return addShortCircuitFlag(async () => {
|
||||
var _a;
|
||||
// If we get a format hint from resolve() on the context then use it
|
||||
// otherwise call the old getFormat() hook using node's old built-in defaultGetFormat() that ships with ts-node
|
||||
const format = (_a = context.format) !== null && _a !== void 0 ? _a : (await getFormat(url, context, nodeGetFormatImplementation.defaultGetFormat)).format;
|
||||
let source = undefined;
|
||||
if (format !== 'builtin' && format !== 'commonjs') {
|
||||
// Call the new defaultLoad() to get the source
|
||||
const { source: rawSource } = await defaultLoad(url, {
|
||||
...context,
|
||||
format,
|
||||
}, defaultLoad);
|
||||
if (rawSource === undefined || rawSource === null) {
|
||||
throw new Error(`Failed to load raw source: Format was '${format}' and url was '${url}''.`);
|
||||
}
|
||||
// Emulate node's built-in old defaultTransformSource() so we can re-use the old transformSource() hook
|
||||
const defaultTransformSource = async (source, _context, _defaultTransformSource) => ({ source });
|
||||
// Call the old hook
|
||||
const { source: transformedSource } = await transformSource(rawSource, { url, format }, defaultTransformSource);
|
||||
source = transformedSource;
|
||||
}
|
||||
return { format, source };
|
||||
});
|
||||
}
|
||||
async function getFormat(url, context, defaultGetFormat) {
|
||||
const defer = (overrideUrl = url) => defaultGetFormat(overrideUrl, context, defaultGetFormat);
|
||||
// See: https://github.com/nodejs/node/discussions/41711
|
||||
// nodejs will likely implement a similar fallback. Till then, we can do our users a favor and fallback today.
|
||||
async function entrypointFallback(cb) {
|
||||
try {
|
||||
return await cb();
|
||||
}
|
||||
catch (getFormatError) {
|
||||
if (!rememberIsProbablyEntrypoint.has(url))
|
||||
throw getFormatError;
|
||||
return { format: 'commonjs' };
|
||||
}
|
||||
}
|
||||
const parsed = (0, url_1.parse)(url);
|
||||
if (!isFileUrlOrNodeStyleSpecifier(parsed)) {
|
||||
return entrypointFallback(defer);
|
||||
}
|
||||
const { pathname } = parsed;
|
||||
assert(pathname !== null, 'ESM getFormat() hook: URL should never have null pathname');
|
||||
const nativePath = (0, url_1.fileURLToPath)(url);
|
||||
let nodeSays;
|
||||
// If file has extension not understood by node, then ask node how it would treat the emitted extension.
|
||||
// E.g. .mts compiles to .mjs, so ask node how to classify an .mjs file.
|
||||
const ext = (0, path_1.extname)(nativePath);
|
||||
const tsNodeIgnored = tsNodeService.ignored(nativePath);
|
||||
const nodeEquivalentExt = extensions.nodeEquivalents.get(ext);
|
||||
if (nodeEquivalentExt && !tsNodeIgnored) {
|
||||
nodeSays = await entrypointFallback(() => defer((0, url_1.format)((0, url_1.pathToFileURL)(nativePath + nodeEquivalentExt))));
|
||||
}
|
||||
else {
|
||||
try {
|
||||
nodeSays = await entrypointFallback(defer);
|
||||
}
|
||||
catch (e) {
|
||||
if (e instanceof Error &&
|
||||
tsNodeIgnored &&
|
||||
extensions.nodeDoesNotUnderstand.includes(ext)) {
|
||||
e.message +=
|
||||
`\n\n` +
|
||||
`Hint:\n` +
|
||||
`ts-node is configured to ignore this file.\n` +
|
||||
`If you want ts-node to handle this file, consider enabling the "skipIgnore" option or adjusting your "ignore" patterns.\n` +
|
||||
`https://typestrong.org/ts-node/docs/scope\n`;
|
||||
}
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
// For files compiled by ts-node that node believes are either CJS or ESM, check if we should override that classification
|
||||
if (!tsNodeService.ignored(nativePath) &&
|
||||
(nodeSays.format === 'commonjs' || nodeSays.format === 'module')) {
|
||||
const { moduleType } = tsNodeService.moduleTypeClassifier.classifyModuleByModuleTypeOverrides((0, util_1.normalizeSlashes)(nativePath));
|
||||
if (moduleType === 'cjs') {
|
||||
return { format: 'commonjs' };
|
||||
}
|
||||
else if (moduleType === 'esm') {
|
||||
return { format: 'module' };
|
||||
}
|
||||
}
|
||||
return nodeSays;
|
||||
}
|
||||
async function transformSource(source, context, defaultTransformSource) {
|
||||
if (source === null || source === undefined) {
|
||||
throw new Error('No source');
|
||||
}
|
||||
const defer = () => defaultTransformSource(source, context, defaultTransformSource);
|
||||
const sourceAsString = typeof source === 'string' ? source : source.toString('utf8');
|
||||
const { url } = context;
|
||||
const parsed = (0, url_1.parse)(url);
|
||||
if (!isFileUrlOrNodeStyleSpecifier(parsed)) {
|
||||
return defer();
|
||||
}
|
||||
const nativePath = (0, url_1.fileURLToPath)(url);
|
||||
if (tsNodeService.ignored(nativePath)) {
|
||||
return defer();
|
||||
}
|
||||
const emittedJs = tsNodeService.compile(sourceAsString, nativePath);
|
||||
return { source: emittedJs };
|
||||
}
|
||||
return hooksAPI;
|
||||
}
|
||||
exports.createEsmHooks = createEsmHooks;
|
||||
async function addShortCircuitFlag(fn) {
|
||||
const ret = await fn();
|
||||
// Not sure if this is necessary; being lazy. Can revisit in the future.
|
||||
if (ret == null)
|
||||
return ret;
|
||||
return {
|
||||
...ret,
|
||||
shortCircuit: true,
|
||||
};
|
||||
}
|
||||
//# sourceMappingURL=esm.js.map
|
1
node_modules/ts-node/dist/esm.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/esm.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
node_modules/ts-node/dist/file-extensions.d.ts
generated
vendored
Normal file
1
node_modules/ts-node/dist/file-extensions.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
export {};
|
133
node_modules/ts-node/dist/file-extensions.js
generated
vendored
Normal file
133
node_modules/ts-node/dist/file-extensions.js
generated
vendored
Normal file
@ -0,0 +1,133 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getExtensions = void 0;
|
||||
const util_1 = require("./util");
|
||||
const nodeEquivalents = new Map([
|
||||
['.ts', '.js'],
|
||||
['.tsx', '.js'],
|
||||
['.jsx', '.js'],
|
||||
['.mts', '.mjs'],
|
||||
['.cts', '.cjs'],
|
||||
]);
|
||||
const tsResolverEquivalents = new Map([
|
||||
['.ts', ['.js']],
|
||||
['.tsx', ['.js', '.jsx']],
|
||||
['.mts', ['.mjs']],
|
||||
['.cts', ['.cjs']],
|
||||
]);
|
||||
// All extensions understood by vanilla node
|
||||
const vanillaNodeExtensions = [
|
||||
'.js',
|
||||
'.json',
|
||||
'.node',
|
||||
'.mjs',
|
||||
'.cjs',
|
||||
];
|
||||
// Extensions added by vanilla node's require() if you omit them:
|
||||
// js, json, node
|
||||
// Extensions added by vanilla node if you omit them with --experimental-specifier-resolution=node
|
||||
// js, json, node, mjs
|
||||
// Extensions added by ESM codepath's legacy package.json "main" resolver
|
||||
// js, json, node (not mjs!)
|
||||
const nodeDoesNotUnderstand = [
|
||||
'.ts',
|
||||
'.tsx',
|
||||
'.jsx',
|
||||
'.cts',
|
||||
'.mts',
|
||||
];
|
||||
/**
|
||||
* [MUST_UPDATE_FOR_NEW_FILE_EXTENSIONS]
|
||||
* @internal
|
||||
*/
|
||||
function getExtensions(config, options, tsVersion) {
|
||||
// TS 4.5 is first version to understand .cts, .mts, .cjs, and .mjs extensions
|
||||
const tsSupportsMtsCtsExts = (0, util_1.versionGteLt)(tsVersion, '4.5.0');
|
||||
const requiresHigherTypescriptVersion = [];
|
||||
if (!tsSupportsMtsCtsExts)
|
||||
requiresHigherTypescriptVersion.push('.cts', '.cjs', '.mts', '.mjs');
|
||||
const allPossibleExtensionsSortedByPreference = Array.from(new Set([
|
||||
...(options.preferTsExts ? nodeDoesNotUnderstand : []),
|
||||
...vanillaNodeExtensions,
|
||||
...nodeDoesNotUnderstand,
|
||||
]));
|
||||
const compiledJsUnsorted = ['.ts'];
|
||||
const compiledJsxUnsorted = [];
|
||||
if (config.options.jsx)
|
||||
compiledJsxUnsorted.push('.tsx');
|
||||
if (tsSupportsMtsCtsExts)
|
||||
compiledJsUnsorted.push('.mts', '.cts');
|
||||
if (config.options.allowJs) {
|
||||
compiledJsUnsorted.push('.js');
|
||||
if (config.options.jsx)
|
||||
compiledJsxUnsorted.push('.jsx');
|
||||
if (tsSupportsMtsCtsExts)
|
||||
compiledJsUnsorted.push('.mjs', '.cjs');
|
||||
}
|
||||
const compiledUnsorted = [...compiledJsUnsorted, ...compiledJsxUnsorted];
|
||||
const compiled = allPossibleExtensionsSortedByPreference.filter((ext) => compiledUnsorted.includes(ext));
|
||||
const compiledNodeDoesNotUnderstand = nodeDoesNotUnderstand.filter((ext) => compiled.includes(ext));
|
||||
/**
|
||||
* TS's resolver can resolve foo.js to foo.ts, by replacing .js extension with several source extensions.
|
||||
* IMPORTANT: Must preserve ordering according to preferTsExts!
|
||||
* Must include the .js/.mjs/.cjs extension in the array!
|
||||
* This affects resolution behavior!
|
||||
* [MUST_UPDATE_FOR_NEW_FILE_EXTENSIONS]
|
||||
*/
|
||||
const r = allPossibleExtensionsSortedByPreference.filter((ext) => [...compiledUnsorted, '.js', '.mjs', '.cjs', '.mts', '.cts'].includes(ext));
|
||||
const replacementsForJs = r.filter((ext) => ['.js', '.jsx', '.ts', '.tsx'].includes(ext));
|
||||
const replacementsForJsx = r.filter((ext) => ['.jsx', '.tsx'].includes(ext));
|
||||
const replacementsForMjs = r.filter((ext) => ['.mjs', '.mts'].includes(ext));
|
||||
const replacementsForCjs = r.filter((ext) => ['.cjs', '.cts'].includes(ext));
|
||||
const replacementsForJsOrMjs = r.filter((ext) => ['.js', '.jsx', '.ts', '.tsx', '.mjs', '.mts'].includes(ext));
|
||||
// Node allows omitting .js or .mjs extension in certain situations (CJS, ESM w/experimental flag)
|
||||
// So anything that compiles to .js or .mjs can also be omitted.
|
||||
const experimentalSpecifierResolutionAddsIfOmitted = Array.from(new Set([...replacementsForJsOrMjs, '.json', '.node']));
|
||||
// Same as above, except node curiuosly doesn't do .mjs here
|
||||
const legacyMainResolveAddsIfOmitted = Array.from(new Set([...replacementsForJs, '.json', '.node']));
|
||||
return {
|
||||
/** All file extensions we transform, ordered by resolution preference according to preferTsExts */
|
||||
compiled,
|
||||
/** Resolved extensions that vanilla node will not understand; we should handle them */
|
||||
nodeDoesNotUnderstand,
|
||||
/** Like the above, but only the ones we're compiling */
|
||||
compiledNodeDoesNotUnderstand,
|
||||
/**
|
||||
* Mapping from extensions understood by tsc to the equivalent for node,
|
||||
* as far as getFormat is concerned.
|
||||
*/
|
||||
nodeEquivalents,
|
||||
/**
|
||||
* Mapping from extensions rejected by TSC in import specifiers, to the
|
||||
* possible alternatives that TS's resolver will accept.
|
||||
*
|
||||
* When we allow users to opt-in to .ts extensions in import specifiers, TS's
|
||||
* resolver requires us to replace the .ts extensions with .js alternatives.
|
||||
* Otherwise, resolution fails.
|
||||
*
|
||||
* Note TS's resolver is only used by, and only required for, typechecking.
|
||||
* This is separate from node's resolver, which we hook separately and which
|
||||
* does not require this mapping.
|
||||
*/
|
||||
tsResolverEquivalents,
|
||||
/**
|
||||
* Extensions that we can support if the user upgrades their typescript version.
|
||||
* Used when raising hints.
|
||||
*/
|
||||
requiresHigherTypescriptVersion,
|
||||
/**
|
||||
* --experimental-specifier-resolution=node will add these extensions.
|
||||
*/
|
||||
experimentalSpecifierResolutionAddsIfOmitted,
|
||||
/**
|
||||
* ESM loader will add these extensions to package.json "main" field
|
||||
*/
|
||||
legacyMainResolveAddsIfOmitted,
|
||||
replacementsForMjs,
|
||||
replacementsForCjs,
|
||||
replacementsForJsx,
|
||||
replacementsForJs,
|
||||
};
|
||||
}
|
||||
exports.getExtensions = getExtensions;
|
||||
//# sourceMappingURL=file-extensions.js.map
|
1
node_modules/ts-node/dist/file-extensions.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/file-extensions.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
332
node_modules/ts-node/dist/index.d.ts
generated
vendored
Normal file
332
node_modules/ts-node/dist/index.d.ts
generated
vendored
Normal file
@ -0,0 +1,332 @@
|
||||
import { BaseError } from 'make-error';
|
||||
import type * as _ts from 'typescript';
|
||||
import type { TSCommon } from './ts-compiler-types';
|
||||
import type { createEsmHooks as createEsmHooksFn } from './esm';
|
||||
export { TSCommon };
|
||||
export { createRepl, CreateReplOptions, ReplService, EvalAwarePartialHost, } from './repl';
|
||||
export type { TranspilerModule, TranspilerFactory, CreateTranspilerOptions, TranspileOutput, TranspileOptions, Transpiler, } from './transpilers/types';
|
||||
export type { NodeLoaderHooksAPI1, NodeLoaderHooksAPI2, NodeLoaderHooksFormat, } from './esm';
|
||||
/**
|
||||
* Registered `ts-node` instance information.
|
||||
*/
|
||||
export declare const REGISTER_INSTANCE: unique symbol;
|
||||
/**
|
||||
* Expose `REGISTER_INSTANCE` information on node.js `process`.
|
||||
*/
|
||||
declare global {
|
||||
namespace NodeJS {
|
||||
interface Process {
|
||||
[REGISTER_INSTANCE]?: Service;
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Export the current version.
|
||||
*/
|
||||
export declare const VERSION: any;
|
||||
/**
|
||||
* Options for creating a new TypeScript compiler instance.
|
||||
|
||||
* @category Basic
|
||||
*/
|
||||
export interface CreateOptions {
|
||||
/**
|
||||
* Behave as if invoked within this working directory. Roughly equivalent to `cd $dir && ts-node ...`
|
||||
*
|
||||
* @default process.cwd()
|
||||
*/
|
||||
cwd?: string;
|
||||
/**
|
||||
* Legacy alias for `cwd`
|
||||
*
|
||||
* @deprecated use `projectSearchDir` or `cwd`
|
||||
*/
|
||||
dir?: string;
|
||||
/**
|
||||
* Emit output files into `.ts-node` directory.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
emit?: boolean;
|
||||
/**
|
||||
* Scope compiler to files within `scopeDir`.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
scope?: boolean;
|
||||
/**
|
||||
* @default First of: `tsconfig.json` "rootDir" if specified, directory containing `tsconfig.json`, or cwd if no `tsconfig.json` is loaded.
|
||||
*/
|
||||
scopeDir?: string;
|
||||
/**
|
||||
* Use pretty diagnostic formatter.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
pretty?: boolean;
|
||||
/**
|
||||
* Use TypeScript's faster `transpileModule`.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
transpileOnly?: boolean;
|
||||
/**
|
||||
* **DEPRECATED** Specify type-check is enabled (e.g. `transpileOnly == false`).
|
||||
*
|
||||
* @default true
|
||||
*/
|
||||
typeCheck?: boolean;
|
||||
/**
|
||||
* Use TypeScript's compiler host API instead of the language service API.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
compilerHost?: boolean;
|
||||
/**
|
||||
* Logs TypeScript errors to stderr instead of throwing exceptions.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
logError?: boolean;
|
||||
/**
|
||||
* Load "files" and "include" from `tsconfig.json` on startup.
|
||||
*
|
||||
* Default is to override `tsconfig.json` "files" and "include" to only include the entrypoint script.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
files?: boolean;
|
||||
/**
|
||||
* Specify a custom TypeScript compiler.
|
||||
*
|
||||
* @default "typescript"
|
||||
*/
|
||||
compiler?: string;
|
||||
/**
|
||||
* Specify a custom transpiler for use with transpileOnly
|
||||
*/
|
||||
transpiler?: string | [string, object];
|
||||
/**
|
||||
* Transpile with swc instead of the TypeScript compiler, and skip typechecking.
|
||||
*
|
||||
* Equivalent to setting both `transpileOnly: true` and `transpiler: 'ts-node/transpilers/swc'`
|
||||
*
|
||||
* For complete instructions: https://typestrong.org/ts-node/docs/transpilers
|
||||
*/
|
||||
swc?: boolean;
|
||||
/**
|
||||
* Paths which should not be compiled.
|
||||
*
|
||||
* Each string in the array is converted to a regular expression via `new RegExp()` and tested against source paths prior to compilation.
|
||||
*
|
||||
* Source paths are normalized to posix-style separators, relative to the directory containing `tsconfig.json` or to cwd if no `tsconfig.json` is loaded.
|
||||
*
|
||||
* Default is to ignore all node_modules subdirectories.
|
||||
*
|
||||
* @default ["(?:^|/)node_modules/"]
|
||||
*/
|
||||
ignore?: string[];
|
||||
/**
|
||||
* Path to TypeScript config file or directory containing a `tsconfig.json`.
|
||||
* Similar to the `tsc --project` flag: https://www.typescriptlang.org/docs/handbook/compiler-options.html
|
||||
*/
|
||||
project?: string;
|
||||
/**
|
||||
* Search for TypeScript config file (`tsconfig.json`) in this or parent directories.
|
||||
*/
|
||||
projectSearchDir?: string;
|
||||
/**
|
||||
* Skip project config resolution and loading.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
skipProject?: boolean;
|
||||
/**
|
||||
* Skip ignore check, so that compilation will be attempted for all files with matching extensions.
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
skipIgnore?: boolean;
|
||||
/**
|
||||
* JSON object to merge with TypeScript `compilerOptions`.
|
||||
*
|
||||
* @allOf [{"$ref": "https://schemastore.azurewebsites.net/schemas/json/tsconfig.json#definitions/compilerOptionsDefinition/properties/compilerOptions"}]
|
||||
*/
|
||||
compilerOptions?: object;
|
||||
/**
|
||||
* Ignore TypeScript warnings by diagnostic code.
|
||||
*/
|
||||
ignoreDiagnostics?: Array<number | string>;
|
||||
/**
|
||||
* Modules to require, like node's `--require` flag.
|
||||
*
|
||||
* If specified in `tsconfig.json`, the modules will be resolved relative to the `tsconfig.json` file.
|
||||
*
|
||||
* If specified programmatically, each input string should be pre-resolved to an absolute path for
|
||||
* best results.
|
||||
*/
|
||||
require?: Array<string>;
|
||||
readFile?: (path: string) => string | undefined;
|
||||
fileExists?: (path: string) => boolean;
|
||||
transformers?: _ts.CustomTransformers | ((p: _ts.Program) => _ts.CustomTransformers);
|
||||
/**
|
||||
* Allows the usage of top level await in REPL.
|
||||
*
|
||||
* Uses node's implementation which accomplishes this with an AST syntax transformation.
|
||||
*
|
||||
* Enabled by default when tsconfig target is es2018 or above. Set to false to disable.
|
||||
*
|
||||
* **Note**: setting to `true` when tsconfig target is too low will throw an Error. Leave as `undefined`
|
||||
* to get default, automatic behavior.
|
||||
*/
|
||||
experimentalReplAwait?: boolean;
|
||||
/**
|
||||
* Override certain paths to be compiled and executed as CommonJS or ECMAScript modules.
|
||||
* When overridden, the tsconfig "module" and package.json "type" fields are overridden, and
|
||||
* the file extension is ignored.
|
||||
* This is useful if you cannot use .mts, .cts, .mjs, or .cjs file extensions;
|
||||
* it achieves the same effect.
|
||||
*
|
||||
* Each key is a glob pattern following the same rules as tsconfig's "include" array.
|
||||
* When multiple patterns match the same file, the last pattern takes precedence.
|
||||
*
|
||||
* `cjs` overrides matches files to compile and execute as CommonJS.
|
||||
* `esm` overrides matches files to compile and execute as native ECMAScript modules.
|
||||
* `package` overrides either of the above to default behavior, which obeys package.json "type" and
|
||||
* tsconfig.json "module" options.
|
||||
*/
|
||||
moduleTypes?: ModuleTypes;
|
||||
/**
|
||||
* A function to collect trace messages from the TypeScript compiler, for example when `traceResolution` is enabled.
|
||||
*
|
||||
* @default console.log
|
||||
*/
|
||||
tsTrace?: (str: string) => void;
|
||||
/**
|
||||
* Enable native ESM support.
|
||||
*
|
||||
* For details, see https://typestrong.org/ts-node/docs/imports#native-ecmascript-modules
|
||||
*/
|
||||
esm?: boolean;
|
||||
/**
|
||||
* Re-order file extensions so that TypeScript imports are preferred.
|
||||
*
|
||||
* For example, when both `index.js` and `index.ts` exist, enabling this option causes `require('./index')` to resolve to `index.ts` instead of `index.js`
|
||||
*
|
||||
* @default false
|
||||
*/
|
||||
preferTsExts?: boolean;
|
||||
/**
|
||||
* Like node's `--experimental-specifier-resolution`, , but can also be set in your `tsconfig.json` for convenience.
|
||||
*
|
||||
* For details, see https://nodejs.org/dist/latest-v18.x/docs/api/esm.html#customizing-esm-specifier-resolution-algorithm
|
||||
*/
|
||||
experimentalSpecifierResolution?: 'node' | 'explicit';
|
||||
/**
|
||||
* Allow using voluntary `.ts` file extension in import specifiers.
|
||||
*
|
||||
* Typically, in ESM projects, import specifiers must have an emit extension, `.js`, `.cjs`, or `.mjs`,
|
||||
* and we automatically map to the corresponding `.ts`, `.cts`, or `.mts` source file. This is the
|
||||
* recommended approach.
|
||||
*
|
||||
* However, if you really want to use `.ts` in import specifiers, and are aware that this may
|
||||
* break tooling, you can enable this flag.
|
||||
*/
|
||||
experimentalTsImportSpecifiers?: boolean;
|
||||
}
|
||||
export declare type ModuleTypes = Record<string, ModuleTypeOverride>;
|
||||
export declare type ModuleTypeOverride = 'cjs' | 'esm' | 'package';
|
||||
/**
|
||||
* Options for registering a TypeScript compiler instance globally.
|
||||
|
||||
* @category Basic
|
||||
*/
|
||||
export interface RegisterOptions extends CreateOptions {
|
||||
/**
|
||||
* Enable experimental features that re-map imports and require calls to support:
|
||||
* `baseUrl`, `paths`, `rootDirs`, `.js` to `.ts` file extension mappings,
|
||||
* `outDir` to `rootDir` mappings for composite projects and monorepos.
|
||||
*
|
||||
* For details, see https://github.com/TypeStrong/ts-node/issues/1514
|
||||
*/
|
||||
experimentalResolver?: boolean;
|
||||
}
|
||||
export declare type ExperimentalSpecifierResolution = 'node' | 'explicit';
|
||||
/**
|
||||
* Must be an interface to support `typescript-json-schema`.
|
||||
*/
|
||||
export interface TsConfigOptions extends Omit<RegisterOptions, 'transformers' | 'readFile' | 'fileExists' | 'skipProject' | 'project' | 'dir' | 'cwd' | 'projectSearchDir' | 'optionBasePaths' | 'tsTrace'> {
|
||||
}
|
||||
/**
|
||||
* Information retrieved from type info check.
|
||||
*/
|
||||
export interface TypeInfo {
|
||||
name: string;
|
||||
comment: string;
|
||||
}
|
||||
/**
|
||||
* TypeScript diagnostics error.
|
||||
*/
|
||||
export declare class TSError extends BaseError {
|
||||
diagnosticCodes: number[];
|
||||
name: string;
|
||||
diagnosticText: string;
|
||||
diagnostics: ReadonlyArray<_ts.Diagnostic>;
|
||||
constructor(diagnosticText: string, diagnosticCodes: number[], diagnostics?: ReadonlyArray<_ts.Diagnostic>);
|
||||
}
|
||||
/**
|
||||
* Primary ts-node service, which wraps the TypeScript API and can compile TypeScript to JavaScript
|
||||
*/
|
||||
export interface Service {
|
||||
ts: TSCommon;
|
||||
config: _ts.ParsedCommandLine;
|
||||
options: RegisterOptions;
|
||||
enabled(enabled?: boolean): boolean;
|
||||
ignored(fileName: string): boolean;
|
||||
compile(code: string, fileName: string, lineOffset?: number): string;
|
||||
getTypeInfo(code: string, fileName: string, position: number): TypeInfo;
|
||||
}
|
||||
/**
|
||||
* Re-export of `Service` interface for backwards-compatibility
|
||||
* @deprecated use `Service` instead
|
||||
* @see {Service}
|
||||
*/
|
||||
export declare type Register = Service;
|
||||
/**
|
||||
* Create a new TypeScript compiler instance and register it onto node.js
|
||||
*
|
||||
* @category Basic
|
||||
*/
|
||||
export declare function register(opts?: RegisterOptions): Service;
|
||||
/**
|
||||
* Register TypeScript compiler instance onto node.js
|
||||
|
||||
* @category Basic
|
||||
*/
|
||||
export declare function register(service: Service): Service;
|
||||
/**
|
||||
* Create TypeScript compiler instance.
|
||||
*
|
||||
* @category Basic
|
||||
*/
|
||||
export declare function create(rawOptions?: CreateOptions): Service;
|
||||
/**
|
||||
* Create an implementation of node's ESM loader hooks.
|
||||
*
|
||||
* This may be useful if you
|
||||
* want to wrap or compose the loader hooks to add additional functionality or
|
||||
* combine with another loader.
|
||||
*
|
||||
* Node changed the hooks API, so there are two possible APIs. This function
|
||||
* detects your node version and returns the appropriate API.
|
||||
*
|
||||
* @category ESM Loader
|
||||
*/
|
||||
export declare const createEsmHooks: typeof createEsmHooksFn;
|
||||
/**
|
||||
* When using `module: nodenext` or `module: node12`, there are two possible styles of emit depending in file extension or package.json "type":
|
||||
*
|
||||
* - CommonJS with dynamic imports preserved (not transformed into `require()` calls)
|
||||
* - ECMAScript modules with `import foo = require()` transformed into `require = createRequire(); const foo = require()`
|
||||
*/
|
||||
export declare type NodeModuleEmitKind = 'nodeesm' | 'nodecjs';
|
953
node_modules/ts-node/dist/index.js
generated
vendored
Normal file
953
node_modules/ts-node/dist/index.js
generated
vendored
Normal file
@ -0,0 +1,953 @@
|
||||
"use strict";
|
||||
var _a, _b;
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createEsmHooks = exports.createFromPreloadedConfig = exports.create = exports.register = exports.TSError = exports.DEFAULTS = exports.VERSION = exports.debug = exports.INSPECT_CUSTOM = exports.env = exports.REGISTER_INSTANCE = exports.createRepl = void 0;
|
||||
const path_1 = require("path");
|
||||
const module_1 = require("module");
|
||||
const util = require("util");
|
||||
const url_1 = require("url");
|
||||
const make_error_1 = require("make-error");
|
||||
const util_1 = require("./util");
|
||||
const configuration_1 = require("./configuration");
|
||||
const module_type_classifier_1 = require("./module-type-classifier");
|
||||
const resolver_functions_1 = require("./resolver-functions");
|
||||
const cjs_resolve_hooks_1 = require("./cjs-resolve-hooks");
|
||||
const node_module_type_classifier_1 = require("./node-module-type-classifier");
|
||||
const file_extensions_1 = require("./file-extensions");
|
||||
const ts_transpile_module_1 = require("./ts-transpile-module");
|
||||
var repl_1 = require("./repl");
|
||||
Object.defineProperty(exports, "createRepl", { enumerable: true, get: function () { return repl_1.createRepl; } });
|
||||
/**
|
||||
* Does this version of node obey the package.json "type" field
|
||||
* and throw ERR_REQUIRE_ESM when attempting to require() an ESM modules.
|
||||
*/
|
||||
const engineSupportsPackageTypeField = parseInt(process.versions.node.split('.')[0], 10) >= 12;
|
||||
/**
|
||||
* Assert that script can be loaded as CommonJS when we attempt to require it.
|
||||
* If it should be loaded as ESM, throw ERR_REQUIRE_ESM like node does.
|
||||
*
|
||||
* Loaded conditionally so we don't need to support older node versions
|
||||
*/
|
||||
let assertScriptCanLoadAsCJS = engineSupportsPackageTypeField
|
||||
? require('../dist-raw/node-internal-modules-cjs-loader').assertScriptCanLoadAsCJSImpl
|
||||
: () => {
|
||||
/* noop */
|
||||
};
|
||||
/**
|
||||
* Registered `ts-node` instance information.
|
||||
*/
|
||||
exports.REGISTER_INSTANCE = Symbol.for('ts-node.register.instance');
|
||||
/** @internal */
|
||||
exports.env = process.env;
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
exports.INSPECT_CUSTOM = util.inspect.custom || 'inspect';
|
||||
/**
|
||||
* Debugging `ts-node`.
|
||||
*/
|
||||
const shouldDebug = (0, util_1.yn)(exports.env.TS_NODE_DEBUG);
|
||||
/** @internal */
|
||||
exports.debug = shouldDebug
|
||||
? (...args) => console.log(`[ts-node ${new Date().toISOString()}]`, ...args)
|
||||
: () => undefined;
|
||||
const debugFn = shouldDebug
|
||||
? (key, fn) => {
|
||||
let i = 0;
|
||||
return (x) => {
|
||||
(0, exports.debug)(key, x, ++i);
|
||||
return fn(x);
|
||||
};
|
||||
}
|
||||
: (_, fn) => fn;
|
||||
/**
|
||||
* Export the current version.
|
||||
*/
|
||||
exports.VERSION = require('../package.json').version;
|
||||
/**
|
||||
* Default register options, including values specified via environment
|
||||
* variables.
|
||||
* @internal
|
||||
*/
|
||||
exports.DEFAULTS = {
|
||||
cwd: (_a = exports.env.TS_NODE_CWD) !== null && _a !== void 0 ? _a : exports.env.TS_NODE_DIR,
|
||||
emit: (0, util_1.yn)(exports.env.TS_NODE_EMIT),
|
||||
scope: (0, util_1.yn)(exports.env.TS_NODE_SCOPE),
|
||||
scopeDir: exports.env.TS_NODE_SCOPE_DIR,
|
||||
files: (0, util_1.yn)(exports.env.TS_NODE_FILES),
|
||||
pretty: (0, util_1.yn)(exports.env.TS_NODE_PRETTY),
|
||||
compiler: exports.env.TS_NODE_COMPILER,
|
||||
compilerOptions: (0, util_1.parse)(exports.env.TS_NODE_COMPILER_OPTIONS),
|
||||
ignore: (0, util_1.split)(exports.env.TS_NODE_IGNORE),
|
||||
project: exports.env.TS_NODE_PROJECT,
|
||||
skipProject: (0, util_1.yn)(exports.env.TS_NODE_SKIP_PROJECT),
|
||||
skipIgnore: (0, util_1.yn)(exports.env.TS_NODE_SKIP_IGNORE),
|
||||
preferTsExts: (0, util_1.yn)(exports.env.TS_NODE_PREFER_TS_EXTS),
|
||||
ignoreDiagnostics: (0, util_1.split)(exports.env.TS_NODE_IGNORE_DIAGNOSTICS),
|
||||
transpileOnly: (0, util_1.yn)(exports.env.TS_NODE_TRANSPILE_ONLY),
|
||||
typeCheck: (0, util_1.yn)(exports.env.TS_NODE_TYPE_CHECK),
|
||||
compilerHost: (0, util_1.yn)(exports.env.TS_NODE_COMPILER_HOST),
|
||||
logError: (0, util_1.yn)(exports.env.TS_NODE_LOG_ERROR),
|
||||
experimentalReplAwait: (_b = (0, util_1.yn)(exports.env.TS_NODE_EXPERIMENTAL_REPL_AWAIT)) !== null && _b !== void 0 ? _b : undefined,
|
||||
tsTrace: console.log.bind(console),
|
||||
};
|
||||
/**
|
||||
* TypeScript diagnostics error.
|
||||
*/
|
||||
class TSError extends make_error_1.BaseError {
|
||||
constructor(diagnosticText, diagnosticCodes, diagnostics = []) {
|
||||
super(`⨯ Unable to compile TypeScript:\n${diagnosticText}`);
|
||||
this.diagnosticCodes = diagnosticCodes;
|
||||
this.name = 'TSError';
|
||||
Object.defineProperty(this, 'diagnosticText', {
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: diagnosticText,
|
||||
});
|
||||
Object.defineProperty(this, 'diagnostics', {
|
||||
configurable: true,
|
||||
writable: true,
|
||||
value: diagnostics,
|
||||
});
|
||||
}
|
||||
/**
|
||||
* @internal
|
||||
*/
|
||||
[exports.INSPECT_CUSTOM]() {
|
||||
return this.diagnosticText;
|
||||
}
|
||||
}
|
||||
exports.TSError = TSError;
|
||||
const TS_NODE_SERVICE_BRAND = Symbol('TS_NODE_SERVICE_BRAND');
|
||||
function register(serviceOrOpts) {
|
||||
// Is this a Service or a RegisterOptions?
|
||||
let service = serviceOrOpts;
|
||||
if (!(serviceOrOpts === null || serviceOrOpts === void 0 ? void 0 : serviceOrOpts[TS_NODE_SERVICE_BRAND])) {
|
||||
// Not a service; is options
|
||||
service = create((serviceOrOpts !== null && serviceOrOpts !== void 0 ? serviceOrOpts : {}));
|
||||
}
|
||||
const originalJsHandler = require.extensions['.js'];
|
||||
// Expose registered instance globally.
|
||||
process[exports.REGISTER_INSTANCE] = service;
|
||||
// Register the extensions.
|
||||
registerExtensions(service.options.preferTsExts, service.extensions.compiled, service, originalJsHandler);
|
||||
(0, cjs_resolve_hooks_1.installCommonjsResolveHooksIfNecessary)(service);
|
||||
// Require specified modules before start-up.
|
||||
module_1.Module._preloadModules(service.options.require);
|
||||
return service;
|
||||
}
|
||||
exports.register = register;
|
||||
/**
|
||||
* Create TypeScript compiler instance.
|
||||
*
|
||||
* @category Basic
|
||||
*/
|
||||
function create(rawOptions = {}) {
|
||||
const foundConfigResult = (0, configuration_1.findAndReadConfig)(rawOptions);
|
||||
return createFromPreloadedConfig(foundConfigResult);
|
||||
}
|
||||
exports.create = create;
|
||||
/** @internal */
|
||||
function createFromPreloadedConfig(foundConfigResult) {
|
||||
var _a, _b, _c, _d;
|
||||
const { configFilePath, cwd, options, config, compiler, projectLocalResolveDir, optionBasePaths, } = foundConfigResult;
|
||||
const projectLocalResolveHelper = (0, util_1.createProjectLocalResolveHelper)(projectLocalResolveDir);
|
||||
const ts = (0, configuration_1.loadCompiler)(compiler);
|
||||
// Experimental REPL await is not compatible targets lower than ES2018
|
||||
const targetSupportsTla = config.options.target >= ts.ScriptTarget.ES2018;
|
||||
if (options.experimentalReplAwait === true && !targetSupportsTla) {
|
||||
throw new Error('Experimental REPL await is not compatible with targets lower than ES2018');
|
||||
}
|
||||
// Top-level await was added in TS 3.8
|
||||
const tsVersionSupportsTla = (0, util_1.versionGteLt)(ts.version, '3.8.0');
|
||||
if (options.experimentalReplAwait === true && !tsVersionSupportsTla) {
|
||||
throw new Error('Experimental REPL await is not compatible with TypeScript versions older than 3.8');
|
||||
}
|
||||
const shouldReplAwait = options.experimentalReplAwait !== false &&
|
||||
tsVersionSupportsTla &&
|
||||
targetSupportsTla;
|
||||
// swc implies two other options
|
||||
// typeCheck option was implemented specifically to allow overriding tsconfig transpileOnly from the command-line
|
||||
// So we should allow using typeCheck to override swc
|
||||
if (options.swc && !options.typeCheck) {
|
||||
if (options.transpileOnly === false) {
|
||||
throw new Error("Cannot enable 'swc' option with 'transpileOnly: false'. 'swc' implies 'transpileOnly'.");
|
||||
}
|
||||
if (options.transpiler) {
|
||||
throw new Error("Cannot specify both 'swc' and 'transpiler' options. 'swc' uses the built-in swc transpiler.");
|
||||
}
|
||||
}
|
||||
const readFile = options.readFile || ts.sys.readFile;
|
||||
const fileExists = options.fileExists || ts.sys.fileExists;
|
||||
// typeCheck can override transpileOnly, useful for CLI flag to override config file
|
||||
const transpileOnly = (options.transpileOnly === true || options.swc === true) &&
|
||||
options.typeCheck !== true;
|
||||
let transpiler = undefined;
|
||||
let transpilerBasePath = undefined;
|
||||
if (options.transpiler) {
|
||||
transpiler = options.transpiler;
|
||||
transpilerBasePath = optionBasePaths.transpiler;
|
||||
}
|
||||
else if (options.swc) {
|
||||
transpiler = require.resolve('./transpilers/swc.js');
|
||||
transpilerBasePath = optionBasePaths.swc;
|
||||
}
|
||||
const transformers = options.transformers || undefined;
|
||||
const diagnosticFilters = [
|
||||
{
|
||||
appliesToAllFiles: true,
|
||||
filenamesAbsolute: [],
|
||||
diagnosticsIgnored: [
|
||||
6059,
|
||||
18002,
|
||||
18003,
|
||||
...(options.experimentalTsImportSpecifiers
|
||||
? [
|
||||
2691, // "An import path cannot end with a '.ts' extension. Consider importing '<specifier without ext>' instead."
|
||||
]
|
||||
: []),
|
||||
...(options.ignoreDiagnostics || []),
|
||||
].map(Number),
|
||||
},
|
||||
];
|
||||
const configDiagnosticList = filterDiagnostics(config.errors, diagnosticFilters);
|
||||
const outputCache = new Map();
|
||||
const configFileDirname = configFilePath ? (0, path_1.dirname)(configFilePath) : null;
|
||||
const scopeDir = (_c = (_b = (_a = options.scopeDir) !== null && _a !== void 0 ? _a : config.options.rootDir) !== null && _b !== void 0 ? _b : configFileDirname) !== null && _c !== void 0 ? _c : cwd;
|
||||
const ignoreBaseDir = configFileDirname !== null && configFileDirname !== void 0 ? configFileDirname : cwd;
|
||||
const isScoped = options.scope
|
||||
? (fileName) => (0, path_1.relative)(scopeDir, fileName).charAt(0) !== '.'
|
||||
: () => true;
|
||||
const shouldIgnore = createIgnore(ignoreBaseDir, options.skipIgnore
|
||||
? []
|
||||
: (options.ignore || ['(?:^|/)node_modules/']).map((str) => new RegExp(str)));
|
||||
const diagnosticHost = {
|
||||
getNewLine: () => ts.sys.newLine,
|
||||
getCurrentDirectory: () => cwd,
|
||||
// TODO switch to getCanonicalFileName we already create later in scope
|
||||
getCanonicalFileName: ts.sys.useCaseSensitiveFileNames
|
||||
? (x) => x
|
||||
: (x) => x.toLowerCase(),
|
||||
};
|
||||
if (options.transpileOnly && typeof transformers === 'function') {
|
||||
throw new TypeError('Transformers function is unavailable in "--transpile-only"');
|
||||
}
|
||||
let createTranspiler = initializeTranspilerFactory();
|
||||
function initializeTranspilerFactory() {
|
||||
var _a;
|
||||
if (transpiler) {
|
||||
if (!transpileOnly)
|
||||
throw new Error('Custom transpiler can only be used when transpileOnly is enabled.');
|
||||
const transpilerName = typeof transpiler === 'string' ? transpiler : transpiler[0];
|
||||
const transpilerOptions = typeof transpiler === 'string' ? {} : (_a = transpiler[1]) !== null && _a !== void 0 ? _a : {};
|
||||
const transpilerConfigLocalResolveHelper = transpilerBasePath
|
||||
? (0, util_1.createProjectLocalResolveHelper)(transpilerBasePath)
|
||||
: projectLocalResolveHelper;
|
||||
const transpilerPath = transpilerConfigLocalResolveHelper(transpilerName, true);
|
||||
const transpilerFactory = require(transpilerPath)
|
||||
.create;
|
||||
return createTranspiler;
|
||||
function createTranspiler(compilerOptions, nodeModuleEmitKind) {
|
||||
return transpilerFactory === null || transpilerFactory === void 0 ? void 0 : transpilerFactory({
|
||||
service: {
|
||||
options,
|
||||
config: {
|
||||
...config,
|
||||
options: compilerOptions,
|
||||
},
|
||||
projectLocalResolveHelper,
|
||||
},
|
||||
transpilerConfigLocalResolveHelper,
|
||||
nodeModuleEmitKind,
|
||||
...transpilerOptions,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* True if require() hooks should interop with experimental ESM loader.
|
||||
* Enabled explicitly via a flag since it is a breaking change.
|
||||
*/
|
||||
let experimentalEsmLoader = false;
|
||||
function enableExperimentalEsmLoaderInterop() {
|
||||
experimentalEsmLoader = true;
|
||||
}
|
||||
// Install source map support and read from memory cache.
|
||||
installSourceMapSupport();
|
||||
function installSourceMapSupport() {
|
||||
const sourceMapSupport = require('@cspotcode/source-map-support');
|
||||
sourceMapSupport.install({
|
||||
environment: 'node',
|
||||
retrieveFile(pathOrUrl) {
|
||||
var _a;
|
||||
let path = pathOrUrl;
|
||||
// If it's a file URL, convert to local path
|
||||
// Note: fileURLToPath does not exist on early node v10
|
||||
// I could not find a way to handle non-URLs except to swallow an error
|
||||
if (experimentalEsmLoader && path.startsWith('file://')) {
|
||||
try {
|
||||
path = (0, url_1.fileURLToPath)(path);
|
||||
}
|
||||
catch (e) {
|
||||
/* swallow error */
|
||||
}
|
||||
}
|
||||
path = (0, util_1.normalizeSlashes)(path);
|
||||
return ((_a = outputCache.get(path)) === null || _a === void 0 ? void 0 : _a.content) || '';
|
||||
},
|
||||
redirectConflictingLibrary: true,
|
||||
onConflictingLibraryRedirect(request, parent, isMain, options, redirectedRequest) {
|
||||
(0, exports.debug)(`Redirected an attempt to require source-map-support to instead receive @cspotcode/source-map-support. "${parent.filename}" attempted to require or resolve "${request}" and was redirected to "${redirectedRequest}".`);
|
||||
},
|
||||
});
|
||||
}
|
||||
const shouldHavePrettyErrors = options.pretty === undefined ? process.stdout.isTTY : options.pretty;
|
||||
const formatDiagnostics = shouldHavePrettyErrors
|
||||
? ts.formatDiagnosticsWithColorAndContext || ts.formatDiagnostics
|
||||
: ts.formatDiagnostics;
|
||||
function createTSError(diagnostics) {
|
||||
const diagnosticText = formatDiagnostics(diagnostics, diagnosticHost);
|
||||
const diagnosticCodes = diagnostics.map((x) => x.code);
|
||||
return new TSError(diagnosticText, diagnosticCodes, diagnostics);
|
||||
}
|
||||
function reportTSError(configDiagnosticList) {
|
||||
const error = createTSError(configDiagnosticList);
|
||||
if (options.logError) {
|
||||
// Print error in red color and continue execution.
|
||||
console.error('\x1b[31m%s\x1b[0m', error);
|
||||
}
|
||||
else {
|
||||
// Throw error and exit the script.
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Render the configuration errors.
|
||||
if (configDiagnosticList.length)
|
||||
reportTSError(configDiagnosticList);
|
||||
const jsxEmitPreserve = config.options.jsx === ts.JsxEmit.Preserve;
|
||||
/**
|
||||
* Get the extension for a transpiled file.
|
||||
* [MUST_UPDATE_FOR_NEW_FILE_EXTENSIONS]
|
||||
*/
|
||||
function getEmitExtension(path) {
|
||||
const lastDotIndex = path.lastIndexOf('.');
|
||||
if (lastDotIndex >= 0) {
|
||||
const ext = path.slice(lastDotIndex);
|
||||
switch (ext) {
|
||||
case '.js':
|
||||
case '.ts':
|
||||
return '.js';
|
||||
case '.jsx':
|
||||
case '.tsx':
|
||||
return jsxEmitPreserve ? '.jsx' : '.js';
|
||||
case '.mjs':
|
||||
case '.mts':
|
||||
return '.mjs';
|
||||
case '.cjs':
|
||||
case '.cts':
|
||||
return '.cjs';
|
||||
}
|
||||
}
|
||||
return '.js';
|
||||
}
|
||||
/**
|
||||
* Get output from TS compiler w/typechecking. `undefined` in `transpileOnly`
|
||||
* mode.
|
||||
*/
|
||||
let getOutput;
|
||||
let getTypeInfo;
|
||||
const getCanonicalFileName = ts.createGetCanonicalFileName(ts.sys.useCaseSensitiveFileNames);
|
||||
const moduleTypeClassifier = (0, module_type_classifier_1.createModuleTypeClassifier)({
|
||||
basePath: (_d = options.optionBasePaths) === null || _d === void 0 ? void 0 : _d.moduleTypes,
|
||||
patterns: options.moduleTypes,
|
||||
});
|
||||
const extensions = (0, file_extensions_1.getExtensions)(config, options, ts.version);
|
||||
// Use full language services when the fast option is disabled.
|
||||
if (!transpileOnly) {
|
||||
const fileContents = new Map();
|
||||
const rootFileNames = new Set(config.fileNames);
|
||||
const cachedReadFile = (0, util_1.cachedLookup)(debugFn('readFile', readFile));
|
||||
// Use language services by default
|
||||
if (!options.compilerHost) {
|
||||
let projectVersion = 1;
|
||||
const fileVersions = new Map(Array.from(rootFileNames).map((fileName) => [fileName, 0]));
|
||||
const getCustomTransformers = () => {
|
||||
if (typeof transformers === 'function') {
|
||||
const program = service.getProgram();
|
||||
return program ? transformers(program) : undefined;
|
||||
}
|
||||
return transformers;
|
||||
};
|
||||
// Create the compiler host for type checking.
|
||||
const serviceHost = {
|
||||
getProjectVersion: () => String(projectVersion),
|
||||
getScriptFileNames: () => Array.from(rootFileNames),
|
||||
getScriptVersion: (fileName) => {
|
||||
const version = fileVersions.get(fileName);
|
||||
return version ? version.toString() : '';
|
||||
},
|
||||
getScriptSnapshot(fileName) {
|
||||
// TODO ordering of this with getScriptVersion? Should they sync up?
|
||||
let contents = fileContents.get(fileName);
|
||||
// Read contents into TypeScript memory cache.
|
||||
if (contents === undefined) {
|
||||
contents = cachedReadFile(fileName);
|
||||
if (contents === undefined)
|
||||
return;
|
||||
fileVersions.set(fileName, 1);
|
||||
fileContents.set(fileName, contents);
|
||||
projectVersion++;
|
||||
}
|
||||
return ts.ScriptSnapshot.fromString(contents);
|
||||
},
|
||||
readFile: cachedReadFile,
|
||||
readDirectory: ts.sys.readDirectory,
|
||||
getDirectories: (0, util_1.cachedLookup)(debugFn('getDirectories', ts.sys.getDirectories)),
|
||||
fileExists: (0, util_1.cachedLookup)(debugFn('fileExists', fileExists)),
|
||||
directoryExists: (0, util_1.cachedLookup)(debugFn('directoryExists', ts.sys.directoryExists)),
|
||||
realpath: ts.sys.realpath
|
||||
? (0, util_1.cachedLookup)(debugFn('realpath', ts.sys.realpath))
|
||||
: undefined,
|
||||
getNewLine: () => ts.sys.newLine,
|
||||
useCaseSensitiveFileNames: () => ts.sys.useCaseSensitiveFileNames,
|
||||
getCurrentDirectory: () => cwd,
|
||||
getCompilationSettings: () => config.options,
|
||||
getDefaultLibFileName: () => ts.getDefaultLibFilePath(config.options),
|
||||
getCustomTransformers: getCustomTransformers,
|
||||
trace: options.tsTrace,
|
||||
};
|
||||
const { resolveModuleNames, getResolvedModuleWithFailedLookupLocationsFromCache, resolveTypeReferenceDirectives, isFileKnownToBeInternal, markBucketOfFilenameInternal, } = (0, resolver_functions_1.createResolverFunctions)({
|
||||
host: serviceHost,
|
||||
getCanonicalFileName,
|
||||
ts,
|
||||
cwd,
|
||||
config,
|
||||
projectLocalResolveHelper,
|
||||
options,
|
||||
extensions,
|
||||
});
|
||||
serviceHost.resolveModuleNames = resolveModuleNames;
|
||||
serviceHost.getResolvedModuleWithFailedLookupLocationsFromCache =
|
||||
getResolvedModuleWithFailedLookupLocationsFromCache;
|
||||
serviceHost.resolveTypeReferenceDirectives =
|
||||
resolveTypeReferenceDirectives;
|
||||
const registry = ts.createDocumentRegistry(ts.sys.useCaseSensitiveFileNames, cwd);
|
||||
const service = ts.createLanguageService(serviceHost, registry);
|
||||
const updateMemoryCache = (contents, fileName) => {
|
||||
// Add to `rootFiles` as necessary, either to make TS include a file it has not seen,
|
||||
// or to trigger a re-classification of files from external to internal.
|
||||
if (!rootFileNames.has(fileName) &&
|
||||
!isFileKnownToBeInternal(fileName)) {
|
||||
markBucketOfFilenameInternal(fileName);
|
||||
rootFileNames.add(fileName);
|
||||
// Increment project version for every change to rootFileNames.
|
||||
projectVersion++;
|
||||
}
|
||||
const previousVersion = fileVersions.get(fileName) || 0;
|
||||
const previousContents = fileContents.get(fileName);
|
||||
// Avoid incrementing cache when nothing has changed.
|
||||
if (contents !== previousContents) {
|
||||
fileVersions.set(fileName, previousVersion + 1);
|
||||
fileContents.set(fileName, contents);
|
||||
// Increment project version for every file change.
|
||||
projectVersion++;
|
||||
}
|
||||
};
|
||||
let previousProgram = undefined;
|
||||
getOutput = (code, fileName) => {
|
||||
updateMemoryCache(code, fileName);
|
||||
const programBefore = service.getProgram();
|
||||
if (programBefore !== previousProgram) {
|
||||
(0, exports.debug)(`compiler rebuilt Program instance when getting output for ${fileName}`);
|
||||
}
|
||||
const output = service.getEmitOutput(fileName);
|
||||
// Get the relevant diagnostics - this is 3x faster than `getPreEmitDiagnostics`.
|
||||
const diagnostics = service
|
||||
.getSemanticDiagnostics(fileName)
|
||||
.concat(service.getSyntacticDiagnostics(fileName));
|
||||
const programAfter = service.getProgram();
|
||||
(0, exports.debug)('invariant: Is service.getProject() identical before and after getting emit output and diagnostics? (should always be true) ', programBefore === programAfter);
|
||||
previousProgram = programAfter;
|
||||
const diagnosticList = filterDiagnostics(diagnostics, diagnosticFilters);
|
||||
if (diagnosticList.length)
|
||||
reportTSError(diagnosticList);
|
||||
if (output.emitSkipped) {
|
||||
return [undefined, undefined, true];
|
||||
}
|
||||
// Throw an error when requiring `.d.ts` files.
|
||||
if (output.outputFiles.length === 0) {
|
||||
throw new TypeError(`Unable to require file: ${(0, path_1.relative)(cwd, fileName)}\n` +
|
||||
'This is usually the result of a faulty configuration or import. ' +
|
||||
'Make sure there is a `.js`, `.json` or other executable extension with ' +
|
||||
'loader attached before `ts-node` available.');
|
||||
}
|
||||
return [output.outputFiles[1].text, output.outputFiles[0].text, false];
|
||||
};
|
||||
getTypeInfo = (code, fileName, position) => {
|
||||
const normalizedFileName = (0, util_1.normalizeSlashes)(fileName);
|
||||
updateMemoryCache(code, normalizedFileName);
|
||||
const info = service.getQuickInfoAtPosition(normalizedFileName, position);
|
||||
const name = ts.displayPartsToString(info ? info.displayParts : []);
|
||||
const comment = ts.displayPartsToString(info ? info.documentation : []);
|
||||
return { name, comment };
|
||||
};
|
||||
}
|
||||
else {
|
||||
const sys = {
|
||||
...ts.sys,
|
||||
...diagnosticHost,
|
||||
readFile: (fileName) => {
|
||||
const cacheContents = fileContents.get(fileName);
|
||||
if (cacheContents !== undefined)
|
||||
return cacheContents;
|
||||
const contents = cachedReadFile(fileName);
|
||||
if (contents)
|
||||
fileContents.set(fileName, contents);
|
||||
return contents;
|
||||
},
|
||||
readDirectory: ts.sys.readDirectory,
|
||||
getDirectories: (0, util_1.cachedLookup)(debugFn('getDirectories', ts.sys.getDirectories)),
|
||||
fileExists: (0, util_1.cachedLookup)(debugFn('fileExists', fileExists)),
|
||||
directoryExists: (0, util_1.cachedLookup)(debugFn('directoryExists', ts.sys.directoryExists)),
|
||||
resolvePath: (0, util_1.cachedLookup)(debugFn('resolvePath', ts.sys.resolvePath)),
|
||||
realpath: ts.sys.realpath
|
||||
? (0, util_1.cachedLookup)(debugFn('realpath', ts.sys.realpath))
|
||||
: undefined,
|
||||
};
|
||||
const host = ts.createIncrementalCompilerHost
|
||||
? ts.createIncrementalCompilerHost(config.options, sys)
|
||||
: {
|
||||
...sys,
|
||||
getSourceFile: (fileName, languageVersion) => {
|
||||
const contents = sys.readFile(fileName);
|
||||
if (contents === undefined)
|
||||
return;
|
||||
return ts.createSourceFile(fileName, contents, languageVersion);
|
||||
},
|
||||
getDefaultLibLocation: () => (0, util_1.normalizeSlashes)((0, path_1.dirname)(compiler)),
|
||||
getDefaultLibFileName: () => (0, util_1.normalizeSlashes)((0, path_1.join)((0, path_1.dirname)(compiler), ts.getDefaultLibFileName(config.options))),
|
||||
useCaseSensitiveFileNames: () => sys.useCaseSensitiveFileNames,
|
||||
};
|
||||
host.trace = options.tsTrace;
|
||||
const { resolveModuleNames, resolveTypeReferenceDirectives, isFileKnownToBeInternal, markBucketOfFilenameInternal, } = (0, resolver_functions_1.createResolverFunctions)({
|
||||
host,
|
||||
cwd,
|
||||
config,
|
||||
ts,
|
||||
getCanonicalFileName,
|
||||
projectLocalResolveHelper,
|
||||
options,
|
||||
extensions,
|
||||
});
|
||||
host.resolveModuleNames = resolveModuleNames;
|
||||
host.resolveTypeReferenceDirectives = resolveTypeReferenceDirectives;
|
||||
// Fallback for older TypeScript releases without incremental API.
|
||||
let builderProgram = ts.createIncrementalProgram
|
||||
? ts.createIncrementalProgram({
|
||||
rootNames: Array.from(rootFileNames),
|
||||
options: config.options,
|
||||
host,
|
||||
configFileParsingDiagnostics: config.errors,
|
||||
projectReferences: config.projectReferences,
|
||||
})
|
||||
: ts.createEmitAndSemanticDiagnosticsBuilderProgram(Array.from(rootFileNames), config.options, host, undefined, config.errors, config.projectReferences);
|
||||
// Read and cache custom transformers.
|
||||
const customTransformers = typeof transformers === 'function'
|
||||
? transformers(builderProgram.getProgram())
|
||||
: transformers;
|
||||
// Set the file contents into cache manually.
|
||||
const updateMemoryCache = (contents, fileName) => {
|
||||
const previousContents = fileContents.get(fileName);
|
||||
const contentsChanged = previousContents !== contents;
|
||||
if (contentsChanged) {
|
||||
fileContents.set(fileName, contents);
|
||||
}
|
||||
// Add to `rootFiles` when discovered by compiler for the first time.
|
||||
let addedToRootFileNames = false;
|
||||
if (!rootFileNames.has(fileName) &&
|
||||
!isFileKnownToBeInternal(fileName)) {
|
||||
markBucketOfFilenameInternal(fileName);
|
||||
rootFileNames.add(fileName);
|
||||
addedToRootFileNames = true;
|
||||
}
|
||||
// Update program when file changes.
|
||||
if (addedToRootFileNames || contentsChanged) {
|
||||
builderProgram = ts.createEmitAndSemanticDiagnosticsBuilderProgram(Array.from(rootFileNames), config.options, host, builderProgram, config.errors, config.projectReferences);
|
||||
}
|
||||
};
|
||||
getOutput = (code, fileName) => {
|
||||
let outText = '';
|
||||
let outMap = '';
|
||||
updateMemoryCache(code, fileName);
|
||||
const sourceFile = builderProgram.getSourceFile(fileName);
|
||||
if (!sourceFile)
|
||||
throw new TypeError(`Unable to read file: ${fileName}`);
|
||||
const program = builderProgram.getProgram();
|
||||
const diagnostics = ts.getPreEmitDiagnostics(program, sourceFile);
|
||||
const diagnosticList = filterDiagnostics(diagnostics, diagnosticFilters);
|
||||
if (diagnosticList.length)
|
||||
reportTSError(diagnosticList);
|
||||
const result = builderProgram.emit(sourceFile, (path, file, writeByteOrderMark) => {
|
||||
if (path.endsWith('.map')) {
|
||||
outMap = file;
|
||||
}
|
||||
else {
|
||||
outText = file;
|
||||
}
|
||||
if (options.emit)
|
||||
sys.writeFile(path, file, writeByteOrderMark);
|
||||
}, undefined, undefined, customTransformers);
|
||||
if (result.emitSkipped) {
|
||||
return [undefined, undefined, true];
|
||||
}
|
||||
// Throw an error when requiring files that cannot be compiled.
|
||||
if (outText === '') {
|
||||
if (program.isSourceFileFromExternalLibrary(sourceFile)) {
|
||||
throw new TypeError(`Unable to compile file from external library: ${(0, path_1.relative)(cwd, fileName)}`);
|
||||
}
|
||||
throw new TypeError(`Unable to require file: ${(0, path_1.relative)(cwd, fileName)}\n` +
|
||||
'This is usually the result of a faulty configuration or import. ' +
|
||||
'Make sure there is a `.js`, `.json` or other executable extension with ' +
|
||||
'loader attached before `ts-node` available.');
|
||||
}
|
||||
return [outText, outMap, false];
|
||||
};
|
||||
getTypeInfo = (code, fileName, position) => {
|
||||
const normalizedFileName = (0, util_1.normalizeSlashes)(fileName);
|
||||
updateMemoryCache(code, normalizedFileName);
|
||||
const sourceFile = builderProgram.getSourceFile(normalizedFileName);
|
||||
if (!sourceFile)
|
||||
throw new TypeError(`Unable to read file: ${fileName}`);
|
||||
const node = getTokenAtPosition(ts, sourceFile, position);
|
||||
const checker = builderProgram.getProgram().getTypeChecker();
|
||||
const symbol = checker.getSymbolAtLocation(node);
|
||||
if (!symbol)
|
||||
return { name: '', comment: '' };
|
||||
const type = checker.getTypeOfSymbolAtLocation(symbol, node);
|
||||
const signatures = [
|
||||
...type.getConstructSignatures(),
|
||||
...type.getCallSignatures(),
|
||||
];
|
||||
return {
|
||||
name: signatures.length
|
||||
? signatures.map((x) => checker.signatureToString(x)).join('\n')
|
||||
: checker.typeToString(type),
|
||||
comment: ts.displayPartsToString(symbol ? symbol.getDocumentationComment(checker) : []),
|
||||
};
|
||||
};
|
||||
// Write `.tsbuildinfo` when `--build` is enabled.
|
||||
if (options.emit && config.options.incremental) {
|
||||
process.on('exit', () => {
|
||||
// Emits `.tsbuildinfo` to filesystem.
|
||||
builderProgram.getProgram().emitBuildInfo();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
getTypeInfo = () => {
|
||||
throw new TypeError('Type information is unavailable in "--transpile-only"');
|
||||
};
|
||||
}
|
||||
function createTranspileOnlyGetOutputFunction(overrideModuleType, nodeModuleEmitKind) {
|
||||
const compilerOptions = { ...config.options };
|
||||
if (overrideModuleType !== undefined)
|
||||
compilerOptions.module = overrideModuleType;
|
||||
let customTranspiler = createTranspiler === null || createTranspiler === void 0 ? void 0 : createTranspiler(compilerOptions, nodeModuleEmitKind);
|
||||
let tsTranspileModule = (0, util_1.versionGteLt)(ts.version, '4.7.0')
|
||||
? (0, ts_transpile_module_1.createTsTranspileModule)(ts, {
|
||||
compilerOptions,
|
||||
reportDiagnostics: true,
|
||||
transformers: transformers,
|
||||
})
|
||||
: undefined;
|
||||
return (code, fileName) => {
|
||||
let result;
|
||||
if (customTranspiler) {
|
||||
result = customTranspiler.transpile(code, {
|
||||
fileName,
|
||||
});
|
||||
}
|
||||
else if (tsTranspileModule) {
|
||||
result = tsTranspileModule(code, {
|
||||
fileName,
|
||||
}, nodeModuleEmitKind === 'nodeesm' ? 'module' : 'commonjs');
|
||||
}
|
||||
else {
|
||||
result = ts.transpileModule(code, {
|
||||
fileName,
|
||||
compilerOptions,
|
||||
reportDiagnostics: true,
|
||||
transformers: transformers,
|
||||
});
|
||||
}
|
||||
const diagnosticList = filterDiagnostics(result.diagnostics || [], diagnosticFilters);
|
||||
if (diagnosticList.length)
|
||||
reportTSError(diagnosticList);
|
||||
return [result.outputText, result.sourceMapText, false];
|
||||
};
|
||||
}
|
||||
// When true, these mean that a `moduleType` override will cause a different emit
|
||||
// than the TypeScript compiler, so we *must* overwrite the emit.
|
||||
const shouldOverwriteEmitWhenForcingCommonJS = config.options.module !== ts.ModuleKind.CommonJS;
|
||||
// [MUST_UPDATE_FOR_NEW_MODULEKIND]
|
||||
const shouldOverwriteEmitWhenForcingEsm = !(config.options.module === ts.ModuleKind.ES2015 ||
|
||||
(ts.ModuleKind.ES2020 && config.options.module === ts.ModuleKind.ES2020) ||
|
||||
(ts.ModuleKind.ES2022 && config.options.module === ts.ModuleKind.ES2022) ||
|
||||
config.options.module === ts.ModuleKind.ESNext);
|
||||
/**
|
||||
* node16 or nodenext
|
||||
* [MUST_UPDATE_FOR_NEW_MODULEKIND]
|
||||
*/
|
||||
const isNodeModuleType = (ts.ModuleKind.Node16 && config.options.module === ts.ModuleKind.Node16) ||
|
||||
(ts.ModuleKind.NodeNext &&
|
||||
config.options.module === ts.ModuleKind.NodeNext);
|
||||
const getOutputForceCommonJS = createTranspileOnlyGetOutputFunction(ts.ModuleKind.CommonJS);
|
||||
const getOutputForceNodeCommonJS = createTranspileOnlyGetOutputFunction(ts.ModuleKind.NodeNext, 'nodecjs');
|
||||
const getOutputForceNodeESM = createTranspileOnlyGetOutputFunction(ts.ModuleKind.NodeNext, 'nodeesm');
|
||||
// [MUST_UPDATE_FOR_NEW_MODULEKIND]
|
||||
const getOutputForceESM = createTranspileOnlyGetOutputFunction(ts.ModuleKind.ES2022 || ts.ModuleKind.ES2020 || ts.ModuleKind.ES2015);
|
||||
const getOutputTranspileOnly = createTranspileOnlyGetOutputFunction();
|
||||
// Create a simple TypeScript compiler proxy.
|
||||
function compile(code, fileName, lineOffset = 0) {
|
||||
const normalizedFileName = (0, util_1.normalizeSlashes)(fileName);
|
||||
const classification = moduleTypeClassifier.classifyModuleByModuleTypeOverrides(normalizedFileName);
|
||||
let value = '';
|
||||
let sourceMap = '';
|
||||
let emitSkipped = true;
|
||||
if (getOutput) {
|
||||
// Must always call normal getOutput to throw typechecking errors
|
||||
[value, sourceMap, emitSkipped] = getOutput(code, normalizedFileName);
|
||||
}
|
||||
// If module classification contradicts the above, call the relevant transpiler
|
||||
if (classification.moduleType === 'cjs' &&
|
||||
(shouldOverwriteEmitWhenForcingCommonJS || emitSkipped)) {
|
||||
[value, sourceMap] = getOutputForceCommonJS(code, normalizedFileName);
|
||||
}
|
||||
else if (classification.moduleType === 'esm' &&
|
||||
(shouldOverwriteEmitWhenForcingEsm || emitSkipped)) {
|
||||
[value, sourceMap] = getOutputForceESM(code, normalizedFileName);
|
||||
}
|
||||
else if (emitSkipped) {
|
||||
// Happens when ts compiler skips emit or in transpileOnly mode
|
||||
const classification = (0, node_module_type_classifier_1.classifyModule)(fileName, isNodeModuleType);
|
||||
[value, sourceMap] =
|
||||
classification === 'nodecjs'
|
||||
? getOutputForceNodeCommonJS(code, normalizedFileName)
|
||||
: classification === 'nodeesm'
|
||||
? getOutputForceNodeESM(code, normalizedFileName)
|
||||
: classification === 'cjs'
|
||||
? getOutputForceCommonJS(code, normalizedFileName)
|
||||
: classification === 'esm'
|
||||
? getOutputForceESM(code, normalizedFileName)
|
||||
: getOutputTranspileOnly(code, normalizedFileName);
|
||||
}
|
||||
const output = updateOutput(value, normalizedFileName, sourceMap, getEmitExtension);
|
||||
outputCache.set(normalizedFileName, { content: output });
|
||||
return output;
|
||||
}
|
||||
let active = true;
|
||||
const enabled = (enabled) => enabled === undefined ? active : (active = !!enabled);
|
||||
const ignored = (fileName) => {
|
||||
if (!active)
|
||||
return true;
|
||||
const ext = (0, path_1.extname)(fileName);
|
||||
if (extensions.compiled.includes(ext)) {
|
||||
return !isScoped(fileName) || shouldIgnore(fileName);
|
||||
}
|
||||
return true;
|
||||
};
|
||||
function addDiagnosticFilter(filter) {
|
||||
diagnosticFilters.push({
|
||||
...filter,
|
||||
filenamesAbsolute: filter.filenamesAbsolute.map((f) => (0, util_1.normalizeSlashes)(f)),
|
||||
});
|
||||
}
|
||||
const getNodeEsmResolver = (0, util_1.once)(() => require('../dist-raw/node-internal-modules-esm-resolve').createResolve({
|
||||
extensions,
|
||||
preferTsExts: options.preferTsExts,
|
||||
tsNodeExperimentalSpecifierResolution: options.experimentalSpecifierResolution,
|
||||
}));
|
||||
const getNodeEsmGetFormat = (0, util_1.once)(() => require('../dist-raw/node-internal-modules-esm-get_format').createGetFormat(options.experimentalSpecifierResolution, getNodeEsmResolver()));
|
||||
const getNodeCjsLoader = (0, util_1.once)(() => require('../dist-raw/node-internal-modules-cjs-loader').createCjsLoader({
|
||||
extensions,
|
||||
preferTsExts: options.preferTsExts,
|
||||
nodeEsmResolver: getNodeEsmResolver(),
|
||||
}));
|
||||
return {
|
||||
[TS_NODE_SERVICE_BRAND]: true,
|
||||
ts,
|
||||
compilerPath: compiler,
|
||||
config,
|
||||
compile,
|
||||
getTypeInfo,
|
||||
ignored,
|
||||
enabled,
|
||||
options,
|
||||
configFilePath,
|
||||
moduleTypeClassifier,
|
||||
shouldReplAwait,
|
||||
addDiagnosticFilter,
|
||||
installSourceMapSupport,
|
||||
enableExperimentalEsmLoaderInterop,
|
||||
transpileOnly,
|
||||
projectLocalResolveHelper,
|
||||
getNodeEsmResolver,
|
||||
getNodeEsmGetFormat,
|
||||
getNodeCjsLoader,
|
||||
extensions,
|
||||
};
|
||||
}
|
||||
exports.createFromPreloadedConfig = createFromPreloadedConfig;
|
||||
/**
|
||||
* Check if the filename should be ignored.
|
||||
*/
|
||||
function createIgnore(ignoreBaseDir, ignore) {
|
||||
return (fileName) => {
|
||||
const relname = (0, path_1.relative)(ignoreBaseDir, fileName);
|
||||
const path = (0, util_1.normalizeSlashes)(relname);
|
||||
return ignore.some((x) => x.test(path));
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Register the extensions to support when importing files.
|
||||
*/
|
||||
function registerExtensions(preferTsExts, extensions, service, originalJsHandler) {
|
||||
const exts = new Set(extensions);
|
||||
// Can't add these extensions cuz would allow omitting file extension; node requires ext for .cjs and .mjs
|
||||
// Unless they're already registered by something else (nyc does this):
|
||||
// then we *must* hook them or else our transformer will not be called.
|
||||
for (const cannotAdd of ['.mts', '.cts', '.mjs', '.cjs']) {
|
||||
if (exts.has(cannotAdd) && !(0, util_1.hasOwnProperty)(require.extensions, cannotAdd)) {
|
||||
// Unrecognized file exts can be transformed via the `.js` handler.
|
||||
exts.add('.js');
|
||||
exts.delete(cannotAdd);
|
||||
}
|
||||
}
|
||||
// Register new extensions.
|
||||
for (const ext of exts) {
|
||||
registerExtension(ext, service, originalJsHandler);
|
||||
}
|
||||
if (preferTsExts) {
|
||||
const preferredExtensions = new Set([
|
||||
...exts,
|
||||
...Object.keys(require.extensions),
|
||||
]);
|
||||
// Re-sort iteration order of Object.keys()
|
||||
for (const ext of preferredExtensions) {
|
||||
const old = Object.getOwnPropertyDescriptor(require.extensions, ext);
|
||||
delete require.extensions[ext];
|
||||
Object.defineProperty(require.extensions, ext, old);
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Register the extension for node.
|
||||
*/
|
||||
function registerExtension(ext, service, originalHandler) {
|
||||
const old = require.extensions[ext] || originalHandler;
|
||||
require.extensions[ext] = function (m, filename) {
|
||||
if (service.ignored(filename))
|
||||
return old(m, filename);
|
||||
assertScriptCanLoadAsCJS(service, m, filename);
|
||||
const _compile = m._compile;
|
||||
m._compile = function (code, fileName) {
|
||||
(0, exports.debug)('module._compile', fileName);
|
||||
const result = service.compile(code, fileName);
|
||||
return _compile.call(this, result, fileName);
|
||||
};
|
||||
return old(m, filename);
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Update the output remapping the source map.
|
||||
*/
|
||||
function updateOutput(outputText, fileName, sourceMap, getEmitExtension) {
|
||||
const base64Map = Buffer.from(updateSourceMap(sourceMap, fileName), 'utf8').toString('base64');
|
||||
const sourceMapContent = `//# sourceMappingURL=data:application/json;charset=utf-8;base64,${base64Map}`;
|
||||
// Expected form: `//# sourceMappingURL=foo bar.js.map` or `//# sourceMappingURL=foo%20bar.js.map` for input file "foo bar.tsx"
|
||||
// Percent-encoding behavior added in TS 4.1.1: https://github.com/microsoft/TypeScript/issues/40951
|
||||
const prefix = '//# sourceMappingURL=';
|
||||
const prefixLength = prefix.length;
|
||||
const baseName = /*foo.tsx*/ (0, path_1.basename)(fileName);
|
||||
const extName = /*.tsx*/ (0, path_1.extname)(fileName);
|
||||
const extension = /*.js*/ getEmitExtension(fileName);
|
||||
const sourcemapFilename = baseName.slice(0, -extName.length) + extension + '.map';
|
||||
const sourceMapLengthWithoutPercentEncoding = prefixLength + sourcemapFilename.length;
|
||||
/*
|
||||
* Only rewrite if existing directive exists at the location we expect, to support:
|
||||
* a) compilers that do not append a sourcemap directive
|
||||
* b) situations where we did the math wrong
|
||||
* Not ideal, but appending our sourcemap *after* a pre-existing sourcemap still overrides, so the end-user is happy.
|
||||
*/
|
||||
if (outputText.substr(-sourceMapLengthWithoutPercentEncoding, prefixLength) ===
|
||||
prefix) {
|
||||
return (outputText.slice(0, -sourceMapLengthWithoutPercentEncoding) +
|
||||
sourceMapContent);
|
||||
}
|
||||
// If anyone asks why we're not using URL, the URL equivalent is: `u = new URL('http://d'); u.pathname = "/" + sourcemapFilename; return u.pathname.slice(1);
|
||||
const sourceMapLengthWithPercentEncoding = prefixLength + encodeURI(sourcemapFilename).length;
|
||||
if (outputText.substr(-sourceMapLengthWithPercentEncoding, prefixLength) ===
|
||||
prefix) {
|
||||
return (outputText.slice(0, -sourceMapLengthWithPercentEncoding) +
|
||||
sourceMapContent);
|
||||
}
|
||||
return `${outputText}\n${sourceMapContent}`;
|
||||
}
|
||||
/**
|
||||
* Update the source map contents for improved output.
|
||||
*/
|
||||
function updateSourceMap(sourceMapText, fileName) {
|
||||
const sourceMap = JSON.parse(sourceMapText);
|
||||
sourceMap.file = fileName;
|
||||
sourceMap.sources = [fileName];
|
||||
delete sourceMap.sourceRoot;
|
||||
return JSON.stringify(sourceMap);
|
||||
}
|
||||
/**
|
||||
* Filter diagnostics.
|
||||
*/
|
||||
function filterDiagnostics(diagnostics, filters) {
|
||||
return diagnostics.filter((d) => filters.every((f) => {
|
||||
var _a;
|
||||
return (!f.appliesToAllFiles &&
|
||||
f.filenamesAbsolute.indexOf((_a = d.file) === null || _a === void 0 ? void 0 : _a.fileName) === -1) ||
|
||||
f.diagnosticsIgnored.indexOf(d.code) === -1;
|
||||
}));
|
||||
}
|
||||
/**
|
||||
* Get token at file position.
|
||||
*
|
||||
* Reference: https://github.com/microsoft/TypeScript/blob/fcd9334f57d85b73dd66ad2d21c02e84822f4841/src/services/utilities.ts#L705-L731
|
||||
*/
|
||||
function getTokenAtPosition(ts, sourceFile, position) {
|
||||
let current = sourceFile;
|
||||
outer: while (true) {
|
||||
for (const child of current.getChildren(sourceFile)) {
|
||||
const start = child.getFullStart();
|
||||
if (start > position)
|
||||
break;
|
||||
const end = child.getEnd();
|
||||
if (position <= end) {
|
||||
current = child;
|
||||
continue outer;
|
||||
}
|
||||
}
|
||||
return current;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Create an implementation of node's ESM loader hooks.
|
||||
*
|
||||
* This may be useful if you
|
||||
* want to wrap or compose the loader hooks to add additional functionality or
|
||||
* combine with another loader.
|
||||
*
|
||||
* Node changed the hooks API, so there are two possible APIs. This function
|
||||
* detects your node version and returns the appropriate API.
|
||||
*
|
||||
* @category ESM Loader
|
||||
*/
|
||||
const createEsmHooks = (tsNodeService) => require('./esm').createEsmHooks(tsNodeService);
|
||||
exports.createEsmHooks = createEsmHooks;
|
||||
//# sourceMappingURL=index.js.map
|
1
node_modules/ts-node/dist/index.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/index.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
node_modules/ts-node/dist/module-type-classifier.d.ts
generated
vendored
Normal file
1
node_modules/ts-node/dist/module-type-classifier.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
export {};
|
64
node_modules/ts-node/dist/module-type-classifier.js
generated
vendored
Normal file
64
node_modules/ts-node/dist/module-type-classifier.js
generated
vendored
Normal file
@ -0,0 +1,64 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createModuleTypeClassifier = void 0;
|
||||
const ts_internals_1 = require("./ts-internals");
|
||||
const util_1 = require("./util");
|
||||
/**
|
||||
* @internal
|
||||
* May receive non-normalized options -- basePath and patterns -- and will normalize them
|
||||
* internally.
|
||||
* However, calls to `classifyModule` must pass pre-normalized paths!
|
||||
*/
|
||||
function createModuleTypeClassifier(options) {
|
||||
const { patterns, basePath: _basePath } = options;
|
||||
const basePath = _basePath !== undefined
|
||||
? (0, util_1.normalizeSlashes)(_basePath).replace(/\/$/, '')
|
||||
: undefined;
|
||||
const patternTypePairs = Object.entries(patterns !== null && patterns !== void 0 ? patterns : []).map(([_pattern, type]) => {
|
||||
const pattern = (0, util_1.normalizeSlashes)(_pattern);
|
||||
return { pattern: parsePattern(basePath, pattern), type };
|
||||
});
|
||||
const classifications = {
|
||||
package: {
|
||||
moduleType: 'auto',
|
||||
},
|
||||
cjs: {
|
||||
moduleType: 'cjs',
|
||||
},
|
||||
esm: {
|
||||
moduleType: 'esm',
|
||||
},
|
||||
};
|
||||
const auto = classifications.package;
|
||||
// Passed path must be normalized!
|
||||
function classifyModuleNonCached(path) {
|
||||
const matched = matchPatterns(patternTypePairs, (_) => _.pattern, path);
|
||||
if (matched)
|
||||
return classifications[matched.type];
|
||||
return auto;
|
||||
}
|
||||
const classifyModule = (0, util_1.cachedLookup)(classifyModuleNonCached);
|
||||
function classifyModuleAuto(path) {
|
||||
return auto;
|
||||
}
|
||||
return {
|
||||
classifyModuleByModuleTypeOverrides: patternTypePairs.length
|
||||
? classifyModule
|
||||
: classifyModuleAuto,
|
||||
};
|
||||
}
|
||||
exports.createModuleTypeClassifier = createModuleTypeClassifier;
|
||||
function parsePattern(basePath, patternString) {
|
||||
const pattern = (0, ts_internals_1.getPatternFromSpec)(patternString, basePath);
|
||||
return pattern !== undefined ? new RegExp(pattern) : /(?:)/;
|
||||
}
|
||||
function matchPatterns(objects, getPattern, candidate) {
|
||||
for (let i = objects.length - 1; i >= 0; i--) {
|
||||
const object = objects[i];
|
||||
const pattern = getPattern(object);
|
||||
if (pattern === null || pattern === void 0 ? void 0 : pattern.test(candidate)) {
|
||||
return object;
|
||||
}
|
||||
}
|
||||
}
|
||||
//# sourceMappingURL=module-type-classifier.js.map
|
1
node_modules/ts-node/dist/module-type-classifier.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/module-type-classifier.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"module-type-classifier.js","sourceRoot":"","sources":["../src/module-type-classifier.ts"],"names":[],"mappings":";;;AACA,iDAAoD;AACpD,iCAAwD;AAwBxD;;;;;GAKG;AACH,SAAgB,0BAA0B,CACxC,OAAoC;IAEpC,MAAM,EAAE,QAAQ,EAAE,QAAQ,EAAE,SAAS,EAAE,GAAG,OAAO,CAAC;IAClD,MAAM,QAAQ,GACZ,SAAS,KAAK,SAAS;QACrB,CAAC,CAAC,IAAA,uBAAgB,EAAC,SAAS,CAAC,CAAC,OAAO,CAAC,KAAK,EAAE,EAAE,CAAC;QAChD,CAAC,CAAC,SAAS,CAAC;IAEhB,MAAM,gBAAgB,GAAG,MAAM,CAAC,OAAO,CAAC,QAAQ,aAAR,QAAQ,cAAR,QAAQ,GAAI,EAAE,CAAC,CAAC,GAAG,CACzD,CAAC,CAAC,QAAQ,EAAE,IAAI,CAAC,EAAE,EAAE;QACnB,MAAM,OAAO,GAAG,IAAA,uBAAgB,EAAC,QAAQ,CAAC,CAAC;QAC3C,OAAO,EAAE,OAAO,EAAE,YAAY,CAAC,QAAS,EAAE,OAAO,CAAC,EAAE,IAAI,EAAE,CAAC;IAC7D,CAAC,CACF,CAAC;IAEF,MAAM,eAAe,GACnB;QACE,OAAO,EAAE;YACP,UAAU,EAAE,MAAM;SACnB;QACD,GAAG,EAAE;YACH,UAAU,EAAE,KAAK;SAClB;QACD,GAAG,EAAE;YACH,UAAU,EAAE,KAAK;SAClB;KACF,CAAC;IACJ,MAAM,IAAI,GAAG,eAAe,CAAC,OAAO,CAAC;IAErC,kCAAkC;IAClC,SAAS,uBAAuB,CAAC,IAAY;QAC3C,MAAM,OAAO,GAAG,aAAa,CAAC,gBAAgB,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,EAAE,IAAI,CAAC,CAAC;QACxE,IAAI,OAAO;YAAE,OAAO,eAAe,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QAClD,OAAO,IAAI,CAAC;IACd,CAAC;IAED,MAAM,cAAc,GAAG,IAAA,mBAAY,EAAC,uBAAuB,CAAC,CAAC;IAE7D,SAAS,kBAAkB,CAAC,IAAY;QACtC,OAAO,IAAI,CAAC;IACd,CAAC;IAED,OAAO;QACL,mCAAmC,EAAE,gBAAgB,CAAC,MAAM;YAC1D,CAAC,CAAC,cAAc;YAChB,CAAC,CAAC,kBAAkB;KACvB,CAAC;AACJ,CAAC;AAhDD,gEAgDC;AAED,SAAS,YAAY,CAAC,QAAgB,EAAE,aAAqB;IAC3D,MAAM,OAAO,GAAG,IAAA,iCAAkB,EAAC,aAAa,EAAE,QAAQ,CAAC,CAAC;IAC5D,OAAO,OAAO,KAAK,SAAS,CAAC,CAAC,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC;AAC9D,CAAC;AAED,SAAS,aAAa,CACpB,OAAY,EACZ,UAA4B,EAC5B,SAAiB;IAEjB,KAAK,IAAI,CAAC,GAAG,OAAO,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,EAAE,CAAC,EAAE,EAAE;QAC5C,MAAM,MAAM,GAAG,OAAO,CAAC,CAAC,CAAC,CAAC;QAC1B,MAAM,OAAO,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC;QAEnC,IAAI,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,IAAI,CAAC,SAAS,CAAC,EAAE;YAC5B,OAAO,MAAM,CAAC;SACf;KACF;AACH,CAAC","sourcesContent":["import type { ModuleTypeOverride, ModuleTypes } from '.';\nimport { getPatternFromSpec } from './ts-internals';\nimport { cachedLookup, normalizeSlashes } from './util';\n\n// Logic to support our `moduleTypes` option, which allows overriding node's default ESM / CJS\n// classification of `.js` files based on package.json `type` field.\n\n/**\n * Seperate internal type because `auto` is clearer than `package`, but changing\n * the public API is a breaking change.\n * @internal\n */\nexport type InternalModuleTypeOverride = 'cjs' | 'esm' | 'auto';\n/** @internal */\nexport interface ModuleTypeClassification {\n moduleType: InternalModuleTypeOverride;\n}\n/** @internal */\nexport interface ModuleTypeClassifierOptions {\n basePath?: string;\n patterns?: ModuleTypes;\n}\n/** @internal */\nexport type ModuleTypeClassifier = ReturnType<\n typeof createModuleTypeClassifier\n>;\n/**\n * @internal\n * May receive non-normalized options -- basePath and patterns -- and will normalize them\n * internally.\n * However, calls to `classifyModule` must pass pre-normalized paths!\n */\nexport function createModuleTypeClassifier(\n options: ModuleTypeClassifierOptions\n) {\n const { patterns, basePath: _basePath } = options;\n const basePath =\n _basePath !== undefined\n ? normalizeSlashes(_basePath).replace(/\\/$/, '')\n : undefined;\n\n const patternTypePairs = Object.entries(patterns ?? []).map(\n ([_pattern, type]) => {\n const pattern = normalizeSlashes(_pattern);\n return { pattern: parsePattern(basePath!, pattern), type };\n }\n );\n\n const classifications: Record<ModuleTypeOverride, ModuleTypeClassification> =\n {\n package: {\n moduleType: 'auto',\n },\n cjs: {\n moduleType: 'cjs',\n },\n esm: {\n moduleType: 'esm',\n },\n };\n const auto = classifications.package;\n\n // Passed path must be normalized!\n function classifyModuleNonCached(path: string): ModuleTypeClassification {\n const matched = matchPatterns(patternTypePairs, (_) => _.pattern, path);\n if (matched) return classifications[matched.type];\n return auto;\n }\n\n const classifyModule = cachedLookup(classifyModuleNonCached);\n\n function classifyModuleAuto(path: String) {\n return auto;\n }\n\n return {\n classifyModuleByModuleTypeOverrides: patternTypePairs.length\n ? classifyModule\n : classifyModuleAuto,\n };\n}\n\nfunction parsePattern(basePath: string, patternString: string): RegExp {\n const pattern = getPatternFromSpec(patternString, basePath);\n return pattern !== undefined ? new RegExp(pattern) : /(?:)/;\n}\n\nfunction matchPatterns<T>(\n objects: T[],\n getPattern: (t: T) => RegExp,\n candidate: string\n): T | undefined {\n for (let i = objects.length - 1; i >= 0; i--) {\n const object = objects[i];\n const pattern = getPattern(object);\n\n if (pattern?.test(candidate)) {\n return object;\n }\n }\n}\n"]}
|
1
node_modules/ts-node/dist/node-module-type-classifier.d.ts
generated
vendored
Normal file
1
node_modules/ts-node/dist/node-module-type-classifier.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
export {};
|
39
node_modules/ts-node/dist/node-module-type-classifier.js
generated
vendored
Normal file
39
node_modules/ts-node/dist/node-module-type-classifier.js
generated
vendored
Normal file
@ -0,0 +1,39 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.classifyModule = void 0;
|
||||
const node_internal_modules_cjs_loader_1 = require("../dist-raw/node-internal-modules-cjs-loader");
|
||||
/**
|
||||
* Determine how to emit a module based on tsconfig "module" and package.json "type"
|
||||
*
|
||||
* Supports module=nodenext/node16 with transpileOnly, where we cannot ask the
|
||||
* TS typechecker to tell us if a file is CJS or ESM.
|
||||
*
|
||||
* Return values indicate:
|
||||
* - cjs
|
||||
* - esm
|
||||
* - nodecjs == node-flavored cjs where dynamic imports are *not* transformed into `require()`
|
||||
* - undefined == emit according to tsconfig `module` config, whatever that is
|
||||
* @internal
|
||||
*/
|
||||
function classifyModule(nativeFilename, isNodeModuleType) {
|
||||
// [MUST_UPDATE_FOR_NEW_FILE_EXTENSIONS]
|
||||
const lastDotIndex = nativeFilename.lastIndexOf('.');
|
||||
const ext = lastDotIndex >= 0 ? nativeFilename.slice(lastDotIndex) : '';
|
||||
switch (ext) {
|
||||
case '.cjs':
|
||||
case '.cts':
|
||||
return isNodeModuleType ? 'nodecjs' : 'cjs';
|
||||
case '.mjs':
|
||||
case '.mts':
|
||||
return isNodeModuleType ? 'nodeesm' : 'esm';
|
||||
}
|
||||
if (isNodeModuleType) {
|
||||
const packageScope = (0, node_internal_modules_cjs_loader_1.readPackageScope)(nativeFilename);
|
||||
if (packageScope && packageScope.data.type === 'module')
|
||||
return 'nodeesm';
|
||||
return 'nodecjs';
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
exports.classifyModule = classifyModule;
|
||||
//# sourceMappingURL=node-module-type-classifier.js.map
|
1
node_modules/ts-node/dist/node-module-type-classifier.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/node-module-type-classifier.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"node-module-type-classifier.js","sourceRoot":"","sources":["../src/node-module-type-classifier.ts"],"names":[],"mappings":";;;AAAA,mGAAgF;AAEhF;;;;;;;;;;;;GAYG;AACH,SAAgB,cAAc,CAC5B,cAAsB,EACtB,gBAAyB;IAEzB,wCAAwC;IACxC,MAAM,YAAY,GAAG,cAAc,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC;IACrD,MAAM,GAAG,GAAG,YAAY,IAAI,CAAC,CAAC,CAAC,CAAC,cAAc,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;IACxE,QAAQ,GAAG,EAAE;QACX,KAAK,MAAM,CAAC;QACZ,KAAK,MAAM;YACT,OAAO,gBAAgB,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC;QAC9C,KAAK,MAAM,CAAC;QACZ,KAAK,MAAM;YACT,OAAO,gBAAgB,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,CAAC,KAAK,CAAC;KAC/C;IACD,IAAI,gBAAgB,EAAE;QACpB,MAAM,YAAY,GAAG,IAAA,mDAAgB,EAAC,cAAc,CAAC,CAAC;QACtD,IAAI,YAAY,IAAI,YAAY,CAAC,IAAI,CAAC,IAAI,KAAK,QAAQ;YAAE,OAAO,SAAS,CAAC;QAC1E,OAAO,SAAS,CAAC;KAClB;IACD,OAAO,SAAS,CAAC;AACnB,CAAC;AArBD,wCAqBC","sourcesContent":["import { readPackageScope } from '../dist-raw/node-internal-modules-cjs-loader';\n\n/**\n * Determine how to emit a module based on tsconfig \"module\" and package.json \"type\"\n *\n * Supports module=nodenext/node16 with transpileOnly, where we cannot ask the\n * TS typechecker to tell us if a file is CJS or ESM.\n *\n * Return values indicate:\n * - cjs\n * - esm\n * - nodecjs == node-flavored cjs where dynamic imports are *not* transformed into `require()`\n * - undefined == emit according to tsconfig `module` config, whatever that is\n * @internal\n */\nexport function classifyModule(\n nativeFilename: string,\n isNodeModuleType: boolean\n): 'nodecjs' | 'cjs' | 'esm' | 'nodeesm' | undefined {\n // [MUST_UPDATE_FOR_NEW_FILE_EXTENSIONS]\n const lastDotIndex = nativeFilename.lastIndexOf('.');\n const ext = lastDotIndex >= 0 ? nativeFilename.slice(lastDotIndex) : '';\n switch (ext) {\n case '.cjs':\n case '.cts':\n return isNodeModuleType ? 'nodecjs' : 'cjs';\n case '.mjs':\n case '.mts':\n return isNodeModuleType ? 'nodeesm' : 'esm';\n }\n if (isNodeModuleType) {\n const packageScope = readPackageScope(nativeFilename);\n if (packageScope && packageScope.data.type === 'module') return 'nodeesm';\n return 'nodecjs';\n }\n return undefined;\n}\n"]}
|
78
node_modules/ts-node/dist/repl.d.ts
generated
vendored
Normal file
78
node_modules/ts-node/dist/repl.d.ts
generated
vendored
Normal file
@ -0,0 +1,78 @@
|
||||
/// <reference types="node" />
|
||||
import { Service, CreateOptions } from './index';
|
||||
export interface ReplService {
|
||||
readonly state: EvalState;
|
||||
/**
|
||||
* Bind this REPL to a ts-node compiler service. A compiler service must be bound before `eval`-ing code or starting the REPL
|
||||
*/
|
||||
setService(service: Service): void;
|
||||
/**
|
||||
* Append code to the virtual <repl> source file, compile it to JavaScript, throw semantic errors if the typechecker is enabled,
|
||||
* and execute it.
|
||||
*
|
||||
* Note: typically, you will want to call `start()` instead of using this method.
|
||||
*
|
||||
* @param code string of TypeScript.
|
||||
*/
|
||||
evalCode(code: string): any;
|
||||
/**
|
||||
* `eval` implementation compatible with node's REPL API
|
||||
*
|
||||
* Can be used in advanced scenarios if you want to manually create your own
|
||||
* node REPL instance and delegate eval to this `ReplService`.
|
||||
*
|
||||
* Example:
|
||||
*
|
||||
* import {start} from 'repl';
|
||||
* const replService: tsNode.ReplService = ...; // assuming you have already created a ts-node ReplService
|
||||
* const nodeRepl = start({eval: replService.eval});
|
||||
*/
|
||||
nodeEval(code: string, context: any, _filename: string, callback: (err: Error | null, result?: any) => any): void;
|
||||
evalAwarePartialHost: EvalAwarePartialHost;
|
||||
/** Start a node REPL */
|
||||
start(): void;
|
||||
/**
|
||||
* Start a node REPL, evaling a string of TypeScript before it starts.
|
||||
* @deprecated
|
||||
*/
|
||||
start(code: string): void;
|
||||
}
|
||||
/** @category REPL */
|
||||
export interface CreateReplOptions {
|
||||
service?: Service;
|
||||
state?: EvalState;
|
||||
stdin?: NodeJS.ReadableStream;
|
||||
stdout?: NodeJS.WritableStream;
|
||||
stderr?: NodeJS.WritableStream;
|
||||
}
|
||||
/**
|
||||
* Create a ts-node REPL instance.
|
||||
*
|
||||
* Pay close attention to the example below. Today, the API requires a few lines
|
||||
* of boilerplate to correctly bind the `ReplService` to the ts-node `Service` and
|
||||
* vice-versa.
|
||||
*
|
||||
* Usage example:
|
||||
*
|
||||
* const repl = tsNode.createRepl();
|
||||
* const service = tsNode.create({...repl.evalAwarePartialHost});
|
||||
* repl.setService(service);
|
||||
* repl.start();
|
||||
*
|
||||
* @category REPL
|
||||
*/
|
||||
export declare function createRepl(options?: CreateReplOptions): ReplService;
|
||||
/**
|
||||
* Eval state management. Stores virtual `[eval].ts` file
|
||||
*/
|
||||
export declare class EvalState {
|
||||
path: string;
|
||||
__tsNodeEvalStateBrand: unknown;
|
||||
constructor(path: string);
|
||||
}
|
||||
/**
|
||||
* Filesystem host functions which are aware of the "virtual" `[eval].ts`, `<repl>`, or `[stdin].ts` file used to compile REPL inputs.
|
||||
* Must be passed to `create()` to create a ts-node compiler service which can compile REPL inputs.
|
||||
*/
|
||||
export declare type EvalAwarePartialHost = Pick<CreateOptions, 'readFile' | 'fileExists'>;
|
||||
export declare function createEvalAwarePartialHost(state: EvalState, composeWith?: EvalAwarePartialHost): EvalAwarePartialHost;
|
561
node_modules/ts-node/dist/repl.js
generated
vendored
Normal file
561
node_modules/ts-node/dist/repl.js
generated
vendored
Normal file
@ -0,0 +1,561 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.setupContext = exports.createEvalAwarePartialHost = exports.EvalState = exports.createRepl = exports.REPL_NAME = exports.REPL_FILENAME = exports.STDIN_NAME = exports.STDIN_FILENAME = exports.EVAL_NAME = exports.EVAL_FILENAME = void 0;
|
||||
const os_1 = require("os");
|
||||
const path_1 = require("path");
|
||||
const repl_1 = require("repl");
|
||||
const vm_1 = require("vm");
|
||||
const index_1 = require("./index");
|
||||
const fs_1 = require("fs");
|
||||
const console_1 = require("console");
|
||||
const assert = require("assert");
|
||||
const module_1 = require("module");
|
||||
// Lazy-loaded.
|
||||
let _processTopLevelAwait;
|
||||
function getProcessTopLevelAwait() {
|
||||
if (_processTopLevelAwait === undefined) {
|
||||
({
|
||||
processTopLevelAwait: _processTopLevelAwait,
|
||||
} = require('../dist-raw/node-internal-repl-await'));
|
||||
}
|
||||
return _processTopLevelAwait;
|
||||
}
|
||||
let diff;
|
||||
function getDiffLines() {
|
||||
if (diff === undefined) {
|
||||
diff = require('diff');
|
||||
}
|
||||
return diff.diffLines;
|
||||
}
|
||||
/** @internal */
|
||||
exports.EVAL_FILENAME = `[eval].ts`;
|
||||
/** @internal */
|
||||
exports.EVAL_NAME = `[eval]`;
|
||||
/** @internal */
|
||||
exports.STDIN_FILENAME = `[stdin].ts`;
|
||||
/** @internal */
|
||||
exports.STDIN_NAME = `[stdin]`;
|
||||
/** @internal */
|
||||
exports.REPL_FILENAME = '<repl>.ts';
|
||||
/** @internal */
|
||||
exports.REPL_NAME = '<repl>';
|
||||
/**
|
||||
* Create a ts-node REPL instance.
|
||||
*
|
||||
* Pay close attention to the example below. Today, the API requires a few lines
|
||||
* of boilerplate to correctly bind the `ReplService` to the ts-node `Service` and
|
||||
* vice-versa.
|
||||
*
|
||||
* Usage example:
|
||||
*
|
||||
* const repl = tsNode.createRepl();
|
||||
* const service = tsNode.create({...repl.evalAwarePartialHost});
|
||||
* repl.setService(service);
|
||||
* repl.start();
|
||||
*
|
||||
* @category REPL
|
||||
*/
|
||||
function createRepl(options = {}) {
|
||||
var _a, _b, _c, _d, _e;
|
||||
const { ignoreDiagnosticsThatAreAnnoyingInInteractiveRepl = true } = options;
|
||||
let service = options.service;
|
||||
let nodeReplServer;
|
||||
// If `useGlobal` is not true, then REPL creates a context when started.
|
||||
// This stores a reference to it or to `global`, whichever is used, after REPL has started.
|
||||
let context;
|
||||
const state = (_a = options.state) !== null && _a !== void 0 ? _a : new EvalState((0, path_1.join)(process.cwd(), exports.REPL_FILENAME));
|
||||
const evalAwarePartialHost = createEvalAwarePartialHost(state, options.composeWithEvalAwarePartialHost);
|
||||
const stdin = (_b = options.stdin) !== null && _b !== void 0 ? _b : process.stdin;
|
||||
const stdout = (_c = options.stdout) !== null && _c !== void 0 ? _c : process.stdout;
|
||||
const stderr = (_d = options.stderr) !== null && _d !== void 0 ? _d : process.stderr;
|
||||
const _console = stdout === process.stdout && stderr === process.stderr
|
||||
? console
|
||||
: new console_1.Console(stdout, stderr);
|
||||
const replService = {
|
||||
state: (_e = options.state) !== null && _e !== void 0 ? _e : new EvalState((0, path_1.join)(process.cwd(), exports.EVAL_FILENAME)),
|
||||
setService,
|
||||
evalCode,
|
||||
evalCodeInternal,
|
||||
nodeEval,
|
||||
evalAwarePartialHost,
|
||||
start,
|
||||
startInternal,
|
||||
stdin,
|
||||
stdout,
|
||||
stderr,
|
||||
console: _console,
|
||||
};
|
||||
return replService;
|
||||
function setService(_service) {
|
||||
service = _service;
|
||||
if (ignoreDiagnosticsThatAreAnnoyingInInteractiveRepl) {
|
||||
service.addDiagnosticFilter({
|
||||
appliesToAllFiles: false,
|
||||
filenamesAbsolute: [state.path],
|
||||
diagnosticsIgnored: [
|
||||
2393,
|
||||
6133,
|
||||
7027,
|
||||
...(service.shouldReplAwait ? topLevelAwaitDiagnosticCodes : []),
|
||||
],
|
||||
});
|
||||
}
|
||||
}
|
||||
function evalCode(code) {
|
||||
const result = appendCompileAndEvalInput({
|
||||
service: service,
|
||||
state,
|
||||
input: code,
|
||||
context,
|
||||
overrideIsCompletion: false,
|
||||
});
|
||||
assert(result.containsTopLevelAwait === false);
|
||||
return result.value;
|
||||
}
|
||||
function evalCodeInternal(options) {
|
||||
const { code, enableTopLevelAwait, context } = options;
|
||||
return appendCompileAndEvalInput({
|
||||
service: service,
|
||||
state,
|
||||
input: code,
|
||||
enableTopLevelAwait,
|
||||
context,
|
||||
});
|
||||
}
|
||||
function nodeEval(code, context, _filename, callback) {
|
||||
// TODO: Figure out how to handle completion here.
|
||||
if (code === '.scope') {
|
||||
callback(null);
|
||||
return;
|
||||
}
|
||||
try {
|
||||
const evalResult = evalCodeInternal({
|
||||
code,
|
||||
enableTopLevelAwait: true,
|
||||
context,
|
||||
});
|
||||
if (evalResult.containsTopLevelAwait) {
|
||||
(async () => {
|
||||
try {
|
||||
callback(null, await evalResult.valuePromise);
|
||||
}
|
||||
catch (promiseError) {
|
||||
handleError(promiseError);
|
||||
}
|
||||
})();
|
||||
}
|
||||
else {
|
||||
callback(null, evalResult.value);
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
handleError(error);
|
||||
}
|
||||
// Log TSErrors, check if they're recoverable, log helpful hints for certain
|
||||
// well-known errors, and invoke `callback()`
|
||||
// TODO should evalCode API get the same error-handling benefits?
|
||||
function handleError(error) {
|
||||
var _a, _b;
|
||||
// Don't show TLA hint if the user explicitly disabled repl top level await
|
||||
const canLogTopLevelAwaitHint = service.options.experimentalReplAwait !== false &&
|
||||
!service.shouldReplAwait;
|
||||
if (error instanceof index_1.TSError) {
|
||||
// Support recoverable compilations using >= node 6.
|
||||
if (repl_1.Recoverable && isRecoverable(error)) {
|
||||
callback(new repl_1.Recoverable(error));
|
||||
return;
|
||||
}
|
||||
else {
|
||||
_console.error(error);
|
||||
if (canLogTopLevelAwaitHint &&
|
||||
error.diagnosticCodes.some((dC) => topLevelAwaitDiagnosticCodes.includes(dC))) {
|
||||
_console.error(getTopLevelAwaitHint());
|
||||
}
|
||||
callback(null);
|
||||
}
|
||||
}
|
||||
else {
|
||||
let _error = error;
|
||||
if (canLogTopLevelAwaitHint &&
|
||||
_error instanceof SyntaxError &&
|
||||
((_a = _error.message) === null || _a === void 0 ? void 0 : _a.includes('await is only valid'))) {
|
||||
try {
|
||||
// Only way I know to make our hint appear after the error
|
||||
_error.message += `\n\n${getTopLevelAwaitHint()}`;
|
||||
_error.stack = (_b = _error.stack) === null || _b === void 0 ? void 0 : _b.replace(/(SyntaxError:.*)/, (_, $1) => `${$1}\n\n${getTopLevelAwaitHint()}`);
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
callback(_error);
|
||||
}
|
||||
}
|
||||
function getTopLevelAwaitHint() {
|
||||
return `Hint: REPL top-level await requires TypeScript version 3.8 or higher and target ES2018 or higher. You are using TypeScript ${service.ts.version} and target ${service.ts.ScriptTarget[service.config.options.target]}.`;
|
||||
}
|
||||
}
|
||||
// Note: `code` argument is deprecated
|
||||
function start(code) {
|
||||
startInternal({ code });
|
||||
}
|
||||
// Note: `code` argument is deprecated
|
||||
function startInternal(options) {
|
||||
const { code, forceToBeModule = true, ...optionsOverride } = options !== null && options !== void 0 ? options : {};
|
||||
// TODO assert that `service` is set; remove all `service!` non-null assertions
|
||||
// Eval incoming code before the REPL starts.
|
||||
// Note: deprecated
|
||||
if (code) {
|
||||
try {
|
||||
evalCode(`${code}\n`);
|
||||
}
|
||||
catch (err) {
|
||||
_console.error(err);
|
||||
// Note: should not be killing the process here, but this codepath is deprecated anyway
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
// In case the typescript compiler hasn't compiled anything yet,
|
||||
// make it run though compilation at least one time before
|
||||
// the REPL starts for a snappier user experience on startup.
|
||||
service === null || service === void 0 ? void 0 : service.compile('', state.path);
|
||||
const repl = (0, repl_1.start)({
|
||||
prompt: '> ',
|
||||
input: replService.stdin,
|
||||
output: replService.stdout,
|
||||
// Mimicking node's REPL implementation: https://github.com/nodejs/node/blob/168b22ba073ee1cbf8d0bcb4ded7ff3099335d04/lib/internal/repl.js#L28-L30
|
||||
terminal: stdout.isTTY &&
|
||||
!parseInt(index_1.env.NODE_NO_READLINE, 10),
|
||||
eval: nodeEval,
|
||||
useGlobal: true,
|
||||
...optionsOverride,
|
||||
});
|
||||
nodeReplServer = repl;
|
||||
context = repl.context;
|
||||
// Bookmark the point where we should reset the REPL state.
|
||||
const resetEval = appendToEvalState(state, '');
|
||||
function reset() {
|
||||
resetEval();
|
||||
// Hard fix for TypeScript forcing `Object.defineProperty(exports, ...)`.
|
||||
runInContext('exports = module.exports', state.path, context);
|
||||
if (forceToBeModule) {
|
||||
state.input += 'export {};void 0;\n';
|
||||
}
|
||||
// Declare node builtins.
|
||||
// Skip the same builtins as `addBuiltinLibsToObject`:
|
||||
// those starting with _
|
||||
// those containing /
|
||||
// those that already exist as globals
|
||||
// Intentionally suppress type errors in case @types/node does not declare any of them, and because
|
||||
// `declare import` is technically invalid syntax.
|
||||
// Avoid this when in transpileOnly, because third-party transpilers may not handle `declare import`.
|
||||
if (!(service === null || service === void 0 ? void 0 : service.transpileOnly)) {
|
||||
state.input += `// @ts-ignore\n${module_1.builtinModules
|
||||
.filter((name) => !name.startsWith('_') &&
|
||||
!name.includes('/') &&
|
||||
!['console', 'module', 'process'].includes(name))
|
||||
.map((name) => `declare import ${name} = require('${name}')`)
|
||||
.join(';')}\n`;
|
||||
}
|
||||
}
|
||||
reset();
|
||||
repl.on('reset', reset);
|
||||
repl.defineCommand('type', {
|
||||
help: 'Check the type of a TypeScript identifier',
|
||||
action: function (identifier) {
|
||||
if (!identifier) {
|
||||
repl.displayPrompt();
|
||||
return;
|
||||
}
|
||||
const undo = appendToEvalState(state, identifier);
|
||||
const { name, comment } = service.getTypeInfo(state.input, state.path, state.input.length);
|
||||
undo();
|
||||
if (name)
|
||||
repl.outputStream.write(`${name}\n`);
|
||||
if (comment)
|
||||
repl.outputStream.write(`${comment}\n`);
|
||||
repl.displayPrompt();
|
||||
},
|
||||
});
|
||||
// Set up REPL history when available natively via node.js >= 11.
|
||||
if (repl.setupHistory) {
|
||||
const historyPath = index_1.env.TS_NODE_HISTORY || (0, path_1.join)((0, os_1.homedir)(), '.ts_node_repl_history');
|
||||
repl.setupHistory(historyPath, (err) => {
|
||||
if (!err)
|
||||
return;
|
||||
_console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
return repl;
|
||||
}
|
||||
}
|
||||
exports.createRepl = createRepl;
|
||||
/**
|
||||
* Eval state management. Stores virtual `[eval].ts` file
|
||||
*/
|
||||
class EvalState {
|
||||
constructor(path) {
|
||||
this.path = path;
|
||||
/** @internal */
|
||||
this.input = '';
|
||||
/** @internal */
|
||||
this.output = '';
|
||||
/** @internal */
|
||||
this.version = 0;
|
||||
/** @internal */
|
||||
this.lines = 0;
|
||||
}
|
||||
}
|
||||
exports.EvalState = EvalState;
|
||||
function createEvalAwarePartialHost(state, composeWith) {
|
||||
function readFile(path) {
|
||||
if (path === state.path)
|
||||
return state.input;
|
||||
if (composeWith === null || composeWith === void 0 ? void 0 : composeWith.readFile)
|
||||
return composeWith.readFile(path);
|
||||
try {
|
||||
return (0, fs_1.readFileSync)(path, 'utf8');
|
||||
}
|
||||
catch (err) {
|
||||
/* Ignore. */
|
||||
}
|
||||
}
|
||||
function fileExists(path) {
|
||||
if (path === state.path)
|
||||
return true;
|
||||
if (composeWith === null || composeWith === void 0 ? void 0 : composeWith.fileExists)
|
||||
return composeWith.fileExists(path);
|
||||
try {
|
||||
const stats = (0, fs_1.statSync)(path);
|
||||
return stats.isFile() || stats.isFIFO();
|
||||
}
|
||||
catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return { readFile, fileExists };
|
||||
}
|
||||
exports.createEvalAwarePartialHost = createEvalAwarePartialHost;
|
||||
const sourcemapCommentRe = /\/\/# ?sourceMappingURL=\S+[\s\r\n]*$/;
|
||||
/**
|
||||
* Evaluate the code snippet.
|
||||
*
|
||||
* Append it to virtual .ts file, compile, handle compiler errors, compute a diff of the JS, and eval any code that
|
||||
* appears as "added" in the diff.
|
||||
*/
|
||||
function appendCompileAndEvalInput(options) {
|
||||
const { service, state, wrappedErr, enableTopLevelAwait = false, context, overrideIsCompletion, } = options;
|
||||
let { input } = options;
|
||||
// It's confusing for `{ a: 1 }` to be interpreted as a block statement
|
||||
// rather than an object literal. So, we first try to wrap it in
|
||||
// parentheses, so that it will be interpreted as an expression.
|
||||
// Based on https://github.com/nodejs/node/blob/c2e6822153bad023ab7ebd30a6117dcc049e475c/lib/repl.js#L413-L422
|
||||
let wrappedCmd = false;
|
||||
if (!wrappedErr && /^\s*{/.test(input) && !/;\s*$/.test(input)) {
|
||||
input = `(${input.trim()})\n`;
|
||||
wrappedCmd = true;
|
||||
}
|
||||
const lines = state.lines;
|
||||
const isCompletion = overrideIsCompletion !== null && overrideIsCompletion !== void 0 ? overrideIsCompletion : !/\n$/.test(input);
|
||||
const undo = appendToEvalState(state, input);
|
||||
let output;
|
||||
// Based on https://github.com/nodejs/node/blob/92573721c7cff104ccb82b6ed3e8aa69c4b27510/lib/repl.js#L457-L461
|
||||
function adjustUseStrict(code) {
|
||||
// "void 0" keeps the repl from returning "use strict" as the result
|
||||
// value for statements and declarations that don't return a value.
|
||||
return code.replace(/^"use strict";/, '"use strict"; void 0;');
|
||||
}
|
||||
try {
|
||||
output = service.compile(state.input, state.path, -lines);
|
||||
}
|
||||
catch (err) {
|
||||
undo();
|
||||
if (wrappedCmd) {
|
||||
if (err instanceof index_1.TSError && err.diagnosticCodes[0] === 2339) {
|
||||
// Ensure consistent and more sane behavior between { a: 1 }['b'] and ({ a: 1 }['b'])
|
||||
throw err;
|
||||
}
|
||||
// Unwrap and try again
|
||||
return appendCompileAndEvalInput({
|
||||
...options,
|
||||
wrappedErr: err,
|
||||
});
|
||||
}
|
||||
if (wrappedErr)
|
||||
throw wrappedErr;
|
||||
throw err;
|
||||
}
|
||||
output = adjustUseStrict(output);
|
||||
// Note: REPL does not respect sourcemaps!
|
||||
// To properly do that, we'd need to prefix the code we eval -- which comes
|
||||
// from `diffLines` -- with newlines so that it's at the proper line numbers.
|
||||
// Then we'd need to ensure each bit of eval-ed code, if there are multiples,
|
||||
// has the sourcemap appended to it.
|
||||
// We might also need to integrate with our sourcemap hooks' cache; I'm not sure.
|
||||
const outputWithoutSourcemapComment = output.replace(sourcemapCommentRe, '');
|
||||
const oldOutputWithoutSourcemapComment = state.output.replace(sourcemapCommentRe, '');
|
||||
// Use `diff` to check for new JavaScript to execute.
|
||||
const changes = getDiffLines()(oldOutputWithoutSourcemapComment, outputWithoutSourcemapComment);
|
||||
if (isCompletion) {
|
||||
undo();
|
||||
}
|
||||
else {
|
||||
state.output = output;
|
||||
// Insert a semicolon to make sure that the code doesn't interact with the next line,
|
||||
// for example to prevent `2\n+ 2` from producing 4.
|
||||
// This is safe since the output will not change since we can only get here with successful inputs,
|
||||
// and adding a semicolon to the end of a successful input won't ever change the output.
|
||||
state.input = state.input.replace(/([^\n\s])([\n\s]*)$/, (all, lastChar, whitespace) => {
|
||||
if (lastChar !== ';')
|
||||
return `${lastChar};${whitespace}`;
|
||||
return all;
|
||||
});
|
||||
}
|
||||
let commands = [];
|
||||
let containsTopLevelAwait = false;
|
||||
// Build a list of "commands": bits of JS code in the diff that must be executed.
|
||||
for (const change of changes) {
|
||||
if (change.added) {
|
||||
if (enableTopLevelAwait &&
|
||||
service.shouldReplAwait &&
|
||||
change.value.indexOf('await') > -1) {
|
||||
const processTopLevelAwait = getProcessTopLevelAwait();
|
||||
// Newline prevents comments to mess with wrapper
|
||||
const wrappedResult = processTopLevelAwait(change.value + '\n');
|
||||
if (wrappedResult !== null) {
|
||||
containsTopLevelAwait = true;
|
||||
commands.push({
|
||||
mustAwait: true,
|
||||
execCommand: () => runInContext(wrappedResult, state.path, context),
|
||||
});
|
||||
continue;
|
||||
}
|
||||
}
|
||||
commands.push({
|
||||
execCommand: () => runInContext(change.value, state.path, context),
|
||||
});
|
||||
}
|
||||
}
|
||||
// Execute all commands asynchronously if necessary, returning the result or a
|
||||
// promise of the result.
|
||||
if (containsTopLevelAwait) {
|
||||
return {
|
||||
containsTopLevelAwait,
|
||||
valuePromise: (async () => {
|
||||
let value;
|
||||
for (const command of commands) {
|
||||
const r = command.execCommand();
|
||||
value = command.mustAwait ? await r : r;
|
||||
}
|
||||
return value;
|
||||
})(),
|
||||
};
|
||||
}
|
||||
else {
|
||||
return {
|
||||
containsTopLevelAwait: false,
|
||||
value: commands.reduce((_, c) => c.execCommand(), undefined),
|
||||
};
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Low-level execution of JS code in context
|
||||
*/
|
||||
function runInContext(code, filename, context) {
|
||||
const script = new vm_1.Script(code, { filename });
|
||||
if (context === undefined || context === global) {
|
||||
return script.runInThisContext();
|
||||
}
|
||||
else {
|
||||
return script.runInContext(context);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Append to the eval instance and return an undo function.
|
||||
*/
|
||||
function appendToEvalState(state, input) {
|
||||
const undoInput = state.input;
|
||||
const undoVersion = state.version;
|
||||
const undoOutput = state.output;
|
||||
const undoLines = state.lines;
|
||||
state.input += input;
|
||||
state.lines += lineCount(input);
|
||||
state.version++;
|
||||
return function () {
|
||||
state.input = undoInput;
|
||||
state.output = undoOutput;
|
||||
state.version = undoVersion;
|
||||
state.lines = undoLines;
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Count the number of lines.
|
||||
*/
|
||||
function lineCount(value) {
|
||||
let count = 0;
|
||||
for (const char of value) {
|
||||
if (char === '\n') {
|
||||
count++;
|
||||
}
|
||||
}
|
||||
return count;
|
||||
}
|
||||
/**
|
||||
* TS diagnostic codes which are recoverable, meaning that the user likely entered an incomplete line of code
|
||||
* and should be prompted for the next. For example, starting a multi-line for() loop and not finishing it.
|
||||
* null value means code is always recoverable. `Set` means code is only recoverable when occurring alongside at least one
|
||||
* of the other codes.
|
||||
*/
|
||||
const RECOVERY_CODES = new Map([
|
||||
[1003, null],
|
||||
[1005, null],
|
||||
[1109, null],
|
||||
[1126, null],
|
||||
[
|
||||
1136,
|
||||
new Set([1005]), // happens when typing out an object literal or block scope across multiple lines: '{ foo: 123,'
|
||||
],
|
||||
[1160, null],
|
||||
[1161, null],
|
||||
[2355, null],
|
||||
[2391, null],
|
||||
[
|
||||
7010,
|
||||
new Set([1005]), // happens when fn signature spread across multiple lines: 'function a(\nb: any\n) {'
|
||||
],
|
||||
]);
|
||||
/**
|
||||
* Diagnostic codes raised when using top-level await.
|
||||
* These are suppressed when top-level await is enabled.
|
||||
* When it is *not* enabled, these trigger a helpful hint about enabling top-level await.
|
||||
*/
|
||||
const topLevelAwaitDiagnosticCodes = [
|
||||
1375,
|
||||
1378,
|
||||
1431,
|
||||
1432, // Top-level 'for await' loops are only allowed when the 'module' option is set to 'esnext' or 'system', and the 'target' option is set to 'es2017' or higher.
|
||||
];
|
||||
/**
|
||||
* Check if a function can recover gracefully.
|
||||
*/
|
||||
function isRecoverable(error) {
|
||||
return error.diagnosticCodes.every((code) => {
|
||||
const deps = RECOVERY_CODES.get(code);
|
||||
return (deps === null ||
|
||||
(deps && error.diagnosticCodes.some((code) => deps.has(code))));
|
||||
});
|
||||
}
|
||||
/**
|
||||
* @internal
|
||||
* Set properties on `context` before eval-ing [stdin] or [eval] input.
|
||||
*/
|
||||
function setupContext(context, module, filenameAndDirname) {
|
||||
if (filenameAndDirname) {
|
||||
context.__dirname = '.';
|
||||
context.__filename = `[${filenameAndDirname}]`;
|
||||
}
|
||||
context.module = module;
|
||||
context.exports = module.exports;
|
||||
context.require = module.require.bind(module);
|
||||
}
|
||||
exports.setupContext = setupContext;
|
||||
//# sourceMappingURL=repl.js.map
|
1
node_modules/ts-node/dist/repl.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/repl.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
node_modules/ts-node/dist/resolver-functions.d.ts
generated
vendored
Normal file
1
node_modules/ts-node/dist/resolver-functions.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
export {};
|
143
node_modules/ts-node/dist/resolver-functions.js
generated
vendored
Normal file
143
node_modules/ts-node/dist/resolver-functions.js
generated
vendored
Normal file
@ -0,0 +1,143 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createResolverFunctions = void 0;
|
||||
const path_1 = require("path");
|
||||
/**
|
||||
* @internal
|
||||
* In a factory because these are shared across both CompilerHost and LanguageService codepaths
|
||||
*/
|
||||
function createResolverFunctions(kwargs) {
|
||||
const { host, ts, config, cwd, getCanonicalFileName, projectLocalResolveHelper, options, extensions, } = kwargs;
|
||||
const moduleResolutionCache = ts.createModuleResolutionCache(cwd, getCanonicalFileName, config.options);
|
||||
const knownInternalFilenames = new Set();
|
||||
/** "Buckets" (module directories) whose contents should be marked "internal" */
|
||||
const internalBuckets = new Set();
|
||||
// Get bucket for a source filename. Bucket is the containing `./node_modules/*/` directory
|
||||
// For '/project/node_modules/foo/node_modules/bar/lib/index.js' bucket is '/project/node_modules/foo/node_modules/bar/'
|
||||
// For '/project/node_modules/foo/node_modules/@scope/bar/lib/index.js' bucket is '/project/node_modules/foo/node_modules/@scope/bar/'
|
||||
const moduleBucketRe = /.*\/node_modules\/(?:@[^\/]+\/)?[^\/]+\//;
|
||||
function getModuleBucket(filename) {
|
||||
const find = moduleBucketRe.exec(filename);
|
||||
if (find)
|
||||
return find[0];
|
||||
return '';
|
||||
}
|
||||
// Mark that this file and all siblings in its bucket should be "internal"
|
||||
function markBucketOfFilenameInternal(filename) {
|
||||
internalBuckets.add(getModuleBucket(filename));
|
||||
}
|
||||
function isFileInInternalBucket(filename) {
|
||||
return internalBuckets.has(getModuleBucket(filename));
|
||||
}
|
||||
function isFileKnownToBeInternal(filename) {
|
||||
return knownInternalFilenames.has(filename);
|
||||
}
|
||||
/**
|
||||
* If we need to emit JS for a file, force TS to consider it non-external
|
||||
*/
|
||||
const fixupResolvedModule = (resolvedModule) => {
|
||||
const { resolvedFileName } = resolvedModule;
|
||||
if (resolvedFileName === undefined)
|
||||
return;
|
||||
// [MUST_UPDATE_FOR_NEW_FILE_EXTENSIONS]
|
||||
// .ts,.mts,.cts is always switched to internal
|
||||
// .js is switched on-demand
|
||||
if (resolvedModule.isExternalLibraryImport &&
|
||||
((resolvedFileName.endsWith('.ts') &&
|
||||
!resolvedFileName.endsWith('.d.ts')) ||
|
||||
(resolvedFileName.endsWith('.cts') &&
|
||||
!resolvedFileName.endsWith('.d.cts')) ||
|
||||
(resolvedFileName.endsWith('.mts') &&
|
||||
!resolvedFileName.endsWith('.d.mts')) ||
|
||||
isFileKnownToBeInternal(resolvedFileName) ||
|
||||
isFileInInternalBucket(resolvedFileName))) {
|
||||
resolvedModule.isExternalLibraryImport = false;
|
||||
}
|
||||
if (!resolvedModule.isExternalLibraryImport) {
|
||||
knownInternalFilenames.add(resolvedFileName);
|
||||
}
|
||||
};
|
||||
/*
|
||||
* NOTE:
|
||||
* Older ts versions do not pass `redirectedReference` nor `options`.
|
||||
* We must pass `redirectedReference` to newer ts versions, but cannot rely on `options`, hence the weird argument name
|
||||
*/
|
||||
const resolveModuleNames = (moduleNames, containingFile, reusedNames, redirectedReference, optionsOnlyWithNewerTsVersions, containingSourceFile) => {
|
||||
return moduleNames.map((moduleName, i) => {
|
||||
var _a, _b;
|
||||
const mode = containingSourceFile
|
||||
? (_b = (_a = ts).getModeForResolutionAtIndex) === null || _b === void 0 ? void 0 : _b.call(_a, containingSourceFile, i)
|
||||
: undefined;
|
||||
let { resolvedModule } = ts.resolveModuleName(moduleName, containingFile, config.options, host, moduleResolutionCache, redirectedReference, mode);
|
||||
if (!resolvedModule && options.experimentalTsImportSpecifiers) {
|
||||
const lastDotIndex = moduleName.lastIndexOf('.');
|
||||
const ext = lastDotIndex >= 0 ? moduleName.slice(lastDotIndex) : '';
|
||||
if (ext) {
|
||||
const replacements = extensions.tsResolverEquivalents.get(ext);
|
||||
for (const replacementExt of replacements !== null && replacements !== void 0 ? replacements : []) {
|
||||
({ resolvedModule } = ts.resolveModuleName(moduleName.slice(0, -ext.length) + replacementExt, containingFile, config.options, host, moduleResolutionCache, redirectedReference, mode));
|
||||
if (resolvedModule)
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (resolvedModule) {
|
||||
fixupResolvedModule(resolvedModule);
|
||||
}
|
||||
return resolvedModule;
|
||||
});
|
||||
};
|
||||
// language service never calls this, but TS docs recommend that we implement it
|
||||
const getResolvedModuleWithFailedLookupLocationsFromCache = (moduleName, containingFile, resolutionMode) => {
|
||||
const ret = ts.resolveModuleNameFromCache(moduleName, containingFile, moduleResolutionCache, resolutionMode);
|
||||
if (ret && ret.resolvedModule) {
|
||||
fixupResolvedModule(ret.resolvedModule);
|
||||
}
|
||||
return ret;
|
||||
};
|
||||
const resolveTypeReferenceDirectives = (typeDirectiveNames, containingFile, redirectedReference, options, containingFileMode // new impliedNodeFormat is accepted by compilerHost
|
||||
) => {
|
||||
// Note: seems to be called with empty typeDirectiveNames array for all files.
|
||||
// TODO consider using `ts.loadWithTypeDirectiveCache`
|
||||
return typeDirectiveNames.map((typeDirectiveName) => {
|
||||
// Copy-pasted from TS source:
|
||||
const nameIsString = typeof typeDirectiveName === 'string';
|
||||
const mode = nameIsString
|
||||
? undefined
|
||||
: ts.getModeForFileReference(typeDirectiveName, containingFileMode);
|
||||
const strName = nameIsString
|
||||
? typeDirectiveName
|
||||
: typeDirectiveName.fileName.toLowerCase();
|
||||
let { resolvedTypeReferenceDirective } = ts.resolveTypeReferenceDirective(strName, containingFile, config.options, host, redirectedReference, undefined, mode);
|
||||
if (typeDirectiveName === 'node' && !resolvedTypeReferenceDirective) {
|
||||
// Resolve @types/node relative to project first, then __dirname (copy logic from elsewhere / refactor into reusable function)
|
||||
let typesNodePackageJsonPath;
|
||||
try {
|
||||
typesNodePackageJsonPath = projectLocalResolveHelper('@types/node/package.json', true);
|
||||
}
|
||||
catch { } // gracefully do nothing when @types/node is not installed for any reason
|
||||
if (typesNodePackageJsonPath) {
|
||||
const typeRoots = [(0, path_1.resolve)(typesNodePackageJsonPath, '../..')];
|
||||
({ resolvedTypeReferenceDirective } =
|
||||
ts.resolveTypeReferenceDirective(typeDirectiveName, containingFile, {
|
||||
...config.options,
|
||||
typeRoots,
|
||||
}, host, redirectedReference));
|
||||
}
|
||||
}
|
||||
if (resolvedTypeReferenceDirective) {
|
||||
fixupResolvedModule(resolvedTypeReferenceDirective);
|
||||
}
|
||||
return resolvedTypeReferenceDirective;
|
||||
});
|
||||
};
|
||||
return {
|
||||
resolveModuleNames,
|
||||
getResolvedModuleWithFailedLookupLocationsFromCache,
|
||||
resolveTypeReferenceDirectives,
|
||||
isFileKnownToBeInternal,
|
||||
markBucketOfFilenameInternal,
|
||||
};
|
||||
}
|
||||
exports.createResolverFunctions = createResolverFunctions;
|
||||
//# sourceMappingURL=resolver-functions.js.map
|
1
node_modules/ts-node/dist/resolver-functions.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/resolver-functions.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
11
node_modules/ts-node/dist/transpilers/swc.d.ts
generated
vendored
Normal file
11
node_modules/ts-node/dist/transpilers/swc.d.ts
generated
vendored
Normal file
@ -0,0 +1,11 @@
|
||||
import type * as swcWasm from '@swc/wasm';
|
||||
import type { CreateTranspilerOptions, Transpiler } from './types';
|
||||
export interface SwcTranspilerOptions extends CreateTranspilerOptions {
|
||||
/**
|
||||
* swc compiler to use for compilation
|
||||
* Set to '@swc/wasm' to use swc's WASM compiler
|
||||
* Default: '@swc/core', falling back to '@swc/wasm'
|
||||
*/
|
||||
swc?: string | typeof swcWasm;
|
||||
}
|
||||
export declare function create(createOptions: SwcTranspilerOptions): Transpiler;
|
212
node_modules/ts-node/dist/transpilers/swc.js
generated
vendored
Normal file
212
node_modules/ts-node/dist/transpilers/swc.js
generated
vendored
Normal file
@ -0,0 +1,212 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createSwcOptions = exports.targetMapping = exports.create = void 0;
|
||||
function create(createOptions) {
|
||||
const { swc, service: { config, projectLocalResolveHelper }, transpilerConfigLocalResolveHelper, nodeModuleEmitKind, } = createOptions;
|
||||
// Load swc compiler
|
||||
let swcInstance;
|
||||
// Used later in diagnostics; merely needs to be human-readable.
|
||||
let swcDepName = 'swc';
|
||||
if (typeof swc === 'string') {
|
||||
swcDepName = swc;
|
||||
swcInstance = require(transpilerConfigLocalResolveHelper(swc, true));
|
||||
}
|
||||
else if (swc == null) {
|
||||
let swcResolved;
|
||||
try {
|
||||
swcDepName = '@swc/core';
|
||||
swcResolved = transpilerConfigLocalResolveHelper(swcDepName, true);
|
||||
}
|
||||
catch (e) {
|
||||
try {
|
||||
swcDepName = '@swc/wasm';
|
||||
swcResolved = transpilerConfigLocalResolveHelper(swcDepName, true);
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error('swc compiler requires either @swc/core or @swc/wasm to be installed as a dependency. See https://typestrong.org/ts-node/docs/transpilers');
|
||||
}
|
||||
}
|
||||
swcInstance = require(swcResolved);
|
||||
}
|
||||
else {
|
||||
swcInstance = swc;
|
||||
}
|
||||
// Prepare SWC options derived from typescript compiler options
|
||||
const { nonTsxOptions, tsxOptions } = createSwcOptions(config.options, nodeModuleEmitKind, swcInstance, swcDepName);
|
||||
const transpile = (input, transpileOptions) => {
|
||||
const { fileName } = transpileOptions;
|
||||
const swcOptions = fileName.endsWith('.tsx') || fileName.endsWith('.jsx')
|
||||
? tsxOptions
|
||||
: nonTsxOptions;
|
||||
const { code, map } = swcInstance.transformSync(input, {
|
||||
...swcOptions,
|
||||
filename: fileName,
|
||||
});
|
||||
return { outputText: code, sourceMapText: map };
|
||||
};
|
||||
return {
|
||||
transpile,
|
||||
};
|
||||
}
|
||||
exports.create = create;
|
||||
/** @internal */
|
||||
exports.targetMapping = new Map();
|
||||
exports.targetMapping.set(/* ts.ScriptTarget.ES3 */ 0, 'es3');
|
||||
exports.targetMapping.set(/* ts.ScriptTarget.ES5 */ 1, 'es5');
|
||||
exports.targetMapping.set(/* ts.ScriptTarget.ES2015 */ 2, 'es2015');
|
||||
exports.targetMapping.set(/* ts.ScriptTarget.ES2016 */ 3, 'es2016');
|
||||
exports.targetMapping.set(/* ts.ScriptTarget.ES2017 */ 4, 'es2017');
|
||||
exports.targetMapping.set(/* ts.ScriptTarget.ES2018 */ 5, 'es2018');
|
||||
exports.targetMapping.set(/* ts.ScriptTarget.ES2019 */ 6, 'es2019');
|
||||
exports.targetMapping.set(/* ts.ScriptTarget.ES2020 */ 7, 'es2020');
|
||||
exports.targetMapping.set(/* ts.ScriptTarget.ES2021 */ 8, 'es2021');
|
||||
exports.targetMapping.set(/* ts.ScriptTarget.ES2022 */ 9, 'es2022');
|
||||
exports.targetMapping.set(/* ts.ScriptTarget.ESNext */ 99, 'es2022');
|
||||
/**
|
||||
* @internal
|
||||
* We use this list to downgrade to a prior target when we probe swc to detect if it supports a particular target
|
||||
*/
|
||||
const swcTargets = [
|
||||
'es3',
|
||||
'es5',
|
||||
'es2015',
|
||||
'es2016',
|
||||
'es2017',
|
||||
'es2018',
|
||||
'es2019',
|
||||
'es2020',
|
||||
'es2021',
|
||||
'es2022',
|
||||
];
|
||||
const ModuleKind = {
|
||||
None: 0,
|
||||
CommonJS: 1,
|
||||
AMD: 2,
|
||||
UMD: 3,
|
||||
System: 4,
|
||||
ES2015: 5,
|
||||
ES2020: 6,
|
||||
ESNext: 99,
|
||||
Node16: 100,
|
||||
NodeNext: 199,
|
||||
};
|
||||
const JsxEmit = {
|
||||
ReactJSX: /* ts.JsxEmit.ReactJSX */ 4,
|
||||
ReactJSXDev: /* ts.JsxEmit.ReactJSXDev */ 5,
|
||||
};
|
||||
/**
|
||||
* Prepare SWC options derived from typescript compiler options.
|
||||
* @internal exported for testing
|
||||
*/
|
||||
function createSwcOptions(compilerOptions, nodeModuleEmitKind, swcInstance, swcDepName) {
|
||||
var _a;
|
||||
const { esModuleInterop, sourceMap, importHelpers, experimentalDecorators, emitDecoratorMetadata, target, module, jsx, jsxFactory, jsxFragmentFactory, strict, alwaysStrict, noImplicitUseStrict, } = compilerOptions;
|
||||
let swcTarget = (_a = exports.targetMapping.get(target)) !== null && _a !== void 0 ? _a : 'es3';
|
||||
// Downgrade to lower target if swc does not support the selected target.
|
||||
// Perhaps project has an older version of swc.
|
||||
// TODO cache the results of this; slightly faster
|
||||
let swcTargetIndex = swcTargets.indexOf(swcTarget);
|
||||
for (; swcTargetIndex >= 0; swcTargetIndex--) {
|
||||
try {
|
||||
swcInstance.transformSync('', {
|
||||
jsc: { target: swcTargets[swcTargetIndex] },
|
||||
});
|
||||
break;
|
||||
}
|
||||
catch (e) { }
|
||||
}
|
||||
swcTarget = swcTargets[swcTargetIndex];
|
||||
const keepClassNames = target >= /* ts.ScriptTarget.ES2016 */ 3;
|
||||
const isNodeModuleKind = module === ModuleKind.Node16 || module === ModuleKind.NodeNext;
|
||||
// swc only supports these 4x module options [MUST_UPDATE_FOR_NEW_MODULEKIND]
|
||||
const moduleType = module === ModuleKind.CommonJS
|
||||
? 'commonjs'
|
||||
: module === ModuleKind.AMD
|
||||
? 'amd'
|
||||
: module === ModuleKind.UMD
|
||||
? 'umd'
|
||||
: isNodeModuleKind && nodeModuleEmitKind === 'nodecjs'
|
||||
? 'commonjs'
|
||||
: isNodeModuleKind && nodeModuleEmitKind === 'nodeesm'
|
||||
? 'es6'
|
||||
: 'es6';
|
||||
// In swc:
|
||||
// strictMode means `"use strict"` is *always* emitted for non-ES module, *never* for ES module where it is assumed it can be omitted.
|
||||
// (this assumption is invalid, but that's the way swc behaves)
|
||||
// tsc is a bit more complex:
|
||||
// alwaysStrict will force emitting it always unless `import`/`export` syntax is emitted which implies it per the JS spec.
|
||||
// if not alwaysStrict, will emit implicitly whenever module target is non-ES *and* transformed module syntax is emitted.
|
||||
// For node, best option is to assume that all scripts are modules (commonjs or esm) and thus should get tsc's implicit strict behavior.
|
||||
// Always set strictMode, *unless* alwaysStrict is disabled and noImplicitUseStrict is enabled
|
||||
const strictMode =
|
||||
// if `alwaysStrict` is disabled, remembering that `strict` defaults `alwaysStrict` to true
|
||||
(alwaysStrict === false || (alwaysStrict !== true && strict !== true)) &&
|
||||
// if noImplicitUseStrict is enabled
|
||||
noImplicitUseStrict === true
|
||||
? false
|
||||
: true;
|
||||
const jsxRuntime = jsx === JsxEmit.ReactJSX || jsx === JsxEmit.ReactJSXDev
|
||||
? 'automatic'
|
||||
: undefined;
|
||||
const jsxDevelopment = jsx === JsxEmit.ReactJSXDev ? true : undefined;
|
||||
const nonTsxOptions = createVariant(false);
|
||||
const tsxOptions = createVariant(true);
|
||||
return { nonTsxOptions, tsxOptions };
|
||||
function createVariant(isTsx) {
|
||||
const swcOptions = {
|
||||
sourceMaps: sourceMap,
|
||||
// isModule: true,
|
||||
module: moduleType
|
||||
? {
|
||||
noInterop: !esModuleInterop,
|
||||
type: moduleType,
|
||||
strictMode,
|
||||
// For NodeNext and Node12, emit as CJS but do not transform dynamic imports
|
||||
ignoreDynamic: nodeModuleEmitKind === 'nodecjs',
|
||||
}
|
||||
: undefined,
|
||||
swcrc: false,
|
||||
jsc: {
|
||||
externalHelpers: importHelpers,
|
||||
parser: {
|
||||
syntax: 'typescript',
|
||||
tsx: isTsx,
|
||||
decorators: experimentalDecorators,
|
||||
dynamicImport: true,
|
||||
importAssertions: true,
|
||||
},
|
||||
target: swcTarget,
|
||||
transform: {
|
||||
decoratorMetadata: emitDecoratorMetadata,
|
||||
legacyDecorator: true,
|
||||
react: {
|
||||
throwIfNamespace: false,
|
||||
development: jsxDevelopment,
|
||||
useBuiltins: false,
|
||||
pragma: jsxFactory,
|
||||
pragmaFrag: jsxFragmentFactory,
|
||||
runtime: jsxRuntime,
|
||||
},
|
||||
},
|
||||
keepClassNames,
|
||||
experimental: {
|
||||
keepImportAssertions: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
// Throw a helpful error if swc version is old, for example, if it rejects `ignoreDynamic`
|
||||
try {
|
||||
swcInstance.transformSync('', swcOptions);
|
||||
}
|
||||
catch (e) {
|
||||
throw new Error(`${swcDepName} threw an error when attempting to validate swc compiler options.\n` +
|
||||
'You may be using an old version of swc which does not support the options used by ts-node.\n' +
|
||||
'Try upgrading to the latest version of swc.\n' +
|
||||
'Error message from swc:\n' +
|
||||
(e === null || e === void 0 ? void 0 : e.message));
|
||||
}
|
||||
return swcOptions;
|
||||
}
|
||||
}
|
||||
exports.createSwcOptions = createSwcOptions;
|
||||
//# sourceMappingURL=swc.js.map
|
1
node_modules/ts-node/dist/transpilers/swc.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/transpilers/swc.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
35
node_modules/ts-node/dist/transpilers/types.d.ts
generated
vendored
Normal file
35
node_modules/ts-node/dist/transpilers/types.d.ts
generated
vendored
Normal file
@ -0,0 +1,35 @@
|
||||
import type * as ts from 'typescript';
|
||||
import type { Service } from '../index';
|
||||
/**
|
||||
* Third-party transpilers are implemented as a CommonJS module with a
|
||||
* named export "create"
|
||||
*
|
||||
* @category Transpiler
|
||||
*/
|
||||
export interface TranspilerModule {
|
||||
create: TranspilerFactory;
|
||||
}
|
||||
/**
|
||||
* Called by ts-node to create a custom transpiler.
|
||||
*
|
||||
* @category Transpiler
|
||||
*/
|
||||
export declare type TranspilerFactory = (options: CreateTranspilerOptions) => Transpiler;
|
||||
/** @category Transpiler */
|
||||
export interface CreateTranspilerOptions {
|
||||
service: Pick<Service, Extract<'config' | 'options' | 'projectLocalResolveHelper', keyof Service>>;
|
||||
}
|
||||
/** @category Transpiler */
|
||||
export interface Transpiler {
|
||||
transpile(input: string, options: TranspileOptions): TranspileOutput;
|
||||
}
|
||||
/** @category Transpiler */
|
||||
export interface TranspileOptions {
|
||||
fileName: string;
|
||||
}
|
||||
/** @category Transpiler */
|
||||
export interface TranspileOutput {
|
||||
outputText: string;
|
||||
diagnostics?: ts.Diagnostic[];
|
||||
sourceMapText?: string;
|
||||
}
|
3
node_modules/ts-node/dist/transpilers/types.js
generated
vendored
Normal file
3
node_modules/ts-node/dist/transpilers/types.js
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//# sourceMappingURL=types.js.map
|
1
node_modules/ts-node/dist/transpilers/types.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/transpilers/types.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"types.js","sourceRoot":"","sources":["../../src/transpilers/types.ts"],"names":[],"mappings":"","sourcesContent":["import type * as ts from 'typescript';\nimport type { NodeModuleEmitKind, Service } from '../index';\nimport type { ProjectLocalResolveHelper } from '../util';\n\n/**\n * Third-party transpilers are implemented as a CommonJS module with a\n * named export \"create\"\n *\n * @category Transpiler\n */\nexport interface TranspilerModule {\n create: TranspilerFactory;\n}\n/**\n * Called by ts-node to create a custom transpiler.\n *\n * @category Transpiler\n */\nexport type TranspilerFactory = (\n options: CreateTranspilerOptions\n) => Transpiler;\n/** @category Transpiler */\nexport interface CreateTranspilerOptions {\n // TODO this is confusing because its only a partial Service. Rename?\n // Careful: must avoid stripInternal breakage by guarding with Extract<>\n service: Pick<\n Service,\n Extract<'config' | 'options' | 'projectLocalResolveHelper', keyof Service>\n >;\n /**\n * If `\"transpiler\"` option is declared in an \"extends\" tsconfig, this path might be different than\n * the `projectLocalResolveHelper`\n *\n * @internal\n */\n transpilerConfigLocalResolveHelper: ProjectLocalResolveHelper;\n /**\n * When using `module: nodenext` or `module: node12`, there are two possible styles of emit:\n * - CommonJS with dynamic imports preserved (not transformed into `require()` calls)\n * - ECMAScript modules with `import foo = require()` transformed into `require = createRequire(); const foo = require()`\n * @internal\n */\n nodeModuleEmitKind?: NodeModuleEmitKind;\n}\n/** @category Transpiler */\nexport interface Transpiler {\n // TODOs\n // Create spec for returning diagnostics? Currently transpilers are allowed to\n // throw an error but that's it.\n transpile(input: string, options: TranspileOptions): TranspileOutput;\n}\n/** @category Transpiler */\nexport interface TranspileOptions {\n fileName: string;\n}\n/** @category Transpiler */\nexport interface TranspileOutput {\n outputText: string;\n diagnostics?: ts.Diagnostic[];\n sourceMapText?: string;\n}\n"]}
|
63
node_modules/ts-node/dist/ts-compiler-types.d.ts
generated
vendored
Normal file
63
node_modules/ts-node/dist/ts-compiler-types.d.ts
generated
vendored
Normal file
@ -0,0 +1,63 @@
|
||||
import type * as _ts from 'typescript';
|
||||
/**
|
||||
* Common TypeScript interfaces between versions. We endeavour to write ts-node's own code against these types instead
|
||||
* of against `import "typescript"`, though we are not yet doing this consistently.
|
||||
*
|
||||
* Sometimes typescript@next adds an API we need to use. But we build ts-node against typescript@latest.
|
||||
* In these cases, we must declare that API explicitly here. Our declarations include the newer typescript@next APIs.
|
||||
* Importantly, these re-declarations are *not* TypeScript internals. They are public APIs that only exist in
|
||||
* pre-release versions of typescript.
|
||||
*/
|
||||
export interface TSCommon {
|
||||
version: typeof _ts.version;
|
||||
sys: typeof _ts.sys;
|
||||
ScriptSnapshot: typeof _ts.ScriptSnapshot;
|
||||
displayPartsToString: typeof _ts.displayPartsToString;
|
||||
createLanguageService: typeof _ts.createLanguageService;
|
||||
getDefaultLibFilePath: typeof _ts.getDefaultLibFilePath;
|
||||
getPreEmitDiagnostics: typeof _ts.getPreEmitDiagnostics;
|
||||
flattenDiagnosticMessageText: typeof _ts.flattenDiagnosticMessageText;
|
||||
transpileModule: typeof _ts.transpileModule;
|
||||
ModuleKind: TSCommon.ModuleKindEnum;
|
||||
ScriptTarget: typeof _ts.ScriptTarget;
|
||||
findConfigFile: typeof _ts.findConfigFile;
|
||||
readConfigFile: typeof _ts.readConfigFile;
|
||||
parseJsonConfigFileContent: typeof _ts.parseJsonConfigFileContent;
|
||||
formatDiagnostics: typeof _ts.formatDiagnostics;
|
||||
formatDiagnosticsWithColorAndContext: typeof _ts.formatDiagnosticsWithColorAndContext;
|
||||
createDocumentRegistry: typeof _ts.createDocumentRegistry;
|
||||
JsxEmit: typeof _ts.JsxEmit;
|
||||
createModuleResolutionCache: typeof _ts.createModuleResolutionCache;
|
||||
resolveModuleName: typeof _ts.resolveModuleName;
|
||||
resolveModuleNameFromCache: typeof _ts.resolveModuleNameFromCache;
|
||||
resolveTypeReferenceDirective: typeof _ts.resolveTypeReferenceDirective;
|
||||
createIncrementalCompilerHost: typeof _ts.createIncrementalCompilerHost;
|
||||
createSourceFile: typeof _ts.createSourceFile;
|
||||
getDefaultLibFileName: typeof _ts.getDefaultLibFileName;
|
||||
createIncrementalProgram: typeof _ts.createIncrementalProgram;
|
||||
createEmitAndSemanticDiagnosticsBuilderProgram: typeof _ts.createEmitAndSemanticDiagnosticsBuilderProgram;
|
||||
Extension: typeof _ts.Extension;
|
||||
ModuleResolutionKind: typeof _ts.ModuleResolutionKind;
|
||||
}
|
||||
export declare namespace TSCommon {
|
||||
interface LanguageServiceHost extends _ts.LanguageServiceHost {
|
||||
}
|
||||
type ModuleResolutionHost = _ts.ModuleResolutionHost;
|
||||
type ParsedCommandLine = _ts.ParsedCommandLine;
|
||||
type ResolvedModule = _ts.ResolvedModule;
|
||||
type ResolvedTypeReferenceDirective = _ts.ResolvedTypeReferenceDirective;
|
||||
type CompilerOptions = _ts.CompilerOptions;
|
||||
type ResolvedProjectReference = _ts.ResolvedProjectReference;
|
||||
type ResolvedModuleWithFailedLookupLocations = _ts.ResolvedModuleWithFailedLookupLocations;
|
||||
type FileReference = _ts.FileReference;
|
||||
type SourceFile = _ts.SourceFile;
|
||||
type ModuleKindEnum = typeof _ts.ModuleKind & {
|
||||
Node16: typeof _ts.ModuleKind extends {
|
||||
Node16: any;
|
||||
} ? typeof _ts.ModuleKind['Node16'] : 100;
|
||||
};
|
||||
namespace ModuleKind {
|
||||
type CommonJS = _ts.ModuleKind.CommonJS;
|
||||
type ESNext = _ts.ModuleKind.ESNext;
|
||||
}
|
||||
}
|
3
node_modules/ts-node/dist/ts-compiler-types.js
generated
vendored
Normal file
3
node_modules/ts-node/dist/ts-compiler-types.js
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//# sourceMappingURL=ts-compiler-types.js.map
|
1
node_modules/ts-node/dist/ts-compiler-types.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/ts-compiler-types.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
node_modules/ts-node/dist/ts-internals.d.ts
generated
vendored
Normal file
1
node_modules/ts-node/dist/ts-internals.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
export {};
|
299
node_modules/ts-node/dist/ts-internals.js
generated
vendored
Normal file
299
node_modules/ts-node/dist/ts-internals.js
generated
vendored
Normal file
@ -0,0 +1,299 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getPatternFromSpec = exports.createTsInternals = void 0;
|
||||
const path_1 = require("path");
|
||||
const util_1 = require("./util");
|
||||
/** @internal */
|
||||
exports.createTsInternals = (0, util_1.cachedLookup)(createTsInternalsUncached);
|
||||
/**
|
||||
* Given a reference to the TS compiler, return some TS internal functions that we
|
||||
* could not or did not want to grab off the `ts` object.
|
||||
* These have been copy-pasted from TS's source and tweaked as necessary.
|
||||
*
|
||||
* NOTE: This factory returns *only* functions which need a reference to the TS
|
||||
* compiler. Other functions do not need a reference to the TS compiler so are
|
||||
* exported directly from this file.
|
||||
*/
|
||||
function createTsInternalsUncached(_ts) {
|
||||
const ts = _ts;
|
||||
/**
|
||||
* Copied from:
|
||||
* https://github.com/microsoft/TypeScript/blob/v4.3.2/src/compiler/commandLineParser.ts#L2821-L2846
|
||||
*/
|
||||
function getExtendsConfigPath(extendedConfig, host, basePath, errors, createDiagnostic) {
|
||||
extendedConfig = (0, util_1.normalizeSlashes)(extendedConfig);
|
||||
if (isRootedDiskPath(extendedConfig) ||
|
||||
startsWith(extendedConfig, './') ||
|
||||
startsWith(extendedConfig, '../')) {
|
||||
let extendedConfigPath = getNormalizedAbsolutePath(extendedConfig, basePath);
|
||||
if (!host.fileExists(extendedConfigPath) &&
|
||||
!endsWith(extendedConfigPath, ts.Extension.Json)) {
|
||||
extendedConfigPath = `${extendedConfigPath}.json`;
|
||||
if (!host.fileExists(extendedConfigPath)) {
|
||||
errors.push(createDiagnostic(ts.Diagnostics.File_0_not_found, extendedConfig));
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
return extendedConfigPath;
|
||||
}
|
||||
// If the path isn't a rooted or relative path, resolve like a module
|
||||
const resolved = ts.nodeModuleNameResolver(extendedConfig, combinePaths(basePath, 'tsconfig.json'), { moduleResolution: ts.ModuleResolutionKind.NodeJs }, host,
|
||||
/*cache*/ undefined,
|
||||
/*projectRefs*/ undefined,
|
||||
/*lookupConfig*/ true);
|
||||
if (resolved.resolvedModule) {
|
||||
return resolved.resolvedModule.resolvedFileName;
|
||||
}
|
||||
errors.push(createDiagnostic(ts.Diagnostics.File_0_not_found, extendedConfig));
|
||||
return undefined;
|
||||
}
|
||||
return { getExtendsConfigPath };
|
||||
}
|
||||
// These functions have alternative implementation to avoid copying too much from TS
|
||||
function isRootedDiskPath(path) {
|
||||
return (0, path_1.isAbsolute)(path);
|
||||
}
|
||||
function combinePaths(path, ...paths) {
|
||||
return (0, util_1.normalizeSlashes)((0, path_1.resolve)(path, ...paths.filter((path) => path)));
|
||||
}
|
||||
function getNormalizedAbsolutePath(fileName, currentDirectory) {
|
||||
return (0, util_1.normalizeSlashes)(currentDirectory != null
|
||||
? (0, path_1.resolve)(currentDirectory, fileName)
|
||||
: (0, path_1.resolve)(fileName));
|
||||
}
|
||||
function startsWith(str, prefix) {
|
||||
return str.lastIndexOf(prefix, 0) === 0;
|
||||
}
|
||||
function endsWith(str, suffix) {
|
||||
const expectedPos = str.length - suffix.length;
|
||||
return expectedPos >= 0 && str.indexOf(suffix, expectedPos) === expectedPos;
|
||||
}
|
||||
// Reserved characters, forces escaping of any non-word (or digit), non-whitespace character.
|
||||
// It may be inefficient (we could just match (/[-[\]{}()*+?.,\\^$|#\s]/g), but this is future
|
||||
// proof.
|
||||
const reservedCharacterPattern = /[^\w\s\/]/g;
|
||||
/**
|
||||
* @internal
|
||||
* See also: getRegularExpressionForWildcard, which seems to do almost the same thing
|
||||
*/
|
||||
function getPatternFromSpec(spec, basePath) {
|
||||
const pattern = spec && getSubPatternFromSpec(spec, basePath, excludeMatcher);
|
||||
return pattern && `^(${pattern})${'($|/)'}`;
|
||||
}
|
||||
exports.getPatternFromSpec = getPatternFromSpec;
|
||||
function getSubPatternFromSpec(spec, basePath, { singleAsteriskRegexFragment, doubleAsteriskRegexFragment, replaceWildcardCharacter, }) {
|
||||
let subpattern = '';
|
||||
let hasWrittenComponent = false;
|
||||
const components = getNormalizedPathComponents(spec, basePath);
|
||||
const lastComponent = last(components);
|
||||
// getNormalizedPathComponents includes the separator for the root component.
|
||||
// We need to remove to create our regex correctly.
|
||||
components[0] = removeTrailingDirectorySeparator(components[0]);
|
||||
if (isImplicitGlob(lastComponent)) {
|
||||
components.push('**', '*');
|
||||
}
|
||||
let optionalCount = 0;
|
||||
for (let component of components) {
|
||||
if (component === '**') {
|
||||
subpattern += doubleAsteriskRegexFragment;
|
||||
}
|
||||
else {
|
||||
if (hasWrittenComponent) {
|
||||
subpattern += directorySeparator;
|
||||
}
|
||||
subpattern += component.replace(reservedCharacterPattern, replaceWildcardCharacter);
|
||||
}
|
||||
hasWrittenComponent = true;
|
||||
}
|
||||
while (optionalCount > 0) {
|
||||
subpattern += ')?';
|
||||
optionalCount--;
|
||||
}
|
||||
return subpattern;
|
||||
}
|
||||
const directoriesMatcher = {
|
||||
singleAsteriskRegexFragment: '[^/]*',
|
||||
/**
|
||||
* Regex for the ** wildcard. Matches any num of subdirectories. When used for including
|
||||
* files or directories, does not match subdirectories that start with a . character
|
||||
*/
|
||||
doubleAsteriskRegexFragment: `(/[^/.][^/]*)*?`,
|
||||
replaceWildcardCharacter: (match) => replaceWildcardCharacter(match, directoriesMatcher.singleAsteriskRegexFragment),
|
||||
};
|
||||
const excludeMatcher = {
|
||||
singleAsteriskRegexFragment: '[^/]*',
|
||||
doubleAsteriskRegexFragment: '(/.+?)?',
|
||||
replaceWildcardCharacter: (match) => replaceWildcardCharacter(match, excludeMatcher.singleAsteriskRegexFragment),
|
||||
};
|
||||
function getNormalizedPathComponents(path, currentDirectory) {
|
||||
return reducePathComponents(getPathComponents(path, currentDirectory));
|
||||
}
|
||||
function getPathComponents(path, currentDirectory = '') {
|
||||
path = combinePaths(currentDirectory, path);
|
||||
return pathComponents(path, getRootLength(path));
|
||||
}
|
||||
function reducePathComponents(components) {
|
||||
if (!some(components))
|
||||
return [];
|
||||
const reduced = [components[0]];
|
||||
for (let i = 1; i < components.length; i++) {
|
||||
const component = components[i];
|
||||
if (!component)
|
||||
continue;
|
||||
if (component === '.')
|
||||
continue;
|
||||
if (component === '..') {
|
||||
if (reduced.length > 1) {
|
||||
if (reduced[reduced.length - 1] !== '..') {
|
||||
reduced.pop();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
else if (reduced[0])
|
||||
continue;
|
||||
}
|
||||
reduced.push(component);
|
||||
}
|
||||
return reduced;
|
||||
}
|
||||
function getRootLength(path) {
|
||||
const rootLength = getEncodedRootLength(path);
|
||||
return rootLength < 0 ? ~rootLength : rootLength;
|
||||
}
|
||||
function getEncodedRootLength(path) {
|
||||
if (!path)
|
||||
return 0;
|
||||
const ch0 = path.charCodeAt(0);
|
||||
// POSIX or UNC
|
||||
if (ch0 === 47 /* CharacterCodes.slash */ || ch0 === 92 /* CharacterCodes.backslash */) {
|
||||
if (path.charCodeAt(1) !== ch0)
|
||||
return 1; // POSIX: "/" (or non-normalized "\")
|
||||
const p1 = path.indexOf(ch0 === 47 /* CharacterCodes.slash */ ? directorySeparator : altDirectorySeparator, 2);
|
||||
if (p1 < 0)
|
||||
return path.length; // UNC: "//server" or "\\server"
|
||||
return p1 + 1; // UNC: "//server/" or "\\server\"
|
||||
}
|
||||
// DOS
|
||||
if (isVolumeCharacter(ch0) && path.charCodeAt(1) === 58 /* CharacterCodes.colon */) {
|
||||
const ch2 = path.charCodeAt(2);
|
||||
if (ch2 === 47 /* CharacterCodes.slash */ || ch2 === 92 /* CharacterCodes.backslash */)
|
||||
return 3; // DOS: "c:/" or "c:\"
|
||||
if (path.length === 2)
|
||||
return 2; // DOS: "c:" (but not "c:d")
|
||||
}
|
||||
// URL
|
||||
const schemeEnd = path.indexOf(urlSchemeSeparator);
|
||||
if (schemeEnd !== -1) {
|
||||
const authorityStart = schemeEnd + urlSchemeSeparator.length;
|
||||
const authorityEnd = path.indexOf(directorySeparator, authorityStart);
|
||||
if (authorityEnd !== -1) {
|
||||
// URL: "file:///", "file://server/", "file://server/path"
|
||||
// For local "file" URLs, include the leading DOS volume (if present).
|
||||
// Per https://www.ietf.org/rfc/rfc1738.txt, a host of "" or "localhost" is a
|
||||
// special case interpreted as "the machine from which the URL is being interpreted".
|
||||
const scheme = path.slice(0, schemeEnd);
|
||||
const authority = path.slice(authorityStart, authorityEnd);
|
||||
if (scheme === 'file' &&
|
||||
(authority === '' || authority === 'localhost') &&
|
||||
isVolumeCharacter(path.charCodeAt(authorityEnd + 1))) {
|
||||
const volumeSeparatorEnd = getFileUrlVolumeSeparatorEnd(path, authorityEnd + 2);
|
||||
if (volumeSeparatorEnd !== -1) {
|
||||
if (path.charCodeAt(volumeSeparatorEnd) === 47 /* CharacterCodes.slash */) {
|
||||
// URL: "file:///c:/", "file://localhost/c:/", "file:///c%3a/", "file://localhost/c%3a/"
|
||||
return ~(volumeSeparatorEnd + 1);
|
||||
}
|
||||
if (volumeSeparatorEnd === path.length) {
|
||||
// URL: "file:///c:", "file://localhost/c:", "file:///c$3a", "file://localhost/c%3a"
|
||||
// but not "file:///c:d" or "file:///c%3ad"
|
||||
return ~volumeSeparatorEnd;
|
||||
}
|
||||
}
|
||||
}
|
||||
return ~(authorityEnd + 1); // URL: "file://server/", "http://server/"
|
||||
}
|
||||
return ~path.length; // URL: "file://server", "http://server"
|
||||
}
|
||||
// relative
|
||||
return 0;
|
||||
}
|
||||
function ensureTrailingDirectorySeparator(path) {
|
||||
if (!hasTrailingDirectorySeparator(path)) {
|
||||
return path + directorySeparator;
|
||||
}
|
||||
return path;
|
||||
}
|
||||
function hasTrailingDirectorySeparator(path) {
|
||||
return (path.length > 0 && isAnyDirectorySeparator(path.charCodeAt(path.length - 1)));
|
||||
}
|
||||
function isAnyDirectorySeparator(charCode) {
|
||||
return (charCode === 47 /* CharacterCodes.slash */ || charCode === 92 /* CharacterCodes.backslash */);
|
||||
}
|
||||
function removeTrailingDirectorySeparator(path) {
|
||||
if (hasTrailingDirectorySeparator(path)) {
|
||||
return path.substr(0, path.length - 1);
|
||||
}
|
||||
return path;
|
||||
}
|
||||
const directorySeparator = '/';
|
||||
const altDirectorySeparator = '\\';
|
||||
const urlSchemeSeparator = '://';
|
||||
function isVolumeCharacter(charCode) {
|
||||
return ((charCode >= 97 /* CharacterCodes.a */ && charCode <= 122 /* CharacterCodes.z */) ||
|
||||
(charCode >= 65 /* CharacterCodes.A */ && charCode <= 90 /* CharacterCodes.Z */));
|
||||
}
|
||||
function getFileUrlVolumeSeparatorEnd(url, start) {
|
||||
const ch0 = url.charCodeAt(start);
|
||||
if (ch0 === 58 /* CharacterCodes.colon */)
|
||||
return start + 1;
|
||||
if (ch0 === 37 /* CharacterCodes.percent */ &&
|
||||
url.charCodeAt(start + 1) === 51 /* CharacterCodes._3 */) {
|
||||
const ch2 = url.charCodeAt(start + 2);
|
||||
if (ch2 === 97 /* CharacterCodes.a */ || ch2 === 65 /* CharacterCodes.A */)
|
||||
return start + 3;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
function some(array, predicate) {
|
||||
if (array) {
|
||||
if (predicate) {
|
||||
for (const v of array) {
|
||||
if (predicate(v)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
return array.length > 0;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function pathComponents(path, rootLength) {
|
||||
const root = path.substring(0, rootLength);
|
||||
const rest = path.substring(rootLength).split(directorySeparator);
|
||||
if (rest.length && !lastOrUndefined(rest))
|
||||
rest.pop();
|
||||
return [root, ...rest];
|
||||
}
|
||||
function lastOrUndefined(array) {
|
||||
return array.length === 0 ? undefined : array[array.length - 1];
|
||||
}
|
||||
function last(array) {
|
||||
// Debug.assert(array.length !== 0);
|
||||
return array[array.length - 1];
|
||||
}
|
||||
function replaceWildcardCharacter(match, singleAsteriskRegexFragment) {
|
||||
return match === '*'
|
||||
? singleAsteriskRegexFragment
|
||||
: match === '?'
|
||||
? '[^/]'
|
||||
: '\\' + match;
|
||||
}
|
||||
/**
|
||||
* An "includes" path "foo" is implicitly a glob "foo/** /*" (without the space) if its last component has no extension,
|
||||
* and does not contain any glob characters itself.
|
||||
*/
|
||||
function isImplicitGlob(lastPathComponent) {
|
||||
return !/[.*?]/.test(lastPathComponent);
|
||||
}
|
||||
//# sourceMappingURL=ts-internals.js.map
|
1
node_modules/ts-node/dist/ts-internals.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/ts-internals.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
1
node_modules/ts-node/dist/ts-transpile-module.d.ts
generated
vendored
Normal file
1
node_modules/ts-node/dist/ts-transpile-module.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
export {};
|
100
node_modules/ts-node/dist/ts-transpile-module.js
generated
vendored
Normal file
100
node_modules/ts-node/dist/ts-transpile-module.js
generated
vendored
Normal file
@ -0,0 +1,100 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.createTsTranspileModule = void 0;
|
||||
/** @internal */
|
||||
function createTsTranspileModule(ts, transpileOptions) {
|
||||
const { createProgram, createSourceFile, getDefaultCompilerOptions, getImpliedNodeFormatForFile, fixupCompilerOptions, transpileOptionValueCompilerOptions, getNewLineCharacter, fileExtensionIs, normalizePath, Debug, toPath, getSetExternalModuleIndicator, getEntries, addRange, hasProperty, getEmitScriptTarget, getDirectoryPath, } = ts;
|
||||
const compilerOptionsDiagnostics = [];
|
||||
const options = transpileOptions.compilerOptions
|
||||
? fixupCompilerOptions(transpileOptions.compilerOptions, compilerOptionsDiagnostics)
|
||||
: {};
|
||||
// mix in default options
|
||||
const defaultOptions = getDefaultCompilerOptions();
|
||||
for (const key in defaultOptions) {
|
||||
if (hasProperty(defaultOptions, key) && options[key] === undefined) {
|
||||
options[key] = defaultOptions[key];
|
||||
}
|
||||
}
|
||||
for (const option of transpileOptionValueCompilerOptions) {
|
||||
options[option.name] = option.transpileOptionValue;
|
||||
}
|
||||
// transpileModule does not write anything to disk so there is no need to verify that there are no conflicts between input and output paths.
|
||||
options.suppressOutputPathCheck = true;
|
||||
// Filename can be non-ts file.
|
||||
options.allowNonTsExtensions = true;
|
||||
const newLine = getNewLineCharacter(options);
|
||||
// Create a compilerHost object to allow the compiler to read and write files
|
||||
const compilerHost = {
|
||||
getSourceFile: (fileName) => fileName === normalizePath(inputFileName) ? sourceFile : undefined,
|
||||
writeFile: (name, text) => {
|
||||
if (fileExtensionIs(name, '.map')) {
|
||||
Debug.assertEqual(sourceMapText, undefined, 'Unexpected multiple source map outputs, file:', name);
|
||||
sourceMapText = text;
|
||||
}
|
||||
else {
|
||||
Debug.assertEqual(outputText, undefined, 'Unexpected multiple outputs, file:', name);
|
||||
outputText = text;
|
||||
}
|
||||
},
|
||||
getDefaultLibFileName: () => 'lib.d.ts',
|
||||
useCaseSensitiveFileNames: () => true,
|
||||
getCanonicalFileName: (fileName) => fileName,
|
||||
getCurrentDirectory: () => '',
|
||||
getNewLine: () => newLine,
|
||||
fileExists: (fileName) => fileName === inputFileName || fileName === packageJsonFileName,
|
||||
readFile: (fileName) => fileName === packageJsonFileName ? `{"type": "${_packageJsonType}"}` : '',
|
||||
directoryExists: () => true,
|
||||
getDirectories: () => [],
|
||||
};
|
||||
let inputFileName;
|
||||
let packageJsonFileName;
|
||||
let _packageJsonType;
|
||||
let sourceFile;
|
||||
let outputText;
|
||||
let sourceMapText;
|
||||
return transpileModule;
|
||||
function transpileModule(input, transpileOptions2, packageJsonType = 'commonjs') {
|
||||
// if jsx is specified then treat file as .tsx
|
||||
inputFileName =
|
||||
transpileOptions2.fileName ||
|
||||
(transpileOptions.compilerOptions && transpileOptions.compilerOptions.jsx
|
||||
? 'module.tsx'
|
||||
: 'module.ts');
|
||||
packageJsonFileName = getDirectoryPath(inputFileName) + '/package.json';
|
||||
_packageJsonType = packageJsonType;
|
||||
sourceFile = createSourceFile(inputFileName, input, {
|
||||
languageVersion: getEmitScriptTarget(options),
|
||||
impliedNodeFormat: getImpliedNodeFormatForFile(toPath(inputFileName, '', compilerHost.getCanonicalFileName),
|
||||
/*cache*/ undefined, compilerHost, options),
|
||||
setExternalModuleIndicator: getSetExternalModuleIndicator(options),
|
||||
});
|
||||
if (transpileOptions2.moduleName) {
|
||||
sourceFile.moduleName = transpileOptions2.moduleName;
|
||||
}
|
||||
if (transpileOptions2.renamedDependencies) {
|
||||
sourceFile.renamedDependencies = new Map(getEntries(transpileOptions2.renamedDependencies));
|
||||
}
|
||||
// Output
|
||||
outputText = undefined;
|
||||
sourceMapText = undefined;
|
||||
const program = createProgram([inputFileName], options, compilerHost);
|
||||
const diagnostics = compilerOptionsDiagnostics.slice();
|
||||
if (transpileOptions.reportDiagnostics) {
|
||||
addRange(
|
||||
/*to*/ diagnostics,
|
||||
/*from*/ program.getSyntacticDiagnostics(sourceFile));
|
||||
addRange(/*to*/ diagnostics, /*from*/ program.getOptionsDiagnostics());
|
||||
}
|
||||
// Emit
|
||||
program.emit(
|
||||
/*targetSourceFile*/ undefined,
|
||||
/*writeFile*/ undefined,
|
||||
/*cancellationToken*/ undefined,
|
||||
/*emitOnlyDtsFiles*/ undefined, transpileOptions.transformers);
|
||||
if (outputText === undefined)
|
||||
return Debug.fail('Output generation failed');
|
||||
return { outputText, diagnostics, sourceMapText };
|
||||
}
|
||||
}
|
||||
exports.createTsTranspileModule = createTsTranspileModule;
|
||||
//# sourceMappingURL=ts-transpile-module.js.map
|
1
node_modules/ts-node/dist/ts-transpile-module.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/ts-transpile-module.js.map
generated
vendored
Normal file
File diff suppressed because one or more lines are too long
13
node_modules/ts-node/dist/tsconfig-schema.d.ts
generated
vendored
Normal file
13
node_modules/ts-node/dist/tsconfig-schema.d.ts
generated
vendored
Normal file
@ -0,0 +1,13 @@
|
||||
import type { TsConfigOptions } from './index';
|
||||
/**
|
||||
* tsconfig schema which includes "ts-node" options.
|
||||
* @allOf [{"$ref": "https://schemastore.azurewebsites.net/schemas/json/tsconfig.json"}]
|
||||
*/
|
||||
export interface TsConfigSchema {
|
||||
/**
|
||||
* ts-node options. See also: https://typestrong.org/ts-node/docs/configuration
|
||||
*
|
||||
* ts-node offers TypeScript execution and REPL for node.js, with source map support.
|
||||
*/
|
||||
'ts-node': TsConfigOptions;
|
||||
}
|
3
node_modules/ts-node/dist/tsconfig-schema.js
generated
vendored
Normal file
3
node_modules/ts-node/dist/tsconfig-schema.js
generated
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
//# sourceMappingURL=tsconfig-schema.js.map
|
1
node_modules/ts-node/dist/tsconfig-schema.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/tsconfig-schema.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"tsconfig-schema.js","sourceRoot":"","sources":["../src/tsconfig-schema.ts"],"names":[],"mappings":"","sourcesContent":["import type { TsConfigOptions } from './index';\n\n/*\n * This interface exists solely for generating a JSON schema for tsconfig.json.\n * We do *not* extend the compiler's tsconfig interface. Instead we handle that\n * on a schema level, via \"allOf\", so we pull in the same schema that VSCode\n * already uses.\n */\n/**\n * tsconfig schema which includes \"ts-node\" options.\n * @allOf [{\"$ref\": \"https://schemastore.azurewebsites.net/schemas/json/tsconfig.json\"}]\n */\nexport interface TsConfigSchema {\n /**\n * ts-node options. See also: https://typestrong.org/ts-node/docs/configuration\n *\n * ts-node offers TypeScript execution and REPL for node.js, with source map support.\n */\n 'ts-node': TsConfigOptions;\n}\n"]}
|
1
node_modules/ts-node/dist/tsconfigs.d.ts
generated
vendored
Normal file
1
node_modules/ts-node/dist/tsconfigs.d.ts
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
export {};
|
36
node_modules/ts-node/dist/tsconfigs.js
generated
vendored
Normal file
36
node_modules/ts-node/dist/tsconfigs.js
generated
vendored
Normal file
@ -0,0 +1,36 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getDefaultTsconfigJsonForNodeVersion = void 0;
|
||||
const nodeMajor = parseInt(process.versions.node.split('.')[0], 10);
|
||||
/**
|
||||
* return parsed JSON of the bundled @tsconfig/bases config appropriate for the
|
||||
* running version of nodejs
|
||||
* @internal
|
||||
*/
|
||||
function getDefaultTsconfigJsonForNodeVersion(ts) {
|
||||
const tsInternal = ts;
|
||||
if (nodeMajor >= 16) {
|
||||
const config = require('@tsconfig/node16/tsconfig.json');
|
||||
if (configCompatible(config))
|
||||
return config;
|
||||
}
|
||||
if (nodeMajor >= 14) {
|
||||
const config = require('@tsconfig/node14/tsconfig.json');
|
||||
if (configCompatible(config))
|
||||
return config;
|
||||
}
|
||||
if (nodeMajor >= 12) {
|
||||
const config = require('@tsconfig/node12/tsconfig.json');
|
||||
if (configCompatible(config))
|
||||
return config;
|
||||
}
|
||||
return require('@tsconfig/node10/tsconfig.json');
|
||||
// Verify that tsconfig target and lib options are compatible with TypeScript compiler
|
||||
function configCompatible(config) {
|
||||
return (typeof ts.ScriptTarget[config.compilerOptions.target.toUpperCase()] === 'number' &&
|
||||
tsInternal.libs &&
|
||||
config.compilerOptions.lib.every((lib) => tsInternal.libs.includes(lib)));
|
||||
}
|
||||
}
|
||||
exports.getDefaultTsconfigJsonForNodeVersion = getDefaultTsconfigJsonForNodeVersion;
|
||||
//# sourceMappingURL=tsconfigs.js.map
|
1
node_modules/ts-node/dist/tsconfigs.js.map
generated
vendored
Normal file
1
node_modules/ts-node/dist/tsconfigs.js.map
generated
vendored
Normal file
@ -0,0 +1 @@
|
||||
{"version":3,"file":"tsconfigs.js","sourceRoot":"","sources":["../src/tsconfigs.ts"],"names":[],"mappings":";;;AAEA,MAAM,SAAS,GAAG,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;AACpE;;;;GAIG;AACH,SAAgB,oCAAoC,CAAC,EAAY;IAC/D,MAAM,UAAU,GAAG,EAAuB,CAAC;IAC3C,IAAI,SAAS,IAAI,EAAE,EAAE;QACnB,MAAM,MAAM,GAAG,OAAO,CAAC,gCAAgC,CAAC,CAAC;QACzD,IAAI,gBAAgB,CAAC,MAAM,CAAC;YAAE,OAAO,MAAM,CAAC;KAC7C;IACD,IAAI,SAAS,IAAI,EAAE,EAAE;QACnB,MAAM,MAAM,GAAG,OAAO,CAAC,gCAAgC,CAAC,CAAC;QACzD,IAAI,gBAAgB,CAAC,MAAM,CAAC;YAAE,OAAO,MAAM,CAAC;KAC7C;IACD,IAAI,SAAS,IAAI,EAAE,EAAE;QACnB,MAAM,MAAM,GAAG,OAAO,CAAC,gCAAgC,CAAC,CAAC;QACzD,IAAI,gBAAgB,CAAC,MAAM,CAAC;YAAE,OAAO,MAAM,CAAC;KAC7C;IACD,OAAO,OAAO,CAAC,gCAAgC,CAAC,CAAC;IAEjD,sFAAsF;IACtF,SAAS,gBAAgB,CAAC,MAKzB;QACC,OAAO,CACL,OAAQ,EAAE,CAAC,YAAoB,CAC7B,MAAM,CAAC,eAAe,CAAC,MAAM,CAAC,WAAW,EAAE,CAC5C,KAAK,QAAQ;YACd,UAAU,CAAC,IAAI;YACf,MAAM,CAAC,eAAe,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,GAAG,EAAE,EAAE,CAAC,UAAU,CAAC,IAAK,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAC1E,CAAC;IACJ,CAAC;AACH,CAAC;AA/BD,oFA+BC","sourcesContent":["import type { TSCommon, TSInternal } from './ts-compiler-types';\n\nconst nodeMajor = parseInt(process.versions.node.split('.')[0], 10);\n/**\n * return parsed JSON of the bundled @tsconfig/bases config appropriate for the\n * running version of nodejs\n * @internal\n */\nexport function getDefaultTsconfigJsonForNodeVersion(ts: TSCommon): any {\n const tsInternal = ts as any as TSInternal;\n if (nodeMajor >= 16) {\n const config = require('@tsconfig/node16/tsconfig.json');\n if (configCompatible(config)) return config;\n }\n if (nodeMajor >= 14) {\n const config = require('@tsconfig/node14/tsconfig.json');\n if (configCompatible(config)) return config;\n }\n if (nodeMajor >= 12) {\n const config = require('@tsconfig/node12/tsconfig.json');\n if (configCompatible(config)) return config;\n }\n return require('@tsconfig/node10/tsconfig.json');\n\n // Verify that tsconfig target and lib options are compatible with TypeScript compiler\n function configCompatible(config: {\n compilerOptions: {\n lib: string[];\n target: string;\n };\n }) {\n return (\n typeof (ts.ScriptTarget as any)[\n config.compilerOptions.target.toUpperCase()\n ] === 'number' &&\n tsInternal.libs &&\n config.compilerOptions.lib.every((lib) => tsInternal.libs!.includes(lib))\n );\n }\n}\n"]}
|
4
node_modules/ts-node/dist/util.d.ts
generated
vendored
Normal file
4
node_modules/ts-node/dist/util.d.ts
generated
vendored
Normal file
@ -0,0 +1,4 @@
|
||||
/**
|
||||
* Cached fs operation wrapper.
|
||||
*/
|
||||
export declare function cachedLookup<T, R>(fn: (arg: T) => R): (arg: T) => R;
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user