Update
This commit is contained in:
234
scripts/build.ts
234
scripts/build.ts
@ -1,218 +1,19 @@
|
||||
#!/usr/bin/env tsx
|
||||
import assert from 'node:assert';
|
||||
import { readFileSync, promises as fs } from 'node:fs';
|
||||
import { resolve, extname, relative } from 'node:path';
|
||||
import { promises as fs } from 'node:fs';
|
||||
import { resolve, relative } from 'node:path';
|
||||
import { isBuiltin } from 'node:module';
|
||||
import esbuild from 'esbuild';
|
||||
import type { Loader, Plugin } from 'esbuild';
|
||||
import * as babel from '@babel/core';
|
||||
import type { Plugin } from 'esbuild';
|
||||
import { memoize } from 'lodash';
|
||||
import { gray, green } from 'picocolors';
|
||||
import type { types as t, types } from '@babel/core';
|
||||
import { dependencies } from '../dist/package.json';
|
||||
import { createMacro, type MacroHandler } from 'babel-plugin-macros';
|
||||
import * as polyfill from '../src/polyfill';
|
||||
import { buildLocalRules } from '../src/build-local-rules';
|
||||
import { execSync } from 'node:child_process';
|
||||
|
||||
const polyfills = Object.keys(polyfill);
|
||||
import { dts } from './dts';
|
||||
import { babelPlugin } from './modifier';
|
||||
|
||||
const ENV = (process.env.NODE_ENV ??= 'production');
|
||||
const PROD = ENV === 'production';
|
||||
|
||||
class HandlerMap {
|
||||
map = new Map<string, MacroHandler>();
|
||||
|
||||
set(names: string | string[], handler: MacroHandler) {
|
||||
names = Array.isArray(names) ? names : [names];
|
||||
const macro = createMacro(handler);
|
||||
for (const name of names) {
|
||||
this.map.set(name, macro);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
get keys() {
|
||||
return Array.from(this.map.keys());
|
||||
}
|
||||
|
||||
resolvePath = (module: string) => module;
|
||||
require = (module: string) => this.map.get(module);
|
||||
isMacrosName = (module: string) => this.map.has(module);
|
||||
}
|
||||
|
||||
const map = new HandlerMap()
|
||||
.set(
|
||||
'object.assign',
|
||||
replace(t => t.memberExpression(t.identifier('Object'), t.identifier('assign'))),
|
||||
)
|
||||
.set(
|
||||
['object-values', 'object.values'],
|
||||
replace(t => t.memberExpression(t.identifier('Object'), t.identifier('values'))),
|
||||
)
|
||||
.set(
|
||||
'object.fromentries',
|
||||
replace(t => t.memberExpression(t.identifier('Object'), t.identifier('fromEntries'))),
|
||||
)
|
||||
.set(
|
||||
'object.entries',
|
||||
replace(t => t.memberExpression(t.identifier('Object'), t.identifier('entries'))),
|
||||
)
|
||||
.set(
|
||||
'hasown',
|
||||
replace(t => t.memberExpression(t.identifier('Object'), t.identifier('hasOwn'))),
|
||||
)
|
||||
.set(
|
||||
'has',
|
||||
replace(t => t.memberExpression(t.identifier('Object'), t.identifier('hasOwn'))),
|
||||
)
|
||||
.set(
|
||||
'array-includes',
|
||||
proto(t => t.identifier('includes')),
|
||||
)
|
||||
.set(
|
||||
'array.prototype.flatmap',
|
||||
proto(t => t.identifier('flatMap')),
|
||||
)
|
||||
.set(
|
||||
'array.prototype.flat',
|
||||
proto(t => t.identifier('flat')),
|
||||
)
|
||||
.set(
|
||||
'array.prototype.findlastindex',
|
||||
proto(t => t.identifier('findLastIndex')),
|
||||
)
|
||||
.set(
|
||||
'array.prototype.tosorted',
|
||||
proto(t => t.identifier('toSorted')),
|
||||
)
|
||||
.set(
|
||||
'array.prototype.toreversed',
|
||||
proto(t => t.identifier('toReversed')),
|
||||
)
|
||||
.set(
|
||||
'array.prototype.findlast',
|
||||
proto(t => t.identifier('findLast')),
|
||||
)
|
||||
.set(
|
||||
'string.prototype.matchall',
|
||||
proto(t => t.identifier('matchAll')),
|
||||
)
|
||||
.set(
|
||||
'string.prototype.includes',
|
||||
proto(t => t.identifier('includes')),
|
||||
)
|
||||
.set(
|
||||
'object.groupby',
|
||||
replace(t =>
|
||||
t.memberExpression(
|
||||
t.callExpression(t.identifier('require'), [t.stringLiteral('lodash')]),
|
||||
t.identifier('groupBy'),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
// es-iterator-helpers/Iterator.prototype.*
|
||||
const polyfillPath = resolve(__dirname, '../src/polyfill.ts');
|
||||
const requirePolyfill = (t: typeof types, name: string) =>
|
||||
t.memberExpression(
|
||||
t.callExpression(t.identifier('require'), [t.stringLiteral(polyfillPath)]),
|
||||
t.identifier(name),
|
||||
);
|
||||
map.set(
|
||||
`es-iterator-helpers/Iterator.from`,
|
||||
replace(t => requirePolyfill(t, 'from')),
|
||||
);
|
||||
for (const name of polyfills) {
|
||||
map.set(
|
||||
`es-iterator-helpers/Iterator.prototype.${name}`,
|
||||
replace(t => requirePolyfill(t, name)),
|
||||
);
|
||||
}
|
||||
|
||||
map.set(
|
||||
'safe-regex-test',
|
||||
replace(t => requirePolyfill(t, 'safeRegexTest')),
|
||||
);
|
||||
|
||||
function replace(getReplacement: (types: typeof t) => t.Expression): MacroHandler {
|
||||
return ({ references, babel: { types: t } }) => {
|
||||
references.default.forEach(referencePath => {
|
||||
referencePath.replaceWith(getReplacement(t));
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function proto(getProperty: (types: typeof t) => t.Expression): MacroHandler {
|
||||
return ({ references, babel: { types: t } }) => {
|
||||
references.default.forEach(referencePath => {
|
||||
const { parent, parentPath } = referencePath;
|
||||
assert(t.isCallExpression(parent));
|
||||
const [callee, ...rest] = parent.arguments;
|
||||
parentPath!.replaceWith(
|
||||
t.callExpression(
|
||||
t.memberExpression(callee as t.Expression, getProperty(t)),
|
||||
rest,
|
||||
),
|
||||
);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export const babelPlugin: Plugin = {
|
||||
name: 'babel',
|
||||
setup(build) {
|
||||
const { keys, ...macroOptions } = map;
|
||||
|
||||
build.onLoad({ filter: /\.[jt]sx?$/ }, args => {
|
||||
const { path } = args;
|
||||
if (path.includes('node_modules/')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let source = readFileSync(path, 'utf-8')
|
||||
.replaceAll("require('object.hasown/polyfill')()", 'Object.hasOwn')
|
||||
.replaceAll("require('object.fromentries/polyfill')()", 'Object.fromEntries')
|
||||
.replaceAll(
|
||||
"Object.keys(require('prop-types'))",
|
||||
JSON.stringify(Object.keys(require('prop-types'))),
|
||||
);
|
||||
|
||||
if (
|
||||
path.includes('packages/eslint-plugin-import/src/rules/') ||
|
||||
path.includes('packages/eslint-plugin-import/config/')
|
||||
) {
|
||||
source = source.replace('\nmodule.exports = {', '\nexport default {');
|
||||
}
|
||||
|
||||
const isFlow = source.includes('@flow');
|
||||
const loader = extname(path).slice(1) as Loader;
|
||||
|
||||
if (!isFlow && !keys.some(key => source.includes(key))) {
|
||||
return { contents: source, loader };
|
||||
}
|
||||
|
||||
const res = babel.transformSync(source, {
|
||||
filename: path,
|
||||
babelrc: false,
|
||||
configFile: false,
|
||||
parserOpts: {
|
||||
plugins: [isFlow ? 'flow' : 'typescript'],
|
||||
},
|
||||
plugins: [
|
||||
isFlow && '@babel/plugin-transform-flow-strip-types',
|
||||
['babel-plugin-macros', macroOptions],
|
||||
].filter(Boolean),
|
||||
})!;
|
||||
|
||||
return {
|
||||
contents: res.code!,
|
||||
loader,
|
||||
};
|
||||
});
|
||||
},
|
||||
};
|
||||
|
||||
declare global {
|
||||
interface Array<T> {
|
||||
filter(
|
||||
@ -264,14 +65,14 @@ if (process.env.DEBUG) {
|
||||
});
|
||||
}
|
||||
|
||||
async function bundle(
|
||||
function bundle(
|
||||
entry: string,
|
||||
outfile = entry
|
||||
.replace('./packages/', './dist/')
|
||||
.replace('src/', '')
|
||||
.replace('.ts', '.js'),
|
||||
) {
|
||||
await esbuild.build({
|
||||
return esbuild.build({
|
||||
entryPoints: [entry],
|
||||
outfile,
|
||||
bundle: true,
|
||||
@ -281,9 +82,7 @@ async function bundle(
|
||||
sourcemap: 'linked',
|
||||
plugins,
|
||||
define: {},
|
||||
alias: {
|
||||
// esm modules
|
||||
},
|
||||
alias: {},
|
||||
external: ['find-cache-dir'],
|
||||
banner: {
|
||||
js: '/* eslint-disable */',
|
||||
@ -317,18 +116,11 @@ async function useText(path: string) {
|
||||
}
|
||||
|
||||
function bundleType(source: string, output: string) {
|
||||
execSync(
|
||||
[
|
||||
'npx',
|
||||
'dts-bundle-generator',
|
||||
JSON.stringify(source),
|
||||
'-o',
|
||||
JSON.stringify(output),
|
||||
'--project',
|
||||
'"./tsconfig.build.json"',
|
||||
'--no-check',
|
||||
].join(' '),
|
||||
);
|
||||
return dts({
|
||||
source,
|
||||
dist: output,
|
||||
project: './tsconfig.build.json',
|
||||
});
|
||||
}
|
||||
|
||||
async function main() {
|
||||
|
36
scripts/dts.ts
Normal file
36
scripts/dts.ts
Normal file
@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env node
|
||||
import * as ts from 'typescript';
|
||||
import {
|
||||
generateDtsBundle,
|
||||
type EntryPointConfig,
|
||||
} from 'dts-bundle-generator/dist/bundle-generator';
|
||||
|
||||
export function dts({
|
||||
source,
|
||||
dist,
|
||||
project,
|
||||
}: {
|
||||
source: string;
|
||||
dist: string;
|
||||
project: string;
|
||||
}): void {
|
||||
const entry: EntryPointConfig = {
|
||||
filePath: source,
|
||||
failOnClass: false,
|
||||
output: {
|
||||
inlineDeclareExternals: false,
|
||||
inlineDeclareGlobals: false,
|
||||
sortNodes: false,
|
||||
noBanner: false,
|
||||
respectPreserveConstEnum: false,
|
||||
exportReferencedTypes: true,
|
||||
},
|
||||
};
|
||||
|
||||
const generatedDts = generateDtsBundle([entry], {
|
||||
preferredConfigPath: project,
|
||||
followSymlinks: true,
|
||||
});
|
||||
|
||||
ts.sys.writeFile(dist, generatedDts[0]);
|
||||
}
|
214
scripts/modifier.ts
Normal file
214
scripts/modifier.ts
Normal file
@ -0,0 +1,214 @@
|
||||
#!/usr/bin/env tsx
|
||||
import assert from 'node:assert';
|
||||
import { readFileSync, promises as fs } from 'node:fs';
|
||||
import { resolve, extname, relative } from 'node:path';
|
||||
import { isBuiltin } from 'node:module';
|
||||
import esbuild from 'esbuild';
|
||||
import type { Loader, Plugin } from 'esbuild';
|
||||
import * as babel from '@babel/core';
|
||||
import { memoize } from 'lodash';
|
||||
import { gray, green } from 'picocolors';
|
||||
import type { types as t, types } from '@babel/core';
|
||||
import { dependencies } from '../dist/package.json';
|
||||
import { createMacro, type MacroHandler } from 'babel-plugin-macros';
|
||||
import * as polyfill from '../src/polyfill';
|
||||
import { buildLocalRules } from '../src/build-local-rules';
|
||||
import { dts } from './dts';
|
||||
|
||||
const polyfills = Object.keys(polyfill);
|
||||
|
||||
const ENV = (process.env.NODE_ENV ??= 'production');
|
||||
const PROD = ENV === 'production';
|
||||
|
||||
class HandlerMap {
|
||||
map = new Map<string, MacroHandler>();
|
||||
|
||||
set(names: string | string[], handler: MacroHandler) {
|
||||
names = Array.isArray(names) ? names : [names];
|
||||
const macro = createMacro(handler);
|
||||
for (const name of names) {
|
||||
this.map.set(name, macro);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
get keys() {
|
||||
return Array.from(this.map.keys());
|
||||
}
|
||||
|
||||
resolvePath = (module: string) => module;
|
||||
require = (module: string) => this.map.get(module);
|
||||
isMacrosName = (module: string) => this.map.has(module);
|
||||
}
|
||||
|
||||
const map = new HandlerMap()
|
||||
.set(
|
||||
'object.assign',
|
||||
replace(t => t.memberExpression(t.identifier('Object'), t.identifier('assign'))),
|
||||
)
|
||||
.set(
|
||||
['object-values', 'object.values'],
|
||||
replace(t => t.memberExpression(t.identifier('Object'), t.identifier('values'))),
|
||||
)
|
||||
.set(
|
||||
'object.fromentries',
|
||||
replace(t => t.memberExpression(t.identifier('Object'), t.identifier('fromEntries'))),
|
||||
)
|
||||
.set(
|
||||
'object.entries',
|
||||
replace(t => t.memberExpression(t.identifier('Object'), t.identifier('entries'))),
|
||||
)
|
||||
.set(
|
||||
'hasown',
|
||||
replace(t => t.memberExpression(t.identifier('Object'), t.identifier('hasOwn'))),
|
||||
)
|
||||
.set(
|
||||
'has',
|
||||
replace(t => t.memberExpression(t.identifier('Object'), t.identifier('hasOwn'))),
|
||||
)
|
||||
.set(
|
||||
'array-includes',
|
||||
proto(t => t.identifier('includes')),
|
||||
)
|
||||
.set(
|
||||
'array.prototype.flatmap',
|
||||
proto(t => t.identifier('flatMap')),
|
||||
)
|
||||
.set(
|
||||
'array.prototype.flat',
|
||||
proto(t => t.identifier('flat')),
|
||||
)
|
||||
.set(
|
||||
'array.prototype.findlastindex',
|
||||
proto(t => t.identifier('findLastIndex')),
|
||||
)
|
||||
.set(
|
||||
'array.prototype.tosorted',
|
||||
proto(t => t.identifier('toSorted')),
|
||||
)
|
||||
.set(
|
||||
'array.prototype.toreversed',
|
||||
proto(t => t.identifier('toReversed')),
|
||||
)
|
||||
.set(
|
||||
'array.prototype.findlast',
|
||||
proto(t => t.identifier('findLast')),
|
||||
)
|
||||
.set(
|
||||
'string.prototype.matchall',
|
||||
proto(t => t.identifier('matchAll')),
|
||||
)
|
||||
.set(
|
||||
'string.prototype.includes',
|
||||
proto(t => t.identifier('includes')),
|
||||
)
|
||||
.set(
|
||||
'object.groupby',
|
||||
replace(t =>
|
||||
t.memberExpression(
|
||||
t.callExpression(t.identifier('require'), [t.stringLiteral('lodash')]),
|
||||
t.identifier('groupBy'),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
// es-iterator-helpers/Iterator.prototype.*
|
||||
const polyfillPath = resolve(__dirname, '../src/polyfill.ts');
|
||||
const requirePolyfill = (t: typeof types, name: string) =>
|
||||
t.memberExpression(
|
||||
t.callExpression(t.identifier('require'), [t.stringLiteral(polyfillPath)]),
|
||||
t.identifier(name),
|
||||
);
|
||||
map.set(
|
||||
`es-iterator-helpers/Iterator.from`,
|
||||
replace(t => requirePolyfill(t, 'from')),
|
||||
);
|
||||
for (const name of polyfills) {
|
||||
map.set(
|
||||
`es-iterator-helpers/Iterator.prototype.${name}`,
|
||||
replace(t => requirePolyfill(t, name)),
|
||||
);
|
||||
}
|
||||
|
||||
map.set(
|
||||
'safe-regex-test',
|
||||
replace(t => requirePolyfill(t, 'safeRegexTest')),
|
||||
);
|
||||
|
||||
function replace(getReplacement: (types: typeof t) => t.Expression): MacroHandler {
|
||||
return ({ references, babel: { types: t } }) => {
|
||||
references.default.forEach(referencePath => {
|
||||
referencePath.replaceWith(getReplacement(t));
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
function proto(getProperty: (types: typeof t) => t.Expression): MacroHandler {
|
||||
return ({ references, babel: { types: t } }) => {
|
||||
references.default.forEach(referencePath => {
|
||||
const { parent, parentPath } = referencePath;
|
||||
assert(t.isCallExpression(parent));
|
||||
const [callee, ...rest] = parent.arguments;
|
||||
parentPath!.replaceWith(
|
||||
t.callExpression(
|
||||
t.memberExpression(callee as t.Expression, getProperty(t)),
|
||||
rest,
|
||||
),
|
||||
);
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
export const babelPlugin: Plugin = {
|
||||
name: 'babel',
|
||||
setup(build) {
|
||||
const { keys, ...macroOptions } = map;
|
||||
|
||||
build.onLoad({ filter: /\.[jt]sx?$/ }, args => {
|
||||
const { path } = args;
|
||||
if (path.includes('node_modules/')) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let source = readFileSync(path, 'utf-8')
|
||||
.replaceAll("require('object.hasown/polyfill')()", 'Object.hasOwn')
|
||||
.replaceAll("require('object.fromentries/polyfill')()", 'Object.fromEntries')
|
||||
.replaceAll(
|
||||
"Object.keys(require('prop-types'))",
|
||||
JSON.stringify(Object.keys(require('prop-types'))),
|
||||
);
|
||||
|
||||
if (
|
||||
path.includes('packages/eslint-plugin-import/src/rules/') ||
|
||||
path.includes('packages/eslint-plugin-import/config/')
|
||||
) {
|
||||
source = source.replace('\nmodule.exports = {', '\nexport default {');
|
||||
}
|
||||
|
||||
const isFlow = source.includes('@flow');
|
||||
const loader = extname(path).slice(1) as Loader;
|
||||
|
||||
if (!isFlow && !keys.some(key => source.includes(key))) {
|
||||
return { contents: source, loader };
|
||||
}
|
||||
|
||||
const res = babel.transformSync(source, {
|
||||
filename: path,
|
||||
babelrc: false,
|
||||
configFile: false,
|
||||
parserOpts: {
|
||||
plugins: [isFlow ? 'flow' : 'typescript'],
|
||||
},
|
||||
plugins: [
|
||||
isFlow && '@babel/plugin-transform-flow-strip-types',
|
||||
['babel-plugin-macros', macroOptions],
|
||||
].filter(Boolean),
|
||||
})!;
|
||||
|
||||
return {
|
||||
contents: res.code!,
|
||||
loader,
|
||||
};
|
||||
});
|
||||
},
|
||||
};
|
Reference in New Issue
Block a user