Merge pull request #1977 from ing-bank/feat/providenceNext
providence improvements, alignment with latest browsers and deps, more
This commit is contained in:
commit
86d3db84fa
172 changed files with 14194 additions and 16348 deletions
17
.changeset/six-geckos-relate.md
Normal file
17
.changeset/six-geckos-relate.md
Normal file
|
|
@ -0,0 +1,17 @@
|
|||
---
|
||||
'providence-analytics': minor
|
||||
---
|
||||
|
||||
Many improvements:
|
||||
|
||||
- rewritten from babel to swc
|
||||
- swc traversal tool with babel
|
||||
- increased performance
|
||||
- better windows compatibility
|
||||
|
||||
BREAKING:
|
||||
|
||||
- package fully written as esm
|
||||
- entrypoints changed:
|
||||
- `@providence-analytics/src/cli` => `@providence-analytics/cli.js`
|
||||
- `@providence-analytics/analyzers` => `@providence-analytics/analyzers.js`
|
||||
|
|
@ -19,7 +19,7 @@ module.exports = {
|
|||
files: [
|
||||
'**/test-suites/**/*.js',
|
||||
'**/test/**/*.js',
|
||||
'**/test-node/**/*.js',
|
||||
'**/test-node/**/*.{j,mj}s',
|
||||
'**/demo/**/*.js',
|
||||
'**/docs/**/*.js',
|
||||
'**/*.config.js',
|
||||
|
|
|
|||
|
|
@ -56,6 +56,8 @@ export class CustomCollapsible extends LionCollapsible {
|
|||
contentNode.style.setProperty('opacity', '1');
|
||||
contentNode.style.setProperty('padding', '12px 0');
|
||||
contentNode.style.setProperty('max-height', '0px');
|
||||
// @ts-ignore
|
||||
// eslint-disable-next-line no-promise-executor-return
|
||||
await new Promise(resolve => requestAnimationFrame(() => resolve()));
|
||||
contentNode.style.setProperty('max-height', expectedHeight);
|
||||
await this._waitForTransition({ contentNode });
|
||||
|
|
@ -105,6 +107,8 @@ export class CustomCollapsible extends LionCollapsible {
|
|||
*/
|
||||
async __calculateHeight(contentNode) {
|
||||
contentNode.style.setProperty('max-height', '');
|
||||
// @ts-ignore
|
||||
// eslint-disable-next-line no-promise-executor-return
|
||||
await new Promise(resolve => requestAnimationFrame(() => resolve()));
|
||||
return this._contentHeight; // Expected height i.e. actual size once collapsed after animation
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,5 @@
|
|||
import { OverlaysManager } from 'overlays';
|
||||
import { OverlaysManager, OverlaysManager as OverlaysManager2 } from 'overlays';
|
||||
import { singletonManager } from 'singleton-manager';
|
||||
import { OverlaysManager as OverlaysManager2 } from './node_modules/page-b/node_modules/overlays/index.js';
|
||||
|
||||
let compatibleManager1;
|
||||
let compatibleManager2;
|
||||
|
|
|
|||
19182
package-lock.json
generated
19182
package-lock.json
generated
File diff suppressed because it is too large
Load diff
67
package.json
67
package.json
|
|
@ -43,70 +43,65 @@
|
|||
"changeset": "^0.2.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.10.1",
|
||||
"@babel/core": "^7.21.3",
|
||||
"@bundled-es-modules/fetch-mock": "^6.5.2",
|
||||
"@changesets/cli": "^2.26.1",
|
||||
"@custom-elements-manifest/analyzer": "^0.5.7",
|
||||
"@open-wc/building-rollup": "^1.2.1",
|
||||
"@open-wc/eslint-config": "^7.0.0",
|
||||
"@open-wc/testing": "^3.1.2",
|
||||
"@open-wc/testing-helpers": "^2.1.2",
|
||||
"@custom-elements-manifest/analyzer": "^0.8.0",
|
||||
"@open-wc/building-rollup": "^1.10.0",
|
||||
"@open-wc/eslint-config": "^10.0.0",
|
||||
"@open-wc/testing": "^3.1.7",
|
||||
"@open-wc/testing-helpers": "^2.2.0",
|
||||
"@rocket/blog": "^0.4.0",
|
||||
"@rocket/cli": "^0.10.1",
|
||||
"@rocket/cli": "^0.10.2",
|
||||
"@rocket/launch": "^0.6.0",
|
||||
"@rocket/search": "^0.5.1",
|
||||
"@types/chai-as-promised": "^7.1.5",
|
||||
"@types/chai-dom": "^0.0.8",
|
||||
"@types/convert-source-map": "^1.5.1",
|
||||
"@types/fs-extra": "^9.0.7",
|
||||
"@types/convert-source-map": "^1.5.2",
|
||||
"@types/fs-extra": "^9.0.13",
|
||||
"@types/glob": "^7.1.3",
|
||||
"@types/istanbul-reports": "^3.0.0",
|
||||
"@types/mocha": "^10.0.0",
|
||||
"@types/prettier": "^2.2.1",
|
||||
"@web/dev-server": "^0.1.8",
|
||||
"@types/istanbul-reports": "^3.0.1",
|
||||
"@types/mocha": "^10.0.1",
|
||||
"@types/prettier": "^2.7.2",
|
||||
"@web/dev-server": "^0.1.37",
|
||||
"@web/dev-server-legacy": "^0.1.7",
|
||||
"@web/test-runner": "^0.13.7",
|
||||
"@web/test-runner-browserstack": "^0.5.0",
|
||||
"@web/test-runner-commands": "^0.6.1",
|
||||
"@web/test-runner-playwright": "^0.8.8",
|
||||
"@webcomponents/scoped-custom-element-registry": "^0.0.5",
|
||||
"@web/test-runner": "^0.15.2",
|
||||
"@web/test-runner-browserstack": "^0.5.1",
|
||||
"@web/test-runner-commands": "^0.6.5",
|
||||
"@web/test-runner-playwright": "^0.9.0",
|
||||
"@webcomponents/scoped-custom-element-registry": "^0.0.8",
|
||||
"@yarnpkg/lockfile": "^1.1.0",
|
||||
"babel-polyfill": "^6.26.0",
|
||||
"bundlesize": "^1.0.0-beta.2",
|
||||
"cem-plugin-vs-code-custom-data-generator": "^1.4.1",
|
||||
"chai": "^4.2.0",
|
||||
"chai-as-promised": "^7.1.1",
|
||||
"chalk": "^4.1.0",
|
||||
"concurrently": "^5.2.0",
|
||||
"cross-env": "^7.0.2",
|
||||
"es6-promisify": "^6.1.1",
|
||||
"eslint": "^8.26.0",
|
||||
"eslint-config-prettier": "^8.3.0",
|
||||
"eslint-plugin-import": "^2.26.0",
|
||||
"eslint-plugin-lit": "^1.6.1",
|
||||
"eslint-plugin-lit-a11y": "^2.2.0",
|
||||
"eslint-plugin-wc": "^1.3.2",
|
||||
"globby": "^13.1.2",
|
||||
"eslint": "^8.37.0",
|
||||
"eslint-config-prettier": "^8.8.0",
|
||||
"eslint-plugin-import": "^2.27.5",
|
||||
"eslint-plugin-lit": "^1.8.2",
|
||||
"eslint-plugin-lit-a11y": "^2.4.0",
|
||||
"eslint-plugin-wc": "^1.4.0",
|
||||
"globby": "^13.1.3",
|
||||
"husky": "^6.0.0",
|
||||
"lint-staged": "^10.0.0",
|
||||
"looks-same": "^7.2.3",
|
||||
"lint-staged": "^10.5.4",
|
||||
"looks-same": "^7.3.0",
|
||||
"markdownlint-cli": "^0.17.0",
|
||||
"mermaid": "^9.3.0",
|
||||
"minimist": "^1.2.6",
|
||||
"mkdirp-promise": "^5.0.1",
|
||||
"mocha": "^10.1.0",
|
||||
"mock-fs": "^5.1.2",
|
||||
"npm-run-all": "^4.1.5",
|
||||
"nyc": "^15.0.0",
|
||||
"playwright": "^1.20.0",
|
||||
"playwright": "^1.32.1",
|
||||
"postinstall-postinstall": "^2.1.0",
|
||||
"prettier": "^2.0.5",
|
||||
"prettier-package-json": "^2.1.3",
|
||||
"remark-html": "^13.0.1",
|
||||
"rimraf": "^2.6.3",
|
||||
"rollup": "^2.0.0",
|
||||
"rollup": "^2.79.1",
|
||||
"semver": "^7.5.2",
|
||||
"sinon": "^7.2.2",
|
||||
"sinon": "^7.5.0",
|
||||
"ssl-root-cas": "^1.3.1",
|
||||
"typescript": "~4.8.4",
|
||||
"whatwg-fetch": "^3.0.0",
|
||||
|
|
@ -128,7 +123,7 @@
|
|||
}
|
||||
},
|
||||
"overrides": {
|
||||
"sharp": "^0.31.x"
|
||||
"sharp": "^0.29.x"
|
||||
},
|
||||
"prettier": {
|
||||
"printWidth": 100,
|
||||
|
|
|
|||
|
|
@ -1,3 +1,3 @@
|
|||
providence-output
|
||||
/providence-output
|
||||
providence-input-data
|
||||
/.nyc_output
|
||||
|
|
|
|||
|
|
@ -1,18 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<title>providence-board</title>
|
||||
<style>
|
||||
body {
|
||||
margin: 8px 32px;
|
||||
}
|
||||
</style>
|
||||
<script type="module" src="./app/p-board.js"></script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<p-board></p-board>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
const { LogService } = require('../../src/program/services/LogService.js');
|
||||
|
||||
LogService.warn(
|
||||
'Running via "dashboard/src/server.js" is deprecated. Please run "providence dashboard" instead.',
|
||||
);
|
||||
|
||||
// @ts-ignore
|
||||
import('./server.mjs');
|
||||
|
|
@ -10,59 +10,62 @@
|
|||
"url": "https://github.com/ing-bank/lion.git",
|
||||
"directory": "packages-node/providence-analytics"
|
||||
},
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": "./src/index.js",
|
||||
"./src/cli": "./src/cli/index.js",
|
||||
"./cli.js": "./src/cli/index.js",
|
||||
"./utils.js": "./src/program/utils/index.js",
|
||||
"./analyzers": "./src/program/analyzers/index.js",
|
||||
"./analyzers.js": "./src/program/analyzers/index.js",
|
||||
"./docs/*": "./docs/*"
|
||||
},
|
||||
"main": "./src/index.js",
|
||||
"bin": {
|
||||
"providence": "./src/cli/index.mjs"
|
||||
"providence": "./src/cli/index.js"
|
||||
},
|
||||
"files": [
|
||||
"dashboard/src",
|
||||
"src"
|
||||
],
|
||||
"scripts": {
|
||||
"dashboard": "node ./dashboard/src/server.js --serve-from-package-root",
|
||||
"match-lion-imports": "npm run providence analyze match-imports --search-target-collection @lion-targets --reference-collection @lion-references",
|
||||
"providence": "node --max-old-space-size=8192 ./src/cli/index.mjs",
|
||||
"dashboard": "node ./src/dashboard/server.js --run-server --serve-from-package-root",
|
||||
"postinstall": "npx patch-package",
|
||||
"match-lion-imports": "npm run providence -- analyze match-imports --search-target-collection @lion-targets --reference-collection @lion-references --measure-perf --skip-check-match-compatibility",
|
||||
"providence": "node --max-old-space-size=8192 ./src/cli/index.js",
|
||||
"publish-docs": "node ../../packages-node/publish-docs/src/cli.js --github-url https://github.com/ing-bank/lion/ --git-root-dir ../../",
|
||||
"prepublishOnly": "npm run publish-docs",
|
||||
"test:node": "mocha './test-node/**/*.test.js'",
|
||||
"test:node:e2e": "mocha './test-node/program/**/*.e2e.js' --timeout 60000",
|
||||
"test:node:watch": "npm run test:node --watch"
|
||||
"test:node": "npm run test:node:unit && npm run test:node:e2e",
|
||||
"test:node:e2e": "mocha './test-node/**/*.e2e.js' --timeout 60000",
|
||||
"test:node:unit": "mocha './test-node/**/*.test.js'"
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.10.1",
|
||||
"@babel/parser": "^7.5.5",
|
||||
"@babel/plugin-proposal-class-properties": "^7.8.3",
|
||||
"@babel/core": "^7.21.4",
|
||||
"@babel/parser": "^7.21.4",
|
||||
"@babel/plugin-proposal-class-properties": "^7.18.6",
|
||||
"@babel/plugin-syntax-export-default-from": "^7.18.6",
|
||||
"@babel/plugin-syntax-import-assertions": "^7.18.6",
|
||||
"@babel/register": "^7.5.5",
|
||||
"@babel/traverse": "^7.23.2",
|
||||
"@babel/types": "^7.9.0",
|
||||
"@rollup/plugin-node-resolve": "^13.0.6",
|
||||
"@typescript-eslint/typescript-estree": "^3.0.0",
|
||||
"anymatch": "^3.1.1",
|
||||
"chalk": "^4.1.0",
|
||||
"commander": "^2.20.0",
|
||||
"deepmerge": "^4.0.0",
|
||||
"es-dev-server": "^1.57.1",
|
||||
"es-module-lexer": "^0.3.6",
|
||||
"glob": "^7.1.6",
|
||||
"htm": "^3.0.3",
|
||||
"inquirer": "^7.0.0",
|
||||
"@babel/plugin-syntax-import-assertions": "^7.20.0",
|
||||
"@babel/register": "^7.21.0",
|
||||
"@babel/traverse": "^7.21.4",
|
||||
"@babel/types": "^7.21.4",
|
||||
"@rollup/plugin-node-resolve": "^15.0.2",
|
||||
"@swc/core": "^1.3.46",
|
||||
"@web/dev-server": "^0.1.38",
|
||||
"anymatch": "^3.1.3",
|
||||
"commander": "^2.20.3",
|
||||
"glob": "^8.1.0",
|
||||
"inquirer": "^9.1.5",
|
||||
"is-negated-glob": "^1.0.0",
|
||||
"lit-element": "~2.4.0",
|
||||
"mock-require": "^3.0.3",
|
||||
"ora": "^3.4.0",
|
||||
"parse5": "^5.1.1",
|
||||
"lit-element": "~3.3.1",
|
||||
"parse5": "^7.1.2",
|
||||
"read-package-tree": "5.3.1",
|
||||
"semver": "^7.5.2",
|
||||
"typescript": "~4.8.4"
|
||||
"semver": "^7.3.8",
|
||||
"swc-to-babel": "^1.26.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/chai": "^4.3.4",
|
||||
"@types/inquirer": "^9.0.3",
|
||||
"@types/mocha": "^10.0.1",
|
||||
"@web/dev-server-core": "^0.4.0",
|
||||
"mock-require": "^3.0.3",
|
||||
"mock-fs": "^5.2.0"
|
||||
},
|
||||
"keywords": [
|
||||
"analysis",
|
||||
|
|
|
|||
|
|
@ -0,0 +1,11 @@
|
|||
diff --git a/node_modules/@web/dev-server-core/test-helpers.mjs b/node_modules/@web/dev-server-core/test-helpers.mjs
|
||||
index 1a4d604..9c0d714 100644
|
||||
--- a/node_modules/@web/dev-server-core/test-helpers.mjs
|
||||
+++ b/node_modules/@web/dev-server-core/test-helpers.mjs
|
||||
@@ -1,5 +1,5 @@
|
||||
// this file is autogenerated with the generate-mjs-dts-entrypoints script
|
||||
-import cjsEntrypoint from './dist/index.js';
|
||||
+import cjsEntrypoint from './dist/test-helpers.js';
|
||||
|
||||
const {
|
||||
virtualFilesPlugin,
|
||||
|
|
@ -1,37 +1,4 @@
|
|||
import pathLib, { dirname } from 'path';
|
||||
import fs from 'fs';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
// This file is read by dashboard and cli and needs to be present under process.cwd()
|
||||
// It mainly serves as an example and it allows to run the dashboard locally
|
||||
// from within this repo.
|
||||
|
||||
/**
|
||||
* @returns {string[]}
|
||||
*/
|
||||
function getAllLionScopedPackagePaths() {
|
||||
const rootPath = pathLib.resolve(__dirname, '../../packages');
|
||||
const filesAndDirs = fs.readdirSync(rootPath);
|
||||
const packages = filesAndDirs.filter(f => {
|
||||
const filePath = pathLib.join(rootPath, f);
|
||||
if (fs.lstatSync(filePath).isDirectory()) {
|
||||
let pkgJson;
|
||||
try {
|
||||
pkgJson = JSON.parse(fs.readFileSync(pathLib.resolve(filePath, './package.json')));
|
||||
// eslint-disable-next-line no-empty
|
||||
} catch (_) {
|
||||
return false;
|
||||
}
|
||||
return pkgJson.name && pkgJson.name.startsWith('@lion/');
|
||||
}
|
||||
return false;
|
||||
});
|
||||
return packages.map(p => pathLib.join(rootPath, p));
|
||||
}
|
||||
|
||||
const lionScopedPackagePaths = getAllLionScopedPackagePaths();
|
||||
const lionScopedPackagePaths = ['../../packages/ui'];
|
||||
|
||||
export default {
|
||||
metaConfig: {
|
||||
|
|
@ -42,9 +9,11 @@ export default {
|
|||
majorVersion: 1,
|
||||
// These conditions will be run on overy filePath
|
||||
categories: {
|
||||
overlays: localFilePath => {
|
||||
overlays: (/** @type {string} */ localFilePath) => {
|
||||
const names = ['dialog', 'tooltip'];
|
||||
const fromPackages = names.some(p => localFilePath.startsWith(`./packages/${p}`));
|
||||
const fromPackages = names.some(p =>
|
||||
localFilePath.startsWith(`./packages/ui/components/${p}`),
|
||||
);
|
||||
const fromRoot =
|
||||
names.some(p => localFilePath.startsWith(`./ui-${p}`)) ||
|
||||
localFilePath.startsWith('./overlays.js');
|
||||
|
|
|
|||
|
|
@ -1,17 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# See https://gist.github.com/myusuf3/7f645819ded92bda6677
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo "Please define 'path/to/submodule'";
|
||||
exit;
|
||||
fi
|
||||
|
||||
# Remove the submodule entry from .git/config
|
||||
git submodule deinit -f $1
|
||||
|
||||
# Remove the submodule directory from the superproject's .git/modules directory
|
||||
rm -rf .git/modules/$1
|
||||
|
||||
# Remove the entry in .gitmodules and remove the submodule directory located at path/to/submodule
|
||||
git rm -rf $1
|
||||
|
|
@ -1,39 +1,54 @@
|
|||
/* eslint-disable no-shadow */
|
||||
const pathLib = require('path');
|
||||
const child_process = require('child_process'); // eslint-disable-line camelcase
|
||||
const glob = require('glob');
|
||||
const readPackageTree = require('../program/utils/read-package-tree-with-bower-support.js');
|
||||
const { InputDataService } = require('../program/services/InputDataService.js');
|
||||
const { LogService } = require('../program/services/LogService.js');
|
||||
const { aForEach } = require('../program/utils/async-array-utils.js');
|
||||
const { toPosixPath } = require('../program/utils/to-posix-path.js');
|
||||
import pathLib from 'path';
|
||||
import child_process from 'child_process'; // eslint-disable-line camelcase
|
||||
import glob from 'glob';
|
||||
import readPackageTree from '../program/utils/read-package-tree-with-bower-support.js';
|
||||
import { LogService } from '../program/core/LogService.js';
|
||||
import { toPosixPath } from '../program/utils/to-posix-path.js';
|
||||
|
||||
function flatten(arr) {
|
||||
/**
|
||||
* @param {any[]} arr
|
||||
* @returns {any[]}
|
||||
*/
|
||||
export function flatten(arr) {
|
||||
return Array.prototype.concat.apply([], arr);
|
||||
}
|
||||
|
||||
function csToArray(v) {
|
||||
/**
|
||||
* @param {string} v
|
||||
* @returns {string[]}
|
||||
*/
|
||||
export function csToArray(v) {
|
||||
return v.split(',').map(v => v.trim());
|
||||
}
|
||||
|
||||
function extensionsFromCs(v) {
|
||||
/**
|
||||
* @param {string} v like 'js,html'
|
||||
* @returns {string[]} like ['.js', '.html']
|
||||
*/
|
||||
export function extensionsFromCs(v) {
|
||||
return csToArray(v).map(v => `.${v}`);
|
||||
}
|
||||
|
||||
function setQueryMethod(m) {
|
||||
/**
|
||||
*
|
||||
* @param {*} m
|
||||
* @returns
|
||||
*/
|
||||
export function setQueryMethod(m) {
|
||||
const allowedMehods = ['grep', 'ast'];
|
||||
if (allowedMehods.includes(m)) {
|
||||
return m;
|
||||
}
|
||||
// eslint-disable-next-line no-console
|
||||
LogService.error(`Please provide one of the following methods: ${allowedMehods.join(', ')}`);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string[]}
|
||||
* @param {string} t
|
||||
* @returns {string[]|undefined}
|
||||
*/
|
||||
function pathsArrayFromCs(t, cwd = process.cwd()) {
|
||||
export function pathsArrayFromCs(t, cwd = process.cwd()) {
|
||||
if (!t) {
|
||||
return undefined;
|
||||
}
|
||||
|
|
@ -57,27 +72,40 @@ function pathsArrayFromCs(t, cwd = process.cwd()) {
|
|||
|
||||
/**
|
||||
* @param {string} name collection name found in eCfg
|
||||
* @param {'search-target'|'reference'} [colType='search-targets'] collection type
|
||||
* @param {object} eCfg external configuration. Usually providence.conf.js
|
||||
* @returns {string[]}
|
||||
* @param {'search-target'|'reference'} collectionType collection type
|
||||
* @param {{searchTargetCollections: {[repo:string]:string[]}; referenceCollections:{[repo:string]:string[]}}} [eCfg] external configuration. Usually providence.conf.js
|
||||
* @param {string} [cwd]
|
||||
* @returns {string[]|undefined}
|
||||
*/
|
||||
function pathsArrayFromCollectionName(name, colType = 'search-target', eCfg, cwd) {
|
||||
export function pathsArrayFromCollectionName(
|
||||
name,
|
||||
collectionType = 'search-target',
|
||||
eCfg = undefined,
|
||||
cwd = process.cwd(),
|
||||
) {
|
||||
let collection;
|
||||
if (colType === 'search-target') {
|
||||
collection = eCfg.searchTargetCollections;
|
||||
} else if (colType === 'reference') {
|
||||
collection = eCfg.referenceCollections;
|
||||
if (collectionType === 'search-target') {
|
||||
collection = eCfg?.searchTargetCollections;
|
||||
} else if (collectionType === 'reference') {
|
||||
collection = eCfg?.referenceCollections;
|
||||
}
|
||||
if (collection && collection[name]) {
|
||||
if (collection?.[name]) {
|
||||
return pathsArrayFromCs(collection[name].join(','), cwd);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function spawnProcess(processArgStr, opts) {
|
||||
/**
|
||||
* @param {string} processArgStr
|
||||
* @param {object} [opts]
|
||||
* @returns {Promise<{ code:number; output:string }>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
export function spawnProcess(processArgStr, opts) {
|
||||
const processArgs = processArgStr.split(' ');
|
||||
// eslint-disable-next-line camelcase
|
||||
const proc = child_process.spawn(processArgs[0], processArgs.slice(1), opts);
|
||||
/** @type {string} */
|
||||
let output;
|
||||
proc.stdout.on('data', data => {
|
||||
output += data;
|
||||
|
|
@ -95,28 +123,30 @@ function spawnProcess(processArgStr, opts) {
|
|||
}
|
||||
|
||||
/**
|
||||
* When providence is called from the root of a repo and no target is provided,
|
||||
* this will provide the default fallback (the project itself)
|
||||
* @param {string} cwd
|
||||
* @returns {string[]}
|
||||
*/
|
||||
function targetDefault() {
|
||||
// eslint-disable-next-line import/no-dynamic-require, global-require
|
||||
const { name } = require(`${process.cwd()}/package.json`);
|
||||
if (name === 'providence') {
|
||||
return InputDataService.targetProjectPaths;
|
||||
}
|
||||
return [toPosixPath(process.cwd())];
|
||||
export function targetDefault(cwd) {
|
||||
return [toPosixPath(cwd)];
|
||||
}
|
||||
|
||||
/**
|
||||
* @desc Returns all sub projects matching condition supplied in matchFn
|
||||
* @param {string[]} searchTargetPaths all search-target project paths
|
||||
* @param {string} matchPattern base for RegExp
|
||||
* @param {string[]} modes
|
||||
* Returns all sub projects matching condition supplied in matchFn
|
||||
* @param {string[]} rootPaths all search-target project paths
|
||||
* @param {string} [matchPattern] base for RegExp
|
||||
* @param {('npm'|'bower')[]} [modes]
|
||||
*/
|
||||
async function appendProjectDependencyPaths(rootPaths, matchPattern, modes = ['npm', 'bower']) {
|
||||
export async function appendProjectDependencyPaths(
|
||||
rootPaths,
|
||||
matchPattern,
|
||||
modes = ['npm', 'bower'],
|
||||
) {
|
||||
let matchFn;
|
||||
if (matchPattern) {
|
||||
if (matchPattern.startsWith('/') && matchPattern.endsWith('/')) {
|
||||
matchFn = (_, d) => {
|
||||
matchFn = (/** @type {any} */ _, /** @type {string} */ d) => {
|
||||
const reString = matchPattern.slice(1, -1);
|
||||
const result = new RegExp(reString).test(d);
|
||||
LogService.debug(`[appendProjectDependencyPaths]: /${reString}/.test(${d} => ${result})`);
|
||||
|
|
@ -128,13 +158,14 @@ async function appendProjectDependencyPaths(rootPaths, matchPattern, modes = ['n
|
|||
);
|
||||
}
|
||||
}
|
||||
/** @type {string[]} */
|
||||
const depProjectPaths = [];
|
||||
await aForEach(rootPaths, async targetPath => {
|
||||
await aForEach(modes, async mode => {
|
||||
for (const targetPath of rootPaths) {
|
||||
for (const mode of modes) {
|
||||
await readPackageTree(
|
||||
targetPath,
|
||||
matchFn,
|
||||
(err, tree) => {
|
||||
(/** @type {string | undefined} */ err, /** @type {{ children: any[]; }} */ tree) => {
|
||||
if (err) {
|
||||
throw new Error(err);
|
||||
}
|
||||
|
|
@ -143,8 +174,8 @@ async function appendProjectDependencyPaths(rootPaths, matchPattern, modes = ['n
|
|||
},
|
||||
mode,
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
// Write all data to {outputPath}/projectDeps.json
|
||||
// const projectDeps = {};
|
||||
// rootPaths.forEach(rootP => {
|
||||
|
|
@ -154,28 +185,32 @@ async function appendProjectDependencyPaths(rootPaths, matchPattern, modes = ['n
|
|||
return depProjectPaths.concat(rootPaths).map(toPosixPath);
|
||||
}
|
||||
|
||||
async function installDeps(searchTargetPaths) {
|
||||
return aForEach(searchTargetPaths, async t => {
|
||||
const spawnConfig = { cwd: t };
|
||||
const extraOptions = { log: true };
|
||||
|
||||
LogService.info(`Installing npm dependencies for ${pathLib.basename(t)}`);
|
||||
/**
|
||||
* Will install all npm and bower deps, so an analysis can be performed on them as well.
|
||||
* Relevant when '--target-dependencies' is supplied.
|
||||
* @param {string[]} searchTargetPaths
|
||||
*/
|
||||
export async function installDeps(searchTargetPaths) {
|
||||
for (const targetPath of searchTargetPaths) {
|
||||
LogService.info(`Installing npm dependencies for ${pathLib.basename(targetPath)}`);
|
||||
try {
|
||||
await spawnProcess('npm i --no-progress', spawnConfig, extraOptions);
|
||||
await spawnProcess('npm i --no-progress', { cwd: targetPath });
|
||||
} catch (e) {
|
||||
// @ts-expect-error
|
||||
LogService.error(e);
|
||||
}
|
||||
|
||||
LogService.info(`Installing bower dependencies for ${pathLib.basename(t)}`);
|
||||
LogService.info(`Installing bower dependencies for ${pathLib.basename(targetPath)}`);
|
||||
try {
|
||||
await spawnProcess(`bower i --production --force-latest`, spawnConfig, extraOptions);
|
||||
await spawnProcess(`bower i --production --force-latest`, { cwd: targetPath });
|
||||
} catch (e) {
|
||||
// @ts-expect-error
|
||||
LogService.error(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
export const _cliHelpersModule = {
|
||||
csToArray,
|
||||
extensionsFromCs,
|
||||
setQueryMethod,
|
||||
|
|
|
|||
|
|
@ -1,21 +1,35 @@
|
|||
const child_process = require('child_process'); // eslint-disable-line camelcase
|
||||
const pathLib = require('path');
|
||||
const commander = require('commander');
|
||||
const providenceModule = require('../program/providence.js');
|
||||
const { LogService } = require('../program/services/LogService.js');
|
||||
const { QueryService } = require('../program/services/QueryService.js');
|
||||
const { InputDataService } = require('../program/services/InputDataService.js');
|
||||
const promptModule = require('./prompt-analyzer-menu.js');
|
||||
const cliHelpers = require('./cli-helpers.js');
|
||||
const extendDocsModule = require('./launch-providence-with-extend-docs.js');
|
||||
const { toPosixPath } = require('../program/utils/to-posix-path.js');
|
||||
import child_process from 'child_process'; // eslint-disable-line camelcase
|
||||
import path from 'path';
|
||||
import fs from 'fs';
|
||||
import commander from 'commander';
|
||||
import { LogService } from '../program/core/LogService.js';
|
||||
import { QueryService } from '../program/core/QueryService.js';
|
||||
import { InputDataService } from '../program/core/InputDataService.js';
|
||||
import { toPosixPath } from '../program/utils/to-posix-path.js';
|
||||
import { getCurrentDir } from '../program/utils/get-current-dir.js';
|
||||
import { dashboardServer } from '../dashboard/server.js';
|
||||
import { _providenceModule } from '../program/providence.js';
|
||||
import { _cliHelpersModule } from './cli-helpers.js';
|
||||
import { _extendDocsModule } from './launch-providence-with-extend-docs.js';
|
||||
import { _promptAnalyzerMenuModule } from './prompt-analyzer-menu.js';
|
||||
|
||||
const { extensionsFromCs, setQueryMethod, targetDefault, installDeps, spawnProcess } = cliHelpers;
|
||||
/**
|
||||
* @typedef {import('../../types/index.js').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../types/index.js').ProvidenceCliConf} ProvidenceCliConf
|
||||
*/
|
||||
|
||||
const { version } = require('../../package.json');
|
||||
const { version } = JSON.parse(
|
||||
fs.readFileSync(path.resolve(getCurrentDir(import.meta.url), '../../package.json'), 'utf8'),
|
||||
);
|
||||
const { extensionsFromCs, setQueryMethod, targetDefault, installDeps } = _cliHelpersModule;
|
||||
|
||||
async function cli({ cwd, providenceConf } = {}) {
|
||||
/**
|
||||
* @param {{cwd?:string; argv?: string[]; providenceConf?: Partial<ProvidenceCliConf>}} cfg
|
||||
*/
|
||||
export async function cli({ cwd = process.cwd(), providenceConf, argv = process.argv }) {
|
||||
/** @type {(value: any) => void} */
|
||||
let resolveCli;
|
||||
/** @type {(reason?: any) => void} */
|
||||
let rejectCli;
|
||||
|
||||
const cliPromise = new Promise((resolve, reject) => {
|
||||
|
|
@ -35,7 +49,14 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
// TODO: change back to "InputDataService.getExternalConfig();" once full package ESM
|
||||
const externalConfig = providenceConf;
|
||||
|
||||
async function getQueryInputData(
|
||||
/**
|
||||
* @param {'search-query'|'feature-query'|'analyzer-query'} searchMode
|
||||
* @param {{regexString: string}} regexSearchOptions
|
||||
* @param {{queryString: string}} featureOptions
|
||||
* @param {{name:AnalyzerName; config:object;promptOptionalConfig:object}} analyzerOptions
|
||||
* @returns
|
||||
*/
|
||||
async function getQueryConfigAndMeta(
|
||||
/* eslint-disable no-shadow */
|
||||
searchMode,
|
||||
regexSearchOptions,
|
||||
|
|
@ -57,11 +78,12 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
} else if (searchMode === 'analyzer-query') {
|
||||
let { name, config } = analyzerOptions;
|
||||
if (!name) {
|
||||
const answers = await promptModule.promptAnalyzerMenu();
|
||||
const answers = await _promptAnalyzerMenuModule.promptAnalyzerMenu();
|
||||
|
||||
name = answers.analyzerName;
|
||||
}
|
||||
if (!config) {
|
||||
const answers = await promptModule.promptAnalyzerConfigMenu(
|
||||
const answers = await _promptAnalyzerMenuModule.promptAnalyzerConfigMenu(
|
||||
name,
|
||||
analyzerOptions.promptOptionalConfig,
|
||||
);
|
||||
|
|
@ -70,7 +92,7 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
// Will get metaConfig from ./providence.conf.js
|
||||
const metaConfig = externalConfig ? externalConfig.metaConfig : {};
|
||||
config = { ...config, metaConfig };
|
||||
queryConfig = QueryService.getQueryConfigFromAnalyzer(name, config);
|
||||
queryConfig = await QueryService.getQueryConfigFromAnalyzer(name, config);
|
||||
queryMethod = 'ast';
|
||||
} else {
|
||||
LogService.error('Please define a feature, analyzer or search');
|
||||
|
|
@ -80,7 +102,7 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
}
|
||||
|
||||
async function launchProvidence() {
|
||||
const { queryConfig, queryMethod } = await getQueryInputData(
|
||||
const { queryConfig, queryMethod } = await getQueryConfigAndMeta(
|
||||
searchMode,
|
||||
regexSearchOptions,
|
||||
featureOptions,
|
||||
|
|
@ -99,7 +121,7 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
*/
|
||||
let totalSearchTargets;
|
||||
if (commander.targetDependencies !== undefined) {
|
||||
totalSearchTargets = await cliHelpers.appendProjectDependencyPaths(
|
||||
totalSearchTargets = await _cliHelpersModule.appendProjectDependencyPaths(
|
||||
searchTargetPaths,
|
||||
commander.targetDependencies,
|
||||
);
|
||||
|
|
@ -112,7 +134,7 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
// we do not test against ourselves...
|
||||
// -
|
||||
|
||||
providenceModule.providence(queryConfig, {
|
||||
_providenceModule.providence(queryConfig, {
|
||||
gatherFilesConfig: {
|
||||
extensions: commander.extensions,
|
||||
allowlistMode: commander.allowlistMode,
|
||||
|
|
@ -130,11 +152,17 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
targetProjectRootPaths: searchTargetPaths,
|
||||
writeLogFile: commander.writeLogFile,
|
||||
skipCheckMatchCompatibility: commander.skipCheckMatchCompatibility,
|
||||
measurePerformance: commander.measurePerf,
|
||||
addSystemPathsInResult: commander.addSystemPaths,
|
||||
fallbackToBabel: commander.fallbackToBabel,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {{update:boolean; deps:boolean;createVersionHistory:boolean}} options
|
||||
*/
|
||||
async function manageSearchTargets(options) {
|
||||
const basePath = pathLib.join(__dirname, '../..');
|
||||
const basePath = path.join(__dirname, '../..');
|
||||
if (options.update) {
|
||||
LogService.info('git submodule update --init --recursive');
|
||||
|
||||
|
|
@ -153,15 +181,6 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
}
|
||||
}
|
||||
|
||||
async function runDashboard() {
|
||||
const pathFromServerRootToDashboard = `${pathLib.relative(
|
||||
process.cwd(),
|
||||
pathLib.resolve(__dirname, '../../dashboard'),
|
||||
)}`;
|
||||
|
||||
spawnProcess(`node ${pathFromServerRootToDashboard}/src/server.mjs`);
|
||||
}
|
||||
|
||||
commander
|
||||
.version(version, '-v, --version')
|
||||
.option('-e, --extensions [extensions]', 'extensions like "js,html"', extensionsFromCs, [
|
||||
|
|
@ -173,37 +192,37 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
'-t, --search-target-paths [targets]',
|
||||
`path(s) to project(s) on which analysis/querying should take place. Requires
|
||||
a list of comma seperated values relative to project root`,
|
||||
v => cliHelpers.pathsArrayFromCs(v, cwd),
|
||||
targetDefault(),
|
||||
v => _cliHelpersModule.pathsArrayFromCs(v, cwd),
|
||||
targetDefault(cwd),
|
||||
)
|
||||
.option(
|
||||
'-r, --reference-paths [references]',
|
||||
`path(s) to project(s) which serve as a reference (applicable for certain analyzers like
|
||||
'match-imports'). Requires a list of comma seperated values relative to
|
||||
project root (like 'node_modules/lion-based-ui, node_modules/lion-based-ui-labs').`,
|
||||
v => cliHelpers.pathsArrayFromCs(v, cwd),
|
||||
v => _cliHelpersModule.pathsArrayFromCs(v, cwd),
|
||||
InputDataService.referenceProjectPaths,
|
||||
)
|
||||
.option('-a, --allowlist [allowlist]', `allowlisted paths, like 'src/**/*, packages/**/*'`, v =>
|
||||
cliHelpers.csToArray(v, cwd),
|
||||
_cliHelpersModule.csToArray(v),
|
||||
)
|
||||
.option(
|
||||
'--allowlist-reference [allowlist-reference]',
|
||||
`allowed paths for reference, like 'src/**/*, packages/**/*'`,
|
||||
v => cliHelpers.csToArray(v, cwd),
|
||||
v => _cliHelpersModule.csToArray(v),
|
||||
)
|
||||
.option(
|
||||
'--search-target-collection [collection-name]',
|
||||
`path(s) to project(s) which serve as a reference (applicable for certain analyzers like
|
||||
'match-imports'). Should be a collection defined in providence.conf.js as paths relative to
|
||||
project root.`,
|
||||
v => cliHelpers.pathsArrayFromCollectionName(v, 'search-target', externalConfig),
|
||||
v => _cliHelpersModule.pathsArrayFromCollectionName(v, 'search-target', externalConfig),
|
||||
)
|
||||
.option(
|
||||
'--reference-collection [collection-name]',
|
||||
`path(s) to project(s) on which analysis/querying should take place. Should be a collection
|
||||
defined in providence.conf.js as paths relative to project root.`,
|
||||
v => cliHelpers.pathsArrayFromCollectionName(v, 'reference', externalConfig),
|
||||
v => _cliHelpersModule.pathsArrayFromCollectionName(v, 'reference', externalConfig),
|
||||
)
|
||||
.option('--write-log-file', `Writes all logs to 'providence.log' file`)
|
||||
.option(
|
||||
|
|
@ -232,6 +251,12 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
.option(
|
||||
'--skip-check-match-compatibility',
|
||||
`skips semver checks, handy for forward compatible libs or libs below v1`,
|
||||
)
|
||||
.option('--measure-perf', 'Logs the completion time in seconds')
|
||||
.option('--add-system-paths', 'Adds system paths to results')
|
||||
.option(
|
||||
'--fallback-to-babel',
|
||||
'Uses babel instead of swc. This will be slower, but guaranteed to be 100% compatible with @babel/generate and @babel/traverse',
|
||||
);
|
||||
|
||||
commander
|
||||
|
|
@ -298,7 +323,7 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
.option(
|
||||
'--output-folder [output-folder]',
|
||||
`This is the file path where the result file "providence-extend-docs-data.json" will be written to`,
|
||||
p => toPosixPath(pathLib.resolve(process.cwd(), p.trim())),
|
||||
p => toPosixPath(path.resolve(process.cwd(), p.trim())),
|
||||
process.cwd(),
|
||||
)
|
||||
.action(options => {
|
||||
|
|
@ -311,7 +336,7 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
process.exit(1);
|
||||
}
|
||||
const prefixCfg = { from: options.prefixFrom, to: options.prefixTo };
|
||||
extendDocsModule
|
||||
_extendDocsModule
|
||||
.launchProvidenceWithExtendDocs({
|
||||
referenceProjectPaths: commander.referencePaths,
|
||||
prefixCfg,
|
||||
|
|
@ -346,12 +371,10 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
via providence.conf`,
|
||||
)
|
||||
.action(() => {
|
||||
runDashboard();
|
||||
dashboardServer.start();
|
||||
});
|
||||
|
||||
commander.parse(process.argv);
|
||||
commander.parse(argv);
|
||||
|
||||
await cliPromise;
|
||||
}
|
||||
|
||||
module.exports = { cli };
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
#!/usr/bin/env node
|
||||
import { cli } from './cli.js';
|
||||
import { getProvidenceConf } from '../program/utils/get-providence-conf.mjs';
|
||||
import { providenceConfUtil } from '../program/utils/providence-conf-util.js';
|
||||
|
||||
(async () => {
|
||||
// We need to provide config to cli, until whole package is rewritten as ESM.
|
||||
const { providenceConf } = (await getProvidenceConf()) || {};
|
||||
const { providenceConf } = (await providenceConfUtil.getConf()) || {};
|
||||
cli({ providenceConf });
|
||||
})();
|
||||
|
|
@ -1,14 +1,31 @@
|
|||
/* eslint-disable import/no-extraneous-dependencies */
|
||||
const fs = require('fs');
|
||||
const pathLib = require('path');
|
||||
const { performance } = require('perf_hooks');
|
||||
const providenceModule = require('../program/providence.js');
|
||||
const { QueryService } = require('../program/services/QueryService.js');
|
||||
const { InputDataService } = require('../program/services/InputDataService.js');
|
||||
const { LogService } = require('../program/services/LogService.js');
|
||||
const { flatten } = require('./cli-helpers.js');
|
||||
import fs from 'fs';
|
||||
import pathLib from 'path';
|
||||
import { performance } from 'perf_hooks';
|
||||
import { _providenceModule } from '../program/providence.js';
|
||||
import { QueryService } from '../program/core/QueryService.js';
|
||||
import { InputDataService } from '../program/core/InputDataService.js';
|
||||
import { LogService } from '../program/core/LogService.js';
|
||||
import { flatten } from './cli-helpers.js';
|
||||
import MatchPathsAnalyzer from '../program/analyzers/match-paths.js';
|
||||
|
||||
async function getExtendDocsResults({
|
||||
/**
|
||||
* @typedef {import('../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../types/index.js').GatherFilesConfig} GatherFilesConfig
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {{
|
||||
* referenceProjectPaths: PathFromSystemRoot[];
|
||||
* prefixCfg:{from:string;to:string};
|
||||
* extensions:GatherFilesConfig['extensions'];
|
||||
* allowlist?:string[];
|
||||
* allowlistReference?:string[];
|
||||
* cwd:PathFromSystemRoot
|
||||
* }} opts
|
||||
* @returns
|
||||
*/
|
||||
export async function getExtendDocsResults({
|
||||
referenceProjectPaths,
|
||||
prefixCfg,
|
||||
extensions,
|
||||
|
|
@ -18,11 +35,11 @@ async function getExtendDocsResults({
|
|||
}) {
|
||||
const monoPkgs = InputDataService.getMonoRepoPackages(cwd);
|
||||
|
||||
const results = await providenceModule.providence(
|
||||
QueryService.getQueryConfigFromAnalyzer('match-paths', { prefix: prefixCfg }),
|
||||
const results = await _providenceModule.providence(
|
||||
await QueryService.getQueryConfigFromAnalyzer(MatchPathsAnalyzer, { prefix: prefixCfg }),
|
||||
{
|
||||
gatherFilesConfig: {
|
||||
extensions: extensions || ['.js'],
|
||||
extensions: extensions || /** @type {GatherFilesConfig['extensions']} */ (['.js']),
|
||||
allowlist: allowlist || ['!coverage', '!test'],
|
||||
},
|
||||
gatherFilesConfigReference: {
|
||||
|
|
@ -31,7 +48,7 @@ async function getExtendDocsResults({
|
|||
},
|
||||
queryMethod: 'ast',
|
||||
report: false,
|
||||
targetProjectPaths: [pathLib.resolve(cwd)],
|
||||
targetProjectPaths: [cwd],
|
||||
referenceProjectPaths,
|
||||
// For mono repos, a match between root package.json and ref project will not exist.
|
||||
// Disable this check, so it won't be a blocker for extendin docs
|
||||
|
|
@ -45,7 +62,7 @@ async function getExtendDocsResults({
|
|||
|
||||
/**
|
||||
* @param {string} pathStr ./packages/lea-tabs/lea-tabs.js
|
||||
* @param {string[]} pkgs ['packages/lea-tabs', ...]
|
||||
* @param {{path:string;name:string}[]} pkgs ['packages/lea-tabs', ...]
|
||||
*/
|
||||
function replaceToMonoRepoPath(pathStr, pkgs) {
|
||||
let result = pathStr;
|
||||
|
|
@ -82,7 +99,11 @@ async function getExtendDocsResults({
|
|||
return queryOutputs;
|
||||
}
|
||||
|
||||
async function launchProvidenceWithExtendDocs({
|
||||
/**
|
||||
*
|
||||
* @param {*} opts
|
||||
*/
|
||||
export async function launchProvidenceWithExtendDocs({
|
||||
referenceProjectPaths,
|
||||
prefixCfg,
|
||||
outputFolder,
|
||||
|
|
@ -118,7 +139,7 @@ async function launchProvidenceWithExtendDocs({
|
|||
LogService.info(`"extend-docs" completed in ${Math.round((t1 - t0) / 1000)} seconds`);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
export const _extendDocsModule = {
|
||||
launchProvidenceWithExtendDocs,
|
||||
getExtendDocsResults,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,23 +1,31 @@
|
|||
const fs = require('fs');
|
||||
const pathLib = require('path');
|
||||
const inquirer = require('inquirer');
|
||||
const { default: traverse } = require('@babel/traverse');
|
||||
const { InputDataService } = require('../program/services/InputDataService.js');
|
||||
const { AstService } = require('../program/services/AstService.js');
|
||||
const { LogService } = require('../program/services/LogService.js');
|
||||
const JsdocCommentParser = require('../program/utils/jsdoc-comment-parser.js');
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import inquirer from 'inquirer';
|
||||
import traverse from '@babel/traverse';
|
||||
import { InputDataService } from '../program/core/InputDataService.js';
|
||||
import { AstService } from '../program/core/AstService.js';
|
||||
import { LogService } from '../program/core/LogService.js';
|
||||
import JsdocCommentParser from '../program/utils/jsdoc-comment-parser.js';
|
||||
import { getCurrentDir } from '../program/utils/get-current-dir.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('../../types/index.js').TargetDepsObj} TargetDepsObj
|
||||
* @typedef {import('../../types/index.js').TargetOrRefCollectionsObj} TargetOrRefCollectionsObj
|
||||
* @typedef {import('../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../types/index.js').AnalyzerName} AnalyzerName
|
||||
*/
|
||||
|
||||
/**
|
||||
* Extracts name, defaultValue, optional, type, desc from JsdocCommentParser.parse method
|
||||
* result
|
||||
* @param {string[]} jsdoc
|
||||
* @returns {{ name:string, defaultValue:string, optional:boolean, type:string, desc:string }}
|
||||
* @param {{tagName:string;tagValue:string}[]} jsdoc
|
||||
* @returns {{ name:string, defaultValue:string, optional:boolean, type:string, desc:string }[]}
|
||||
*/
|
||||
function getPropsFromParsedJsDoc(jsdoc) {
|
||||
const jsdocProps = jsdoc.filter(p => p.tagName === '@property');
|
||||
const options = jsdocProps.map(({ tagValue }) => {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const [_, type, nameOptionalDefault, desc] = tagValue.match(/\{(.*)\}\s*([^\s]*)\s*(.*)/);
|
||||
const [_, type, nameOptionalDefault, desc] = tagValue.match(/\{(.*)\}\s*([^\s]*)\s*(.*)/) || [];
|
||||
let nameDefault = nameOptionalDefault;
|
||||
let optional = false;
|
||||
if (nameOptionalDefault.startsWith('[') && nameOptionalDefault.endsWith(']')) {
|
||||
|
|
@ -30,21 +38,26 @@ function getPropsFromParsedJsDoc(jsdoc) {
|
|||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {PathFromSystemRoot} file
|
||||
*/
|
||||
function getAnalyzerOptions(file) {
|
||||
const code = fs.readFileSync(file, 'utf8');
|
||||
const ast = AstService.getAst(code, 'babel', { filePath: file });
|
||||
const babelAst = AstService.getAst(code, 'swc-to-babel', { filePath: file });
|
||||
|
||||
let commentNode;
|
||||
traverse(ast, {
|
||||
traverse.default(babelAst, {
|
||||
// eslint-disable-next-line no-shadow
|
||||
VariableDeclaration(path) {
|
||||
if (!path.node.leadingComments) {
|
||||
VariableDeclaration(astPath) {
|
||||
const { node } = astPath;
|
||||
if (!node.leadingComments) {
|
||||
return;
|
||||
}
|
||||
const decls = path.node.declarations || [];
|
||||
decls.forEach(decl => {
|
||||
if (decl && decl.id && decl.id.name === 'cfg') {
|
||||
[commentNode] = path.node.leadingComments;
|
||||
node.declarations.forEach(decl => {
|
||||
// @ts-expect-error
|
||||
if (decl?.id?.name === 'cfg') {
|
||||
// eslint-disable-next-line prefer-destructuring
|
||||
commentNode = node.leadingComments?.[0];
|
||||
}
|
||||
});
|
||||
},
|
||||
|
|
@ -57,20 +70,33 @@ function getAnalyzerOptions(file) {
|
|||
return undefined;
|
||||
}
|
||||
|
||||
function gatherAnalyzers(dir, getConfigOptions) {
|
||||
/**
|
||||
* @param {PathFromSystemRoot} dir
|
||||
* @param {boolean} [shouldGetOptions]
|
||||
*/
|
||||
function gatherAnalyzers(dir, shouldGetOptions) {
|
||||
return InputDataService.gatherFilesFromDir(dir, { depth: 0 }).map(file => {
|
||||
const analyzerObj = { file, name: pathLib.basename(file, '.js') };
|
||||
if (getConfigOptions) {
|
||||
const analyzerObj = { file, name: path.basename(file, '.js') };
|
||||
if (shouldGetOptions) {
|
||||
analyzerObj.options = getAnalyzerOptions(file);
|
||||
}
|
||||
return analyzerObj;
|
||||
});
|
||||
}
|
||||
|
||||
async function promptAnalyzerConfigMenu(
|
||||
/**
|
||||
*
|
||||
* @param {AnalyzerName} analyzerName
|
||||
* @param {*} promptOptionalConfig
|
||||
* @param {PathFromSystemRoot} [dir]
|
||||
* @returns
|
||||
*/
|
||||
export async function promptAnalyzerConfigMenu(
|
||||
analyzerName,
|
||||
promptOptionalConfig,
|
||||
dir = pathLib.resolve(__dirname, '../program/analyzers'),
|
||||
dir = /** @type {PathFromSystemRoot} */ (
|
||||
path.resolve(getCurrentDir(import.meta.url), '../program/analyzers')
|
||||
),
|
||||
) {
|
||||
const menuOptions = gatherAnalyzers(dir, true);
|
||||
const analyzer = menuOptions.find(o => o.name === analyzerName);
|
||||
|
|
@ -112,7 +138,11 @@ async function promptAnalyzerConfigMenu(
|
|||
};
|
||||
}
|
||||
|
||||
async function promptAnalyzerMenu(dir = pathLib.resolve(__dirname, '../program/analyzers')) {
|
||||
export async function promptAnalyzerMenu(
|
||||
dir = /** @type {PathFromSystemRoot} */ (
|
||||
path.resolve(getCurrentDir(import.meta.url), '../program/analyzers')
|
||||
),
|
||||
) {
|
||||
const menuOptions = gatherAnalyzers(dir);
|
||||
const answers = await inquirer.prompt([
|
||||
{
|
||||
|
|
@ -127,7 +157,7 @@ async function promptAnalyzerMenu(dir = pathLib.resolve(__dirname, '../program/a
|
|||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
export const _promptAnalyzerMenuModule = {
|
||||
promptAnalyzerMenu,
|
||||
promptAnalyzerConfigMenu,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -278,7 +278,7 @@ class PBoard extends DecorateMixin(LitElement) {
|
|||
if (!this.__menuData) {
|
||||
return;
|
||||
}
|
||||
await this.__fetchResults();
|
||||
// await this.__fetchResults();
|
||||
|
||||
const elements = Array.from(this._selectionMenuFormNode.elements);
|
||||
const repos = elements.filter(n => n.name === 'repos');
|
||||
|
|
@ -303,7 +303,8 @@ class PBoard extends DecorateMixin(LitElement) {
|
|||
this.__providenceConf,
|
||||
);
|
||||
dataResult[i].type = specifierRes.exportSpecifier.name === '[file]' ? 'file' : 'specifier';
|
||||
dataResult[i].count = specifierRes.matchesPerProject
|
||||
// dedupe, because outputs genarted with older versions might have dedupe problems
|
||||
dataResult[i].count = Array.from(new Set(specifierRes.matchesPerProject))
|
||||
.map(mpp => mpp.files)
|
||||
.flat(Infinity).length;
|
||||
dataResult[i].matchedProjects = specifierRes.matchesPerProject;
|
||||
|
|
@ -435,7 +436,7 @@ class PBoard extends DecorateMixin(LitElement) {
|
|||
|
||||
async __fetchMenuData() {
|
||||
// Derived from providence.conf.js, generated in server.mjs
|
||||
this.__initialMenuData = await fetch('/menu-data').then(response => response.json());
|
||||
this.__initialMenuData = await fetch('/menu-data.json').then(response => response.json());
|
||||
}
|
||||
|
||||
async __fetchProvidenceConf() {
|
||||
|
|
@ -446,7 +447,7 @@ class PBoard extends DecorateMixin(LitElement) {
|
|||
}
|
||||
|
||||
async __fetchResults() {
|
||||
this.__resultFiles = await fetch('/results').then(response => response.json());
|
||||
this.__resultFiles = await fetch('/results.json').then(response => response.json());
|
||||
}
|
||||
}
|
||||
customElements.define('p-board', PBoard);
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
/**
|
||||
* @desc Can be called from a button click handler in order to let the end user download a file
|
||||
* Can be called from a button click handler in order to let the end user download a file
|
||||
* @param {string} filename like 'overview.csv'
|
||||
* @param {string} content for instance a csv file
|
||||
*/
|
||||
16
packages-node/providence-analytics/src/dashboard/index.html
Normal file
16
packages-node/providence-analytics/src/dashboard/index.html
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Providence dashboard</title>
|
||||
<style>
|
||||
body {
|
||||
margin: 8px 32px;
|
||||
}
|
||||
</style>
|
||||
<script type="module" src="./app/p-board.js"></script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<p-board></p-board>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -1,15 +1,19 @@
|
|||
import fs from 'fs';
|
||||
import pathLib, { dirname } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { createConfig, startServer } from 'es-dev-server';
|
||||
import { ReportService } from '../../src/program/services/ReportService.js';
|
||||
import { getProvidenceConf } from '../../src/program/utils/get-providence-conf.mjs';
|
||||
import pathLib from 'path';
|
||||
import { startDevServer } from '@web/dev-server';
|
||||
import { ReportService } from '../program/core/ReportService.js';
|
||||
import { providenceConfUtil } from '../program/utils/providence-conf-util.js';
|
||||
import { getCurrentDir } from '../program/utils/get-current-dir.js';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
/**
|
||||
* @typedef {import('../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../types/index.js').GatherFilesConfig} GatherFilesConfig
|
||||
* @typedef {import('../../types/index.js').AnalyzerName} AnalyzerName
|
||||
*/
|
||||
|
||||
/**
|
||||
* Gets all results found in cache folder with all results
|
||||
* @param {{ supportedAnalyzers: `match-${string}`[], resultsPath: string }} options
|
||||
* @param {{ supportedAnalyzers?: `match-${string}`[], resultsPath?: string }} options
|
||||
*/
|
||||
async function getCachedProvidenceResults({
|
||||
supportedAnalyzers = ['match-imports', 'match-subclasses'],
|
||||
|
|
@ -34,6 +38,7 @@ async function getCachedProvidenceResults({
|
|||
searchTargetDeps = content;
|
||||
} else {
|
||||
const analyzerName = fileName.split('_-_')[0];
|
||||
// @ts-ignore
|
||||
if (!supportedAnalyzers.includes(analyzerName)) {
|
||||
return;
|
||||
}
|
||||
|
|
@ -48,7 +53,7 @@ async function getCachedProvidenceResults({
|
|||
}
|
||||
|
||||
/**
|
||||
* @param {{ providenceConf: object; earchTargetDeps: object; resultFiles: string[]; }}
|
||||
* @param {{ providenceConf: object; providenceConfRaw:string; searchTargetDeps: object; resultFiles: string[]; }}
|
||||
*/
|
||||
function createMiddleWares({ providenceConf, providenceConfRaw, searchTargetDeps, resultFiles }) {
|
||||
/**
|
||||
|
|
@ -66,7 +71,7 @@ function createMiddleWares({ providenceConf, providenceConfRaw, searchTargetDeps
|
|||
|
||||
/**
|
||||
* @param {object[]} collections
|
||||
* @returns {{[keu as string]: }}
|
||||
* @returns {{[key as string]: }}
|
||||
*/
|
||||
function transformToProjectNames(collections) {
|
||||
const res = {};
|
||||
|
|
@ -74,13 +79,16 @@ function createMiddleWares({ providenceConf, providenceConfRaw, searchTargetDeps
|
|||
Object.entries(collections).map(([key, val]) => {
|
||||
res[key] = val.map(c => {
|
||||
const pkg = getPackageJson(c);
|
||||
return pkg && pkg.name;
|
||||
return pkg?.name;
|
||||
});
|
||||
});
|
||||
return res;
|
||||
}
|
||||
|
||||
const pathFromServerRootToHere = `/${pathLib.relative(process.cwd(), __dirname)}`;
|
||||
const pathFromServerRootToHere = `/${pathLib.relative(
|
||||
process.cwd(),
|
||||
getCurrentDir(import.meta.url),
|
||||
)}`;
|
||||
|
||||
return [
|
||||
// eslint-disable-next-line consistent-return
|
||||
|
|
@ -94,9 +102,10 @@ function createMiddleWares({ providenceConf, providenceConfRaw, searchTargetDeps
|
|||
ctx.url = `${pathFromServerRootToHere}/index.html`;
|
||||
return next();
|
||||
}
|
||||
if (ctx.url === '/results') {
|
||||
if (ctx.url === '/results.json') {
|
||||
ctx.type = 'application/json';
|
||||
ctx.body = resultFiles;
|
||||
} else if (ctx.url === '/menu-data') {
|
||||
} else if (ctx.url === '/menu-data.json') {
|
||||
// Gathers all data that are relevant to create a configuration menu
|
||||
// at the top of the dashboard:
|
||||
// - referenceCollections as defined in providence.conf.js
|
||||
|
|
@ -112,16 +121,18 @@ function createMiddleWares({ providenceConf, providenceConfRaw, searchTargetDeps
|
|||
}
|
||||
|
||||
const menuData = {
|
||||
// N.B. theoratically there can be a mismatch between basename and pkgJson.name,
|
||||
// N.B. theoretically there can be a mismatch between basename and pkgJson.name,
|
||||
// but we assume folder names and pkgJson.names to be similar
|
||||
searchTargetCollections,
|
||||
referenceCollections: transformToProjectNames(providenceConf.referenceCollections),
|
||||
searchTargetDeps,
|
||||
};
|
||||
|
||||
ctx.type = 'application/json';
|
||||
ctx.body = menuData;
|
||||
} else if (ctx.url === '/providence-conf.js') {
|
||||
// Alloes frontend dasbboard app to find categoriesand other configs
|
||||
ctx.type = 'text/javascript';
|
||||
// Allows frontend dasbboard app to find categories and other configs
|
||||
ctx.type = 'application/javascript';
|
||||
ctx.body = providenceConfRaw;
|
||||
} else {
|
||||
await next();
|
||||
|
|
@ -130,8 +141,8 @@ function createMiddleWares({ providenceConf, providenceConfRaw, searchTargetDeps
|
|||
];
|
||||
}
|
||||
|
||||
(async function main() {
|
||||
const { providenceConf, providenceConfRaw } = await getProvidenceConf();
|
||||
export async function createDashboardServerConfig() {
|
||||
const { providenceConf, providenceConfRaw } = (await providenceConfUtil.getConf()) || {};
|
||||
const { searchTargetDeps, resultFiles } = await getCachedProvidenceResults();
|
||||
|
||||
// Needed for dev purposes (we call it from ./packages-node/providence-analytics/ instead of ./)
|
||||
|
|
@ -139,21 +150,38 @@ function createMiddleWares({ providenceConf, providenceConfRaw, searchTargetDeps
|
|||
const fromPackageRoot = process.argv.includes('--serve-from-package-root');
|
||||
const moduleRoot = fromPackageRoot ? pathLib.resolve(process.cwd(), '../../') : process.cwd();
|
||||
|
||||
const config = createConfig({
|
||||
port: 8080,
|
||||
appIndex: pathLib.resolve(__dirname, 'index.html'),
|
||||
return {
|
||||
appIndex: pathLib.resolve(getCurrentDir(import.meta.url), 'index.html'),
|
||||
rootDir: moduleRoot,
|
||||
nodeResolve: true,
|
||||
moduleDirs: pathLib.resolve(moduleRoot, 'node_modules'),
|
||||
watch: false,
|
||||
open: true,
|
||||
middlewares: createMiddleWares({
|
||||
middleware: createMiddleWares({
|
||||
providenceConf,
|
||||
providenceConfRaw,
|
||||
searchTargetDeps,
|
||||
resultFiles,
|
||||
}),
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
await startServer(config);
|
||||
/** @type {(value?: any) => void} */
|
||||
let resolveLoaded;
|
||||
export const serverInstanceLoaded = new Promise(resolve => {
|
||||
resolveLoaded = resolve;
|
||||
});
|
||||
|
||||
// Export interface as object, so we can mock it easily inside tests
|
||||
export const dashboardServer = {
|
||||
start: async () => {
|
||||
await startDevServer({ config: await createDashboardServerConfig() });
|
||||
resolveLoaded();
|
||||
},
|
||||
};
|
||||
|
||||
(async () => {
|
||||
if (process.argv.includes('--run-server')) {
|
||||
dashboardServer.start();
|
||||
}
|
||||
})();
|
||||
|
|
@ -1,7 +1,5 @@
|
|||
const { providence } = require('./program/providence.js');
|
||||
const { QueryService } = require('./program/services/QueryService.js');
|
||||
const { LogService } = require('./program/services/LogService.js');
|
||||
const { InputDataService } = require('./program/services/InputDataService.js');
|
||||
const { AstService } = require('./program/services/AstService.js');
|
||||
|
||||
module.exports = { providence, QueryService, LogService, InputDataService, AstService };
|
||||
export { providence } from './program/providence.js';
|
||||
export { QueryService } from './program/core/QueryService.js';
|
||||
export { LogService } from './program/core/LogService.js';
|
||||
export { InputDataService } from './program/core/InputDataService.js';
|
||||
export { AstService } from './program/core/AstService.js';
|
||||
|
|
|
|||
|
|
@ -1,21 +1,28 @@
|
|||
/* eslint-disable no-shadow, no-param-reassign */
|
||||
const pathLib = require('path');
|
||||
const t = require('@babel/types');
|
||||
const { default: traverse } = require('@babel/traverse');
|
||||
const { Analyzer } = require('./helpers/Analyzer.js');
|
||||
const { trackDownIdentifierFromScope } = require('./helpers/track-down-identifier.js');
|
||||
const { aForEach } = require('../utils/async-array-utils.js');
|
||||
import path from 'path';
|
||||
import t from '@babel/types';
|
||||
// @ts-ignore
|
||||
import babelTraverse from '@babel/traverse';
|
||||
import { Analyzer } from '../core/Analyzer.js';
|
||||
import { trackDownIdentifierFromScope } from './helpers/track-down-identifier--legacy.js';
|
||||
|
||||
/** @typedef {import('../types/analyzers').FindClassesAnalyzerOutput} FindClassesAnalyzerOutput */
|
||||
/** @typedef {import('../types/analyzers').FindClassesAnalyzerOutputEntry} FindClassesAnalyzerOutputEntry */
|
||||
/** @typedef {import('../types/analyzers').FindClassesConfig} FindClassesConfig */
|
||||
/**
|
||||
* @typedef {import('@babel/types').File} File
|
||||
* @typedef {import('@babel/types').ClassMethod} ClassMethod
|
||||
* @typedef {import('@babel/traverse').NodePath} NodePath
|
||||
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../../types/index.js').FindClassesAnalyzerResult} FindClassesAnalyzerResult
|
||||
* @typedef {import('../../../types/index.js').FindClassesAnalyzerOutputFile} FindClassesAnalyzerOutputFile
|
||||
* @typedef {import('../../../types/index.js').FindClassesAnalyzerEntry} FindClassesAnalyzerEntry
|
||||
* @typedef {import('../../../types/index.js').FindClassesConfig} FindClassesConfig
|
||||
*/
|
||||
|
||||
/**
|
||||
* Finds import specifiers and sources
|
||||
* @param {BabelAst} ast
|
||||
* @param {string} relativePath the file being currently processed
|
||||
* @param {File} babelAst
|
||||
* @param {string} fullCurrentFilePath the file being currently processed
|
||||
*/
|
||||
async function findMembersPerAstEntry(ast, fullCurrentFilePath, projectPath) {
|
||||
async function findMembersPerAstEntry(babelAst, fullCurrentFilePath, projectPath) {
|
||||
// The transformed entry
|
||||
const classesFound = [];
|
||||
/**
|
||||
|
|
@ -34,6 +41,10 @@ async function findMembersPerAstEntry(ast, fullCurrentFilePath, projectPath) {
|
|||
return 'public';
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {{node:ClassMethod}} cfg
|
||||
* @returns
|
||||
*/
|
||||
function isStaticProperties({ node }) {
|
||||
return node.static && node.kind === 'get' && node.key.name === 'properties';
|
||||
}
|
||||
|
|
@ -73,15 +84,20 @@ async function findMembersPerAstEntry(ast, fullCurrentFilePath, projectPath) {
|
|||
// return false;
|
||||
// }
|
||||
|
||||
async function traverseClass(path, { isMixin } = {}) {
|
||||
/**
|
||||
*
|
||||
* @param {NodePath} astPath
|
||||
* @param {{isMixin?:boolean}} opts
|
||||
*/
|
||||
async function traverseClass(astPath, { isMixin = false } = {}) {
|
||||
const classRes = {};
|
||||
classRes.name = path.node.id && path.node.id.name;
|
||||
classRes.name = astPath.node.id && astPath.node.id.name;
|
||||
classRes.isMixin = Boolean(isMixin);
|
||||
if (path.node.superClass) {
|
||||
if (astPath.node.superClass) {
|
||||
const superClasses = [];
|
||||
|
||||
// Add all Identifier names
|
||||
let parent = path.node.superClass;
|
||||
let parent = astPath.node.superClass;
|
||||
while (parent.type === 'CallExpression') {
|
||||
superClasses.push({ name: parent.callee.name, isMixin: true });
|
||||
// As long as we are a CallExpression, we will have a parent
|
||||
|
|
@ -91,37 +107,42 @@ async function findMembersPerAstEntry(ast, fullCurrentFilePath, projectPath) {
|
|||
superClasses.push({ name: parent.name, isMixin: false });
|
||||
|
||||
// For all found superclasses, track down their root location.
|
||||
// This will either result in a local, relative path in the project,
|
||||
// or an external path like '@lion/overlays'. In the latter case,
|
||||
// This will either result in a local, relative astPath in the project,
|
||||
// or an external astPath like '@lion/overlays'. In the latter case,
|
||||
// tracking down will halt and should be done when there is access to
|
||||
// the external repo... (similar to how 'match-imports' analyzer works)
|
||||
await aForEach(superClasses, async classObj => {
|
||||
|
||||
for (const classObj of superClasses) {
|
||||
// Finds the file that holds the declaration of the import
|
||||
classObj.rootFile = await trackDownIdentifierFromScope(
|
||||
path,
|
||||
astPath,
|
||||
classObj.name,
|
||||
fullCurrentFilePath,
|
||||
projectPath,
|
||||
);
|
||||
});
|
||||
}
|
||||
classRes.superClasses = superClasses;
|
||||
}
|
||||
|
||||
classRes.members = {};
|
||||
classRes.members.props = []; // meta: private, public, getter/setter, (found in static get properties)
|
||||
classRes.members.methods = []; // meta: private, public, getter/setter
|
||||
path.traverse({
|
||||
ClassMethod(path) {
|
||||
// if (isBlacklisted(path)) {
|
||||
classRes.members = {
|
||||
// meta: private, public, getter/setter, (found in static get properties)
|
||||
props: [],
|
||||
// meta: private, public, getter/setter
|
||||
methods: [],
|
||||
};
|
||||
|
||||
astPath.traverse({
|
||||
ClassMethod(astPath) {
|
||||
// if (isBlacklisted(astPath)) {
|
||||
// return;
|
||||
// }
|
||||
if (isStaticProperties(path)) {
|
||||
if (isStaticProperties(astPath)) {
|
||||
let hasFoundTopLvlObjExpr = false;
|
||||
path.traverse({
|
||||
ObjectExpression(path) {
|
||||
astPath.traverse({
|
||||
ObjectExpression(astPath) {
|
||||
if (hasFoundTopLvlObjExpr) return;
|
||||
hasFoundTopLvlObjExpr = true;
|
||||
path.node.properties.forEach(objectProperty => {
|
||||
astPath.node.properties.forEach(objectProperty => {
|
||||
if (!t.isProperty(objectProperty)) {
|
||||
// we can also have a SpreadElement
|
||||
return;
|
||||
|
|
@ -139,19 +160,19 @@ async function findMembersPerAstEntry(ast, fullCurrentFilePath, projectPath) {
|
|||
}
|
||||
|
||||
const methodRes = {};
|
||||
const { name } = path.node.key;
|
||||
const { name } = astPath.node.key;
|
||||
methodRes.name = name;
|
||||
methodRes.accessType = computeAccessType(name);
|
||||
|
||||
if (path.node.kind === 'set' || path.node.kind === 'get') {
|
||||
if (path.node.static) {
|
||||
if (astPath.node.kind === 'set' || astPath.node.kind === 'get') {
|
||||
if (astPath.node.static) {
|
||||
methodRes.static = true;
|
||||
}
|
||||
methodRes.kind = [...(methodRes.kind || []), path.node.kind];
|
||||
methodRes.kind = [...(methodRes.kind || []), astPath.node.kind];
|
||||
// Merge getter/setters into one
|
||||
const found = classRes.members.props.find(p => p.name === name);
|
||||
if (found) {
|
||||
found.kind = [...(found.kind || []), path.node.kind];
|
||||
found.kind = [...(found.kind || []), astPath.node.kind];
|
||||
} else {
|
||||
classRes.members.props.push(methodRes);
|
||||
}
|
||||
|
|
@ -165,18 +186,19 @@ async function findMembersPerAstEntry(ast, fullCurrentFilePath, projectPath) {
|
|||
}
|
||||
|
||||
const classesToTraverse = [];
|
||||
traverse(ast, {
|
||||
ClassDeclaration(path) {
|
||||
classesToTraverse.push({ path, isMixin: false });
|
||||
|
||||
babelTraverse.default(babelAst, {
|
||||
ClassDeclaration(astPath) {
|
||||
classesToTraverse.push({ astPath, isMixin: false });
|
||||
},
|
||||
ClassExpression(path) {
|
||||
classesToTraverse.push({ path, isMixin: true });
|
||||
ClassExpression(astPath) {
|
||||
classesToTraverse.push({ astPath, isMixin: true });
|
||||
},
|
||||
});
|
||||
|
||||
await aForEach(classesToTraverse, async klass => {
|
||||
await traverseClass(klass.path, { isMixin: klass.isMixin });
|
||||
});
|
||||
for (const klass of classesToTraverse) {
|
||||
await traverseClass(klass.astPath, { isMixin: klass.isMixin });
|
||||
}
|
||||
|
||||
return classesFound;
|
||||
}
|
||||
|
|
@ -201,25 +223,20 @@ async function findMembersPerAstEntry(ast, fullCurrentFilePath, projectPath) {
|
|||
// });
|
||||
// }
|
||||
|
||||
class FindClassesAnalyzer extends Analyzer {
|
||||
constructor() {
|
||||
super();
|
||||
this.name = 'find-classes';
|
||||
}
|
||||
export default class FindClassesAnalyzer extends Analyzer {
|
||||
/** @type {AnalyzerName} */
|
||||
static analyzerName = 'find-classes';
|
||||
|
||||
/** @type {'babel'|'swc-to-babel'} */
|
||||
static requiredAst = 'babel';
|
||||
|
||||
/**
|
||||
* @desc Will find all public members (properties (incl. getter/setters)/functions) of a class and
|
||||
* Will find all public members (properties (incl. getter/setters)/functions) of a class and
|
||||
* will make a distinction between private, public and protected methods
|
||||
* @param {FindClassesConfig} customConfig
|
||||
* @param {Partial<FindClassesConfig>} customConfig
|
||||
*/
|
||||
async execute(customConfig = {}) {
|
||||
/** @type {FindClassesConfig} */
|
||||
const cfg = {
|
||||
gatherFilesConfig: {},
|
||||
targetProjectPath: null,
|
||||
metaConfig: null,
|
||||
...customConfig,
|
||||
};
|
||||
async execute(customConfig) {
|
||||
const cfg = customConfig;
|
||||
|
||||
/**
|
||||
* Prepare
|
||||
|
|
@ -235,7 +252,7 @@ class FindClassesAnalyzer extends Analyzer {
|
|||
/** @type {FindClassesAnalyzerOutput} */
|
||||
const queryOutput = await this._traverse(async (ast, { relativePath }) => {
|
||||
const projectPath = cfg.targetProjectPath;
|
||||
const fullPath = pathLib.resolve(projectPath, relativePath);
|
||||
const fullPath = path.resolve(projectPath, relativePath);
|
||||
const transformedEntry = await findMembersPerAstEntry(ast, fullPath, projectPath);
|
||||
return { result: transformedEntry };
|
||||
});
|
||||
|
|
@ -247,5 +264,3 @@ class FindClassesAnalyzer extends Analyzer {
|
|||
return this._finalize(queryOutput, cfg);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FindClassesAnalyzer;
|
||||
|
|
|
|||
|
|
@ -1,9 +1,14 @@
|
|||
const pathLib = require('path');
|
||||
const t = require('@babel/types');
|
||||
const { default: traverse } = require('@babel/traverse');
|
||||
const { Analyzer } = require('./helpers/Analyzer.js');
|
||||
const { trackDownIdentifierFromScope } = require('./helpers/track-down-identifier.js');
|
||||
const { aForEach } = require('../utils/async-array-utils.js');
|
||||
import path from 'path';
|
||||
import t from '@babel/types';
|
||||
import babelTraverse from '@babel/traverse';
|
||||
import { Analyzer } from '../core/Analyzer.js';
|
||||
import { trackDownIdentifierFromScope } from './helpers/track-down-identifier--legacy.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('@babel/types').File} File
|
||||
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../../types/index.js').FindCustomelementsConfig} FindCustomelementsConfig
|
||||
*/
|
||||
|
||||
function cleanup(transformedEntry) {
|
||||
transformedEntry.forEach(definitionObj => {
|
||||
|
|
@ -16,35 +21,35 @@ function cleanup(transformedEntry) {
|
|||
}
|
||||
|
||||
async function trackdownRoot(transformedEntry, relativePath, projectPath) {
|
||||
const fullCurrentFilePath = pathLib.resolve(projectPath, relativePath);
|
||||
const fullCurrentFilePath = path.resolve(projectPath, relativePath);
|
||||
|
||||
await aForEach(transformedEntry, async definitionObj => {
|
||||
for (const definitionObj of transformedEntry) {
|
||||
const rootFile = await trackDownIdentifierFromScope(
|
||||
definitionObj.__tmp.path,
|
||||
definitionObj.__tmp.astPath,
|
||||
definitionObj.constructorIdentifier,
|
||||
fullCurrentFilePath,
|
||||
projectPath,
|
||||
);
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
definitionObj.rootFile = rootFile;
|
||||
});
|
||||
}
|
||||
return transformedEntry;
|
||||
}
|
||||
|
||||
/**
|
||||
* @desc Finds import specifiers and sources
|
||||
* @param {BabelAst} ast
|
||||
* Finds import specifiers and sources
|
||||
* @param {File} babelAst
|
||||
*/
|
||||
function findCustomElementsPerAstEntry(ast) {
|
||||
function findCustomElementsPerAstFile(babelAst) {
|
||||
const definitions = [];
|
||||
traverse(ast, {
|
||||
CallExpression(path) {
|
||||
babelTraverse.default(babelAst, {
|
||||
CallExpression(astPath) {
|
||||
let found = false;
|
||||
// Doing it like this we detect 'customElements.define()',
|
||||
// but also 'window.customElements.define()'
|
||||
path.traverse({
|
||||
astPath.traverse({
|
||||
MemberExpression(memberPath) {
|
||||
if (memberPath.parentPath !== path) {
|
||||
if (memberPath.parentPath !== astPath) {
|
||||
return;
|
||||
}
|
||||
const { node } = memberPath;
|
||||
|
|
@ -65,33 +70,34 @@ function findCustomElementsPerAstEntry(ast) {
|
|||
let tagName;
|
||||
let constructorIdentifier;
|
||||
|
||||
if (t.isLiteral(path.node.arguments[0])) {
|
||||
tagName = path.node.arguments[0].value;
|
||||
if (t.isLiteral(astPath.node.arguments[0])) {
|
||||
tagName = astPath.node.arguments[0].value;
|
||||
} else {
|
||||
// No Literal found. For now, we only mark them as '[variable]'
|
||||
tagName = '[variable]';
|
||||
}
|
||||
if (path.node.arguments[1].type === 'Identifier') {
|
||||
constructorIdentifier = path.node.arguments[1].name;
|
||||
if (astPath.node.arguments[1].type === 'Identifier') {
|
||||
constructorIdentifier = astPath.node.arguments[1].name;
|
||||
} else {
|
||||
// We assume customElements.define('my-el', class extends HTMLElement {...})
|
||||
constructorIdentifier = '[inline]';
|
||||
}
|
||||
definitions.push({ tagName, constructorIdentifier, __tmp: { path } });
|
||||
definitions.push({ tagName, constructorIdentifier, __tmp: { astPath } });
|
||||
}
|
||||
},
|
||||
});
|
||||
return definitions;
|
||||
}
|
||||
|
||||
class FindCustomelementsAnalyzer extends Analyzer {
|
||||
constructor() {
|
||||
super();
|
||||
this.name = 'find-customelements';
|
||||
}
|
||||
export default class FindCustomelementsAnalyzer extends Analyzer {
|
||||
/** @type {AnalyzerName} */
|
||||
static analyzerName = 'find-customelements';
|
||||
|
||||
/** @type {'babel'|'swc-to-babel'} */
|
||||
static requiredAst = 'swc-to-babel';
|
||||
|
||||
/**
|
||||
* @desc Finds export specifiers and sources
|
||||
* Finds export specifiers and sources
|
||||
* @param {FindCustomelementsConfig} customConfig
|
||||
*/
|
||||
async execute(customConfig = {}) {
|
||||
|
|
@ -103,18 +109,18 @@ class FindCustomelementsAnalyzer extends Analyzer {
|
|||
/**
|
||||
* Prepare
|
||||
*/
|
||||
const analyzerResult = this._prepare(cfg);
|
||||
if (analyzerResult) {
|
||||
return analyzerResult;
|
||||
const cachedAnalyzerResult = this._prepare(cfg);
|
||||
if (cachedAnalyzerResult) {
|
||||
return cachedAnalyzerResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* Traverse
|
||||
*/
|
||||
const projectPath = cfg.targetProjectPath;
|
||||
const queryOutput = await this._traverse(async (ast, { relativePath }) => {
|
||||
let transformedEntry = findCustomElementsPerAstEntry(ast);
|
||||
transformedEntry = await trackdownRoot(transformedEntry, relativePath, projectPath);
|
||||
const queryOutput = await this._traverse(async (ast, context) => {
|
||||
let transformedEntry = findCustomElementsPerAstFile(ast);
|
||||
transformedEntry = await trackdownRoot(transformedEntry, context.relativePath, projectPath);
|
||||
transformedEntry = cleanup(transformedEntry);
|
||||
return { result: transformedEntry };
|
||||
});
|
||||
|
|
@ -125,5 +131,3 @@ class FindCustomelementsAnalyzer extends Analyzer {
|
|||
return this._finalize(queryOutput, cfg);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FindCustomelementsAnalyzer;
|
||||
|
|
|
|||
|
|
@ -1,29 +1,40 @@
|
|||
/* eslint-disable no-shadow, no-param-reassign */
|
||||
const pathLib = require('path');
|
||||
const { default: traverse } = require('@babel/traverse');
|
||||
const { Analyzer } = require('./helpers/Analyzer.js');
|
||||
const { trackDownIdentifier } = require('./helpers/track-down-identifier.js');
|
||||
const { normalizeSourcePaths } = require('./helpers/normalize-source-paths.js');
|
||||
const { getReferencedDeclaration } = require('../utils/get-source-code-fragment-of-declaration.js');
|
||||
|
||||
const { LogService } = require('../services/LogService.js');
|
||||
import path from 'path';
|
||||
import { swcTraverse } from '../utils/swc-traverse.js';
|
||||
import { getAssertionType } from '../utils/get-assertion-type.js';
|
||||
import { Analyzer } from '../core/Analyzer.js';
|
||||
import { trackDownIdentifier } from './helpers/track-down-identifier.js';
|
||||
import { normalizeSourcePaths } from './helpers/normalize-source-paths.js';
|
||||
import { getReferencedDeclaration } from '../utils/get-source-code-fragment-of-declaration.js';
|
||||
import { LogService } from '../core/LogService.js';
|
||||
|
||||
/**
|
||||
* @typedef {import("@swc/core").Module} SwcAstModule
|
||||
* @typedef {import("@swc/core").Node} SwcNode
|
||||
* @typedef {import("@swc/core").VariableDeclaration} SwcVariableDeclaration
|
||||
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../../types/index.js').AnalyzerAst} AnalyzerAst
|
||||
* @typedef {import('../../../types/index.js').FindExportsAnalyzerResult} FindExportsAnalyzerResult
|
||||
* @typedef {import('../../../types/index.js').FindExportsAnalyzerEntry} FindExportsAnalyzerEntry
|
||||
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../types/index.js').SwcScope} SwcScope
|
||||
* @typedef {import('../../../types/index.js').SwcBinding} SwcBinding
|
||||
* @typedef {import('../../../types/index.js').SwcPath} SwcPath
|
||||
* @typedef {import('../../../types/index.js').SwcVisitor} SwcVisitor
|
||||
* @typedef {import('./helpers/track-down-identifier.js').RootFile} RootFile
|
||||
* @typedef {object} RootFileMapEntry
|
||||
* @typedef {string} currentFileSpecifier this is the local name in the file we track from
|
||||
* @typedef {RootFile} rootFile contains file(filePath) and specifier
|
||||
* @typedef {RootFileMapEntry[]} RootFileMap
|
||||
*
|
||||
* @typedef {{ exportSpecifiers:string[]; localMap: object; source:string, __tmp: { path:string } }} FindExportsSpecifierObj
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {FindExportsSpecifierObj[]} transformedEntry
|
||||
* @param {FindExportsSpecifierObj[]} transformedFile
|
||||
*/
|
||||
async function trackdownRoot(transformedEntry, relativePath, projectPath) {
|
||||
const fullCurrentFilePath = pathLib.resolve(projectPath, relativePath);
|
||||
for (const specObj of transformedEntry) {
|
||||
async function trackdownRoot(transformedFile, relativePath, projectPath) {
|
||||
const fullCurrentFilePath = path.resolve(projectPath, relativePath);
|
||||
for (const specObj of transformedFile) {
|
||||
/** @type {RootFileMap} */
|
||||
const rootFileMap = [];
|
||||
if (specObj.exportSpecifiers[0] === '[file]') {
|
||||
|
|
@ -79,144 +90,145 @@ async function trackdownRoot(transformedEntry, relativePath, projectPath) {
|
|||
}
|
||||
specObj.rootFileMap = rootFileMap;
|
||||
}
|
||||
return transformedEntry;
|
||||
return transformedFile;
|
||||
}
|
||||
|
||||
function cleanup(transformedEntry) {
|
||||
transformedEntry.forEach(specObj => {
|
||||
function cleanup(transformedFile) {
|
||||
transformedFile.forEach(specObj => {
|
||||
if (specObj.__tmp) {
|
||||
delete specObj.__tmp;
|
||||
}
|
||||
});
|
||||
return transformedEntry;
|
||||
return transformedFile;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {*} node
|
||||
* @returns {string[]}
|
||||
*/
|
||||
function getExportSpecifiers(node) {
|
||||
// handles default [export const g = 4];
|
||||
if (node.declaration) {
|
||||
if (node.declaration.declarations) {
|
||||
return [node.declaration.declarations[0].id.name];
|
||||
return [node.declaration.declarations[0].id.value];
|
||||
}
|
||||
if (node.declaration.id) {
|
||||
return [node.declaration.id.name];
|
||||
if (node.declaration.identifier) {
|
||||
return [node.declaration.identifier.value];
|
||||
}
|
||||
}
|
||||
|
||||
// handles (re)named specifiers [export { x (as y)} from 'y'];
|
||||
return node.specifiers.map(s => {
|
||||
let specifier;
|
||||
return (node.specifiers || []).map(s => {
|
||||
if (s.exported) {
|
||||
// { x as y }
|
||||
specifier = s.exported.name;
|
||||
} else {
|
||||
// { x }
|
||||
specifier = s.local.name;
|
||||
return s.exported.value === 'default' ? '[default]' : s.exported.value;
|
||||
}
|
||||
return specifier;
|
||||
// { x }
|
||||
return s.orig.value;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {object[]}
|
||||
* @returns {{local:string;exported:string;}|undefined[]}
|
||||
*/
|
||||
function getLocalNameSpecifiers(node) {
|
||||
return node.specifiers
|
||||
return (node.declaration?.declarations || node.specifiers || [])
|
||||
.map(s => {
|
||||
if (s.exported && s.local && s.exported.name !== s.local.name) {
|
||||
if (s.exported && s.orig && s.exported.value !== s.orig.value) {
|
||||
return {
|
||||
// if reserved keyword 'default' is used, translate it into 'providence keyword'
|
||||
local: s.local.name === 'default' ? '[default]' : s.local.name,
|
||||
exported: s.exported.name,
|
||||
local: s.orig.value === 'default' ? '[default]' : s.orig.value,
|
||||
exported: s.exported.value,
|
||||
};
|
||||
}
|
||||
return undefined;
|
||||
})
|
||||
.filter(s => s);
|
||||
.filter(Boolean);
|
||||
}
|
||||
|
||||
const isImportingSpecifier = pathOrNode =>
|
||||
pathOrNode.type === 'ImportDefaultSpecifier' || pathOrNode.type === 'ImportSpecifier';
|
||||
|
||||
/**
|
||||
* @desc Finds import specifiers and sources for a given ast result
|
||||
* @param {BabelAst} ast
|
||||
* Finds import specifiers and sources for a given ast result
|
||||
* @param {SwcAstModule} swcAst
|
||||
* @param {FindExportsConfig} config
|
||||
*/
|
||||
function findExportsPerAstEntry(ast, { skipFileImports }) {
|
||||
LogService.debug(`Analyzer "find-exports": started findExportsPerAstEntry method`);
|
||||
function findExportsPerAstFile(swcAst, { skipFileImports }) {
|
||||
LogService.debug(`Analyzer "find-exports": started findExportsPerAstFile method`);
|
||||
|
||||
// Visit AST...
|
||||
|
||||
/** @type {FindExportsSpecifierObj} */
|
||||
const transformedEntry = [];
|
||||
/** @type {FindExportsSpecifierObj[]} */
|
||||
const transformedFile = [];
|
||||
// Unfortunately, we cannot have async functions in babel traverse.
|
||||
// Therefore, we store a temp reference to path that we use later for
|
||||
// async post processing (tracking down original export Identifier)
|
||||
/** @type {{[key:string]:SwcBinding}} */
|
||||
let globalScopeBindings;
|
||||
|
||||
traverse(ast, {
|
||||
Program: {
|
||||
enter(babelPath) {
|
||||
const body = babelPath.get('body');
|
||||
if (body.length) {
|
||||
globalScopeBindings = body[0].scope.bindings;
|
||||
const exportHandler = (/** @type {SwcPath} */ astPath) => {
|
||||
const exportSpecifiers = getExportSpecifiers(astPath.node);
|
||||
const localMap = getLocalNameSpecifiers(astPath.node);
|
||||
const source = astPath.node.source?.value;
|
||||
const entry = { exportSpecifiers, localMap, source, __tmp: { astPath } };
|
||||
const assertionType = getAssertionType(astPath.node);
|
||||
if (assertionType) {
|
||||
entry.assertionType = assertionType;
|
||||
}
|
||||
},
|
||||
},
|
||||
ExportNamedDeclaration(path) {
|
||||
const exportSpecifiers = getExportSpecifiers(path.node);
|
||||
const localMap = getLocalNameSpecifiers(path.node);
|
||||
const source = path.node.source?.value;
|
||||
const entry = { exportSpecifiers, localMap, source, __tmp: { path } };
|
||||
if (path.node.assertions?.length) {
|
||||
entry.assertionType = path.node.assertions[0].value?.value;
|
||||
}
|
||||
transformedEntry.push(entry);
|
||||
},
|
||||
ExportDefaultDeclaration(defaultExportPath) {
|
||||
transformedFile.push(entry);
|
||||
};
|
||||
|
||||
const exportDefaultHandler = (/** @type {SwcPath} */ astPath) => {
|
||||
const exportSpecifiers = ['[default]'];
|
||||
let source;
|
||||
if (defaultExportPath.node.declaration?.type !== 'Identifier') {
|
||||
source = defaultExportPath.node.declaration.name;
|
||||
} else {
|
||||
// Is it an inline declaration like "export default class X {};" ?
|
||||
if (
|
||||
astPath.node.decl?.type === 'Identifier' ||
|
||||
astPath.node.expression?.type === 'Identifier'
|
||||
) {
|
||||
// It is a reference to an identifier like "export { x } from 'y';"
|
||||
const importOrDeclPath = getReferencedDeclaration({
|
||||
referencedIdentifierName: defaultExportPath.node.declaration.name,
|
||||
referencedIdentifierName: astPath.node.decl?.value || astPath.node.expression.value,
|
||||
globalScopeBindings,
|
||||
});
|
||||
if (isImportingSpecifier(importOrDeclPath)) {
|
||||
source = importOrDeclPath.parentPath.node.source.value;
|
||||
}
|
||||
}
|
||||
transformedEntry.push({ exportSpecifiers, source, __tmp: { path: defaultExportPath } });
|
||||
transformedFile.push({ exportSpecifiers, source, __tmp: { astPath } });
|
||||
};
|
||||
|
||||
/** @type {SwcVisitor} */
|
||||
const visitor = {
|
||||
Module({ scope }) {
|
||||
globalScopeBindings = scope.bindings;
|
||||
},
|
||||
});
|
||||
ExportDeclaration: exportHandler,
|
||||
ExportNamedDeclaration: exportHandler,
|
||||
ExportDefaultDeclaration: exportDefaultHandler,
|
||||
ExportDefaultExpression: exportDefaultHandler,
|
||||
};
|
||||
|
||||
swcTraverse(swcAst, visitor, { needsAdvancedPaths: true });
|
||||
|
||||
if (!skipFileImports) {
|
||||
// Always add an entry for just the file 'relativePath'
|
||||
// (since this also can be imported directly from a search target project)
|
||||
transformedEntry.push({
|
||||
transformedFile.push({
|
||||
exportSpecifiers: ['[file]'],
|
||||
// source: relativePath,
|
||||
});
|
||||
}
|
||||
|
||||
return transformedEntry;
|
||||
return transformedFile;
|
||||
}
|
||||
|
||||
class FindExportsAnalyzer extends Analyzer {
|
||||
constructor() {
|
||||
super();
|
||||
this.name = 'find-exports';
|
||||
}
|
||||
export default class FindExportsAnalyzer extends Analyzer {
|
||||
static analyzerName = /** @type {AnalyzerName} */ ('find-exports');
|
||||
|
||||
static requiredAst = /** @type {AnalyzerAst} */ ('swc');
|
||||
|
||||
/**
|
||||
* @desc Finds export specifiers and sources
|
||||
* @param {FindExportsConfig} customConfig
|
||||
*/
|
||||
async execute(customConfig = {}) {
|
||||
/**
|
||||
* @typedef FindExportsConfig
|
||||
* @property {boolean} [onlyInternalSources=false]
|
||||
|
|
@ -224,46 +236,23 @@ class FindExportsAnalyzer extends Analyzer {
|
|||
* [import {specifier} 'lion-based-ui/foo.js'], and [import 'lion-based-ui/foo.js'] as a result,
|
||||
* not list file exports
|
||||
*/
|
||||
const cfg = {
|
||||
get config() {
|
||||
return {
|
||||
targetProjectPath: null,
|
||||
skipFileImports: false,
|
||||
...customConfig,
|
||||
...this._customConfig,
|
||||
};
|
||||
|
||||
/**
|
||||
* Prepare
|
||||
*/
|
||||
const analyzerResult = this._prepare(cfg);
|
||||
if (analyzerResult) {
|
||||
return analyzerResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* Traverse
|
||||
*/
|
||||
const projectPath = cfg.targetProjectPath;
|
||||
static async analyzeFile(ast, { relativePath, analyzerCfg }) {
|
||||
const projectPath = analyzerCfg.targetProjectPath;
|
||||
|
||||
const traverseEntryFn = async (ast, { relativePath }) => {
|
||||
let transformedEntry = findExportsPerAstEntry(ast, cfg);
|
||||
let transformedFile = findExportsPerAstFile(ast, analyzerCfg);
|
||||
|
||||
transformedEntry = await normalizeSourcePaths(transformedEntry, relativePath, projectPath);
|
||||
transformedEntry = await trackdownRoot(transformedEntry, relativePath, projectPath);
|
||||
transformedEntry = cleanup(transformedEntry);
|
||||
transformedFile = await normalizeSourcePaths(transformedFile, relativePath, projectPath);
|
||||
transformedFile = await trackdownRoot(transformedFile, relativePath, projectPath);
|
||||
transformedFile = cleanup(transformedFile);
|
||||
|
||||
return { result: transformedEntry };
|
||||
};
|
||||
|
||||
const queryOutput = await this._traverse({
|
||||
traverseEntryFn,
|
||||
filePaths: cfg.targetFilePaths,
|
||||
projectPath: cfg.targetProjectPath,
|
||||
});
|
||||
|
||||
/**
|
||||
* Finalize
|
||||
*/
|
||||
return this._finalize(queryOutput, cfg);
|
||||
return { result: transformedFile };
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FindExportsAnalyzer;
|
||||
|
|
|
|||
|
|
@ -1,120 +1,106 @@
|
|||
/* eslint-disable no-shadow, no-param-reassign */
|
||||
const { default: traverse } = require('@babel/traverse');
|
||||
const { isRelativeSourcePath } = require('../utils/relative-source-path.js');
|
||||
const { normalizeSourcePaths } = require('./helpers/normalize-source-paths.js');
|
||||
const { Analyzer } = require('./helpers/Analyzer.js');
|
||||
const { LogService } = require('../services/LogService.js');
|
||||
import { isRelativeSourcePath } from '../utils/relative-source-path.js';
|
||||
import { swcTraverse } from '../utils/swc-traverse.js';
|
||||
import { getAssertionType } from '../utils/get-assertion-type.js';
|
||||
import { normalizeSourcePaths } from './helpers/normalize-source-paths.js';
|
||||
import { Analyzer } from '../core/Analyzer.js';
|
||||
import { LogService } from '../core/LogService.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('../types/core').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../types/analyzers').FindImportsAnalyzerResult} FindImportsAnalyzerResult
|
||||
* @typedef {import('../types/analyzers').FindImportsAnalyzerEntry} FindImportsAnalyzerEntry
|
||||
* @typedef {import('../types/core').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import("@swc/core").Module} SwcAstModule
|
||||
* @typedef {import("@swc/core").Node} SwcNode
|
||||
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../../types/index.js').AnalyzerAst} AnalyzerAst
|
||||
* @typedef {import('../../../types/index.js').AnalyzerConfig} AnalyzerConfig
|
||||
* @typedef {import('../../../types/index.js').FindImportsAnalyzerResult} FindImportsAnalyzerResult
|
||||
* @typedef {import('../../../types/index.js').FindImportsAnalyzerEntry} FindImportsAnalyzerEntry
|
||||
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
*/
|
||||
|
||||
/**
|
||||
* Options that allow to filter 'on a file basis'.
|
||||
* We can also filter on the total result
|
||||
* @param {SwcNode} node
|
||||
*/
|
||||
const /** @type {AnalyzerOptions} */ options = {
|
||||
/**
|
||||
* Only leaves entries with external sources:
|
||||
* - keeps: '@open-wc/testing'
|
||||
* - drops: '../testing'
|
||||
* @param {FindImportsAnalyzerResult} result
|
||||
* @param {string} targetSpecifier for instance 'LitElement'
|
||||
*/
|
||||
onlyExternalSources(result) {
|
||||
return result.filter(entry => !isRelativeSourcePath(entry.source));
|
||||
},
|
||||
};
|
||||
|
||||
function getImportOrReexportsSpecifiers(node) {
|
||||
// @ts-expect-error
|
||||
return node.specifiers.map(s => {
|
||||
if (s.type === 'ImportDefaultSpecifier' || s.type === 'ExportDefaultSpecifier') {
|
||||
if (
|
||||
s.type === 'ImportDefaultSpecifier' ||
|
||||
s.type === 'ExportDefaultSpecifier' ||
|
||||
(s.type === 'ExportSpecifier' && s.exported?.value === 'default')
|
||||
) {
|
||||
return '[default]';
|
||||
}
|
||||
if (s.type === 'ImportNamespaceSpecifier' || s.type === 'ExportNamespaceSpecifier') {
|
||||
return '[*]';
|
||||
}
|
||||
if ((s.imported && s.type === 'ImportNamespaceSpecifier') || s.type === 'ImportSpecifier') {
|
||||
return s.imported.name;
|
||||
}
|
||||
if (s.exported && s.type === 'ExportNamespaceSpecifier') {
|
||||
return s.exported.name;
|
||||
}
|
||||
return s.local.name;
|
||||
const importedValue = s.imported?.value || s.orig?.value || s.exported?.value || s.local?.value;
|
||||
return importedValue;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds import specifiers and sources
|
||||
* @param {any} ast
|
||||
* @param {SwcAstModule} swcAst
|
||||
*/
|
||||
function findImportsPerAstEntry(ast) {
|
||||
LogService.debug(`Analyzer "find-imports": started findImportsPerAstEntry method`);
|
||||
function findImportsPerAstFile(swcAst) {
|
||||
LogService.debug(`Analyzer "find-imports": started findImportsPerAstFile method`);
|
||||
|
||||
// https://github.com/babel/babel/blob/672a58660f0b15691c44582f1f3fdcdac0fa0d2f/packages/babel-core/src/transformation/index.ts#L110
|
||||
// Visit AST...
|
||||
/** @type {Partial<FindImportsAnalyzerEntry>[]} */
|
||||
const transformedEntry = [];
|
||||
traverse(ast, {
|
||||
ImportDeclaration(path) {
|
||||
const importSpecifiers = getImportOrReexportsSpecifiers(path.node);
|
||||
const transformedFile = [];
|
||||
|
||||
swcTraverse(swcAst, {
|
||||
ImportDeclaration({ node }) {
|
||||
const importSpecifiers = getImportOrReexportsSpecifiers(node);
|
||||
if (!importSpecifiers.length) {
|
||||
importSpecifiers.push('[file]'); // apparently, there was just a file import
|
||||
}
|
||||
const source = path.node.source.value;
|
||||
const entry = { importSpecifiers, source };
|
||||
if (path.node.assertions?.length) {
|
||||
entry.assertionType = path.node.assertions[0].value?.value;
|
||||
const source = node.source.value;
|
||||
const entry = /** @type {Partial<FindImportsAnalyzerEntry>} */ ({ importSpecifiers, source });
|
||||
const assertionType = getAssertionType(node);
|
||||
if (assertionType) {
|
||||
entry.assertionType = getAssertionType(node);
|
||||
}
|
||||
transformedEntry.push(entry);
|
||||
transformedFile.push(entry);
|
||||
},
|
||||
ExportNamedDeclaration({ node }) {
|
||||
if (!node.source) {
|
||||
return; // we are dealing with a regular export, not a reexport
|
||||
}
|
||||
const importSpecifiers = getImportOrReexportsSpecifiers(node);
|
||||
const source = node.source.value;
|
||||
const entry = /** @type {Partial<FindImportsAnalyzerEntry>} */ ({ importSpecifiers, source });
|
||||
const assertionType = getAssertionType(node);
|
||||
if (assertionType) {
|
||||
entry.assertionType = assertionType;
|
||||
}
|
||||
transformedFile.push(entry);
|
||||
},
|
||||
// Dynamic imports
|
||||
CallExpression(path) {
|
||||
if (path.node.callee && path.node.callee.type === 'Import') {
|
||||
CallExpression({ node }) {
|
||||
if (node.callee?.type !== 'Import') {
|
||||
return;
|
||||
}
|
||||
// TODO: check for specifiers catched via obj destructuring?
|
||||
// TODO: also check for ['file']
|
||||
const importSpecifiers = ['[default]'];
|
||||
let source = path.node.arguments[0].value;
|
||||
if (!source) {
|
||||
// TODO: with advanced retrieval, we could possibly get the value
|
||||
source = '[variable]';
|
||||
}
|
||||
transformedEntry.push({ importSpecifiers, source });
|
||||
}
|
||||
const dynamicImportExpression = node.arguments[0].expression;
|
||||
const source =
|
||||
dynamicImportExpression.type === 'StringLiteral'
|
||||
? dynamicImportExpression.value
|
||||
: '[variable]';
|
||||
transformedFile.push({ importSpecifiers, source });
|
||||
},
|
||||
ExportNamedDeclaration(path) {
|
||||
if (!path.node.source) {
|
||||
return; // we are dealing with a regular export, not a reexport
|
||||
}
|
||||
const importSpecifiers = getImportOrReexportsSpecifiers(path.node);
|
||||
const source = path.node.source.value;
|
||||
const entry = { importSpecifiers, source };
|
||||
if (path.node.assertions?.length) {
|
||||
entry.assertionType = path.node.assertions[0].value?.value;
|
||||
}
|
||||
transformedEntry.push(entry);
|
||||
},
|
||||
// ExportAllDeclaration(path) {
|
||||
// if (!path.node.source) {
|
||||
// return; // we are dealing with a regular export, not a reexport
|
||||
// }
|
||||
// const importSpecifiers = ['[*]'];
|
||||
// const source = path.node.source.value;
|
||||
// transformedEntry.push({ importSpecifiers, source });
|
||||
// },
|
||||
});
|
||||
|
||||
return transformedEntry;
|
||||
return transformedFile;
|
||||
}
|
||||
|
||||
class FindImportsAnalyzer extends Analyzer {
|
||||
constructor() {
|
||||
super();
|
||||
/** @type {AnalyzerName} */
|
||||
this.name = 'find-imports';
|
||||
}
|
||||
export default class FindImportsSwcAnalyzer extends Analyzer {
|
||||
static analyzerName = /** @type {AnalyzerName} */ ('find-imports');
|
||||
|
||||
static requiredAst = /** @type {AnalyzerAst} */ ('swc');
|
||||
|
||||
/**
|
||||
* Finds import specifiers and sources
|
||||
|
|
@ -138,42 +124,36 @@ class FindImportsAnalyzer extends Analyzer {
|
|||
/**
|
||||
* Prepare
|
||||
*/
|
||||
const analyzerResult = this._prepare(cfg);
|
||||
if (analyzerResult) {
|
||||
return analyzerResult;
|
||||
const cachedAnalyzerResult = this._prepare(cfg);
|
||||
if (cachedAnalyzerResult) {
|
||||
return cachedAnalyzerResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* Traverse
|
||||
*/
|
||||
const queryOutput = await this._traverse(async (ast, { relativePath }) => {
|
||||
let transformedEntry = findImportsPerAstEntry(ast);
|
||||
const queryOutput = await this._traverse(async (swcAst, context) => {
|
||||
// @ts-expect-error
|
||||
let transformedFile = findImportsPerAstFile(swcAst);
|
||||
// Post processing based on configuration...
|
||||
transformedEntry = await normalizeSourcePaths(
|
||||
transformedEntry,
|
||||
relativePath,
|
||||
transformedFile = await normalizeSourcePaths(
|
||||
transformedFile,
|
||||
context.relativePath,
|
||||
// @ts-expect-error
|
||||
cfg.targetProjectPath,
|
||||
);
|
||||
|
||||
if (!cfg.keepInternalSources) {
|
||||
transformedEntry = options.onlyExternalSources(transformedEntry);
|
||||
// @ts-expect-error
|
||||
transformedFile = transformedFile.filter(entry => !isRelativeSourcePath(entry.source));
|
||||
}
|
||||
|
||||
return { result: transformedEntry };
|
||||
return { result: transformedFile };
|
||||
});
|
||||
|
||||
// if (cfg.sortBySpecifier) {
|
||||
// queryOutput = sortBySpecifier.execute(queryOutput, {
|
||||
// ...cfg,
|
||||
// specifiersKey: 'importSpecifiers',
|
||||
// });
|
||||
// }
|
||||
|
||||
/**
|
||||
* Finalize
|
||||
*/
|
||||
return this._finalize(queryOutput, cfg);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FindImportsAnalyzer;
|
||||
|
|
|
|||
|
|
@ -1,29 +1,14 @@
|
|||
const { isRelativeSourcePath } = require('../../utils/relative-source-path.js');
|
||||
const { LogService } = require('../../services/LogService.js');
|
||||
const { resolveImportPath } = require('../../utils/resolve-import-path.js');
|
||||
import { isRelativeSourcePath } from '../../utils/relative-source-path.js';
|
||||
import { LogService } from '../../core/LogService.js';
|
||||
import { resolveImportPath } from '../../utils/resolve-import-path.js';
|
||||
import { toPosixPath } from '../../utils/to-posix-path.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('../../types/core').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../../types/index.js').SpecifierSource} SpecifierSource
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {string} importee like '@lion/core/myFile.js'
|
||||
* @returns {string} project name ('@lion/core')
|
||||
*/
|
||||
function getProjectFromImportee(importee) {
|
||||
const scopedProject = importee[0] === '@';
|
||||
// 'external-project/src/file.js' -> ['external-project', 'src', file.js']
|
||||
let splitSource = importee.split('/');
|
||||
if (scopedProject) {
|
||||
// '@external/project'
|
||||
splitSource = [splitSource.slice(0, 2).join('/'), ...splitSource.slice(2)];
|
||||
}
|
||||
// ['external-project', 'src', 'file.js'] -> 'external-project'
|
||||
const project = splitSource.slice(0, 1).join('/');
|
||||
|
||||
return project;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets local path from reference project
|
||||
*
|
||||
|
|
@ -33,28 +18,29 @@ function getProjectFromImportee(importee) {
|
|||
* - from: 'reference-project'
|
||||
* - to: './index.js' (or other file specified in package.json 'main')
|
||||
* @param {object} config
|
||||
* @param {string} config.importee 'reference-project/foo.js'
|
||||
* @param {string} config.importer '/my/project/importing-file.js'
|
||||
* @param {SpecifierSource} config.importee 'reference-project/foo.js'
|
||||
* @param {PathFromSystemRoot} config.importer '/my/project/importing-file.js'
|
||||
* @param {PathFromSystemRoot} config.importeeProjectPath '/path/to/reference/project'
|
||||
* @returns {Promise<PathRelativeFromProjectRoot|null>} './foo.js'
|
||||
*/
|
||||
async function fromImportToExportPerspective({ importee, importer }) {
|
||||
export async function fromImportToExportPerspective({ importee, importer, importeeProjectPath }) {
|
||||
if (isRelativeSourcePath(importee)) {
|
||||
LogService.warn('[fromImportToExportPerspective] Please only provide external import paths');
|
||||
LogService.warn(
|
||||
`[fromImportToExportPerspective] Please only provide external import paths for ${{
|
||||
importee,
|
||||
importer,
|
||||
importeeProjectPath,
|
||||
}}`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
|
||||
const absolutePath = await resolveImportPath(importee, importer);
|
||||
const projectName = getProjectFromImportee(importee);
|
||||
if (!absolutePath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* - from: '/my/reference/project/packages/foo/index.js'
|
||||
* - to: './packages/foo/index.js'
|
||||
*/
|
||||
return absolutePath
|
||||
? /** @type {PathRelativeFromProjectRoot} */ (
|
||||
absolutePath.replace(new RegExp(`^.*/${projectName}/?(.*)$`), './$1')
|
||||
)
|
||||
: null;
|
||||
return /** @type {PathRelativeFromProjectRoot} */ (
|
||||
absolutePath.replace(new RegExp(`^${toPosixPath(importeeProjectPath)}/?(.*)$`), './$1')
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = { fromImportToExportPerspective };
|
||||
|
|
|
|||
|
|
@ -1,26 +1,24 @@
|
|||
/* eslint-disable no-param-reassign */
|
||||
const pathLib = require('path');
|
||||
const { isRelativeSourcePath } = require('../../utils/relative-source-path.js');
|
||||
const { resolveImportPath } = require('../../utils/resolve-import-path.js');
|
||||
const { toPosixPath } = require('../../utils/to-posix-path.js');
|
||||
const { aMap } = require('../../utils/async-array-utils.js');
|
||||
import path from 'path';
|
||||
import { isRelativeSourcePath } from '../../utils/relative-source-path.js';
|
||||
import { resolveImportPath } from '../../utils/resolve-import-path.js';
|
||||
import { toPosixPath } from '../../utils/to-posix-path.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('../../types/core').PathRelative} PathRelative
|
||||
* @typedef {import('../../types/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../types/core').QueryOutput} QueryOutput
|
||||
* @typedef {import('../../../../types/index.js').PathRelative} PathRelative
|
||||
* @typedef {import('../../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../../types/index.js').FindImportsAnalyzerEntry} FindImportsAnalyzerEntry
|
||||
*/
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {PathFromSystemRoot} currentDirPath
|
||||
* @param {PathFromSystemRoot} resolvedPath
|
||||
* @returns {PathRelative}
|
||||
*/
|
||||
function toLocalPath(currentDirPath, resolvedPath) {
|
||||
let relativeSourcePath = pathLib.relative(currentDirPath, resolvedPath);
|
||||
let relativeSourcePath = path.relative(currentDirPath, resolvedPath);
|
||||
if (!relativeSourcePath.startsWith('.')) {
|
||||
// correction on top of pathLib.resolve, which resolves local paths like
|
||||
// correction on top of path.resolve, which resolves local paths like
|
||||
// (from import perspective) external modules.
|
||||
// so 'my-local-files.js' -> './my-local-files.js'
|
||||
relativeSourcePath = `./${relativeSourcePath}`;
|
||||
|
|
@ -30,38 +28,42 @@ function toLocalPath(currentDirPath, resolvedPath) {
|
|||
|
||||
/**
|
||||
* Resolves and converts to normalized local/absolute path, based on file-system information.
|
||||
* - from: { source: '../../relative/file' }
|
||||
* - to: {
|
||||
* fullPath: './absolute/path/from/root/to/relative/file.js',
|
||||
* normalizedPath: '../../relative/file.js'
|
||||
* }
|
||||
* @param {QueryOutput} queryOutput
|
||||
* - from: '../../relative/file'
|
||||
* - to: './src/relative/file.js'
|
||||
* @param {string} oldSource
|
||||
* @param {string} relativePath
|
||||
* @param {string} rootPath
|
||||
*/
|
||||
async function normalizeSourcePaths(queryOutput, relativePath, rootPath = process.cwd()) {
|
||||
const currentFilePath = /** @type {PathFromSystemRoot} */ (
|
||||
pathLib.resolve(rootPath, relativePath)
|
||||
);
|
||||
const currentDirPath = /** @type {PathFromSystemRoot} */ (pathLib.dirname(currentFilePath));
|
||||
return aMap(queryOutput, async specifierResObj => {
|
||||
if (specifierResObj.source) {
|
||||
if (isRelativeSourcePath(specifierResObj.source) && relativePath) {
|
||||
export async function normalizeSourcePath(oldSource, relativePath, rootPath = process.cwd()) {
|
||||
const currentFilePath = /** @type {PathFromSystemRoot} */ (path.resolve(rootPath, relativePath));
|
||||
const currentDirPath = /** @type {PathFromSystemRoot} */ (path.dirname(currentFilePath));
|
||||
|
||||
if (isRelativeSourcePath(oldSource) && relativePath) {
|
||||
// This will be a source like '../my/file.js' or './file.js'
|
||||
const resolvedPath = /** @type {PathFromSystemRoot} */ (
|
||||
await resolveImportPath(specifierResObj.source, currentFilePath)
|
||||
await resolveImportPath(oldSource, currentFilePath)
|
||||
);
|
||||
specifierResObj.normalizedSource =
|
||||
resolvedPath && toLocalPath(currentDirPath, resolvedPath);
|
||||
// specifierResObj.fullSource = resolvedPath && toRelativeSourcePath(resolvedPath, rootPath);
|
||||
} else {
|
||||
return resolvedPath && toLocalPath(currentDirPath, resolvedPath);
|
||||
}
|
||||
// This will be a source from a project, like 'lion-based-ui/x.js' or '@open-wc/testing/y.js'
|
||||
specifierResObj.normalizedSource = specifierResObj.source;
|
||||
// specifierResObj.fullSource = specifierResObj.source;
|
||||
}
|
||||
}
|
||||
return specifierResObj;
|
||||
});
|
||||
return oldSource;
|
||||
}
|
||||
|
||||
module.exports = { normalizeSourcePaths };
|
||||
/**
|
||||
* @param {Partial<FindImportsAnalyzerEntry>[]} queryOutput
|
||||
* @param {string} relativePath
|
||||
* @param {string} rootPath
|
||||
*/
|
||||
export async function normalizeSourcePaths(queryOutput, relativePath, rootPath = process.cwd()) {
|
||||
const normalizedQueryOutput = [];
|
||||
for (const specifierResObj of queryOutput) {
|
||||
if (specifierResObj.source) {
|
||||
const x = await normalizeSourcePath(specifierResObj.source, relativePath, rootPath);
|
||||
if (x) {
|
||||
specifierResObj.normalizedSource = x;
|
||||
}
|
||||
}
|
||||
normalizedQueryOutput.push(specifierResObj);
|
||||
}
|
||||
return normalizedQueryOutput;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,332 @@
|
|||
/* eslint-disable no-shadow */
|
||||
// @ts-nocheck
|
||||
import fs from 'fs';
|
||||
import pathLib from 'path';
|
||||
import babelTraverse from '@babel/traverse';
|
||||
import { isRelativeSourcePath, toRelativeSourcePath } from '../../utils/relative-source-path.js';
|
||||
import { InputDataService } from '../../core/InputDataService.js';
|
||||
import { resolveImportPath } from '../../utils/resolve-import-path.js';
|
||||
import { AstService } from '../../core/AstService.js';
|
||||
import { LogService } from '../../core/LogService.js';
|
||||
import { memoize } from '../../utils/memoize.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('../../../../types/index.js').RootFile} RootFile
|
||||
* @typedef {import('../../../../types/index.js').SpecifierSource} SpecifierSource
|
||||
* @typedef {import('../../../../types/index.js').IdentifierName} IdentifierName
|
||||
* @typedef {import('../../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('@babel/traverse').NodePath} NodePath
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {string} source
|
||||
* @param {string} projectName
|
||||
*/
|
||||
function isSelfReferencingProject(source, projectName) {
|
||||
return source.startsWith(`${projectName}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} source
|
||||
* @param {string} projectName
|
||||
*/
|
||||
function isExternalProject(source, projectName) {
|
||||
return (
|
||||
!source.startsWith('#') &&
|
||||
!isRelativeSourcePath(source) &&
|
||||
!isSelfReferencingProject(source, projectName)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Other than with import, no binding is created for MyClass by Babel(?)
|
||||
* This means 'path.scope.getBinding('MyClass')' returns undefined
|
||||
* and we have to find a different way to retrieve this value.
|
||||
* @param {NodePath} astPath Babel ast traversal path
|
||||
* @param {IdentifierName} identifierName the name that should be tracked (and that exists inside scope of astPath)
|
||||
*/
|
||||
function getBindingAndSourceReexports(astPath, identifierName) {
|
||||
// Get to root node of file and look for exports like `export { identifierName } from 'src';`
|
||||
let source;
|
||||
let bindingType;
|
||||
let bindingPath;
|
||||
|
||||
let curPath = astPath;
|
||||
while (curPath.parentPath) {
|
||||
curPath = curPath.parentPath;
|
||||
}
|
||||
const rootPath = curPath;
|
||||
rootPath.traverse({
|
||||
ExportSpecifier(astPath) {
|
||||
// eslint-disable-next-line arrow-body-style
|
||||
const found =
|
||||
astPath.node.exported.name === identifierName || astPath.node.local.name === identifierName;
|
||||
if (found) {
|
||||
bindingPath = astPath;
|
||||
bindingType = 'ExportSpecifier';
|
||||
source = astPath.parentPath.node.source
|
||||
? astPath.parentPath.node.source.value
|
||||
: '[current]';
|
||||
astPath.stop();
|
||||
}
|
||||
},
|
||||
});
|
||||
return [source, bindingType, bindingPath];
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves source (like '@lion/core') and importedIdentifierName (like 'lit') from ast for
|
||||
* current file.
|
||||
* We might be an import that was locally renamed.
|
||||
* Since we are traversing, we are interested in the imported name. Or in case of a re-export,
|
||||
* the local name.
|
||||
* @param {NodePath} astPath Babel ast traversal path
|
||||
* @param {string} identifierName the name that should be tracked (and that exists inside scope of astPath)
|
||||
* @returns {{ source:string, importedIdentifierName:string }}
|
||||
*/
|
||||
export function getImportSourceFromAst(astPath, identifierName) {
|
||||
let source;
|
||||
let importedIdentifierName;
|
||||
|
||||
const binding = astPath.scope.getBinding(identifierName);
|
||||
let bindingType = binding?.path.type;
|
||||
let bindingPath = binding?.path;
|
||||
const matchingTypes = ['ImportSpecifier', 'ImportDefaultSpecifier', 'ExportSpecifier'];
|
||||
|
||||
if (bindingType && matchingTypes.includes(bindingType)) {
|
||||
source = binding?.path?.parentPath?.node?.source?.value;
|
||||
} else {
|
||||
// no binding
|
||||
[source, bindingType, bindingPath] = getBindingAndSourceReexports(astPath, identifierName);
|
||||
}
|
||||
|
||||
const shouldLookForDefaultExport = bindingType === 'ImportDefaultSpecifier';
|
||||
if (shouldLookForDefaultExport) {
|
||||
importedIdentifierName = '[default]';
|
||||
} else if (source) {
|
||||
const { node } = bindingPath;
|
||||
importedIdentifierName = (node.imported && node.imported.name) || node.local.name;
|
||||
}
|
||||
|
||||
return { source, importedIdentifierName };
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {(source:SpecifierSource,identifierName:IdentifierName,currentFilePath:PathFromSystemRoot,rootPath:PathFromSystemRoot,projectName?: string,depth?:number) => Promise<RootFile>} TrackDownIdentifierFn
|
||||
*/
|
||||
|
||||
/**
|
||||
* Follows the full path of an Identifier until its declaration ('root file') is found.
|
||||
* @example
|
||||
*```js
|
||||
* // 1. Starting point
|
||||
* // target-proj/my-comp-import.js
|
||||
* import { MyComp as TargetComp } from 'ref-proj';
|
||||
*
|
||||
* // 2. Intermediate stop: a re-export
|
||||
* // ref-proj/exportsIndex.js (package.json has main: './exportsIndex.js')
|
||||
* export { RefComp as MyComp } from './src/RefComp.js';
|
||||
*
|
||||
* // 3. End point: our declaration
|
||||
* // ref-proj/src/RefComp.js
|
||||
* export class RefComp extends LitElement {...}
|
||||
*```
|
||||
*
|
||||
* -param {SpecifierSource} source an importSpecifier source, like 'ref-proj' or '../file'
|
||||
* -param {IdentifierName} identifierName imported reference/Identifier name, like 'MyComp'
|
||||
* -param {PathFromSystemRoot} currentFilePath file path, like '/path/to/target-proj/my-comp-import.js'
|
||||
* -param {PathFromSystemRoot} rootPath dir path, like '/path/to/target-proj'
|
||||
* -param {string} [projectName] like 'target-proj' or '@lion/input'
|
||||
* -returns {Promise<RootFile>} file: path of file containing the binding (exported declaration),
|
||||
* like '/path/to/ref-proj/src/RefComp.js'
|
||||
*/
|
||||
/** @type {TrackDownIdentifierFn} */
|
||||
// eslint-disable-next-line import/no-mutable-exports
|
||||
export let trackDownIdentifier;
|
||||
|
||||
/** @type {TrackDownIdentifierFn} */
|
||||
async function trackDownIdentifierFn(
|
||||
source,
|
||||
identifierName,
|
||||
currentFilePath,
|
||||
rootPath,
|
||||
projectName,
|
||||
depth = 0,
|
||||
) {
|
||||
let rootFilePath; // our result path
|
||||
let rootSpecifier; // the name under which it was imported
|
||||
|
||||
if (!projectName) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
projectName = InputDataService.getPackageJson(rootPath)?.name;
|
||||
}
|
||||
|
||||
if (projectName && isExternalProject(source, projectName)) {
|
||||
// So, it is an external ref like '@lion/core' or '@open-wc/scoped-elements/index.js'
|
||||
// At this moment in time, we don't know if we have file system access to this particular
|
||||
// project. Therefore, we limit ourselves to tracking down local references.
|
||||
// In case this helper is used inside an analyzer like 'match-subclasses', the external
|
||||
// (search-target) project can be accessed and paths can be resolved to local ones,
|
||||
// just like in 'match-imports' analyzer.
|
||||
/** @type {RootFile} */
|
||||
const result = { file: source, specifier: identifierName };
|
||||
return result;
|
||||
}
|
||||
|
||||
const resolvedSourcePath = await resolveImportPath(source, currentFilePath);
|
||||
|
||||
LogService.debug(`[trackDownIdentifier] ${resolvedSourcePath}`);
|
||||
const allowedJsModuleExtensions = ['.mjs', '.js'];
|
||||
if (!allowedJsModuleExtensions.includes(pathLib.extname(resolvedSourcePath))) {
|
||||
// We have an import assertion
|
||||
return /** @type { RootFile } */ {
|
||||
file: toRelativeSourcePath(resolvedSourcePath, rootPath),
|
||||
specifier: '[default]',
|
||||
};
|
||||
}
|
||||
const code = fs.readFileSync(resolvedSourcePath, 'utf8');
|
||||
const babelAst = AstService.getAst(code, 'swc-to-babel', { filePath: resolvedSourcePath });
|
||||
|
||||
const shouldLookForDefaultExport = identifierName === '[default]';
|
||||
|
||||
let reexportMatch = false; // named specifier declaration
|
||||
let exportMatch;
|
||||
let pendingTrackDownPromise;
|
||||
|
||||
babelTraverse.default(babelAst, {
|
||||
ExportDefaultDeclaration(astPath) {
|
||||
if (!shouldLookForDefaultExport) {
|
||||
return;
|
||||
}
|
||||
|
||||
let newSource;
|
||||
if (astPath.node.declaration.type === 'Identifier') {
|
||||
newSource = getImportSourceFromAst(astPath, astPath.node.declaration.name).source;
|
||||
}
|
||||
|
||||
if (newSource) {
|
||||
pendingTrackDownPromise = trackDownIdentifier(
|
||||
newSource,
|
||||
'[default]',
|
||||
resolvedSourcePath,
|
||||
rootPath,
|
||||
projectName,
|
||||
depth + 1,
|
||||
);
|
||||
} else {
|
||||
// We found our file!
|
||||
rootSpecifier = identifierName;
|
||||
rootFilePath = toRelativeSourcePath(resolvedSourcePath, rootPath);
|
||||
}
|
||||
astPath.stop();
|
||||
},
|
||||
ExportNamedDeclaration: {
|
||||
enter(astPath) {
|
||||
if (reexportMatch || shouldLookForDefaultExport) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Are we dealing with a re-export ?
|
||||
if (astPath.node.specifiers?.length) {
|
||||
exportMatch = astPath.node.specifiers.find(s => s.exported.name === identifierName);
|
||||
|
||||
if (exportMatch) {
|
||||
const localName = exportMatch.local.name;
|
||||
let newSource;
|
||||
if (astPath.node.source) {
|
||||
/**
|
||||
* @example
|
||||
* export { x } from 'y'
|
||||
*/
|
||||
newSource = astPath.node.source.value;
|
||||
} else {
|
||||
/**
|
||||
* @example
|
||||
* import { x } from 'y'
|
||||
* export { x }
|
||||
*/
|
||||
newSource = getImportSourceFromAst(astPath, identifierName).source;
|
||||
|
||||
if (!newSource || newSource === '[current]') {
|
||||
/**
|
||||
* @example
|
||||
* const x = 12;
|
||||
* export { x }
|
||||
*/
|
||||
return;
|
||||
}
|
||||
}
|
||||
reexportMatch = true;
|
||||
pendingTrackDownPromise = trackDownIdentifier(
|
||||
newSource,
|
||||
localName,
|
||||
resolvedSourcePath,
|
||||
rootPath,
|
||||
projectName,
|
||||
depth + 1,
|
||||
);
|
||||
astPath.stop();
|
||||
}
|
||||
}
|
||||
},
|
||||
exit(astPath) {
|
||||
if (!reexportMatch) {
|
||||
// We didn't find a re-exported Identifier, that means the reference is declared
|
||||
// in current file...
|
||||
rootSpecifier = identifierName;
|
||||
rootFilePath = toRelativeSourcePath(resolvedSourcePath, rootPath);
|
||||
|
||||
if (exportMatch) {
|
||||
astPath.stop();
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (pendingTrackDownPromise) {
|
||||
// We can't handle promises inside Babel traverse, so we do it here...
|
||||
const resObj = await pendingTrackDownPromise;
|
||||
rootFilePath = resObj.file;
|
||||
rootSpecifier = resObj.specifier;
|
||||
}
|
||||
|
||||
return /** @type { RootFile } */ { file: rootFilePath, specifier: rootSpecifier };
|
||||
}
|
||||
|
||||
trackDownIdentifier = memoize(trackDownIdentifierFn);
|
||||
|
||||
/**
|
||||
* @param {NodePath} astPath
|
||||
* @param {string} identifierNameInScope
|
||||
* @param {PathFromSystemRoot} fullCurrentFilePath
|
||||
* @param {PathFromSystemRoot} projectPath
|
||||
* @param {string} [projectName]
|
||||
*/
|
||||
async function trackDownIdentifierFromScopeFn(
|
||||
astPath,
|
||||
identifierNameInScope,
|
||||
fullCurrentFilePath,
|
||||
projectPath,
|
||||
projectName,
|
||||
) {
|
||||
const sourceObj = getImportSourceFromAst(astPath, identifierNameInScope);
|
||||
|
||||
/** @type {RootFile} */
|
||||
let rootFile;
|
||||
if (sourceObj.source) {
|
||||
rootFile = await trackDownIdentifier(
|
||||
sourceObj.source,
|
||||
sourceObj.importedIdentifierName,
|
||||
fullCurrentFilePath,
|
||||
projectPath,
|
||||
projectName,
|
||||
);
|
||||
} else {
|
||||
const specifier = sourceObj.importedIdentifierName || identifierNameInScope;
|
||||
rootFile = { file: '[current]', specifier };
|
||||
}
|
||||
return rootFile;
|
||||
}
|
||||
|
||||
export const trackDownIdentifierFromScope = memoize(trackDownIdentifierFromScopeFn);
|
||||
|
|
@ -1,23 +1,28 @@
|
|||
const fs = require('fs');
|
||||
const pathLib = require('path');
|
||||
const { default: traverse } = require('@babel/traverse');
|
||||
const {
|
||||
isRelativeSourcePath,
|
||||
toRelativeSourcePath,
|
||||
} = require('../../utils/relative-source-path.js');
|
||||
const { AstService } = require('../../services/AstService.js');
|
||||
const { LogService } = require('../../services/LogService.js');
|
||||
const { InputDataService } = require('../../services/InputDataService.js');
|
||||
const { resolveImportPath } = require('../../utils/resolve-import-path.js');
|
||||
const { memoize } = require('../../utils/memoize.js');
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { swcTraverse } from '../../utils/swc-traverse.js';
|
||||
import { isRelativeSourcePath, toRelativeSourcePath } from '../../utils/relative-source-path.js';
|
||||
import { InputDataService } from '../../core/InputDataService.js';
|
||||
import { resolveImportPath } from '../../utils/resolve-import-path.js';
|
||||
import { AstService } from '../../core/AstService.js';
|
||||
import { memoize } from '../../utils/memoize.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('../../types/core').RootFile} RootFile
|
||||
* @typedef {import('../../types/core').SpecifierSource} SpecifierSource
|
||||
* @typedef {import('../../types/core').IdentifierName} IdentifierName
|
||||
* @typedef {import('../../types/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../../types/index.js').RootFile} RootFile
|
||||
* @typedef {import('../../../../types/index.js').SpecifierSource} SpecifierSource
|
||||
* @typedef {import('../../../../types/index.js').IdentifierName} IdentifierName
|
||||
* @typedef {import('../../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../../types/index.js').SwcPath} SwcPath
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {string} source
|
||||
* @param {string} projectName
|
||||
*/
|
||||
function isSelfReferencingProject(source, projectName) {
|
||||
return source.startsWith(`${projectName}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} source
|
||||
* @param {string} projectName
|
||||
|
|
@ -26,7 +31,7 @@ function isExternalProject(source, projectName) {
|
|||
return (
|
||||
!source.startsWith('#') &&
|
||||
!isRelativeSourcePath(source) &&
|
||||
!source.startsWith(`${projectName}/`)
|
||||
!isSelfReferencingProject(source, projectName)
|
||||
);
|
||||
}
|
||||
|
||||
|
|
@ -34,30 +39,35 @@ function isExternalProject(source, projectName) {
|
|||
* Other than with import, no binding is created for MyClass by Babel(?)
|
||||
* This means 'path.scope.getBinding('MyClass')' returns undefined
|
||||
* and we have to find a different way to retrieve this value.
|
||||
* @param {object} astPath Babel ast traversal path
|
||||
* @param {SwcPath} swcPath Babel ast traversal path
|
||||
* @param {IdentifierName} identifierName the name that should be tracked (and that exists inside scope of astPath)
|
||||
*/
|
||||
function getBindingAndSourceReexports(astPath, identifierName) {
|
||||
function getBindingAndSourceReexports(swcPath, identifierName) {
|
||||
// Get to root node of file and look for exports like `export { identifierName } from 'src';`
|
||||
let source;
|
||||
let bindingType;
|
||||
let bindingPath;
|
||||
|
||||
let curPath = astPath;
|
||||
let curPath = swcPath;
|
||||
while (curPath.parentPath) {
|
||||
curPath = curPath.parentPath;
|
||||
}
|
||||
const rootPath = curPath;
|
||||
rootPath.traverse({
|
||||
ExportSpecifier(path) {
|
||||
|
||||
swcTraverse(rootPath.node, {
|
||||
ExportSpecifier(astPath) {
|
||||
// eslint-disable-next-line arrow-body-style
|
||||
const found =
|
||||
path.node.exported.name === identifierName || path.node.local.name === identifierName;
|
||||
astPath.node.orig?.value === identifierName ||
|
||||
astPath.node.exported?.value === identifierName ||
|
||||
astPath.node.local?.value === identifierName;
|
||||
if (found) {
|
||||
bindingPath = path;
|
||||
bindingPath = astPath;
|
||||
bindingType = 'ExportSpecifier';
|
||||
source = path.parentPath.node.source ? path.parentPath.node.source.value : '[current]';
|
||||
path.stop();
|
||||
source = astPath.parentPath.node.source
|
||||
? astPath.parentPath.node.source.value
|
||||
: '[current]';
|
||||
astPath.stop();
|
||||
}
|
||||
},
|
||||
});
|
||||
|
|
@ -70,21 +80,21 @@ function getBindingAndSourceReexports(astPath, identifierName) {
|
|||
* We might be an import that was locally renamed.
|
||||
* Since we are traversing, we are interested in the imported name. Or in case of a re-export,
|
||||
* the local name.
|
||||
* @param {object} astPath Babel ast traversal path
|
||||
* @param {SwcPath} astPath Babel ast traversal path
|
||||
* @param {string} identifierName the name that should be tracked (and that exists inside scope of astPath)
|
||||
* @returns {{ source:string, importedIdentifierName:string }}
|
||||
*/
|
||||
function getImportSourceFromAst(astPath, identifierName) {
|
||||
export function getImportSourceFromAst(astPath, identifierName) {
|
||||
let source;
|
||||
let importedIdentifierName;
|
||||
|
||||
const binding = astPath.scope.getBinding(identifierName);
|
||||
let bindingType = binding && binding.path.type;
|
||||
let bindingPath = binding && binding.path;
|
||||
const binding = astPath.scope.bindings[identifierName];
|
||||
let bindingType = binding?.path.type;
|
||||
let bindingPath = binding?.path;
|
||||
const matchingTypes = ['ImportSpecifier', 'ImportDefaultSpecifier', 'ExportSpecifier'];
|
||||
|
||||
if (binding && matchingTypes.includes(bindingType)) {
|
||||
source = binding.path.parentPath.node.source.value;
|
||||
if (bindingType && matchingTypes.includes(bindingType)) {
|
||||
source = binding?.path?.parentPath?.node?.source?.value;
|
||||
} else {
|
||||
// no binding
|
||||
[source, bindingType, bindingPath] = getBindingAndSourceReexports(astPath, identifierName);
|
||||
|
|
@ -95,14 +105,18 @@ function getImportSourceFromAst(astPath, identifierName) {
|
|||
importedIdentifierName = '[default]';
|
||||
} else if (source) {
|
||||
const { node } = bindingPath;
|
||||
importedIdentifierName = (node.imported && node.imported.name) || node.local.name;
|
||||
importedIdentifierName = node.orig?.value || node.imported?.value || node.local?.value;
|
||||
}
|
||||
|
||||
return { source, importedIdentifierName };
|
||||
}
|
||||
|
||||
/** @type {(source:SpecifierSource,identifierName:IdentifierName,currentFilePath:PathFromSystemRoot,rootPath:PathFromSystemRoot, depth?:number) => Promise<RootFile>} */
|
||||
let trackDownIdentifier;
|
||||
/**
|
||||
* @typedef {(source:SpecifierSource,identifierName:IdentifierName,currentFilePath:PathFromSystemRoot,rootPath:PathFromSystemRoot,projectName?: string,depth?:number) => Promise<RootFile>} TrackDownIdentifierFn
|
||||
*/
|
||||
|
||||
/**
|
||||
* Follows the full path of an Identifier until its declaration ('root file') is found.
|
||||
* @example
|
||||
*```js
|
||||
* // 1. Starting point
|
||||
|
|
@ -118,14 +132,19 @@ let trackDownIdentifier;
|
|||
* export class RefComp extends LitElement {...}
|
||||
*```
|
||||
*
|
||||
* @param {SpecifierSource} source an importSpecifier source, like 'ref-proj' or '../file'
|
||||
* @param {IdentifierName} identifierName imported reference/Identifier name, like 'MyComp'
|
||||
* @param {PathFromSystemRoot} currentFilePath file path, like '/path/to/target-proj/my-comp-import.js'
|
||||
* @param {PathFromSystemRoot} rootPath dir path, like '/path/to/target-proj'
|
||||
* @param {string} [projectName] like 'target-proj' or '@lion/input'
|
||||
* @returns {Promise<RootFile>} file: path of file containing the binding (exported declaration),
|
||||
* -param {SpecifierSource} source an importSpecifier source, like 'ref-proj' or '../file'
|
||||
* -param {IdentifierName} identifierName imported reference/Identifier name, like 'MyComp'
|
||||
* -param {PathFromSystemRoot} currentFilePath file path, like '/path/to/target-proj/my-comp-import.js'
|
||||
* -param {PathFromSystemRoot} rootPath dir path, like '/path/to/target-proj'
|
||||
* -param {string} [projectName] like 'target-proj' or '@lion/input'
|
||||
* -returns {Promise<RootFile>} file: path of file containing the binding (exported declaration),
|
||||
* like '/path/to/ref-proj/src/RefComp.js'
|
||||
*/
|
||||
/** @type {TrackDownIdentifierFn} */
|
||||
// eslint-disable-next-line import/no-mutable-exports
|
||||
export let trackDownIdentifier;
|
||||
|
||||
/** @type {TrackDownIdentifierFn} */
|
||||
async function trackDownIdentifierFn(
|
||||
source,
|
||||
identifierName,
|
||||
|
|
@ -142,7 +161,7 @@ async function trackDownIdentifierFn(
|
|||
projectName = InputDataService.getPackageJson(rootPath)?.name;
|
||||
}
|
||||
|
||||
if (isExternalProject(source, projectName)) {
|
||||
if (projectName && isExternalProject(source, projectName)) {
|
||||
// So, it is an external ref like '@lion/core' or '@open-wc/scoped-elements/index.js'
|
||||
// At this moment in time, we don't know if we have file system access to this particular
|
||||
// project. Therefore, we limit ourselves to tracking down local references.
|
||||
|
|
@ -154,44 +173,53 @@ async function trackDownIdentifierFn(
|
|||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {PathFromSystemRoot}
|
||||
*/
|
||||
const resolvedSourcePath = await resolveImportPath(source, currentFilePath);
|
||||
|
||||
LogService.debug(`[trackDownIdentifier] ${resolvedSourcePath}`);
|
||||
// if (resolvedSourcePath === null) {
|
||||
// LogService.error(`[trackDownIdentifier] ${resolvedSourcePath} not found`);
|
||||
|
||||
// }
|
||||
// if (resolvedSourcePath === '[node-builtin]') {
|
||||
// LogService.error(`[trackDownIdentifier] ${resolvedSourcePath} not found`);
|
||||
// }
|
||||
|
||||
const allowedJsModuleExtensions = ['.mjs', '.js'];
|
||||
if (!allowedJsModuleExtensions.includes(pathLib.extname(resolvedSourcePath))) {
|
||||
if (
|
||||
!allowedJsModuleExtensions.includes(path.extname(/** @type {string} */ (resolvedSourcePath)))
|
||||
) {
|
||||
// We have an import assertion
|
||||
return /** @type { RootFile } */ {
|
||||
file: toRelativeSourcePath(resolvedSourcePath, rootPath),
|
||||
file: toRelativeSourcePath(/** @type {string} */ (resolvedSourcePath), rootPath),
|
||||
specifier: '[default]',
|
||||
};
|
||||
}
|
||||
const code = fs.readFileSync(resolvedSourcePath, 'utf8');
|
||||
const ast = AstService.getAst(code, 'babel', { filePath: resolvedSourcePath });
|
||||
const code = fs.readFileSync(/** @type {string} */ (resolvedSourcePath), 'utf8');
|
||||
const swcAst = AstService._getSwcAst(code);
|
||||
|
||||
const shouldLookForDefaultExport = identifierName === '[default]';
|
||||
|
||||
let reexportMatch = false; // named specifier declaration
|
||||
let exportMatch;
|
||||
let pendingTrackDownPromise;
|
||||
|
||||
traverse(ast, {
|
||||
ExportDefaultDeclaration(path) {
|
||||
const handleExportDefaultDeclOrExpr = astPath => {
|
||||
if (!shouldLookForDefaultExport) {
|
||||
return;
|
||||
}
|
||||
|
||||
let newSource;
|
||||
if (path.node.declaration.type === 'Identifier') {
|
||||
newSource = getImportSourceFromAst(path, path.node.declaration.name).source;
|
||||
if (
|
||||
astPath.node.expression?.type === 'Identifier' ||
|
||||
astPath.node.declaration?.type === 'Identifier'
|
||||
) {
|
||||
newSource = getImportSourceFromAst(astPath, astPath.node.expression.value).source;
|
||||
}
|
||||
|
||||
if (newSource) {
|
||||
pendingTrackDownPromise = trackDownIdentifier(
|
||||
newSource,
|
||||
'[default]',
|
||||
resolvedSourcePath,
|
||||
/** @type {PathFromSystemRoot} */ (resolvedSourcePath),
|
||||
rootPath,
|
||||
projectName,
|
||||
depth + 1,
|
||||
|
|
@ -199,36 +227,41 @@ async function trackDownIdentifierFn(
|
|||
} else {
|
||||
// We found our file!
|
||||
rootSpecifier = identifierName;
|
||||
rootFilePath = toRelativeSourcePath(resolvedSourcePath, rootPath);
|
||||
rootFilePath = toRelativeSourcePath(
|
||||
/** @type {PathFromSystemRoot} */ (resolvedSourcePath),
|
||||
rootPath,
|
||||
);
|
||||
}
|
||||
path.stop();
|
||||
},
|
||||
ExportNamedDeclaration: {
|
||||
enter(path) {
|
||||
astPath.stop();
|
||||
};
|
||||
const handleExportDeclOrNamedDecl = {
|
||||
enter(astPath) {
|
||||
if (reexportMatch || shouldLookForDefaultExport) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Are we dealing with a re-export ?
|
||||
if (path.node.specifiers?.length) {
|
||||
exportMatch = path.node.specifiers.find(s => s.exported.name === identifierName);
|
||||
if (astPath.node.specifiers?.length) {
|
||||
exportMatch = astPath.node.specifiers.find(
|
||||
s => s.orig?.value === identifierName || s.exported?.value === identifierName,
|
||||
);
|
||||
|
||||
if (exportMatch) {
|
||||
const localName = exportMatch.local.name;
|
||||
const localName = exportMatch.orig.value;
|
||||
let newSource;
|
||||
if (path.node.source) {
|
||||
if (astPath.node.source) {
|
||||
/**
|
||||
* @example
|
||||
* export { x } from 'y'
|
||||
*/
|
||||
newSource = path.node.source.value;
|
||||
newSource = astPath.node.source.value;
|
||||
} else {
|
||||
/**
|
||||
* @example
|
||||
* import { x } from 'y'
|
||||
* export { x }
|
||||
*/
|
||||
newSource = getImportSourceFromAst(path, identifierName).source;
|
||||
newSource = getImportSourceFromAst(astPath, identifierName).source;
|
||||
|
||||
if (!newSource || newSource === '[current]') {
|
||||
/**
|
||||
|
|
@ -248,11 +281,11 @@ async function trackDownIdentifierFn(
|
|||
projectName,
|
||||
depth + 1,
|
||||
);
|
||||
path.stop();
|
||||
astPath.stop();
|
||||
}
|
||||
}
|
||||
},
|
||||
exit(path) {
|
||||
exit(astPath) {
|
||||
if (!reexportMatch) {
|
||||
// We didn't find a re-exported Identifier, that means the reference is declared
|
||||
// in current file...
|
||||
|
|
@ -260,12 +293,20 @@ async function trackDownIdentifierFn(
|
|||
rootFilePath = toRelativeSourcePath(resolvedSourcePath, rootPath);
|
||||
|
||||
if (exportMatch) {
|
||||
path.stop();
|
||||
astPath.stop();
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
const visitor = {
|
||||
ExportDefaultDeclaration: handleExportDefaultDeclOrExpr,
|
||||
ExportDefaultExpression: handleExportDefaultDeclOrExpr,
|
||||
ExportNamedDeclaration: handleExportDeclOrNamedDecl,
|
||||
ExportDeclaration: handleExportDeclOrNamedDecl,
|
||||
};
|
||||
|
||||
swcTraverse(swcAst, visitor, { needsAdvancedPaths: true });
|
||||
|
||||
if (pendingTrackDownPromise) {
|
||||
// We can't handle promises inside Babel traverse, so we do it here...
|
||||
|
|
@ -280,10 +321,10 @@ async function trackDownIdentifierFn(
|
|||
trackDownIdentifier = memoize(trackDownIdentifierFn);
|
||||
|
||||
/**
|
||||
* @param {BabelPath} astPath
|
||||
* @param {SwcPath} astPath
|
||||
* @param {string} identifierNameInScope
|
||||
* @param {string} fullCurrentFilePath
|
||||
* @param {string} projectPath
|
||||
* @param {PathFromSystemRoot} fullCurrentFilePath
|
||||
* @param {PathFromSystemRoot} projectPath
|
||||
* @param {string} [projectName]
|
||||
*/
|
||||
async function trackDownIdentifierFromScopeFn(
|
||||
|
|
@ -312,10 +353,4 @@ async function trackDownIdentifierFromScopeFn(
|
|||
return rootFile;
|
||||
}
|
||||
|
||||
const trackDownIdentifierFromScope = memoize(trackDownIdentifierFromScopeFn);
|
||||
|
||||
module.exports = {
|
||||
trackDownIdentifier,
|
||||
getImportSourceFromAst,
|
||||
trackDownIdentifierFromScope,
|
||||
};
|
||||
export const trackDownIdentifierFromScope = memoize(trackDownIdentifierFromScopeFn);
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
/**
|
||||
* @typedef {import('../../types/analyzers').FindExportsAnalyzerResult} FindExportsAnalyzerResult
|
||||
* @typedef {import('../../../../types/index.js').FindExportsAnalyzerResult} FindExportsAnalyzerResult
|
||||
* @typedef {import('../../../../types/index.js').IterableFindExportsAnalyzerEntry} IterableFindExportsAnalyzerEntry
|
||||
*/
|
||||
|
||||
/**
|
||||
|
|
@ -35,7 +36,7 @@
|
|||
*
|
||||
* @param {FindExportsAnalyzerResult} exportsAnalyzerResult
|
||||
*/
|
||||
function transformIntoIterableFindExportsOutput(exportsAnalyzerResult) {
|
||||
export function transformIntoIterableFindExportsOutput(exportsAnalyzerResult) {
|
||||
/** @type {IterableFindExportsAnalyzerEntry[]} */
|
||||
const iterableEntries = [];
|
||||
|
||||
|
|
@ -62,6 +63,3 @@ function transformIntoIterableFindExportsOutput(exportsAnalyzerResult) {
|
|||
}
|
||||
return iterableEntries;
|
||||
}
|
||||
module.exports = {
|
||||
transformIntoIterableFindExportsOutput,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
/**
|
||||
* @typedef {import('../../types/analyzers').FindImportsAnalyzerResult} FindImportsAnalyzerResult
|
||||
* @typedef {import('../../../../types/index.js').FindImportsAnalyzerResult} FindImportsAnalyzerResult
|
||||
* @typedef {import('../../../../types/index.js').IterableFindImportsAnalyzerEntry} IterableFindImportsAnalyzerEntry
|
||||
*/
|
||||
|
||||
/**
|
||||
|
|
@ -32,7 +33,7 @@
|
|||
*
|
||||
* @param {FindImportsAnalyzerResult} importsAnalyzerResult
|
||||
*/
|
||||
function transformIntoIterableFindImportsOutput(importsAnalyzerResult) {
|
||||
export function transformIntoIterableFindImportsOutput(importsAnalyzerResult) {
|
||||
/** @type {IterableFindImportsAnalyzerEntry[]} */
|
||||
const iterableEntries = [];
|
||||
|
||||
|
|
@ -43,20 +44,15 @@ function transformIntoIterableFindImportsOutput(importsAnalyzerResult) {
|
|||
continue;
|
||||
}
|
||||
for (const importSpecifier of importSpecifiers) {
|
||||
/** @type {IterableFindImportsAnalyzerEntry} */
|
||||
const resultEntry = {
|
||||
const resultEntry = /** @type {IterableFindImportsAnalyzerEntry} */ ({
|
||||
file,
|
||||
specifier: importSpecifier,
|
||||
source,
|
||||
normalizedSource,
|
||||
};
|
||||
});
|
||||
iterableEntries.push(resultEntry);
|
||||
}
|
||||
}
|
||||
}
|
||||
return iterableEntries;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
transformIntoIterableFindImportsOutput,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,23 +1,10 @@
|
|||
// A base class for writing Analyzers
|
||||
const { Analyzer } = require('./helpers/Analyzer.js');
|
||||
export { Analyzer } from '../core/Analyzer.js';
|
||||
|
||||
// Expose analyzers that are requested to be run in external contexts
|
||||
const FindExportsAnalyzer = require('./find-exports.js');
|
||||
const FindImportsAnalyzer = require('./find-imports.js');
|
||||
const MatchImportsAnalyzer = require('./match-paths.js');
|
||||
export { default as FindExportsAnalyzer } from './find-exports.js';
|
||||
export { default as FindImportsAnalyzer } from './find-imports.js';
|
||||
export { default as MatchImportsAnalyzer } from './match-paths.js';
|
||||
|
||||
const {
|
||||
transformIntoIterableFindImportsOutput,
|
||||
} = require('./helpers/transform-into-iterable-find-imports-output.js');
|
||||
const {
|
||||
transformIntoIterableFindExportsOutput,
|
||||
} = require('./helpers/transform-into-iterable-find-exports-output.js');
|
||||
|
||||
module.exports = {
|
||||
Analyzer,
|
||||
FindExportsAnalyzer,
|
||||
FindImportsAnalyzer,
|
||||
MatchImportsAnalyzer,
|
||||
transformIntoIterableFindImportsOutput,
|
||||
transformIntoIterableFindExportsOutput,
|
||||
};
|
||||
export { transformIntoIterableFindImportsOutput } from './helpers/transform-into-iterable-find-imports-output.js';
|
||||
export { transformIntoIterableFindExportsOutput } from './helpers/transform-into-iterable-find-exports-output.js';
|
||||
|
|
|
|||
|
|
@ -1,27 +1,25 @@
|
|||
/* eslint-disable no-continue */
|
||||
const pathLib = require('path');
|
||||
import pathLib from 'path';
|
||||
/* eslint-disable no-shadow, no-param-reassign */
|
||||
const FindImportsAnalyzer = require('./find-imports.js');
|
||||
const FindExportsAnalyzer = require('./find-exports.js');
|
||||
const { Analyzer } = require('./helpers/Analyzer.js');
|
||||
const { fromImportToExportPerspective } = require('./helpers/from-import-to-export-perspective.js');
|
||||
const {
|
||||
transformIntoIterableFindExportsOutput,
|
||||
} = require('./helpers/transform-into-iterable-find-exports-output.js');
|
||||
const {
|
||||
transformIntoIterableFindImportsOutput,
|
||||
} = require('./helpers/transform-into-iterable-find-imports-output.js');
|
||||
import FindImportsAnalyzer from './find-imports.js';
|
||||
import FindExportsAnalyzer from './find-exports.js';
|
||||
import { Analyzer } from '../core/Analyzer.js';
|
||||
import { fromImportToExportPerspective } from './helpers/from-import-to-export-perspective.js';
|
||||
import { transformIntoIterableFindExportsOutput } from './helpers/transform-into-iterable-find-exports-output.js';
|
||||
import { transformIntoIterableFindImportsOutput } from './helpers/transform-into-iterable-find-imports-output.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('../types/analyzers').FindImportsAnalyzerResult} FindImportsAnalyzerResult
|
||||
* @typedef {import('../types/analyzers').FindExportsAnalyzerResult} FindExportsAnalyzerResult
|
||||
* @typedef {import('../types/analyzers').IterableFindExportsAnalyzerEntry} IterableFindExportsAnalyzerEntry
|
||||
* @typedef {import('../types/analyzers').IterableFindImportsAnalyzerEntry} IterableFindImportsAnalyzerEntry
|
||||
* @typedef {import('../types/analyzers').ConciseMatchImportsAnalyzerResult} ConciseMatchImportsAnalyzerResult
|
||||
* @typedef {import('../types/analyzers').MatchImportsConfig} MatchImportsConfig
|
||||
* @typedef {import('../types/analyzers').MatchImportsAnalyzerResult} MatchImportsAnalyzerResult
|
||||
* @typedef {import('../types/core').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../types/core').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../../types/index.js').FindImportsAnalyzerResult} FindImportsAnalyzerResult
|
||||
* @typedef {import('../../../types/index.js').FindExportsAnalyzerResult} FindExportsAnalyzerResult
|
||||
* @typedef {import('../../../types/index.js').IterableFindExportsAnalyzerEntry} IterableFindExportsAnalyzerEntry
|
||||
* @typedef {import('../../../types/index.js').IterableFindImportsAnalyzerEntry} IterableFindImportsAnalyzerEntry
|
||||
* @typedef {import('../../../types/index.js').ConciseMatchImportsAnalyzerResult} ConciseMatchImportsAnalyzerResult
|
||||
* @typedef {import('../../../types/index.js').MatchImportsConfig} MatchImportsConfig
|
||||
* @typedef {import('../../../types/index.js').MatchImportsAnalyzerResult} MatchImportsAnalyzerResult
|
||||
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../../types/index.js').AnalyzerAst} AnalyzerAst
|
||||
*/
|
||||
|
||||
/**
|
||||
|
|
@ -76,6 +74,7 @@ async function matchImportsPostprocess(exportsAnalyzerResult, importsAnalyzerRes
|
|||
// TODO: What if this info is retrieved from cached importProject/target project?
|
||||
const importProjectPath = cfg.targetProjectPath;
|
||||
|
||||
// TODO: make find-import / export automatically output these, to improve perf...
|
||||
const iterableFindExportsOutput = transformIntoIterableFindExportsOutput(exportsAnalyzerResult);
|
||||
const iterableFindImportsOutput = transformIntoIterableFindImportsOutput(importsAnalyzerResult);
|
||||
|
||||
|
|
@ -106,7 +105,7 @@ async function matchImportsPostprocess(exportsAnalyzerResult, importsAnalyzerRes
|
|||
/**
|
||||
* 2. Are we from the same source?
|
||||
* A.k.a. is source required by target the same as the one found in target.
|
||||
* (we know the specifier name is tha same, now we need to check the file as well.)
|
||||
* (we know the specifier name is the same, now we need to check the file as well.)
|
||||
*
|
||||
* Example:
|
||||
* exportFile './foo.js'
|
||||
|
|
@ -117,9 +116,13 @@ async function matchImportsPostprocess(exportsAnalyzerResult, importsAnalyzerRes
|
|||
*/
|
||||
const fromImportToExport = await fromImportToExportPerspective({
|
||||
importee: importEntry.normalizedSource,
|
||||
importer: pathLib.resolve(importProjectPath, importEntry.file),
|
||||
importer: /** @type {PathFromSystemRoot} */ (
|
||||
pathLib.resolve(importProjectPath, importEntry.file)
|
||||
),
|
||||
importeeProjectPath: cfg.referenceProjectPath,
|
||||
});
|
||||
const isFromSameSource = compareImportAndExportPaths(exportEntry.file, fromImportToExport);
|
||||
|
||||
if (!isFromSameSource) {
|
||||
continue;
|
||||
}
|
||||
|
|
@ -133,7 +136,10 @@ async function matchImportsPostprocess(exportsAnalyzerResult, importsAnalyzerRes
|
|||
entry => entry.exportSpecifier && entry.exportSpecifier.id === id,
|
||||
);
|
||||
if (resultForCurrentExport) {
|
||||
// Prevent that we count double import like "import * as all from 'x'" and "import {smth} from 'x'"
|
||||
if (!resultForCurrentExport.importProjectFiles.includes(importEntry.file)) {
|
||||
resultForCurrentExport.importProjectFiles.push(importEntry.file);
|
||||
}
|
||||
} else {
|
||||
conciseResultsArray.push({
|
||||
exportSpecifier: { id, ...(exportEntry.meta ? { meta: exportEntry.meta } : {}) },
|
||||
|
|
@ -144,22 +150,17 @@ async function matchImportsPostprocess(exportsAnalyzerResult, importsAnalyzerRes
|
|||
}
|
||||
|
||||
const importProject = importsAnalyzerResult.analyzerMeta.targetProject.name;
|
||||
return /** @type {AnalyzerQueryResult} */ createCompatibleMatchImportsResult(
|
||||
conciseResultsArray,
|
||||
importProject,
|
||||
return /** @type {AnalyzerQueryResult} */ (
|
||||
createCompatibleMatchImportsResult(conciseResultsArray, importProject)
|
||||
);
|
||||
}
|
||||
|
||||
class MatchImportsAnalyzer extends Analyzer {
|
||||
constructor() {
|
||||
super();
|
||||
/** @type {AnalyzerName} */
|
||||
this.name = 'match-imports';
|
||||
}
|
||||
export default class MatchImportsAnalyzer extends Analyzer {
|
||||
static analyzerName = /** @type {AnalyzerName} */ ('match-imports');
|
||||
|
||||
static get requiresReference() {
|
||||
return true;
|
||||
}
|
||||
static requiredAst = /** @type {AnalyzerAst} */ ('swc');
|
||||
|
||||
static requiresReference = true;
|
||||
|
||||
/**
|
||||
* Based on ExportsAnalyzerResult of reference project(s) (for instance lion-based-ui)
|
||||
|
|
@ -191,10 +192,9 @@ class MatchImportsAnalyzer extends Analyzer {
|
|||
/**
|
||||
* Prepare
|
||||
*/
|
||||
const analyzerResult = this._prepare(cfg);
|
||||
|
||||
if (analyzerResult) {
|
||||
return analyzerResult;
|
||||
const cachedAnalyzerResult = this._prepare(cfg);
|
||||
if (cachedAnalyzerResult) {
|
||||
return cachedAnalyzerResult;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -207,6 +207,7 @@ class MatchImportsAnalyzer extends Analyzer {
|
|||
metaConfig: cfg.metaConfig,
|
||||
targetProjectPath: cfg.referenceProjectPath,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -217,6 +218,7 @@ class MatchImportsAnalyzer extends Analyzer {
|
|||
metaConfig: cfg.metaConfig,
|
||||
targetProjectPath: cfg.targetProjectPath,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -232,5 +234,3 @@ class MatchImportsAnalyzer extends Analyzer {
|
|||
return this._finalize(queryOutput, cfg);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MatchImportsAnalyzer;
|
||||
|
|
|
|||
|
|
@ -1,15 +1,18 @@
|
|||
/* eslint-disable no-shadow, no-param-reassign */
|
||||
const MatchSubclassesAnalyzer = require('./match-subclasses.js');
|
||||
const FindExportsAnalyzer = require('./find-exports.js');
|
||||
const FindCustomelementsAnalyzer = require('./find-customelements.js');
|
||||
const { Analyzer } = require('./helpers/Analyzer.js');
|
||||
import MatchSubclassesAnalyzer from './match-subclasses.js';
|
||||
import FindExportsAnalyzer from './find-exports.js';
|
||||
import FindCustomelementsAnalyzer from './find-customelements.js';
|
||||
import { Analyzer } from '../core/Analyzer.js';
|
||||
|
||||
/** @typedef {import('./types').FindExportsAnalyzerResult} FindExportsAnalyzerResult */
|
||||
/** @typedef {import('./types').FindCustomelementsAnalyzerResult} FindCustomelementsAnalyzerResult */
|
||||
/** @typedef {import('./types').MatchSubclassesAnalyzerResult} MatchSubclassesAnalyzerResult */
|
||||
/** @typedef {import('./types').FindImportsAnalyzerResult} FindImportsAnalyzerResult */
|
||||
/** @typedef {import('./types').MatchedExportSpecifier} MatchedExportSpecifier */
|
||||
/** @typedef {import('./types').RootFile} RootFile */
|
||||
/**
|
||||
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../../types/index.js').FindExportsAnalyzerResult} FindExportsAnalyzerResult
|
||||
* @typedef {import('../../../types/index.js').FindCustomelementsAnalyzerResult} FindCustomelementsAnalyzerResult
|
||||
* @typedef {import('../../../types/index.js').MatchSubclassesAnalyzerResult} MatchSubclassesAnalyzerResult
|
||||
* @typedef {import('../../../types/index.js').FindImportsAnalyzerResult} FindImportsAnalyzerResult
|
||||
* @typedef {import('../../../types/index.js').MatchedExportSpecifier} MatchedExportSpecifier
|
||||
* @typedef {import('../../../types/index.js').RootFile} RootFile
|
||||
*/
|
||||
|
||||
/**
|
||||
* For prefix `{ from: 'lion', to: 'wolf' }`
|
||||
|
|
@ -361,15 +364,11 @@ function matchPathsPostprocess(
|
|||
* ...
|
||||
* ]
|
||||
*/
|
||||
class MatchPathsAnalyzer extends Analyzer {
|
||||
constructor() {
|
||||
super();
|
||||
this.name = 'match-paths';
|
||||
}
|
||||
export default class MatchPathsAnalyzer extends Analyzer {
|
||||
/** @type {AnalyzerName} */
|
||||
static analyzerName = 'match-paths';
|
||||
|
||||
static get requiresReference() {
|
||||
return true;
|
||||
}
|
||||
static requiresReference = true;
|
||||
|
||||
/**
|
||||
* @param {MatchClasspathsConfig} customConfig
|
||||
|
|
@ -429,6 +428,7 @@ class MatchPathsAnalyzer extends Analyzer {
|
|||
gatherFilesConfig: cfg.gatherFilesConfig,
|
||||
gatherFilesConfigReference: cfg.gatherFilesConfigReference,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
|
||||
// [A2]
|
||||
|
|
@ -438,6 +438,7 @@ class MatchPathsAnalyzer extends Analyzer {
|
|||
targetProjectPath: cfg.targetProjectPath,
|
||||
gatherFilesConfig: cfg.gatherFilesConfig,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
|
||||
// [A3]
|
||||
|
|
@ -447,6 +448,7 @@ class MatchPathsAnalyzer extends Analyzer {
|
|||
targetProjectPath: cfg.referenceProjectPath,
|
||||
gatherFilesConfig: cfg.gatherFilesConfigReference,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
|
||||
/**
|
||||
|
|
@ -475,6 +477,7 @@ class MatchPathsAnalyzer extends Analyzer {
|
|||
targetProjectPath: cfg.targetProjectPath,
|
||||
gatherFilesConfig: cfg.gatherFilesConfig,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
|
||||
// [B2]
|
||||
|
|
@ -484,6 +487,7 @@ class MatchPathsAnalyzer extends Analyzer {
|
|||
targetProjectPath: cfg.referenceProjectPath,
|
||||
gatherFilesConfig: cfg.gatherFilesConfigReference,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
// refFindExportsAnalyzer was already created in A3
|
||||
|
||||
|
|
@ -509,5 +513,3 @@ class MatchPathsAnalyzer extends Analyzer {
|
|||
return this._finalize(queryOutput, cfg);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MatchPathsAnalyzer;
|
||||
|
|
|
|||
|
|
@ -1,20 +1,21 @@
|
|||
/* eslint-disable no-continue */
|
||||
const pathLib = require('path');
|
||||
import pathLib from 'path';
|
||||
/* eslint-disable no-shadow, no-param-reassign */
|
||||
const FindClassesAnalyzer = require('./find-classes.js');
|
||||
const FindExportsAnalyzer = require('./find-exports.js');
|
||||
const { Analyzer } = require('./helpers/Analyzer.js');
|
||||
const { fromImportToExportPerspective } = require('./helpers/from-import-to-export-perspective.js');
|
||||
import FindClassesAnalyzer from './find-classes.js';
|
||||
import FindExportsAnalyzer from './find-exports.js';
|
||||
import { Analyzer } from '../core/Analyzer.js';
|
||||
import { fromImportToExportPerspective } from './helpers/from-import-to-export-perspective.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('../types/analyzers/find-classes').FindClassesAnalyzerResult} FindClassesAnalyzerResult
|
||||
* @typedef {import('../types/find-imports').FindImportsAnalyzerResult} FindImportsAnalyzerResult
|
||||
* @typedef {import('../types/find-exports').FindExportsAnalyzerResult} FindExportsAnalyzerResult
|
||||
* @typedef {import('../types/find-exports').IterableFindExportsAnalyzerEntry} IterableFindExportsAnalyzerEntry
|
||||
* @typedef {import('../types/find-imports').IterableFindImportsAnalyzerEntry} IterableFindImportsAnalyzerEntry
|
||||
* @typedef {import('../types/match-imports').ConciseMatchImportsAnalyzerResult} ConciseMatchImportsAnalyzerResult
|
||||
* @typedef {import('../types/match-imports').MatchImportsConfig} MatchImportsConfig
|
||||
* @typedef {import('../types/core/core').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../types/index.js').FindClassesAnalyzerResult} FindClassesAnalyzerResult
|
||||
* @typedef {import('../../../types/index.js').FindImportsAnalyzerResult} FindImportsAnalyzerResult
|
||||
* @typedef {import('../../../types/index.js').FindExportsAnalyzerResult} FindExportsAnalyzerResult
|
||||
* @typedef {import('../../../types/index.js').IterableFindExportsAnalyzerEntry} IterableFindExportsAnalyzerEntry
|
||||
* @typedef {import('../../../types/index.js').IterableFindImportsAnalyzerEntry} IterableFindImportsAnalyzerEntry
|
||||
* @typedef {import('../../../types/index.js').ConciseMatchImportsAnalyzerResult} ConciseMatchImportsAnalyzerResult
|
||||
* @typedef {import('../../../types/index.js').MatchImportsConfig} MatchImportsConfig
|
||||
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
*/
|
||||
|
||||
function getMemberOverrides(
|
||||
|
|
@ -52,7 +53,7 @@ function getMemberOverrides(
|
|||
}
|
||||
|
||||
/**
|
||||
* @desc Helper method for matchImportsPostprocess. Modifies its resultsObj
|
||||
* Helper method for matchImportsPostprocess. Modifies its resultsObj
|
||||
* @param {object} resultsObj
|
||||
* @param {string} exportId like 'myExport::./reference-project/my/export.js::my-project'
|
||||
* @param {Set<string>} filteredList
|
||||
|
|
@ -67,14 +68,14 @@ function storeResult(resultsObj, exportId, filteredList, meta) {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param {FindExportsAnalyzerResult} exportsAnalyzerResult
|
||||
* @param {FindExportsAnalyzerResult} refExportsAnalyzerResult
|
||||
* @param {FindClassesAnalyzerResult} targetClassesAnalyzerResult
|
||||
* @param {FindClassesAnalyzerResult} refClassesAResult
|
||||
* @param {MatchSubclassesConfig} customConfig
|
||||
* @returns {AnalyzerQueryResult}
|
||||
*/
|
||||
async function matchSubclassesPostprocess(
|
||||
exportsAnalyzerResult,
|
||||
refExportsAnalyzerResult,
|
||||
targetClassesAnalyzerResult,
|
||||
refClassesAResult,
|
||||
customConfig,
|
||||
|
|
@ -86,7 +87,7 @@ async function matchSubclassesPostprocess(
|
|||
|
||||
/**
|
||||
* Step 1: a 'flat' data structure
|
||||
* @desc Create a key value storage map for exports/class matches
|
||||
* Create a key value storage map for exports/class matches
|
||||
* - key: `${exportSpecifier}::${normalizedSource}::${project}` from reference project
|
||||
* - value: an array of import file matches like `${targetProject}::${normalizedSource}::${className}`
|
||||
* @example
|
||||
|
|
@ -102,8 +103,8 @@ async function matchSubclassesPostprocess(
|
|||
*/
|
||||
const resultsObj = {};
|
||||
|
||||
for (const exportEntry of exportsAnalyzerResult.queryOutput) {
|
||||
const exportsProjectObj = exportsAnalyzerResult.analyzerMeta.targetProject;
|
||||
for (const exportEntry of refExportsAnalyzerResult.queryOutput) {
|
||||
const exportsProjectObj = refExportsAnalyzerResult.analyzerMeta.targetProject;
|
||||
const exportsProjectName = exportsProjectObj.name;
|
||||
|
||||
// Look for all specifiers that are exported, like [import {specifier} 'lion-based-ui/foo.js']
|
||||
|
|
@ -124,9 +125,10 @@ async function matchSubclassesPostprocess(
|
|||
// TODO: What if this info is retrieved from cached importProject/target project?
|
||||
const importProjectPath = cfg.targetProjectPath;
|
||||
for (const { result, file } of targetClassesAnalyzerResult.queryOutput) {
|
||||
// targetClassesAnalyzerResult.queryOutput.forEach(({ result, file }) =>
|
||||
const importerFilePath = /** @type {PathFromSystemRoot} */ (
|
||||
pathLib.resolve(importProjectPath, file)
|
||||
);
|
||||
for (const classEntryResult of result) {
|
||||
// result.forEach(classEntryResult => {
|
||||
/**
|
||||
* @example
|
||||
* Example context (read by 'find-classes'/'find-exports' analyzers)
|
||||
|
|
@ -165,7 +167,8 @@ async function matchSubclassesPostprocess(
|
|||
exportEntry.file ===
|
||||
(await fromImportToExportPerspective({
|
||||
importee: classMatch.rootFile.file,
|
||||
importer: pathLib.resolve(importProjectPath, file),
|
||||
importer: importerFilePath,
|
||||
importeeProjectPath: cfg.referenceProjectPath,
|
||||
}));
|
||||
|
||||
if (classMatch && isFromSameSource) {
|
||||
|
|
@ -176,8 +179,14 @@ async function matchSubclassesPostprocess(
|
|||
exportEntryResult,
|
||||
exportSpecifier,
|
||||
);
|
||||
|
||||
let projectFileId = `${importProject}::${file}::${classEntryResult.name}`;
|
||||
if (cfg.addSystemPathsInResult) {
|
||||
projectFileId += `::${importerFilePath}`;
|
||||
}
|
||||
|
||||
filteredImportsList.add({
|
||||
projectFileId: `${importProject}::${file}::${classEntryResult.name}`,
|
||||
projectFileId,
|
||||
memberOverrides,
|
||||
});
|
||||
}
|
||||
|
|
@ -190,7 +199,7 @@ async function matchSubclassesPostprocess(
|
|||
|
||||
/**
|
||||
* Step 2: a rich data structure
|
||||
* @desc Transform resultObj from step 1 into an array of objects
|
||||
* Transform resultObj from step 1 into an array of objects
|
||||
* @example
|
||||
* [{
|
||||
* exportSpecifier: {
|
||||
|
|
@ -235,13 +244,18 @@ async function matchSubclassesPostprocess(
|
|||
const matchesPerProject = [];
|
||||
flatResult.files.forEach(({ projectFileId, memberOverrides }) => {
|
||||
// eslint-disable-next-line no-shadow
|
||||
const [project, file, identifier] = projectFileId.split('::');
|
||||
const [project, file, identifier, filePath] = projectFileId.split('::');
|
||||
let projectEntry = matchesPerProject.find(m => m.project === project);
|
||||
if (!projectEntry) {
|
||||
matchesPerProject.push({ project, files: [] });
|
||||
projectEntry = matchesPerProject[matchesPerProject.length - 1];
|
||||
}
|
||||
projectEntry.files.push({ file, identifier, memberOverrides });
|
||||
const entry = { file, identifier, memberOverrides };
|
||||
if (filePath) {
|
||||
// @ts-ignore
|
||||
entry.filePath = filePath;
|
||||
}
|
||||
projectEntry.files.push(entry);
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
@ -261,10 +275,9 @@ async function matchSubclassesPostprocess(
|
|||
// return aResult;
|
||||
// }
|
||||
|
||||
class MatchSubclassesAnalyzer extends Analyzer {
|
||||
constructor() {
|
||||
super();
|
||||
this.name = 'match-subclasses';
|
||||
export default class MatchSubclassesAnalyzer extends Analyzer {
|
||||
static get analyzerName() {
|
||||
return 'match-subclasses';
|
||||
}
|
||||
|
||||
static get requiresReference() {
|
||||
|
|
@ -272,7 +285,7 @@ class MatchSubclassesAnalyzer extends Analyzer {
|
|||
}
|
||||
|
||||
/**
|
||||
* @desc Based on ExportsAnalyzerResult of reference project(s) (for instance lion-based-ui)
|
||||
* Based on ExportsAnalyzerResult of reference project(s) (for instance lion-based-ui)
|
||||
* and targetClassesAnalyzerResult of search-targets (for instance my-app-using-lion-based-ui),
|
||||
* an overview is returned of all matching imports and exports.
|
||||
* @param {MatchSubclassesConfig} customConfig
|
||||
|
|
@ -309,16 +322,18 @@ class MatchSubclassesAnalyzer extends Analyzer {
|
|||
*/
|
||||
const findExportsAnalyzer = new FindExportsAnalyzer();
|
||||
/** @type {FindExportsAnalyzerResult} */
|
||||
const exportsAnalyzerResult = await findExportsAnalyzer.execute({
|
||||
const refExportsAnalyzerResult = await findExportsAnalyzer.execute({
|
||||
targetProjectPath: cfg.referenceProjectPath,
|
||||
gatherFilesConfig: cfg.gatherFilesConfigReference,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
const findClassesAnalyzer = new FindClassesAnalyzer();
|
||||
/** @type {FindClassesAnalyzerResult} */
|
||||
const targetClassesAnalyzerResult = await findClassesAnalyzer.execute({
|
||||
targetProjectPath: cfg.targetProjectPath,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
const findRefClassesAnalyzer = new FindClassesAnalyzer();
|
||||
/** @type {FindClassesAnalyzerResult} */
|
||||
|
|
@ -326,10 +341,11 @@ class MatchSubclassesAnalyzer extends Analyzer {
|
|||
targetProjectPath: cfg.referenceProjectPath,
|
||||
gatherFilesConfig: cfg.gatherFilesConfigReference,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
|
||||
const queryOutput = await matchSubclassesPostprocess(
|
||||
exportsAnalyzerResult,
|
||||
refExportsAnalyzerResult,
|
||||
targetClassesAnalyzerResult,
|
||||
refClassesAnalyzerResult,
|
||||
cfg,
|
||||
|
|
@ -341,5 +357,3 @@ class MatchSubclassesAnalyzer extends Analyzer {
|
|||
return this._finalize(queryOutput, cfg);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MatchSubclassesAnalyzer;
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const pathLib = require('path');
|
||||
const { LogService } = require('../../services/LogService.js');
|
||||
import pathLib from 'path';
|
||||
import { LogService } from '../../core/LogService.js';
|
||||
|
||||
const /** @type {AnalyzerOptions} */ options = {
|
||||
filterSpecifier(results, targetSpecifier, specifiersKey) {
|
||||
|
|
@ -77,7 +77,7 @@ function sortBySpecifier(analyzerResult, customConfig) {
|
|||
return /** @type {AnalyzerQueryResult} */ resultsBySpecifier;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
export default {
|
||||
name: 'sort-by-specifier',
|
||||
execute: sortBySpecifier,
|
||||
compatibleAnalyzers: ['find-imports', 'find-exports'],
|
||||
|
|
|
|||
|
|
@ -1,37 +1,38 @@
|
|||
/* eslint-disable no-param-reassign */
|
||||
const fs = require('fs');
|
||||
const semver = require('semver');
|
||||
const pathLib = require('path');
|
||||
const { LogService } = require('../../services/LogService.js');
|
||||
const { QueryService } = require('../../services/QueryService.js');
|
||||
const { ReportService } = require('../../services/ReportService.js');
|
||||
const { InputDataService } = require('../../services/InputDataService.js');
|
||||
const { toPosixPath } = require('../../utils/to-posix-path.js');
|
||||
const { getFilePathRelativeFromRoot } = require('../../utils/get-file-path-relative-from-root.js');
|
||||
import semver from 'semver';
|
||||
import pathLib from 'path';
|
||||
import { LogService } from './LogService.js';
|
||||
import { QueryService } from './QueryService.js';
|
||||
import { ReportService } from './ReportService.js';
|
||||
import { InputDataService } from './InputDataService.js';
|
||||
import { toPosixPath } from '../utils/to-posix-path.js';
|
||||
import { getFilePathRelativeFromRoot } from '../utils/get-file-path-relative-from-root.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('../../types/core').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../types/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../types/core').QueryOutput} QueryOutput
|
||||
* @typedef {import('../../types/core').QueryOutputEntry} QueryOutputEntry
|
||||
* @typedef {import('../../types/core').ProjectInputData} ProjectInputData
|
||||
* @typedef {import('../../types/core').ProjectInputDataWithMeta} ProjectInputDataWithMeta
|
||||
* @typedef {import('../../types/core').AnalyzerQueryResult} AnalyzerQueryResult
|
||||
* @typedef {import('../../types/core').MatchAnalyzerConfig} MatchAnalyzerConfig
|
||||
*
|
||||
* @typedef {(ast: object, { relativePath: PathRelative }) => {result: QueryOutputEntry}} TraversEntryFn
|
||||
* @typedef {import("@swc/core").Module} SwcAstModule
|
||||
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../../types/index.js').AnalyzerAst} AnalyzerAst
|
||||
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../types/index.js').QueryOutput} QueryOutput
|
||||
* @typedef {import('../../../types/index.js').ProjectInputData} ProjectInputData
|
||||
* @typedef {import('../../../types/index.js').ProjectInputDataWithMeta} ProjectInputDataWithMeta
|
||||
* @typedef {import('../../../types/index.js').AnalyzerQueryResult} AnalyzerQueryResult
|
||||
* @typedef {import('../../../types/index.js').MatchAnalyzerConfig} MatchAnalyzerConfig
|
||||
* @typedef {import('@babel/types').File} File
|
||||
* @typedef {(ast: File, astContext: {code:string; relativePath:string; projectData: ProjectInputDataWithMeta}) => object} FileAstTraverseFn
|
||||
*/
|
||||
|
||||
/**
|
||||
* Analyzes one entry: the callback can traverse a given ast for each entry
|
||||
* @param {ProjectInputDataWithMeta} projectData
|
||||
* @param {function} astAnalysis
|
||||
* @param {object} analyzerCfg
|
||||
*/
|
||||
async function analyzePerAstEntry(projectData, astAnalysis) {
|
||||
async function analyzePerAstFile(projectData, astAnalysis, analyzerCfg) {
|
||||
const entries = [];
|
||||
for (const { file, ast, context: astContext } of projectData.entries) {
|
||||
const relativePath = getFilePathRelativeFromRoot(file, projectData.project.path);
|
||||
const context = { code: astContext.code, relativePath, projectData };
|
||||
const context = { code: astContext.code, relativePath, projectData, analyzerCfg };
|
||||
LogService.debug(`${pathLib.resolve(projectData.project.path, file)}`);
|
||||
const { result, meta } = await astAnalysis(ast, context);
|
||||
entries.push({ file: relativePath, meta, result });
|
||||
|
|
@ -64,22 +65,22 @@ function posixify(data) {
|
|||
}
|
||||
|
||||
/**
|
||||
* @desc This method ensures that the result returned by an analyzer always has a consistent format.
|
||||
* This method ensures that the result returned by an analyzer always has a consistent format.
|
||||
* By returning the configuration for the queryOutput, it will be possible to run later queries
|
||||
* under the same circumstances
|
||||
* @param {QueryOutput} queryOutput
|
||||
* @param {object} configuration
|
||||
* @param {object} cfg
|
||||
* @param {Analyzer} analyzer
|
||||
*/
|
||||
function ensureAnalyzerResultFormat(queryOutput, configuration, analyzer) {
|
||||
function ensureAnalyzerResultFormat(queryOutput, cfg, analyzer) {
|
||||
const { targetProjectMeta, identifier, referenceProjectMeta } = analyzer;
|
||||
const optional = {};
|
||||
if (targetProjectMeta) {
|
||||
optional.targetProject = targetProjectMeta;
|
||||
optional.targetProject = { ...targetProjectMeta };
|
||||
delete optional.targetProject.path; // get rid of machine specific info
|
||||
}
|
||||
if (referenceProjectMeta) {
|
||||
optional.referenceProject = referenceProjectMeta;
|
||||
optional.referenceProject = { ...referenceProjectMeta };
|
||||
delete optional.referenceProject.path; // get rid of machine specific info
|
||||
}
|
||||
|
||||
|
|
@ -87,11 +88,11 @@ function ensureAnalyzerResultFormat(queryOutput, configuration, analyzer) {
|
|||
const aResult = {
|
||||
queryOutput,
|
||||
analyzerMeta: {
|
||||
name: analyzer.name,
|
||||
requiredAst: analyzer.requiredAst,
|
||||
name: analyzer.constructor.analyzerName,
|
||||
requiredAst: analyzer.constructor.requiredAst,
|
||||
identifier,
|
||||
...optional,
|
||||
configuration,
|
||||
configuration: cfg,
|
||||
},
|
||||
};
|
||||
|
||||
|
|
@ -129,28 +130,31 @@ function ensureAnalyzerResultFormat(queryOutput, configuration, analyzer) {
|
|||
* Before running the analyzer, we need two conditions for a 'compatible match':
|
||||
* - 1. referenceProject is imported by targetProject at all
|
||||
* - 2. referenceProject and targetProject have compatible major versions
|
||||
* @param {PathFromSystemRoot} referencePath
|
||||
* @param {PathFromSystemRoot} targetPath
|
||||
* @typedef {(referencePath:PathFromSystemRoot,targetPath:PathFromSystemRoot) => {compatible:boolean; reason?:string}} CheckForMatchCompatibilityFn
|
||||
* @type {CheckForMatchCompatibilityFn}
|
||||
*/
|
||||
function checkForMatchCompatibility(referencePath, targetPath) {
|
||||
const refFile = pathLib.resolve(referencePath, 'package.json');
|
||||
const referencePkg = JSON.parse(fs.readFileSync(refFile, 'utf8'));
|
||||
const targetFile = pathLib.resolve(targetPath, 'package.json');
|
||||
const targetPkg = JSON.parse(fs.readFileSync(targetFile, 'utf8'));
|
||||
const checkForMatchCompatibility = (
|
||||
/** @type {PathFromSystemRoot} */ referencePath,
|
||||
/** @type {PathFromSystemRoot} */ targetPath,
|
||||
) => {
|
||||
// const refFile = pathLib.resolve(referencePath, 'package.json');
|
||||
const referencePkg = InputDataService.getPackageJson(referencePath);
|
||||
// const targetFile = pathLib.resolve(targetPath, 'package.json');
|
||||
const targetPkg = InputDataService.getPackageJson(targetPath);
|
||||
|
||||
const allTargetDeps = [
|
||||
...Object.entries(targetPkg.devDependencies || {}),
|
||||
...Object.entries(targetPkg.dependencies || {}),
|
||||
...Object.entries(targetPkg?.devDependencies || {}),
|
||||
...Object.entries(targetPkg?.dependencies || {}),
|
||||
];
|
||||
const importEntry = allTargetDeps.find(([name]) => referencePkg.name === name);
|
||||
const importEntry = allTargetDeps.find(([name]) => referencePkg?.name === name);
|
||||
if (!importEntry) {
|
||||
return { compatible: false, reason: 'no-dependency' };
|
||||
}
|
||||
if (!semver.satisfies(referencePkg.version, importEntry[1])) {
|
||||
if (referencePkg?.version && !semver.satisfies(referencePkg.version, importEntry[1])) {
|
||||
return { compatible: false, reason: 'no-matched-version' };
|
||||
}
|
||||
return { compatible: true };
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* If in json format, 'unwind' to be compatible for analysis...
|
||||
|
|
@ -162,29 +166,37 @@ function unwindJsonResult(targetOrReferenceProjectResult) {
|
|||
return { queryOutput, analyzerMeta };
|
||||
}
|
||||
|
||||
class Analyzer {
|
||||
constructor() {
|
||||
this.requiredAst = 'babel';
|
||||
/** @type {AnalyzerName|''} */
|
||||
this.name = '';
|
||||
}
|
||||
export class Analyzer {
|
||||
static requiresReference = false;
|
||||
|
||||
static get requiresReference() {
|
||||
return false;
|
||||
/** @type {AnalyzerAst} */
|
||||
static requiredAst = 'babel';
|
||||
|
||||
/** @type {AnalyzerName} */
|
||||
static analyzerName = '';
|
||||
|
||||
name = /** @type {typeof Analyzer} */ (this.constructor).analyzerName;
|
||||
|
||||
_customConfig = {};
|
||||
|
||||
get config() {
|
||||
return {
|
||||
...this._customConfig,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* In a MatchAnalyzer, two Analyzers (a reference and target) are run.
|
||||
* For instance: a FindExportsAnalyzer and FindImportsAnalyzer are run.
|
||||
* In a MatchAnalyzer, two Analyzers (a reference and targer) are run.
|
||||
* For instance, in a MatchImportsAnalyzer, a FindExportsAnalyzer and FinImportsAnalyzer are run.
|
||||
* Their results can be provided as config params.
|
||||
* When they were stored in json format in the filesystem, 'unwind' them to be compatible for analysis...
|
||||
* @param {MatchAnalyzerConfig} cfg
|
||||
*/
|
||||
static __unwindProvidedResults(cfg) {
|
||||
if (cfg.targetProjectResult && !cfg.targetProjectResult.analyzerMeta) {
|
||||
if (cfg.targetProjectResult && !cfg.targetProjectResult?.analyzerMeta) {
|
||||
cfg.targetProjectResult = unwindJsonResult(cfg.targetProjectResult);
|
||||
}
|
||||
if (cfg.referenceProjectResult && !cfg.referenceProjectResult.analyzerMeta) {
|
||||
if (cfg.referenceProjectResult && !cfg.referenceProjectResult?.analyzerMeta) {
|
||||
cfg.referenceProjectResult = unwindJsonResult(cfg.referenceProjectResult);
|
||||
}
|
||||
}
|
||||
|
|
@ -195,16 +207,16 @@ class Analyzer {
|
|||
*/
|
||||
_prepare(cfg) {
|
||||
LogService.debug(`Analyzer "${this.name}": started _prepare method`);
|
||||
this.constructor.__unwindProvidedResults(cfg);
|
||||
/** @type {typeof Analyzer} */ (this.constructor).__unwindProvidedResults(cfg);
|
||||
|
||||
if (!cfg.targetProjectResult) {
|
||||
this.targetProjectMeta = InputDataService.getProjectMeta(cfg.targetProjectPath, true);
|
||||
this.targetProjectMeta = InputDataService.getProjectMeta(cfg.targetProjectPath);
|
||||
} else {
|
||||
this.targetProjectMeta = cfg.targetProjectResult.analyzerMeta.targetProject;
|
||||
}
|
||||
|
||||
if (cfg.referenceProjectPath && !cfg.referenceProjectResult) {
|
||||
this.referenceProjectMeta = InputDataService.getProjectMeta(cfg.referenceProjectPath, true);
|
||||
this.referenceProjectMeta = InputDataService.getProjectMeta(cfg.referenceProjectPath);
|
||||
} else if (cfg.referenceProjectResult) {
|
||||
this.referenceProjectMeta = cfg.referenceProjectResult.analyzerMeta.targetProject;
|
||||
}
|
||||
|
|
@ -227,6 +239,7 @@ class Analyzer {
|
|||
);
|
||||
|
||||
if (!compatible) {
|
||||
if (!cfg.suppressNonCriticalLogs) {
|
||||
LogService.info(
|
||||
`skipping ${LogService.pad(this.name, 16)} for ${
|
||||
this.identifier
|
||||
|
|
@ -235,6 +248,7 @@ class Analyzer {
|
|||
'',
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
return ensureAnalyzerResultFormat(`[${reason}]`, cfg, this);
|
||||
}
|
||||
}
|
||||
|
|
@ -245,13 +259,16 @@ class Analyzer {
|
|||
const cachedResult = Analyzer._getCachedAnalyzerResult({
|
||||
analyzerName: this.name,
|
||||
identifier: this.identifier,
|
||||
cfg,
|
||||
});
|
||||
|
||||
if (cachedResult) {
|
||||
return cachedResult;
|
||||
}
|
||||
|
||||
if (!cfg.suppressNonCriticalLogs) {
|
||||
LogService.info(`starting ${LogService.pad(this.name, 16)} for ${this.identifier}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get reference and search-target data
|
||||
|
|
@ -282,12 +299,14 @@ class Analyzer {
|
|||
LogService.debug(`Analyzer "${this.name}": started _finalize method`);
|
||||
|
||||
const analyzerResult = ensureAnalyzerResultFormat(queryOutput, cfg, this);
|
||||
if (!cfg.suppressNonCriticalLogs) {
|
||||
LogService.success(`finished ${LogService.pad(this.name, 16)} for ${this.identifier}`);
|
||||
}
|
||||
return analyzerResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {function|{traverseEntryFn: function: filePaths:string[]; projectPath: string}} traverseEntryOrConfig
|
||||
* @param {FileAstTraverseFn|{traverseEntryFn: FileAstTraverseFn; filePaths:string[]; projectPath: string}} traverseEntryOrConfig
|
||||
*/
|
||||
async _traverse(traverseEntryOrConfig) {
|
||||
LogService.debug(`Analyzer "${this.name}": started _traverse method`);
|
||||
|
|
@ -322,31 +341,38 @@ class Analyzer {
|
|||
/**
|
||||
* Create ASTs for our inputData
|
||||
*/
|
||||
const astDataProjects = await QueryService.addAstToProjectsData(finalTargetData, 'babel');
|
||||
return analyzePerAstEntry(astDataProjects[0], traverseEntryFn);
|
||||
const astDataProjects = await QueryService.addAstToProjectsData(
|
||||
finalTargetData,
|
||||
this.constructor.requiredAst,
|
||||
);
|
||||
return analyzePerAstFile(astDataProjects[0], traverseEntryFn, this.config);
|
||||
}
|
||||
|
||||
async execute(customConfig = {}) {
|
||||
LogService.debug(`Analyzer "${this.name}": started execute method`);
|
||||
|
||||
const cfg = {
|
||||
targetProjectPath: null,
|
||||
referenceProjectPath: null,
|
||||
...customConfig,
|
||||
};
|
||||
/**
|
||||
* Finds export specifiers and sources
|
||||
* @param {FindExportsConfig} customConfig
|
||||
*/
|
||||
async execute(customConfig) {
|
||||
this._customConfig = customConfig;
|
||||
const cfg = this.config;
|
||||
|
||||
/**
|
||||
* Prepare
|
||||
*/
|
||||
const analyzerResult = this._prepare(cfg);
|
||||
if (analyzerResult) {
|
||||
return analyzerResult;
|
||||
const cachedAnalyzerResult = this._prepare(cfg);
|
||||
if (cachedAnalyzerResult) {
|
||||
return cachedAnalyzerResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* Traverse
|
||||
*/
|
||||
const queryOutput = await this._traverse(() => {});
|
||||
const queryOutput = await this._traverse({
|
||||
// @ts-ignore
|
||||
traverseEntryFn: this.constructor.analyzeFile,
|
||||
filePaths: cfg.targetFilePaths,
|
||||
projectPath: cfg.targetProjectPath,
|
||||
});
|
||||
|
||||
/**
|
||||
* Finalize
|
||||
|
|
@ -355,19 +381,19 @@ class Analyzer {
|
|||
}
|
||||
|
||||
/**
|
||||
* @desc Gets a cached result from ReportService. Since ReportService slightly modifies analyzer
|
||||
* Gets a cached result from ReportService. Since ReportService slightly modifies analyzer
|
||||
* output, we 'unwind' before we return...
|
||||
* @param {object} config
|
||||
* @param {string} config.analyzerName
|
||||
* @param {string} config.identifier
|
||||
* @param {{ analyzerName:AnalyzerName, identifier:string, cfg:AnalyzerConfig}} config
|
||||
* @returns {AnalyzerQueryResult|undefined}
|
||||
*/
|
||||
static _getCachedAnalyzerResult({ analyzerName, identifier }) {
|
||||
static _getCachedAnalyzerResult({ analyzerName, identifier, cfg }) {
|
||||
const cachedResult = ReportService.getCachedResult({ analyzerName, identifier });
|
||||
if (!cachedResult) {
|
||||
return undefined;
|
||||
}
|
||||
if (!cfg.suppressNonCriticalLogs) {
|
||||
LogService.success(`cached version found for ${identifier}`);
|
||||
}
|
||||
|
||||
/** @type {AnalyzerQueryResult} */
|
||||
const result = unwindJsonResult(cachedResult);
|
||||
|
|
@ -375,5 +401,3 @@ class Analyzer {
|
|||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { Analyzer };
|
||||
|
|
@ -0,0 +1,131 @@
|
|||
import babelParser from '@babel/parser';
|
||||
import * as parse5 from 'parse5';
|
||||
import swc from '@swc/core';
|
||||
import { traverseHtml } from '../utils/traverse-html.js';
|
||||
import { LogService } from './LogService.js';
|
||||
import { guardedSwcToBabel } from '../utils/guarded-swc-to-babel.js';
|
||||
|
||||
/**
|
||||
* @typedef {import("@babel/types").File} File
|
||||
* @typedef {import("@swc/core").Module} SwcAstModule
|
||||
* @typedef {import("@babel/parser").ParserOptions} ParserOptions
|
||||
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
*/
|
||||
|
||||
export class AstService {
|
||||
/**
|
||||
* Compiles an array of file paths using Babel.
|
||||
* @param {string} code
|
||||
* @param {ParserOptions} parserOptions
|
||||
* @returns {File}
|
||||
*/
|
||||
static _getBabelAst(code, parserOptions = {}) {
|
||||
const ast = babelParser.parse(code, {
|
||||
sourceType: 'module',
|
||||
plugins: [
|
||||
'importMeta',
|
||||
'dynamicImport',
|
||||
'classProperties',
|
||||
'exportDefaultFrom',
|
||||
'importAssertions',
|
||||
],
|
||||
...parserOptions,
|
||||
});
|
||||
return ast;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compiles an array of file paths using Babel.
|
||||
* @param {string} code
|
||||
* @param {ParserOptions} parserOptions
|
||||
* @returns {File}
|
||||
*/
|
||||
static _getSwcToBabelAst(code, parserOptions = {}) {
|
||||
if (this.fallbackToBabel) {
|
||||
return this._getBabelAst(code, parserOptions);
|
||||
}
|
||||
const ast = swc.parseSync(code, {
|
||||
syntax: 'typescript',
|
||||
// importAssertions: true,
|
||||
...parserOptions,
|
||||
});
|
||||
return guardedSwcToBabel(ast, code);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compiles an array of file paths using swc.
|
||||
* @param {string} code
|
||||
* @param {ParserOptions} parserOptions
|
||||
* @returns {SwcAstModule}
|
||||
*/
|
||||
static _getSwcAst(code, parserOptions = {}) {
|
||||
const ast = swc.parseSync(code, {
|
||||
syntax: 'typescript',
|
||||
target: 'es2022',
|
||||
...parserOptions,
|
||||
});
|
||||
return ast;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compensates for swc span bug: https://github.com/swc-project/swc/issues/1366#issuecomment-1516539812
|
||||
* @returns {number}
|
||||
*/
|
||||
static _getSwcOffset() {
|
||||
return swc.parseSync('').span.end;
|
||||
}
|
||||
|
||||
/**
|
||||
* Combines all script tags as if it were one js file.
|
||||
* @param {string} htmlCode
|
||||
*/
|
||||
static getScriptsFromHtml(htmlCode) {
|
||||
const ast = parse5.parseFragment(htmlCode);
|
||||
/**
|
||||
* @type {string[]}
|
||||
*/
|
||||
const scripts = [];
|
||||
traverseHtml(ast, {
|
||||
/**
|
||||
* @param {{ node: { childNodes: { value: any; }[]; }; }} path
|
||||
*/
|
||||
script(path) {
|
||||
const code = path.node.childNodes[0] ? path.node.childNodes[0].value : '';
|
||||
scripts.push(code);
|
||||
},
|
||||
});
|
||||
return scripts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Babel AST
|
||||
* @param { string } code
|
||||
* @param { 'babel'|'swc-to-babel'|'swc'} astType
|
||||
* @param { {filePath?: PathFromSystemRoot} } options
|
||||
* @returns {File|undefined|SwcAstModule}
|
||||
*/
|
||||
// eslint-disable-next-line consistent-return
|
||||
static getAst(code, astType, { filePath } = {}) {
|
||||
// eslint-disable-next-line default-case
|
||||
try {
|
||||
if (astType === 'babel') {
|
||||
return this._getBabelAst(code);
|
||||
}
|
||||
if (astType === 'swc-to-babel') {
|
||||
return this._getSwcToBabelAst(code);
|
||||
}
|
||||
if (astType === 'swc') {
|
||||
return this._getSwcAst(code);
|
||||
}
|
||||
throw new Error(`astType "${astType}" not supported.`);
|
||||
} catch (e) {
|
||||
LogService.error(`Error when parsing "${filePath}":/n${e}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* This option can be used as a last resort when an swc AST combined with swc-to-babel, is backwards incompatible
|
||||
* (for instance when @babel/generator expects a different ast structure and fails).
|
||||
* Analyzers should use guarded-swc-to-babel util.
|
||||
*/
|
||||
AstService.fallbackToBabel = false;
|
||||
|
|
@ -1,50 +1,49 @@
|
|||
/* eslint-disable no-param-reassign */
|
||||
const fs = require('fs');
|
||||
const pathLib = require('path');
|
||||
const child_process = require('child_process'); // eslint-disable-line camelcase
|
||||
const glob = require('glob');
|
||||
const anymatch = require('anymatch');
|
||||
import fs from 'fs';
|
||||
import pathLib from 'path';
|
||||
import child_process from 'child_process'; // eslint-disable-line camelcase
|
||||
import glob from 'glob';
|
||||
import anymatch from 'anymatch';
|
||||
// @ts-expect-error
|
||||
const isNegatedGlob = require('is-negated-glob');
|
||||
const { LogService } = require('./LogService.js');
|
||||
const { AstService } = require('./AstService.js');
|
||||
const { getFilePathRelativeFromRoot } = require('../utils/get-file-path-relative-from-root.js');
|
||||
const { toPosixPath } = require('../utils/to-posix-path.js');
|
||||
|
||||
/**
|
||||
* @typedef {import('../types/analyzers').FindImportsAnalyzerResult} FindImportsAnalyzerResult
|
||||
* @typedef {import('../types/analyzers').FindImportsAnalyzerEntry} FindImportsAnalyzerEntry
|
||||
* @typedef {import('../types/core').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../types/core').QueryConfig} QueryConfig
|
||||
* @typedef {import('../types/core').QueryResult} QueryResult
|
||||
* @typedef {import('../types/core').FeatureQueryConfig} FeatureQueryConfig
|
||||
* @typedef {import('../types/core').SearchQueryConfig} SearchQueryConfig
|
||||
* @typedef {import('../types/core').AnalyzerQueryConfig} AnalyzerQueryConfig
|
||||
* @typedef {import('../types/core').Feature} Feature
|
||||
* @typedef {import('../types/core').AnalyzerConfig} AnalyzerConfig
|
||||
* @typedef {import('../types/core').Analyzer} Analyzer
|
||||
* @typedef {import('../types/core').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../types/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../types/core').GatherFilesConfig} GatherFilesConfig
|
||||
* @typedef {import('../types/core').AnalyzerQueryResult} AnalyzerQueryResult
|
||||
* @typedef {import('../types/core').ProjectInputData} ProjectInputData
|
||||
* @typedef {import('../types/core').ProjectInputDataWithMeta} ProjectInputDataWithMeta
|
||||
* @typedef {import('../types/core').Project} Project
|
||||
* @typedef {import('../types/core').ProjectName} ProjectName
|
||||
*/
|
||||
import isNegatedGlob from 'is-negated-glob';
|
||||
import { LogService } from './LogService.js';
|
||||
import { AstService } from './AstService.js';
|
||||
import { getFilePathRelativeFromRoot } from '../utils/get-file-path-relative-from-root.js';
|
||||
import { toPosixPath } from '../utils/to-posix-path.js';
|
||||
import { memoize } from '../utils/memoize.js';
|
||||
|
||||
// const memoize = fn => fn;
|
||||
|
||||
/**
|
||||
* @typedef {import('../../../types/index.js').FindImportsAnalyzerResult} FindImportsAnalyzerResult
|
||||
* @typedef {import('../../../types/index.js').FindImportsAnalyzerEntry} FindImportsAnalyzerEntry
|
||||
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../types/index.js').PathRelative} PathRelative
|
||||
* @typedef {import('../../../types/index.js').QueryConfig} QueryConfig
|
||||
* @typedef {import('../../../types/index.js').QueryResult} QueryResult
|
||||
* @typedef {import('../../../types/index.js').FeatureQueryConfig} FeatureQueryConfig
|
||||
* @typedef {import('../../../types/index.js').SearchQueryConfig} SearchQueryConfig
|
||||
* @typedef {import('../../../types/index.js').AnalyzerQueryConfig} AnalyzerQueryConfig
|
||||
* @typedef {import('../../../types/index.js').Feature} Feature
|
||||
* @typedef {import('../../../types/index.js').AnalyzerConfig} AnalyzerConfig
|
||||
* @typedef {import('../../../types/index.js').Analyzer} Analyzer
|
||||
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../types/index.js').GatherFilesConfig} GatherFilesConfig
|
||||
* @typedef {import('../../../types/index.js').AnalyzerQueryResult} AnalyzerQueryResult
|
||||
* @typedef {import('../../../types/index.js').ProjectInputData} ProjectInputData
|
||||
* @typedef {import('../../../types/index.js').ProjectInputDataWithMeta} ProjectInputDataWithMeta
|
||||
* @typedef {import('../../../types/index.js').Project} Project
|
||||
* @typedef {import('../../../types/index.js').ProjectName} ProjectName
|
||||
* @typedef {import('../../../types/index.js').PackageJson} PackageJson
|
||||
* @typedef {{path:PathFromSystemRoot; name:ProjectName}} ProjectNameAndPath
|
||||
* @typedef {{name:ProjectName;files:PathRelativeFromProjectRoot[], workspaces:string[]}} PkgJson
|
||||
*/
|
||||
|
||||
// TODO: memoize
|
||||
|
||||
/**
|
||||
* @param {PathFromSystemRoot} rootPath
|
||||
* @returns {PkgJson|undefined}
|
||||
* @typedef {(rootPath:PathFromSystemRoot) => PackageJson|undefined} GetPackageJsonFn
|
||||
* @type {GetPackageJsonFn}
|
||||
*/
|
||||
function getPackageJson(rootPath) {
|
||||
const getPackageJson = memoize((/** @type {PathFromSystemRoot} */ rootPath) => {
|
||||
try {
|
||||
const fileContent = fs.readFileSync(`${rootPath}/package.json`, 'utf8');
|
||||
return JSON.parse(fileContent);
|
||||
|
|
@ -58,27 +57,30 @@ function getPackageJson(rootPath) {
|
|||
return undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @param {PathFromSystemRoot} rootPath
|
||||
* @typedef {(rootPath:PathFromSystemRoot) => object|undefined} GetLernaJsonFn
|
||||
* @type {GetLernaJsonFn}
|
||||
*/
|
||||
function getLernaJson(rootPath) {
|
||||
const getLernaJson = memoize((/** @type {PathFromSystemRoot} */ rootPath) => {
|
||||
try {
|
||||
const fileContent = fs.readFileSync(`${rootPath}/lerna.json`, 'utf8');
|
||||
return JSON.parse(fileContent);
|
||||
} catch (_) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {PathFromSystemRoot[]|string[]} list
|
||||
* @param {PathFromSystemRoot} rootPath
|
||||
* @returns {ProjectNameAndPath[]}
|
||||
* @typedef {(list:PathFromSystemRoot[]|string[], rootPath:PathFromSystemRoot) => ProjectNameAndPath[]} GetPathsFromGlobListFn
|
||||
* @type {GetPathsFromGlobListFn}
|
||||
*/
|
||||
function getPathsFromGlobList(list, rootPath) {
|
||||
const getPathsFromGlobList = memoize(
|
||||
(
|
||||
/** @type {PathFromSystemRoot[]|string[]} */ list,
|
||||
/** @type {PathFromSystemRoot} */ rootPath,
|
||||
) => {
|
||||
/** @type {string[]} */
|
||||
const results = [];
|
||||
list.forEach(pathOrGlob => {
|
||||
|
|
@ -103,26 +105,27 @@ function getPathsFromGlobList(list, rootPath) {
|
|||
const name = /** @type {ProjectName} */ ((pkgJson && pkgJson.name) || basename);
|
||||
return { name, path: /** @type {PathFromSystemRoot} */ (pkgPath) };
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* @param {PathFromSystemRoot} rootPath
|
||||
* @returns {string|undefined}
|
||||
* @typedef {(rootPath:PathFromSystemRoot) => string|undefined} GetGitignoreFileFn
|
||||
* @type {GetGitignoreFileFn}
|
||||
*/
|
||||
function getGitignoreFile(rootPath) {
|
||||
const getGitignoreFile = memoize((/** @type {PathFromSystemRoot} */ rootPath) => {
|
||||
try {
|
||||
return fs.readFileSync(`${rootPath}/.gitignore`, 'utf8');
|
||||
} catch (_) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @param {PathFromSystemRoot} rootPath
|
||||
* @returns {string[]}
|
||||
* @typedef {(rootPath:PathFromSystemRoot) => string[]} GetGitIgnorePathsFn
|
||||
* @type {GetGitIgnorePathsFn}
|
||||
*/
|
||||
function getGitIgnorePaths(rootPath) {
|
||||
const fileContent = getGitignoreFile(rootPath);
|
||||
const getGitIgnorePaths = memoize((/** @type {PathFromSystemRoot} */ rootPath) => {
|
||||
const fileContent = /** @type {string} */ (getGitignoreFile(rootPath));
|
||||
if (!fileContent) {
|
||||
return [];
|
||||
}
|
||||
|
|
@ -154,14 +157,14 @@ function getGitIgnorePaths(rootPath) {
|
|||
return entry;
|
||||
});
|
||||
return normalizedEntries;
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Gives back all files and folders that need to be added to npm artifact
|
||||
* @param {PathFromSystemRoot} rootPath
|
||||
* @returns {string[]}
|
||||
* @typedef {(rootPath:PathFromSystemRoot) => string[]} GetNpmPackagePathsFn
|
||||
* @type {GetNpmPackagePathsFn}
|
||||
*/
|
||||
function getNpmPackagePaths(rootPath) {
|
||||
const getNpmPackagePaths = memoize((/** @type {PathFromSystemRoot} */ rootPath) => {
|
||||
const pkgJson = getPackageJson(rootPath);
|
||||
if (!pkgJson) {
|
||||
return [];
|
||||
|
|
@ -176,7 +179,7 @@ function getNpmPackagePaths(rootPath) {
|
|||
});
|
||||
}
|
||||
return [];
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @param {any|any[]} v
|
||||
|
|
@ -189,8 +192,12 @@ function ensureArray(v) {
|
|||
/**
|
||||
* @param {string|string[]} patterns
|
||||
* @param {Partial<{keepDirs:boolean;root:string}>} [options]
|
||||
*
|
||||
* @typedef {(patterns:string|string[], opts: {keepDirs?:boolean;root:string}) => string[]} MultiGlobSyncFn
|
||||
* @type {MultiGlobSyncFn}
|
||||
*/
|
||||
function multiGlobSync(patterns, { keepDirs = false, root } = {}) {
|
||||
const multiGlobSync = memoize(
|
||||
(/** @type {string|string[]} */ patterns, { keepDirs = false, root } = {}) => {
|
||||
patterns = ensureArray(patterns);
|
||||
const res = new Set();
|
||||
patterns.forEach(pattern => {
|
||||
|
|
@ -203,12 +210,21 @@ function multiGlobSync(patterns, { keepDirs = false, root } = {}) {
|
|||
});
|
||||
});
|
||||
return Array.from(res);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* @param {string} localPathWithDotSlash
|
||||
* @returns {string}
|
||||
*/
|
||||
function stripDotSlashFromLocalPath(localPathWithDotSlash) {
|
||||
return localPathWithDotSlash.replace(/^\.\//, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} localPathWithoutDotSlash
|
||||
* @returns {string}
|
||||
*/
|
||||
function normalizeLocalPathWithDotSlash(localPathWithoutDotSlash) {
|
||||
if (!localPathWithoutDotSlash.startsWith('.')) {
|
||||
return `./${localPathWithoutDotSlash}`;
|
||||
|
|
@ -217,7 +233,7 @@ function normalizeLocalPathWithDotSlash(localPathWithoutDotSlash) {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param {{val:object|string;nodeResolveMode:string}} opts
|
||||
* @param {{valObjOrStr:object|string;nodeResolveMode:string}} opts
|
||||
* @returns {string|null}
|
||||
*/
|
||||
function getStringOrObjectValOfExportMapEntry({ valObjOrStr, nodeResolveMode }) {
|
||||
|
|
@ -238,12 +254,12 @@ function getStringOrObjectValOfExportMapEntry({ valObjOrStr, nodeResolveMode })
|
|||
*
|
||||
* Also serves as SSOT in many other contexts wrt data locations and gathering
|
||||
*/
|
||||
class InputDataService {
|
||||
export class InputDataService {
|
||||
/**
|
||||
* Create an array of ProjectData
|
||||
* @param {PathFromSystemRoot | ProjectInputData []} projectPaths
|
||||
* @param {(PathFromSystemRoot|ProjectInputData)[]} projectPaths
|
||||
* @param {Partial<GatherFilesConfig>} gatherFilesConfig
|
||||
* @returns {ProjectInputData[]}
|
||||
* @returns {ProjectInputDataWithMeta[]}
|
||||
*/
|
||||
static createDataObject(projectPaths, gatherFilesConfig = {}) {
|
||||
/** @type {ProjectInputData[]} */
|
||||
|
|
@ -295,18 +311,18 @@ class InputDataService {
|
|||
try {
|
||||
const pkgJson = getPackageJson(projectPath);
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
project.mainEntry = this.__normalizeMainEntry(pkgJson.main || './index.js');
|
||||
project.mainEntry = this.__normalizeMainEntry(pkgJson?.main || './index.js');
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
project.name = pkgJson.name;
|
||||
project.name = pkgJson?.name;
|
||||
// TODO: also add meta info whether we are in a monorepo or not.
|
||||
// We do this by checking whether there is a lerna.json on root level.
|
||||
// eslint-disable-next-line no-empty
|
||||
project.version = pkgJson.version;
|
||||
project.version = pkgJson?.version;
|
||||
} catch (e) {
|
||||
LogService.warn(/** @type {string} */ (e));
|
||||
}
|
||||
project.commitHash = this._getCommitHash(projectPath);
|
||||
return /** @type {Project} */ (project);
|
||||
return /** @type {Project} */ (Object.freeze(project));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -365,7 +381,7 @@ class InputDataService {
|
|||
toPosixPath(projectObj.project.path),
|
||||
);
|
||||
if (pathLib.extname(file) === '.html') {
|
||||
const extractedScripts = AstService.getScriptsFromHtml(code);
|
||||
const extractedScripts = AstService.getScriptsFromHtml(/** @type {string} */ (code));
|
||||
// eslint-disable-next-line no-shadow
|
||||
extractedScripts.forEach((code, i) => {
|
||||
newEntries.push({
|
||||
|
|
@ -408,6 +424,10 @@ class InputDataService {
|
|||
.filter(dirPath => fs.lstatSync(dirPath).isDirectory());
|
||||
}
|
||||
|
||||
static set targetProjectPaths(v) {
|
||||
this.__targetProjectPaths = ensureArray(v);
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {PathFromSystemRoot[]} a list of strings representing all entry paths for projects we want to query
|
||||
*/
|
||||
|
|
@ -432,10 +452,6 @@ class InputDataService {
|
|||
this.__referenceProjectPaths = ensureArray(v);
|
||||
}
|
||||
|
||||
static set targetProjectPaths(v) {
|
||||
this.__targetProjectPaths = ensureArray(v);
|
||||
}
|
||||
|
||||
/**
|
||||
* @type {GatherFilesConfig}
|
||||
*/
|
||||
|
|
@ -606,12 +622,12 @@ class InputDataService {
|
|||
static getMonoRepoPackages(rootPath) {
|
||||
// [1] Look for npm/yarn workspaces
|
||||
const pkgJson = getPackageJson(rootPath);
|
||||
if (pkgJson && pkgJson.workspaces) {
|
||||
if (pkgJson?.workspaces) {
|
||||
return getPathsFromGlobList(pkgJson.workspaces, rootPath);
|
||||
}
|
||||
// [2] Look for lerna packages
|
||||
const lernaJson = getLernaJson(rootPath);
|
||||
if (lernaJson && lernaJson.packages) {
|
||||
if (lernaJson?.packages) {
|
||||
return getPathsFromGlobList(lernaJson.packages, rootPath);
|
||||
}
|
||||
// TODO: support forward compatibility for npm?
|
||||
|
|
@ -619,7 +635,7 @@ class InputDataService {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param {{[key:string]: string|object}} exports
|
||||
* @param {{[key:string]: string|object|null}} exports
|
||||
* @param {object} opts
|
||||
* @param {'default'|'development'|string} [opts.nodeResolveMode='default']
|
||||
* @param {string} opts.packageRootPath
|
||||
|
|
@ -688,8 +704,13 @@ class InputDataService {
|
|||
return exportMapPaths;
|
||||
}
|
||||
}
|
||||
InputDataService.cacheDisabled = false;
|
||||
// TODO: Remove memoizeConfig.isCacheDisabled this once whole providence uses cacheConfig instead of
|
||||
// memoizeConfig.isCacheDisabled
|
||||
// InputDataService.cacheDisabled = memoizeConfig.isCacheDisabled;
|
||||
|
||||
InputDataService.getProjectMeta = memoize(InputDataService.getProjectMeta);
|
||||
InputDataService.gatherFilesFromDir = memoize(InputDataService.gatherFilesFromDir);
|
||||
InputDataService.getMonoRepoPackages = memoize(InputDataService.getMonoRepoPackages);
|
||||
InputDataService.createDataObject = memoize(InputDataService.createDataObject);
|
||||
|
||||
InputDataService.getPackageJson = getPackageJson;
|
||||
|
||||
module.exports = { InputDataService };
|
||||
|
|
@ -1,14 +1,19 @@
|
|||
const pathLib = require('path');
|
||||
const chalk = require('chalk');
|
||||
const ora = require('ora');
|
||||
const fs = require('fs');
|
||||
|
||||
/**
|
||||
* @typedef {import('ora').Ora} Ora
|
||||
*/
|
||||
import pathLib from 'path';
|
||||
import fs from 'fs';
|
||||
|
||||
const { log } = console;
|
||||
|
||||
const colors = {
|
||||
reset: '\x1b[0m',
|
||||
bright: '\x1b[1m',
|
||||
dim: '\x1b[2m',
|
||||
fgRed: '\x1b[31m',
|
||||
fgGreen: '\x1b[32m',
|
||||
fgYellow: '\x1b[33m',
|
||||
fgGray: '\x1b[90m',
|
||||
fgBlue: '\x1b[34m',
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string} [title]
|
||||
* @returns {string}
|
||||
|
|
@ -17,10 +22,7 @@ function printTitle(title) {
|
|||
return `${title ? `${title}\n` : ''}`;
|
||||
}
|
||||
|
||||
/** @type {Ora} */
|
||||
let spinner;
|
||||
|
||||
class LogService {
|
||||
export class LogService {
|
||||
/**
|
||||
* @param {string} text
|
||||
* @param {string} [title]
|
||||
|
|
@ -30,7 +32,7 @@ class LogService {
|
|||
return;
|
||||
}
|
||||
|
||||
log(chalk.bgCyanBright.black.bold(` debug${printTitle(title)}`), text);
|
||||
log(colors.bright, ` debug${printTitle(title)}`, colors.reset, text);
|
||||
// @ts-ignore
|
||||
this._logHistory.push(`- debug -${printTitle(title)} ${text}`);
|
||||
}
|
||||
|
|
@ -44,7 +46,7 @@ class LogService {
|
|||
return;
|
||||
}
|
||||
|
||||
log(chalk.bgYellowBright.black.bold(`warning${printTitle(title)}`), text);
|
||||
log(colors.fgYellow, `warning${printTitle(title)}`, colors.reset, text);
|
||||
// @ts-ignore
|
||||
this._logHistory.push(`- warning -${printTitle(title)} ${text}`);
|
||||
}
|
||||
|
|
@ -65,7 +67,7 @@ class LogService {
|
|||
return;
|
||||
}
|
||||
|
||||
log(chalk.bgRedBright.black.bold(` error${printTitle(title)}`), text);
|
||||
log(colors.fgRed, ` error${printTitle(title)}`, colors.reset, text);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -79,7 +81,7 @@ class LogService {
|
|||
return;
|
||||
}
|
||||
|
||||
log(chalk.bgGreen.black.bold(`success${printTitle(title)}`), text);
|
||||
log(colors.fgGreen, `success${printTitle(title)}`, colors.reset, text);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -89,37 +91,10 @@ class LogService {
|
|||
static info(text, title) {
|
||||
// @ts-ignore
|
||||
this._logHistory.push(`- info -${printTitle(title)} ${text}`);
|
||||
|
||||
if (this.allMuted) {
|
||||
return;
|
||||
}
|
||||
|
||||
log(chalk.bgBlue.black.bold(` info${printTitle(title)}`), text);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} text
|
||||
*/
|
||||
static spinnerStart(text) {
|
||||
spinner = ora(text).start();
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} text
|
||||
*/
|
||||
static spinnerText(text) {
|
||||
if (!spinner) {
|
||||
this.spinnerStart(text);
|
||||
}
|
||||
spinner.text = text;
|
||||
}
|
||||
|
||||
static spinnerStop() {
|
||||
spinner.stop();
|
||||
}
|
||||
|
||||
static get spinner() {
|
||||
return spinner;
|
||||
log(colors.fgBlue, ` info${printTitle(title)}`, colors.reset, text);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -155,5 +130,3 @@ LogService.throwsOnError = false;
|
|||
|
||||
/** @type {string[]} */
|
||||
LogService._logHistory = [];
|
||||
|
||||
module.exports = { LogService };
|
||||
|
|
@ -1,36 +1,44 @@
|
|||
const deepmerge = require('deepmerge');
|
||||
const child_process = require('child_process'); // eslint-disable-line camelcase
|
||||
const { AstService } = require('./AstService.js');
|
||||
const { LogService } = require('./LogService.js');
|
||||
const { getFilePathRelativeFromRoot } = require('../utils/get-file-path-relative-from-root.js');
|
||||
import child_process from 'child_process'; // eslint-disable-line camelcase
|
||||
import path from 'path';
|
||||
import { AstService } from './AstService.js';
|
||||
import { LogService } from './LogService.js';
|
||||
import { getFilePathRelativeFromRoot } from '../utils/get-file-path-relative-from-root.js';
|
||||
import { getCurrentDir } from '../utils/get-current-dir.js';
|
||||
// import { memoize } from '../utils/memoize.js';
|
||||
|
||||
const memoize = fn => fn;
|
||||
|
||||
/**
|
||||
* @typedef {import('../types/analyzers').FindImportsAnalyzerResult} FindImportsAnalyzerResult
|
||||
* @typedef {import('../types/analyzers').FindImportsAnalyzerEntry} FindImportsAnalyzerEntry
|
||||
* @typedef {import('../types/core').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../types/core').QueryConfig} QueryConfig
|
||||
* @typedef {import('../types/core').QueryResult} QueryResult
|
||||
* @typedef {import('../types/core').FeatureQueryConfig} FeatureQueryConfig
|
||||
* @typedef {import('../types/core').SearchQueryConfig} SearchQueryConfig
|
||||
* @typedef {import('../types/core').AnalyzerQueryConfig} AnalyzerQueryConfig
|
||||
* @typedef {import('../types/core').Feature} Feature
|
||||
* @typedef {import('../types/core').AnalyzerConfig} AnalyzerConfig
|
||||
* @typedef {import('../types/core').Analyzer} Analyzer
|
||||
* @typedef {import('../types/core').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../types/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../types/core').GatherFilesConfig} GatherFilesConfig
|
||||
* @typedef {import('../types/core').AnalyzerQueryResult} AnalyzerQueryResult
|
||||
* @typedef {import('../types/core').ProjectInputData} ProjectInputData
|
||||
* @typedef {import('./Analyzer.js').Analyzer} Analyzer
|
||||
* @typedef {import('../../../types/index.js').FindImportsAnalyzerResult} FindImportsAnalyzerResult
|
||||
* @typedef {import('../../../types/index.js').FindImportsAnalyzerEntry} FindImportsAnalyzerEntry
|
||||
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../types/index.js').QueryConfig} QueryConfig
|
||||
* @typedef {import('../../../types/index.js').QueryResult} QueryResult
|
||||
* @typedef {import('../../../types/index.js').FeatureQueryConfig} FeatureQueryConfig
|
||||
* @typedef {import('../../../types/index.js').SearchQueryConfig} SearchQueryConfig
|
||||
* @typedef {import('../../../types/index.js').AnalyzerQueryConfig} AnalyzerQueryConfig
|
||||
* @typedef {import('../../../types/index.js').Feature} Feature
|
||||
* @typedef {import('../../../types/index.js').ProjectInputData} ProjectInputData
|
||||
* @typedef {import('../../../types/index.js').AnalyzerConfig} AnalyzerConfig
|
||||
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../../types/index.js').AnalyzerAst} AnalyzerAst
|
||||
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../types/index.js').GatherFilesConfig} GatherFilesConfig
|
||||
* @typedef {import('../../../types/index.js').AnalyzerQueryResult} AnalyzerQueryResult
|
||||
*/
|
||||
|
||||
const astProjectsDataCache = new Map();
|
||||
|
||||
class QueryService {
|
||||
export class QueryService {
|
||||
/**
|
||||
* @param {string} regexString string for 'free' regex searches.
|
||||
* @returns {SearchQueryConfig}
|
||||
*/
|
||||
static getQueryConfigFromRegexSearchString(regexString) {
|
||||
if (typeof regexString !== 'string') {
|
||||
throw new Error('[QueryService.getQueryConfigFromRegexSearchString]: provide a string');
|
||||
}
|
||||
return { type: 'search', regexString };
|
||||
}
|
||||
|
||||
|
|
@ -44,8 +52,13 @@ class QueryService {
|
|||
* @returns {FeatureQueryConfig}
|
||||
*/
|
||||
static getQueryConfigFromFeatureString(queryString) {
|
||||
if (typeof queryString !== 'string') {
|
||||
throw new Error('[QueryService.getQueryConfigFromFeatureString]: provide a string');
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} candidate
|
||||
* Each candidate (tag, attrKey or attrValue) can end with asterisk.
|
||||
* @param {string} candidate for my-*[attr*=x*] 'my-*', 'attr*' or 'x*'
|
||||
* @returns {[string, boolean]}
|
||||
*/
|
||||
function parseContains(candidate) {
|
||||
|
|
@ -59,12 +72,12 @@ class QueryService {
|
|||
let featString;
|
||||
|
||||
// Creates tag ('tg-icon') and featString ('font-icon+size=xs')
|
||||
const match = queryString.match(/(^.*)(\[(.+)\])+/);
|
||||
if (match) {
|
||||
const attrMatch = queryString.match(/(^.*)(\[(.+)\])+/);
|
||||
if (attrMatch) {
|
||||
// eslint-disable-next-line prefer-destructuring
|
||||
tagCandidate = match[1];
|
||||
tagCandidate = attrMatch[1];
|
||||
// eslint-disable-next-line prefer-destructuring
|
||||
featString = match[3];
|
||||
featString = attrMatch[3];
|
||||
} else {
|
||||
tagCandidate = queryString;
|
||||
}
|
||||
|
|
@ -94,52 +107,59 @@ class QueryService {
|
|||
}
|
||||
|
||||
/**
|
||||
* RSetrieves the default export found in ./program/analyzers/findImport.js
|
||||
* @param {string|Analyzer} analyzerObjectOrString
|
||||
* @param {AnalyzerConfig} analyzerConfig
|
||||
* @returns {AnalyzerQueryConfig}
|
||||
* Retrieves the default export found in ./program/analyzers/find-import.js
|
||||
* @param {typeof Analyzer} analyzerCtor
|
||||
* @param {AnalyzerConfig} [analyzerConfig]
|
||||
* @returns {Promise<AnalyzerQueryConfig>}
|
||||
*/
|
||||
static getQueryConfigFromAnalyzer(analyzerObjectOrString, analyzerConfig) {
|
||||
static async getQueryConfigFromAnalyzer(analyzerObjectOrString, analyzerConfig) {
|
||||
let analyzer;
|
||||
if (typeof analyzerObjectOrString === 'string') {
|
||||
// Get it from our location(s) of predefined analyzers.
|
||||
// Mainly needed when this method is called via cli
|
||||
try {
|
||||
// eslint-disable-next-line import/no-dynamic-require, global-require
|
||||
analyzer = /** @type {Analyzer} */ (require(`../analyzers/${analyzerObjectOrString}`));
|
||||
const module = /** @type {Analyzer} */ (
|
||||
await import(
|
||||
path.join(
|
||||
'file:///',
|
||||
path.resolve(
|
||||
getCurrentDir(import.meta.url),
|
||||
`../analyzers/${analyzerObjectOrString}.js`,
|
||||
),
|
||||
)
|
||||
)
|
||||
);
|
||||
analyzer = module.default;
|
||||
} catch (e) {
|
||||
LogService.error(e);
|
||||
LogService.error(e.toString());
|
||||
process.exit(1);
|
||||
}
|
||||
} else {
|
||||
// We don't need to import the analyzer, since we already have it
|
||||
analyzer = analyzerObjectOrString;
|
||||
}
|
||||
return {
|
||||
return /** @type {AnalyzerQueryConfig} */ ({
|
||||
type: 'ast-analyzer',
|
||||
analyzerName: /** @type {AnalyzerName} */ (analyzer.name),
|
||||
analyzerName: /** @type {AnalyzerName} */ (analyzer.analyzerName),
|
||||
analyzerConfig,
|
||||
analyzer,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @desc Search via unix grep
|
||||
* @param {InputData} inputData
|
||||
* @param {QueryConfig} queryConfig
|
||||
* @param {object} [customConfig]
|
||||
* @param {boolean} [customConfig.hasVerboseReporting]
|
||||
* @param {object} [customConfig.gatherFilesConfig]
|
||||
* Search via unix grep
|
||||
* @param {ProjectInputData} inputData
|
||||
* @param {FeatureQueryConfig|SearchQueryConfig} queryConfig
|
||||
* @param {{hasVerboseReporting:boolean;gatherFilesConfig:GatherFilesConfig}} [customConfig]
|
||||
* @returns {Promise<QueryResult>}
|
||||
*/
|
||||
static async grepSearch(inputData, queryConfig, customConfig) {
|
||||
const cfg = deepmerge(
|
||||
{
|
||||
const cfg = {
|
||||
hasVerboseReporting: false,
|
||||
gatherFilesConfig: {},
|
||||
},
|
||||
customConfig,
|
||||
);
|
||||
...customConfig,
|
||||
};
|
||||
|
||||
const results = [];
|
||||
// 1. Analyze the type of query from the QueryConfig (for instance 'feature' or 'search').
|
||||
|
|
@ -190,7 +210,7 @@ class QueryService {
|
|||
}
|
||||
|
||||
/**
|
||||
* Search via ast (typescript compilation)
|
||||
* Perform ast analysis
|
||||
* @param {AnalyzerQueryConfig} analyzerQueryConfig
|
||||
* @param {AnalyzerConfig} [customConfig]
|
||||
* @returns {Promise<AnalyzerQueryResult>}
|
||||
|
|
@ -222,7 +242,7 @@ class QueryService {
|
|||
|
||||
/**
|
||||
* @param {ProjectInputData[]} projectsData
|
||||
* @param {'babel'|'typescript'|'es-module-lexer'} requiredAst
|
||||
* @param {AnalyzerAst} requiredAst
|
||||
*/
|
||||
static async addAstToProjectsData(projectsData, requiredAst) {
|
||||
return projectsData.map(projectData => {
|
||||
|
|
@ -230,12 +250,13 @@ class QueryService {
|
|||
if (cachedData) {
|
||||
return cachedData;
|
||||
}
|
||||
|
||||
const resultEntries = projectData.entries.map(entry => {
|
||||
const ast = AstService.getAst(entry.context.code, requiredAst, { filePath: entry.file });
|
||||
return { ...entry, ast };
|
||||
});
|
||||
const astData = { ...projectData, entries: resultEntries };
|
||||
this._addToProjectsDataCache(projectData.project.path, astData);
|
||||
this._addToProjectsDataCache(`${projectData.project.path}#${requiredAst}`, astData);
|
||||
return astData;
|
||||
});
|
||||
}
|
||||
|
|
@ -244,12 +265,12 @@ class QueryService {
|
|||
* We need to make sure we don't run into memory issues (ASTs are huge),
|
||||
* so we only store one project in cache now. This will be a performance benefit for
|
||||
* lion-based-ui-cli, that runs providence consecutively for the same project
|
||||
* TODO: instead of storing one result in cache, use sizeof and a memory ;imit
|
||||
* TODO: instead of storing one result in cache, use sizeof and a memory limit
|
||||
* to allow for more projects
|
||||
* @param {string} path
|
||||
* @param {InputData} astData
|
||||
* @param {string} pathAndRequiredAst
|
||||
* @param {ProjectInputData} astData
|
||||
*/
|
||||
static _addToProjectsDataCache(path, astData) {
|
||||
static _addToProjectsDataCache(pathAndRequiredAst, astData) {
|
||||
if (this.cacheDisabled) {
|
||||
return;
|
||||
}
|
||||
|
|
@ -259,7 +280,7 @@ class QueryService {
|
|||
if (astProjectsDataCache.size >= 2) {
|
||||
astProjectsDataCache.delete(astProjectsDataCache.keys()[0]);
|
||||
}
|
||||
astProjectsDataCache.set(path, astData);
|
||||
astProjectsDataCache.set(pathAndRequiredAst, astData);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -311,14 +332,12 @@ class QueryService {
|
|||
* @returns
|
||||
*/
|
||||
static _performGrep(searchPath, regex, customConfig) {
|
||||
const cfg = deepmerge(
|
||||
{
|
||||
const cfg = {
|
||||
count: false,
|
||||
gatherFilesConfig: {},
|
||||
hasDebugEnabled: false,
|
||||
},
|
||||
customConfig,
|
||||
);
|
||||
...customConfig,
|
||||
};
|
||||
|
||||
const /** @type {string[]} */ ext = cfg.gatherFilesConfig.extensions;
|
||||
const include = ext ? `--include="\\.(${ext.map(e => e.slice(1)).join('|')})" ` : '';
|
||||
|
|
@ -340,5 +359,4 @@ class QueryService {
|
|||
}
|
||||
}
|
||||
QueryService.cacheDisabled = false;
|
||||
|
||||
module.exports = { QueryService };
|
||||
QueryService.addAstToProjectsData = memoize(QueryService.addAstToProjectsData);
|
||||
|
|
@ -1,15 +1,17 @@
|
|||
const fs = require('fs');
|
||||
const pathLib = require('path');
|
||||
const getHash = require('../utils/get-hash.js');
|
||||
import fs from 'fs';
|
||||
import pathLib from 'path';
|
||||
import { getHash } from '../utils/get-hash.js';
|
||||
// import { memoize } from '../utils/memoize.js';
|
||||
const memoize = fn => fn;
|
||||
|
||||
/**
|
||||
* @typedef {import('../types/core').Project} Project
|
||||
* @typedef {import('../types/core').ProjectName} ProjectName
|
||||
* @typedef {import('../types/core').AnalyzerQueryResult} AnalyzerQueryResult
|
||||
* @typedef {import('../types/core').AnalyzerConfig} AnalyzerConfig
|
||||
* @typedef {import('../types/core').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../types/core').QueryResult} QueryResult
|
||||
* @typedef {import('../types/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../types/index.js').Project} Project
|
||||
* @typedef {import('../../../types/index.js').ProjectName} ProjectName
|
||||
* @typedef {import('../../../types/index.js').AnalyzerQueryResult} AnalyzerQueryResult
|
||||
* @typedef {import('../../../types/index.js').AnalyzerConfig} AnalyzerConfig
|
||||
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../../types/index.js').QueryResult} QueryResult
|
||||
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
*/
|
||||
|
||||
/**
|
||||
|
|
@ -28,7 +30,7 @@ function createResultIdentifier(searchP, cfg, refP) {
|
|||
return `${format(searchP)}${refP ? `_+_${format(refP)}` : ''}__${cfgHash}`;
|
||||
}
|
||||
|
||||
class ReportService {
|
||||
export class ReportService {
|
||||
/**
|
||||
* Prints queryResult report to console
|
||||
* @param {QueryResult} queryResult
|
||||
|
|
@ -60,6 +62,7 @@ class ReportService {
|
|||
}
|
||||
const { name } = queryResult.meta.analyzerMeta;
|
||||
const filePath = this._getResultFileNameAndPath(name, identifier);
|
||||
|
||||
fs.writeFileSync(filePath, output, { flag: 'w' });
|
||||
}
|
||||
|
||||
|
|
@ -125,5 +128,5 @@ class ReportService {
|
|||
fs.writeFileSync(filePath, JSON.stringify(file, null, 2), { flag: 'w' });
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { ReportService };
|
||||
ReportService.createIdentifier = memoize(ReportService.createIdentifier);
|
||||
ReportService.getCachedResult = memoize(ReportService.getCachedResult);
|
||||
|
|
@ -1,12 +1,25 @@
|
|||
const deepmerge = require('deepmerge');
|
||||
const { ReportService } = require('./services/ReportService.js');
|
||||
const { InputDataService } = require('./services/InputDataService.js');
|
||||
const { LogService } = require('./services/LogService.js');
|
||||
const { QueryService } = require('./services/QueryService.js');
|
||||
const { aForEach } = require('./utils/async-array-utils.js');
|
||||
import { performance } from 'perf_hooks';
|
||||
import { ReportService } from './core/ReportService.js';
|
||||
import { InputDataService } from './core/InputDataService.js';
|
||||
import { LogService } from './core/LogService.js';
|
||||
import { QueryService } from './core/QueryService.js';
|
||||
import { AstService } from './core/AstService.js';
|
||||
|
||||
// After handling a combo, we should know which project versions we have, since
|
||||
// the analyzer internally called createDataObject(which provides us the needed meta info).
|
||||
/**
|
||||
* @typedef {import('../../types/index.js').ProvidenceConfig} ProvidenceConfig
|
||||
* @typedef {import('../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../types/index.js').QueryResult} QueryResult
|
||||
* @typedef {import('../../types/index.js').AnalyzerQueryResult} AnalyzerQueryResult
|
||||
* @typedef {import('../../types/index.js').QueryConfig} QueryConfig
|
||||
* @typedef {import('../../types/index.js').AnalyzerQueryConfig} AnalyzerQueryConfig
|
||||
* @typedef {import('../../types/index.js').GatherFilesConfig} GatherFilesConfig
|
||||
*/
|
||||
|
||||
/**
|
||||
* After handling a combo, we should know which project versions we have, since
|
||||
* the analyzer internally called createDataObject(which provides us the needed meta info).
|
||||
* @param {{queryResult: AnalyzerQueryResult; queryConfig: AnalyzerQueryConfig; providenceConfig: ProvidenceConfig}} opts
|
||||
*/
|
||||
function addToSearchTargetDepsFile({ queryResult, queryConfig, providenceConfig }) {
|
||||
const currentSearchTarget = queryConfig.analyzerConfig.targetProjectPath;
|
||||
// eslint-disable-next-line array-callback-return, consistent-return
|
||||
|
|
@ -26,6 +39,10 @@ function addToSearchTargetDepsFile({ queryResult, queryConfig, providenceConfig
|
|||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {AnalyzerQueryResult} queryResult
|
||||
* @param {{outputPath:PathFromSystemRoot;report:boolean}} cfg
|
||||
*/
|
||||
function report(queryResult, cfg) {
|
||||
if (cfg.report && !queryResult.meta.analyzerMeta.__fromCache) {
|
||||
const { identifier } = queryResult.meta.analyzerMeta;
|
||||
|
|
@ -35,12 +52,13 @@ function report(queryResult, cfg) {
|
|||
|
||||
/**
|
||||
* Creates unique QueryConfig for analyzer turn
|
||||
* @param {QueryConfig} queryConfig
|
||||
* @param {string} targetProjectPath
|
||||
* @param {string} referenceProjectPath
|
||||
* @param {AnalyzerQueryConfig} queryConfig
|
||||
* @param {PathFromSystemRoot} targetProjectPath
|
||||
* @param {PathFromSystemRoot} referenceProjectPath
|
||||
* @returns {Partial<AnalyzerQueryResult>}
|
||||
*/
|
||||
function getSlicedQueryConfig(queryConfig, targetProjectPath, referenceProjectPath) {
|
||||
return {
|
||||
return /** @type {Partial<AnalyzerQueryResult>} */ ({
|
||||
...queryConfig,
|
||||
...{
|
||||
analyzerConfig: {
|
||||
|
|
@ -51,19 +69,20 @@ function getSlicedQueryConfig(queryConfig, targetProjectPath, referenceProjectPa
|
|||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @desc definition "projectCombo": referenceProject#version + searchTargetProject#version
|
||||
* @param {QueryConfig} slicedQConfig
|
||||
* @param {cfg} object
|
||||
* Definition "projectCombo": referenceProject#version + searchTargetProject#version
|
||||
* @param {AnalyzerQueryConfig} slicedQConfig
|
||||
* @param {{ gatherFilesConfig:GatherFilesConfig, gatherFilesConfigReference:GatherFilesConfig, skipCheckMatchCompatibility:boolean }} cfg
|
||||
*/
|
||||
async function handleAnalyzerForProjectCombo(slicedQConfig, cfg) {
|
||||
const queryResult = await QueryService.astSearch(slicedQConfig, {
|
||||
gatherFilesConfig: cfg.gatherFilesConfig,
|
||||
gatherFilesConfigReference: cfg.gatherFilesConfigReference,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
addSystemPathsInResult: cfg.addSystemPathsInResult,
|
||||
...slicedQConfig.analyzerConfig,
|
||||
});
|
||||
if (queryResult) {
|
||||
|
|
@ -73,7 +92,7 @@ async function handleAnalyzerForProjectCombo(slicedQConfig, cfg) {
|
|||
}
|
||||
|
||||
/**
|
||||
* @desc Here, we will match all our reference projects (exports) against all our search targets
|
||||
* Here, we will match all our reference projects (exports) against all our search targets
|
||||
* (imports).
|
||||
*
|
||||
* This is an expensive operation. Therefore, we allow caching.
|
||||
|
|
@ -88,16 +107,16 @@ async function handleAnalyzerForProjectCombo(slicedQConfig, cfg) {
|
|||
* All the json outputs can be aggregated in our dashboard and visually presented in
|
||||
* various ways.
|
||||
*
|
||||
* @param {QueryConfig} queryConfig
|
||||
* @param {ProvidenceConfig} cfg
|
||||
* @param {AnalyzerQueryConfig} queryConfig
|
||||
* @param {Partial<ProvidenceConfig>} cfg
|
||||
*/
|
||||
async function handleAnalyzer(queryConfig, cfg) {
|
||||
const queryResults = [];
|
||||
const { referenceProjectPaths, targetProjectPaths } = cfg;
|
||||
|
||||
await aForEach(targetProjectPaths, async searchTargetProject => {
|
||||
for (const searchTargetProject of targetProjectPaths) {
|
||||
if (referenceProjectPaths) {
|
||||
await aForEach(referenceProjectPaths, async ref => {
|
||||
for (const ref of referenceProjectPaths) {
|
||||
// Create shallow cfg copy with just currrent reference folder
|
||||
const slicedQueryConfig = getSlicedQueryConfig(queryConfig, searchTargetProject, ref);
|
||||
const queryResult = await handleAnalyzerForProjectCombo(slicedQueryConfig, cfg);
|
||||
|
|
@ -109,7 +128,7 @@ async function handleAnalyzer(queryConfig, cfg) {
|
|||
providenceConfig: cfg,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
const slicedQueryConfig = getSlicedQueryConfig(queryConfig, searchTargetProject);
|
||||
const queryResult = await handleAnalyzerForProjectCombo(slicedQueryConfig, cfg);
|
||||
|
|
@ -122,7 +141,7 @@ async function handleAnalyzer(queryConfig, cfg) {
|
|||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
return queryResults;
|
||||
}
|
||||
|
||||
|
|
@ -149,24 +168,16 @@ async function handleRegexSearch(queryConfig, cfg, inputData) {
|
|||
}
|
||||
|
||||
/**
|
||||
* @desc Creates a report with usage metrics, based on a queryConfig.
|
||||
* Creates a report with usage metrics, based on a queryConfig.
|
||||
*
|
||||
* @param {QueryConfig} queryConfig a query configuration object containing analyzerOptions.
|
||||
* @param {object} customConfig
|
||||
* @param {'ast'|'grep'} customConfig.queryMethod whether analyzer should be run or a grep should
|
||||
* be performed
|
||||
* @param {string[]} customConfig.targetProjectPaths search target projects. For instance
|
||||
* ['/path/to/app-a', '/path/to/app-b', ... '/path/to/app-z']
|
||||
* @param {string[]} [customConfig.referenceProjectPaths] reference projects. Needed for 'match
|
||||
* analyzers', having `requiresReference: true`. For instance
|
||||
* ['/path/to/lib1', '/path/to/lib2']
|
||||
* @param {GatherFilesConfig} [customConfig.gatherFilesConfig]
|
||||
* @param {boolean} [customConfig.report]
|
||||
* @param {boolean} [customConfig.debugEnabled]
|
||||
* @param {Partial<ProvidenceConfig>} customConfig
|
||||
* @return {Promise<QueryResult[]>}
|
||||
*/
|
||||
async function providenceMain(queryConfig, customConfig) {
|
||||
const cfg = deepmerge(
|
||||
{
|
||||
export async function providence(queryConfig, customConfig) {
|
||||
const tStart = performance.now();
|
||||
|
||||
const cfg = /** @type {ProvidenceConfig} */ ({
|
||||
queryMethod: 'grep',
|
||||
// This is a merge of all 'main entry projects'
|
||||
// found in search-targets, including their children
|
||||
|
|
@ -181,9 +192,12 @@ async function providenceMain(queryConfig, customConfig) {
|
|||
debugEnabled: false,
|
||||
writeLogFile: false,
|
||||
skipCheckMatchCompatibility: false,
|
||||
},
|
||||
customConfig,
|
||||
);
|
||||
measurePerformance: false,
|
||||
/** Allows to navigate to source file in code editor */
|
||||
addSystemPathsInResult: false,
|
||||
fallbackToBabel: false,
|
||||
...customConfig,
|
||||
});
|
||||
|
||||
if (cfg.debugEnabled) {
|
||||
LogService.debugEnabled = true;
|
||||
|
|
@ -193,6 +207,10 @@ async function providenceMain(queryConfig, customConfig) {
|
|||
InputDataService.referenceProjectPaths = cfg.referenceProjectPaths;
|
||||
}
|
||||
|
||||
if (cfg.fallbackToBabel) {
|
||||
AstService.fallbackToBabel = true;
|
||||
}
|
||||
|
||||
let queryResults;
|
||||
if (queryConfig.type === 'ast-analyzer') {
|
||||
queryResults = await handleAnalyzer(queryConfig, cfg);
|
||||
|
|
@ -215,9 +233,15 @@ async function providenceMain(queryConfig, customConfig) {
|
|||
LogService.writeLogFile();
|
||||
}
|
||||
|
||||
const tEnd = performance.now();
|
||||
|
||||
if (cfg.measurePerformance) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`completed in ${((tEnd - tStart) / 1000).toFixed(2)} seconds`);
|
||||
}
|
||||
return queryResults;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
providence: providenceMain,
|
||||
export const _providenceModule = {
|
||||
providence,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,136 +0,0 @@
|
|||
// @ts-nocheck
|
||||
const {
|
||||
createProgram,
|
||||
getPreEmitDiagnostics,
|
||||
ModuleKind,
|
||||
ModuleResolutionKind,
|
||||
ScriptTarget,
|
||||
} = require('typescript');
|
||||
const babelParser = require('@babel/parser');
|
||||
// @ts-expect-error
|
||||
const esModuleLexer = require('es-module-lexer');
|
||||
const parse5 = require('parse5');
|
||||
const traverseHtml = require('../utils/traverse-html.js');
|
||||
const { LogService } = require('./LogService.js');
|
||||
|
||||
/**
|
||||
* @typedef {import('../types/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
*/
|
||||
|
||||
class AstService {
|
||||
/**
|
||||
* @deprecated for simplicity/maintainability, only allow Babel for js
|
||||
* Compiles an array of file paths using Typescript.
|
||||
* @param {string[]} filePaths
|
||||
* @param {CompilerOptions} options
|
||||
*/
|
||||
static _getTypescriptAst(filePaths, options) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
filePaths = Array.isArray(filePaths) ? filePaths : [filePaths];
|
||||
|
||||
const defaultOptions = {
|
||||
noEmitOnError: false,
|
||||
allowJs: true,
|
||||
experimentalDecorators: true,
|
||||
target: ScriptTarget.Latest,
|
||||
downlevelIteration: true,
|
||||
module: ModuleKind.ESNext,
|
||||
// module: ModuleKind.CommonJS,
|
||||
// lib: ["esnext", "dom"],
|
||||
strictNullChecks: true,
|
||||
moduleResolution: ModuleResolutionKind.NodeJs,
|
||||
esModuleInterop: true,
|
||||
noEmit: true,
|
||||
allowSyntheticDefaultImports: true,
|
||||
allowUnreachableCode: true,
|
||||
allowUnusedLabels: true,
|
||||
skipLibCheck: true,
|
||||
isolatedModules: true,
|
||||
};
|
||||
|
||||
const program = createProgram(filePaths, options || defaultOptions);
|
||||
const diagnostics = getPreEmitDiagnostics(program);
|
||||
const files = program.getSourceFiles().filter(sf => filePaths.includes(sf.fileName));
|
||||
return { diagnostics, program, files };
|
||||
}
|
||||
|
||||
/**
|
||||
* Compiles an array of file paths using Babel.
|
||||
* @param {string} code
|
||||
*/
|
||||
static _getBabelAst(code) {
|
||||
const ast = babelParser.parse(code, {
|
||||
sourceType: 'module',
|
||||
plugins: [
|
||||
'importMeta',
|
||||
'dynamicImport',
|
||||
'classProperties',
|
||||
'exportDefaultFrom',
|
||||
'importAssertions',
|
||||
],
|
||||
});
|
||||
return ast;
|
||||
}
|
||||
|
||||
/**
|
||||
* Combines all script tags as if it were one js file.
|
||||
* @param {string} htmlCode
|
||||
*/
|
||||
static getScriptsFromHtml(htmlCode) {
|
||||
const ast = parse5.parseFragment(htmlCode);
|
||||
const scripts = [];
|
||||
traverseHtml(ast, {
|
||||
script(path) {
|
||||
const code = path.node.childNodes[0] ? path.node.childNodes[0].value : '';
|
||||
scripts.push(code);
|
||||
},
|
||||
});
|
||||
return scripts;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated for simplicity/maintainability, only allow Babel for js
|
||||
* @param {string} code
|
||||
*/
|
||||
static async _getEsModuleLexerOutput(code) {
|
||||
return esModuleLexer.parse(code);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the desired AST
|
||||
* Why would we support multiple ASTs/parsers?
|
||||
* - 'babel' is our default tool for analysis. It's the most versatile and popular tool, it's
|
||||
* close to the EStree standard (other than Typescript) and a lot of plugins and resources can
|
||||
* be found online. It also allows to parse Typescript and spec proposals.
|
||||
* - 'typescript' (deprecated) is needed for some valuable third party tooling, like web-component-analyzer
|
||||
* - 'es-module-lexer' (deprecated) is needed for the dedicated task of finding module imports; it is way
|
||||
* quicker than a full fledged AST parser
|
||||
* @param { 'babel' } astType
|
||||
* @param { {filePath: PathFromSystemRoot} } [options]
|
||||
*/
|
||||
// eslint-disable-next-line consistent-return
|
||||
static getAst(code, astType, { filePath } = {}) {
|
||||
// eslint-disable-next-line default-case
|
||||
try {
|
||||
// eslint-disable-next-line default-case
|
||||
switch (astType) {
|
||||
case 'babel':
|
||||
return this._getBabelAst(code);
|
||||
case 'typescript':
|
||||
LogService.warn(`
|
||||
Please notice "typescript" support is deprecated.
|
||||
For parsing javascript, "babel" is recommended.`);
|
||||
return this._getTypescriptAst(code);
|
||||
case 'es-module-lexer':
|
||||
LogService.warn(`
|
||||
Please notice "es-module-lexer" support is deprecated.
|
||||
For parsing javascript, "babel" is recommended.`);
|
||||
return this._getEsModuleLexerOutput(code);
|
||||
}
|
||||
} catch (e) {
|
||||
LogService.error(`Error when parsing "${filePath}":/n${e}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { AstService };
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
export * from './find-classes';
|
||||
export * from './find-customelements';
|
||||
export * from './find-exports';
|
||||
export * from './find-imports';
|
||||
export * from './match-imports';
|
||||
export * from './match-subclasses';
|
||||
|
|
@ -1,3 +0,0 @@
|
|||
export * from './core';
|
||||
export * from './Analyzer';
|
||||
export * from './QueryService';
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
/**
|
||||
* Readable way to do an async forEach
|
||||
* Since predictability matters, all array items will be handled in a queue,
|
||||
* one after another
|
||||
* @param {any[]} array
|
||||
* @param {function} callback
|
||||
*/
|
||||
async function aForEach(array, callback) {
|
||||
for (let i = 0; i < array.length; i += 1) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await callback(array[i], i);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Readable way to do an async forEach
|
||||
* If predictability does not matter, this method will traverse array items concurrently,
|
||||
* leading to a better performance
|
||||
* @param {any[]} array
|
||||
* @param {(value:any, index:number) => {}} callback
|
||||
*/
|
||||
async function aForEachNonSequential(array, callback) {
|
||||
return Promise.all(array.map(callback));
|
||||
}
|
||||
/**
|
||||
* Readable way to do an async map
|
||||
* Since predictability is crucial for a map, all array items will be handled in a queue,
|
||||
* one after anotoher
|
||||
* @param {Array<any>} array
|
||||
* @param {(param:any, i:number) => any} callback
|
||||
*/
|
||||
async function aMap(array, callback) {
|
||||
const mappedResults = [];
|
||||
for (let i = 0; i < array.length; i += 1) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const resolvedCb = await callback(array[i], i);
|
||||
mappedResults.push(resolvedCb);
|
||||
}
|
||||
return mappedResults;
|
||||
}
|
||||
|
||||
module.exports = { aForEach, aMap, aForEachNonSequential };
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
/**
|
||||
* Swc might have a `with` or `assertions` property
|
||||
* @param {SwcNode} node
|
||||
* @returns {string | undefined}
|
||||
*/
|
||||
export function getAssertionType(node) {
|
||||
if (node.with) {
|
||||
return node.with.properties[0].value?.value;
|
||||
}
|
||||
if (node.assertions) {
|
||||
return node.assertions.properties[0].value?.value;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
import { dirname } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
/**
|
||||
* @param {string} importMetaUrl should be import.meta.url
|
||||
*/
|
||||
export function getCurrentDir(importMetaUrl) {
|
||||
return dirname(fileURLToPath(importMetaUrl));
|
||||
}
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
/**
|
||||
* @typedef {import('../types/core/core').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../types/core/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
*/
|
||||
|
||||
/**
|
||||
|
|
@ -11,8 +11,6 @@
|
|||
* @param {PathFromSystemRoot} projectRoot
|
||||
* @returns {PathRelativeFromProjectRoot}
|
||||
*/
|
||||
function getFilePathRelativeFromRoot(absolutePath, projectRoot) {
|
||||
export function getFilePathRelativeFromRoot(absolutePath, projectRoot) {
|
||||
return /** @type {PathRelativeFromProjectRoot} */ (absolutePath.replace(projectRoot, '.'));
|
||||
}
|
||||
|
||||
module.exports = { getFilePathRelativeFromRoot };
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
* @param {string|object} inputValue
|
||||
* @returns {number}
|
||||
*/
|
||||
function getHash(inputValue) {
|
||||
export function getHash(inputValue) {
|
||||
if (typeof inputValue === 'object') {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
inputValue = JSON.stringify(inputValue);
|
||||
|
|
@ -14,5 +14,3 @@ function getHash(inputValue) {
|
|||
0,
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = getHash;
|
||||
|
|
|
|||
|
|
@ -0,0 +1,188 @@
|
|||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import babelTraversePkg from '@babel/traverse';
|
||||
import { AstService } from '../core/AstService.js';
|
||||
import { trackDownIdentifier } from '../analyzers/helpers/track-down-identifier.js';
|
||||
import { toPosixPath } from './to-posix-path.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('@babel/types').Node} Node
|
||||
* @typedef {import('@babel/traverse').NodePath} NodePath
|
||||
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {{rootPath:PathFromSystemRoot; localPath:PathRelativeFromProjectRoot}} opts
|
||||
* @returns
|
||||
*/
|
||||
export function getFilePathOrExternalSource({ rootPath, localPath }) {
|
||||
if (!localPath.startsWith('.')) {
|
||||
// We are not resolving external files like '@lion/input-amount/x.js',
|
||||
// but we give a 100% score if from and to are same here..
|
||||
return localPath;
|
||||
}
|
||||
return toPosixPath(path.resolve(rootPath, localPath));
|
||||
}
|
||||
|
||||
/**
|
||||
* Assume we had:
|
||||
* ```js
|
||||
* const x = 88;
|
||||
* const y = x;
|
||||
* export const myIdentifier = y;
|
||||
* ```
|
||||
* - We started in getSourceCodeFragmentOfDeclaration (looking for 'myIdentifier'), which found VariableDeclarator of export myIdentifier
|
||||
* - getReferencedDeclaration is called with { referencedIdentifierName: 'y', ... }
|
||||
* - now we will look in globalScopeBindings, till we find declaration of 'y'
|
||||
* - Is it a ref? Call ourselves with referencedIdentifierName ('x' in example above)
|
||||
* - is it a non ref declaration? Return the path of the node
|
||||
* @param {{ referencedIdentifierName:string, globalScopeBindings:BabelBinding; }} opts
|
||||
* @returns {NodePath}
|
||||
*/
|
||||
export function getReferencedDeclaration({ referencedIdentifierName, globalScopeBindings }) {
|
||||
const [, refDeclaratorBinding] = Object.entries(globalScopeBindings).find(
|
||||
([key]) => key === referencedIdentifierName,
|
||||
);
|
||||
|
||||
if (
|
||||
refDeclaratorBinding.path.type === 'ImportSpecifier' ||
|
||||
refDeclaratorBinding.path.type === 'ImportDefaultSpecifier'
|
||||
) {
|
||||
return refDeclaratorBinding.path;
|
||||
}
|
||||
|
||||
if (refDeclaratorBinding.path.node.init.type === 'Identifier') {
|
||||
return getReferencedDeclaration({
|
||||
referencedIdentifierName: refDeclaratorBinding.path.node.init.name,
|
||||
globalScopeBindings,
|
||||
});
|
||||
}
|
||||
|
||||
return refDeclaratorBinding.path.get('init');
|
||||
}
|
||||
|
||||
/**
|
||||
* @example
|
||||
* ```js
|
||||
* // ------ input file --------
|
||||
* const x = 88;
|
||||
* const y = x;
|
||||
* export const myIdentifier = y;
|
||||
* // --------------------------
|
||||
*
|
||||
* await getSourceCodeFragmentOfDeclaration(code) // finds "88"
|
||||
* ```
|
||||
*
|
||||
* @param {{ filePath: PathFromSystemRoot; exportedIdentifier: string; projectRootPath: PathFromSystemRoot }} opts
|
||||
* @returns {Promise<{ sourceNodePath: string; sourceFragment: string|null; externalImportSource: string; }>}
|
||||
*/
|
||||
export async function getSourceCodeFragmentOfDeclaration({
|
||||
filePath,
|
||||
exportedIdentifier,
|
||||
projectRootPath,
|
||||
}) {
|
||||
const code = fs.readFileSync(filePath, 'utf8');
|
||||
// TODO: fix swc-to-babel lib to make this compatible with 'swc-to-babel' mode of getAst
|
||||
const babelAst = AstService.getAst(code, 'babel', { filePath });
|
||||
|
||||
/** @type {NodePath} */
|
||||
let finalNodePath;
|
||||
|
||||
babelTraversePkg.default(babelAst, {
|
||||
Program(astPath) {
|
||||
astPath.stop();
|
||||
|
||||
// Situations
|
||||
// - Identifier is part of default export (in this case 'exportedIdentifier' is '[default]' )
|
||||
// - declared right away (for instance a class)
|
||||
// - referenced (possibly recursively) by other declaration
|
||||
// - Identifier is part of a named export
|
||||
// - declared right away
|
||||
// - referenced (possibly recursively) by other declaration
|
||||
|
||||
const globalScopeBindings = astPath.get('body')[0].scope.bindings;
|
||||
|
||||
if (exportedIdentifier === '[default]') {
|
||||
const defaultExportPath = astPath
|
||||
.get('body')
|
||||
.find(child => child.node.type === 'ExportDefaultDeclaration');
|
||||
// @ts-expect-error
|
||||
const isReferenced = defaultExportPath?.node.declaration?.type === 'Identifier';
|
||||
|
||||
if (!isReferenced) {
|
||||
finalNodePath = defaultExportPath.get('declaration');
|
||||
} else {
|
||||
finalNodePath = getReferencedDeclaration({
|
||||
referencedIdentifierName: defaultExportPath.node.declaration.name,
|
||||
globalScopeBindings,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
const variableDeclaratorPath = astPath.scope.getBinding(exportedIdentifier).path;
|
||||
const varDeclNode = variableDeclaratorPath.node;
|
||||
const isReferenced = varDeclNode.init?.type === 'Identifier';
|
||||
const contentPath = varDeclNode.init
|
||||
? variableDeclaratorPath.get('init')
|
||||
: variableDeclaratorPath;
|
||||
|
||||
const name = varDeclNode.init
|
||||
? varDeclNode.init.name
|
||||
: varDeclNode.id?.name || varDeclNode.imported.name;
|
||||
|
||||
if (!isReferenced) {
|
||||
// it must be an exported declaration
|
||||
finalNodePath = contentPath;
|
||||
} else {
|
||||
finalNodePath = getReferencedDeclaration({
|
||||
referencedIdentifierName: name,
|
||||
globalScopeBindings,
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
if (finalNodePath.type === 'ImportSpecifier') {
|
||||
const importDeclNode = finalNodePath.parentPath.node;
|
||||
const source = importDeclNode.source.value;
|
||||
const identifierName = finalNodePath.node.imported.name;
|
||||
const currentFilePath = filePath;
|
||||
|
||||
const rootFile = await trackDownIdentifier(
|
||||
source,
|
||||
identifierName,
|
||||
currentFilePath,
|
||||
projectRootPath,
|
||||
);
|
||||
const filePathOrSrc = getFilePathOrExternalSource({
|
||||
rootPath: projectRootPath,
|
||||
localPath: rootFile.file,
|
||||
});
|
||||
|
||||
// TODO: allow resolving external project file paths
|
||||
if (!filePathOrSrc.startsWith('/')) {
|
||||
// So we have external project; smth like '@lion/input/x.js'
|
||||
return {
|
||||
sourceNodePath: finalNodePath,
|
||||
sourceFragment: null,
|
||||
externalImportSource: filePathOrSrc,
|
||||
};
|
||||
}
|
||||
|
||||
return getSourceCodeFragmentOfDeclaration({
|
||||
filePath: filePathOrSrc,
|
||||
exportedIdentifier: rootFile.specifier,
|
||||
projectRootPath,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
sourceNodePath: finalNodePath,
|
||||
sourceFragment: code.slice(
|
||||
finalNodePath.node?.loc?.start.index,
|
||||
finalNodePath.node?.loc?.end.index,
|
||||
),
|
||||
externalImportSource: null,
|
||||
};
|
||||
}
|
||||
|
|
@ -1,11 +1,23 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const babelTraversePkg = require('@babel/traverse');
|
||||
const { AstService } = require('../services/AstService.js');
|
||||
const { trackDownIdentifier } = require('../analyzers/helpers/track-down-identifier.js');
|
||||
const { toPosixPath } = require('./to-posix-path.js');
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { swcTraverse, getPathFromNode } from './swc-traverse.js';
|
||||
import { AstService } from '../core/AstService.js';
|
||||
import { trackDownIdentifier } from '../analyzers/helpers/track-down-identifier.js';
|
||||
import { toPosixPath } from './to-posix-path.js';
|
||||
|
||||
function getFilePathOrExternalSource({ rootPath, localPath }) {
|
||||
/**
|
||||
* @typedef {import('@swc/core').Node} SwcNode
|
||||
* @typedef {import('../../../types/index.js').SwcPath} SwcPath
|
||||
* @typedef {import('../../../types/index.js').SwcBinding} SwcBinding
|
||||
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {{rootPath:PathFromSystemRoot; localPath:PathRelativeFromProjectRoot}} opts
|
||||
* @returns
|
||||
*/
|
||||
export function getFilePathOrExternalSource({ rootPath, localPath }) {
|
||||
if (!localPath.startsWith('.')) {
|
||||
// We are not resolving external files like '@lion/input-amount/x.js',
|
||||
// but we give a 100% score if from and to are same here..
|
||||
|
|
@ -22,28 +34,29 @@ function getFilePathOrExternalSource({ rootPath, localPath }) {
|
|||
* export const myIdentifier = y;
|
||||
* ```
|
||||
* - We started in getSourceCodeFragmentOfDeclaration (looking for 'myIdentifier'), which found VariableDeclarator of export myIdentifier
|
||||
* - getReferencedDeclaration is called with { referencedIdentifierName: 'y', ... }
|
||||
* - getReferencedDeclaration is called with { referencedIdentifierName: 'y', globalScopeBindings: {x: SwcBinding; y: SwcBinding} }
|
||||
* - now we will look in globalScopeBindings, till we find declaration of 'y'
|
||||
* - Is it a ref? Call ourselves with referencedIdentifierName ('x' in example above)
|
||||
* - is it a non ref declaration? Return the path of the node
|
||||
* @param {{ referencedIdentifierName:string, globalScopeBindings:BabelBinding; }} opts
|
||||
* @returns {BabelNodePath}
|
||||
* @param {{ referencedIdentifierName:string, globalScopeBindings:{[key:string]:SwcBinding}; }} opts
|
||||
* @returns {SwcPath|null}
|
||||
*/
|
||||
function getReferencedDeclaration({ referencedIdentifierName, globalScopeBindings }) {
|
||||
const [, refDeclaratorBinding] = Object.entries(globalScopeBindings).find(
|
||||
([key]) => key === referencedIdentifierName,
|
||||
);
|
||||
export function getReferencedDeclaration({ referencedIdentifierName, globalScopeBindings }) {
|
||||
// We go from referencedIdentifierName 'y' to binding (VariableDeclarator path) 'y';
|
||||
const refDeclaratorBinding = globalScopeBindings[referencedIdentifierName];
|
||||
|
||||
if (
|
||||
refDeclaratorBinding.path.type === 'ImportSpecifier' ||
|
||||
refDeclaratorBinding.path.type === 'ImportDefaultSpecifier'
|
||||
) {
|
||||
// We provided a referencedIdentifierName that is not in the globalScopeBindings
|
||||
if (!refDeclaratorBinding) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (['ImportSpecifier', 'ImportDefaultSpecifier'].includes(refDeclaratorBinding.path.node.type)) {
|
||||
return refDeclaratorBinding.path;
|
||||
}
|
||||
|
||||
if (refDeclaratorBinding.path.node.init.type === 'Identifier') {
|
||||
if (refDeclaratorBinding.identifier.init.type === 'Identifier') {
|
||||
return getReferencedDeclaration({
|
||||
referencedIdentifierName: refDeclaratorBinding.path.node.init.name,
|
||||
referencedIdentifierName: refDeclaratorBinding.identifier.init.value,
|
||||
globalScopeBindings,
|
||||
});
|
||||
}
|
||||
|
|
@ -52,22 +65,40 @@ function getReferencedDeclaration({ referencedIdentifierName, globalScopeBinding
|
|||
}
|
||||
|
||||
/**
|
||||
* @example
|
||||
* ```js
|
||||
* // ------ input file --------
|
||||
* const x = 88;
|
||||
* const y = x;
|
||||
* export const myIdentifier = y;
|
||||
* // --------------------------
|
||||
*
|
||||
* @param {{ filePath: string; exportedIdentifier: string; }} opts
|
||||
* await getSourceCodeFragmentOfDeclaration(code) // finds "88"
|
||||
* ```
|
||||
*
|
||||
* @param {{ filePath: PathFromSystemRoot; exportedIdentifier: string; projectRootPath: PathFromSystemRoot }} opts
|
||||
* @returns {Promise<{ sourceNodePath: SwcPath; sourceFragment: string|null; externalImportSource: string|null; }>}
|
||||
*/
|
||||
async function getSourceCodeFragmentOfDeclaration({
|
||||
export async function getSourceCodeFragmentOfDeclaration({
|
||||
filePath,
|
||||
exportedIdentifier,
|
||||
projectRootPath,
|
||||
}) {
|
||||
const code = fs.readFileSync(filePath, 'utf-8');
|
||||
const ast = AstService.getAst(code, 'babel');
|
||||
const code = fs.readFileSync(filePath, 'utf8');
|
||||
|
||||
// compensate for swc span bug: https://github.com/swc-project/swc/issues/1366#issuecomment-1516539812
|
||||
const offset = AstService._getSwcOffset();
|
||||
// TODO: fix swc-to-babel lib to make this compatible with 'swc-to-babel' mode of getAst
|
||||
const swcAst = AstService._getSwcAst(code);
|
||||
|
||||
/** @type {SwcPath} */
|
||||
let finalNodePath;
|
||||
|
||||
babelTraversePkg.default(ast, {
|
||||
Program(babelPath) {
|
||||
babelPath.stop();
|
||||
swcTraverse(
|
||||
swcAst,
|
||||
{
|
||||
Module(astPath) {
|
||||
astPath.stop();
|
||||
|
||||
// Situations
|
||||
// - Identifier is part of default export (in this case 'exportedIdentifier' is '[default]' )
|
||||
|
|
@ -77,24 +108,31 @@ async function getSourceCodeFragmentOfDeclaration({
|
|||
// - declared right away
|
||||
// - referenced (possibly recursively) by other declaration
|
||||
|
||||
const globalScopeBindings = babelPath.get('body')[0].scope.bindings;
|
||||
const globalScopeBindings = getPathFromNode(astPath.node.body?.[0])?.scope.bindings;
|
||||
|
||||
if (exportedIdentifier === '[default]') {
|
||||
const defaultExportPath = babelPath
|
||||
.get('body')
|
||||
.find(child => child.node.type === 'ExportDefaultDeclaration');
|
||||
const isReferenced = defaultExportPath.node.declaration?.type === 'Identifier';
|
||||
const defaultExportPath = /** @type {SwcPath} */ (
|
||||
getPathFromNode(
|
||||
astPath.node.body.find((/** @type {{ type: string; }} */ child) =>
|
||||
['ExportDefaultDeclaration', 'ExportDefaultExpression'].includes(child.type),
|
||||
),
|
||||
)
|
||||
);
|
||||
const isReferenced = defaultExportPath?.node.expression?.type === 'Identifier';
|
||||
|
||||
if (!isReferenced) {
|
||||
finalNodePath = defaultExportPath.get('declaration');
|
||||
finalNodePath = defaultExportPath.get('decl') || defaultExportPath.get('expression');
|
||||
} else {
|
||||
finalNodePath = getReferencedDeclaration({
|
||||
referencedIdentifierName: defaultExportPath.node.declaration.name,
|
||||
finalNodePath = /** @type {SwcPath} */ (
|
||||
getReferencedDeclaration({
|
||||
referencedIdentifierName: defaultExportPath.node.expression.value,
|
||||
// @ts-expect-error
|
||||
globalScopeBindings,
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
} else {
|
||||
const variableDeclaratorPath = babelPath.scope.getBinding(exportedIdentifier).path;
|
||||
const variableDeclaratorPath = astPath.scope.bindings[exportedIdentifier].path;
|
||||
const varDeclNode = variableDeclaratorPath.node;
|
||||
const isReferenced = varDeclNode.init?.type === 'Identifier';
|
||||
const contentPath = varDeclNode.init
|
||||
|
|
@ -102,26 +140,34 @@ async function getSourceCodeFragmentOfDeclaration({
|
|||
: variableDeclaratorPath;
|
||||
|
||||
const name = varDeclNode.init
|
||||
? varDeclNode.init.name
|
||||
: varDeclNode.id?.name || varDeclNode.imported.name;
|
||||
? varDeclNode.init.value
|
||||
: varDeclNode.id?.value || varDeclNode.imported?.value || varDeclNode.orig?.value;
|
||||
|
||||
if (!isReferenced) {
|
||||
// it must be an exported declaration
|
||||
finalNodePath = contentPath;
|
||||
} else {
|
||||
finalNodePath = getReferencedDeclaration({
|
||||
finalNodePath = /** @type {SwcPath} */ (
|
||||
getReferencedDeclaration({
|
||||
referencedIdentifierName: name,
|
||||
// @ts-expect-error
|
||||
globalScopeBindings,
|
||||
});
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
},
|
||||
});
|
||||
},
|
||||
{ needsAdvancedPaths: true },
|
||||
);
|
||||
|
||||
// @ts-expect-error
|
||||
if (finalNodePath.type === 'ImportSpecifier') {
|
||||
// @ts-expect-error
|
||||
const importDeclNode = finalNodePath.parentPath.node;
|
||||
const source = importDeclNode.source.value;
|
||||
const identifierName = finalNodePath.node.imported.name;
|
||||
// @ts-expect-error
|
||||
const identifierName = finalNodePath.node.imported?.value || finalNodePath.node.local?.value;
|
||||
const currentFilePath = filePath;
|
||||
|
||||
const rootFile = await trackDownIdentifier(
|
||||
|
|
@ -132,13 +178,14 @@ async function getSourceCodeFragmentOfDeclaration({
|
|||
);
|
||||
const filePathOrSrc = getFilePathOrExternalSource({
|
||||
rootPath: projectRootPath,
|
||||
localPath: rootFile.file,
|
||||
localPath: /** @type {PathRelativeFromProjectRoot} */ (rootFile.file),
|
||||
});
|
||||
|
||||
// TODO: allow resolving external project file paths
|
||||
if (!filePathOrSrc.startsWith('/')) {
|
||||
// So we have external project; smth like '@lion/input/x.js'
|
||||
return {
|
||||
// @ts-expect-error
|
||||
sourceNodePath: finalNodePath,
|
||||
sourceFragment: null,
|
||||
externalImportSource: filePathOrSrc,
|
||||
|
|
@ -146,21 +193,22 @@ async function getSourceCodeFragmentOfDeclaration({
|
|||
}
|
||||
|
||||
return getSourceCodeFragmentOfDeclaration({
|
||||
filePath: filePathOrSrc,
|
||||
filePath: /** @type {PathFromSystemRoot} */ (filePathOrSrc),
|
||||
exportedIdentifier: rootFile.specifier,
|
||||
projectRootPath,
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
// @ts-expect-error
|
||||
sourceNodePath: finalNodePath,
|
||||
sourceFragment: code.slice(finalNodePath.node?.start, finalNodePath.node?.end),
|
||||
sourceFragment: code.slice(
|
||||
// @ts-expect-error
|
||||
finalNodePath.node.span.start - 1 - offset,
|
||||
// @ts-expect-error
|
||||
finalNodePath.node.span.end - 1 - offset,
|
||||
),
|
||||
// sourceFragment: finalNodePath.node?.raw || finalNodePath.node?.value,
|
||||
externalImportSource: null,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
getSourceCodeFragmentOfDeclaration,
|
||||
getFilePathOrExternalSource,
|
||||
getReferencedDeclaration,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -0,0 +1,23 @@
|
|||
import toBabel from 'swc-to-babel';
|
||||
|
||||
/**
|
||||
* @typedef {import('@babel/types').File} File
|
||||
*/
|
||||
|
||||
/**
|
||||
* Internal wrapper around swc-to-babel...
|
||||
* Allows to easily switch all swc based analyzers to Babel in case
|
||||
* they turn out to be not stable yet (for instance printing a transformed ast with @babel/generator)
|
||||
* Checks first whether it gets a Babel ast provided or not...
|
||||
* @param {*} swcOrBabelAst
|
||||
* @param {string} source
|
||||
* @returns {File}
|
||||
*/
|
||||
export function guardedSwcToBabel(swcOrBabelAst, source) {
|
||||
const isSwcAst = swcOrBabelAst.type === 'Module';
|
||||
if (isSwcAst) {
|
||||
// @ts-ignore
|
||||
return toBabel(swcOrBabelAst, source);
|
||||
}
|
||||
return swcOrBabelAst;
|
||||
}
|
||||
|
|
@ -1,16 +1,8 @@
|
|||
const {
|
||||
export {
|
||||
getSourceCodeFragmentOfDeclaration,
|
||||
getFilePathOrExternalSource,
|
||||
} = require('./get-source-code-fragment-of-declaration.js');
|
||||
const { memoize } = require('./memoize.js');
|
||||
const { toRelativeSourcePath, isRelativeSourcePath } = require('./relative-source-path.js');
|
||||
} from './get-source-code-fragment-of-declaration.js';
|
||||
export { memoize } from './memoize.js';
|
||||
export { toRelativeSourcePath, isRelativeSourcePath } from './relative-source-path.js';
|
||||
|
||||
// TODO: move trackdownIdentifier to utils as well
|
||||
|
||||
module.exports = {
|
||||
memoize,
|
||||
getSourceCodeFragmentOfDeclaration,
|
||||
getFilePathOrExternalSource,
|
||||
toRelativeSourcePath,
|
||||
isRelativeSourcePath,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -36,7 +36,7 @@
|
|||
* console.log(tags);
|
||||
* }
|
||||
*/
|
||||
class JsdocCommentParser {
|
||||
export default class JsdocCommentParser {
|
||||
/**
|
||||
* parse comment to tags.
|
||||
* @param {ASTNode} commentNode - comment node.
|
||||
|
|
@ -122,5 +122,3 @@ class JsdocCommentParser {
|
|||
}, '*\n');
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = JsdocCommentParser;
|
||||
|
|
|
|||
|
|
@ -1,23 +0,0 @@
|
|||
// import htm from 'htm';
|
||||
const htm = require('htm');
|
||||
|
||||
function convertToObj(type, props, ...children) {
|
||||
return { type, props, children };
|
||||
}
|
||||
|
||||
/**
|
||||
* @desc
|
||||
* Used for parsing lit-html templates inside ASTs
|
||||
* @returns {type, props, children}
|
||||
*
|
||||
* @example
|
||||
* litToObj`<h1 .id=${'hello'}>Hello world!</h1>`;
|
||||
* // {
|
||||
* // type: 'h1',
|
||||
* // props: { .id: 'hello' },
|
||||
* // children: ['Hello world!']
|
||||
* // }
|
||||
*/
|
||||
const litToObj = htm.bind(convertToObj);
|
||||
|
||||
module.exports = litToObj;
|
||||
|
|
@ -1,9 +1,17 @@
|
|||
const { InputDataService } = require('../services/InputDataService.js');
|
||||
export const memoizeConfig = {
|
||||
isCacheDisabled: false,
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {object|any[]|string} arg
|
||||
*/
|
||||
function isObject(arg) {
|
||||
return !Array.isArray(arg) && typeof arg === 'object';
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {object|any[]|string} arg
|
||||
*/
|
||||
function createCachableArg(arg) {
|
||||
if (isObject(arg)) {
|
||||
try {
|
||||
|
|
@ -16,10 +24,10 @@ function createCachableArg(arg) {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param {function} functionToMemoize
|
||||
* @param {{ storage:object; serializeObjects: boolean }} [opts]
|
||||
* @type {<T>(functionToMemoize:T, opts?:{ storage?:object; serializeObjects?: boolean }) => T}
|
||||
*/
|
||||
function memoize(functionToMemoize, { storage = {}, serializeObjects = false } = {}) {
|
||||
export function memoize(functionToMemoize, { storage = {}, serializeObjects = false } = {}) {
|
||||
// @ts-ignore
|
||||
// eslint-disable-next-line func-names
|
||||
return function () {
|
||||
// eslint-disable-next-line prefer-rest-params
|
||||
|
|
@ -27,7 +35,7 @@ function memoize(functionToMemoize, { storage = {}, serializeObjects = false } =
|
|||
const cachableArgs = !serializeObjects ? args : args.map(createCachableArg);
|
||||
// Allow disabling of cache for testing purposes
|
||||
// @ts-ignore
|
||||
if (!InputDataService.cacheDisabled && cachableArgs in storage) {
|
||||
if (!memoizeConfig.isCacheDisabled && cachableArgs in storage) {
|
||||
// @ts-ignore
|
||||
return storage[cachableArgs];
|
||||
}
|
||||
|
|
@ -39,7 +47,3 @@ function memoize(functionToMemoize, { storage = {}, serializeObjects = false } =
|
|||
return outcome;
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
memoize,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -3,9 +3,13 @@ import fs from 'fs';
|
|||
import { pathToFileURL } from 'url';
|
||||
|
||||
/**
|
||||
* @returns {Promise<object|null>}
|
||||
* @typedef {import('../../../types/index.js').ProvidenceCliConf} ProvidenceCliConf
|
||||
*/
|
||||
export async function getProvidenceConf() {
|
||||
|
||||
/**
|
||||
* @returns {Promise<{providenceConf:Partial<ProvidenceCliConf>;providenceConfRaw:string}|null>}
|
||||
*/
|
||||
async function getConf() {
|
||||
const confPathWithoutExtension = `${pathLib.join(process.cwd(), 'providence.conf')}`;
|
||||
let confPathFound;
|
||||
try {
|
||||
|
|
@ -33,6 +37,8 @@ export async function getProvidenceConf() {
|
|||
}
|
||||
|
||||
const providenceConfRaw = fs.readFileSync(confPathFound, 'utf8');
|
||||
|
||||
return { providenceConf, providenceConfRaw };
|
||||
}
|
||||
|
||||
// Wrapped in object for stubbing
|
||||
export const providenceConfUtil = { getConf };
|
||||
|
|
@ -24,13 +24,16 @@
|
|||
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
*/
|
||||
|
||||
const fs = require('fs');
|
||||
import fs from 'fs';
|
||||
/* istanbul ignore next */
|
||||
const promisify = require('util').promisify || require('util-promisify');
|
||||
const { resolve, basename, dirname, join } = require('path');
|
||||
const rpj = promisify(require('read-package-json'));
|
||||
const readdir = promisify(require('readdir-scoped-modules'));
|
||||
const realpath = require('read-package-tree/realpath.js');
|
||||
import { promisify } from 'util';
|
||||
import { basename, dirname, join } from 'path';
|
||||
import rpjSync from 'read-package-json';
|
||||
import readdirSync from 'readdir-scoped-modules';
|
||||
import realpath from 'read-package-tree/realpath.js';
|
||||
|
||||
const rpj = promisify(rpjSync);
|
||||
const readdir = promisify(readdirSync);
|
||||
|
||||
let ID = 0;
|
||||
class Node {
|
||||
|
|
@ -220,4 +223,5 @@ const rpt = (root, filterWith, cb, mode = 'npm') => {
|
|||
|
||||
rpt.Node = Node;
|
||||
rpt.Link = Link;
|
||||
module.exports = rpt;
|
||||
|
||||
export default rpt;
|
||||
|
|
|
|||
|
|
@ -1,26 +1,24 @@
|
|||
const { toPosixPath } = require('./to-posix-path.js');
|
||||
import { toPosixPath } from './to-posix-path.js';
|
||||
|
||||
/**
|
||||
* @desc determines for a source path of an import- or export specifier, whether
|
||||
* Determines for a source path of an import- or export specifier, whether
|
||||
* it is relative (an internal import/export) or absolute (external)
|
||||
* - relative: './helpers', './helpers.js', '../helpers.js'
|
||||
* - not relative: '@open-wc/helpers', 'project-x/helpers'
|
||||
* @param {string} source source path of an import- or export specifier
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isRelativeSourcePath(source) {
|
||||
export function isRelativeSourcePath(source) {
|
||||
return source.startsWith('.');
|
||||
}
|
||||
|
||||
/**
|
||||
* @desc Simple helper te make code a bit more readable.
|
||||
* Simple helper te make code a bit more readable.
|
||||
* - from '/path/to/repo/my/file.js';
|
||||
* - to './my/file.js'
|
||||
* @param {string} fullPath like '/path/to/repo/my/file.js'
|
||||
* @param {string} rootPath like '/path/to/repo'
|
||||
*/
|
||||
function toRelativeSourcePath(fullPath, rootPath) {
|
||||
export function toRelativeSourcePath(fullPath, rootPath) {
|
||||
return toPosixPath(fullPath).replace(toPosixPath(rootPath), '.');
|
||||
}
|
||||
|
||||
module.exports = { isRelativeSourcePath, toRelativeSourcePath };
|
||||
|
|
|
|||
|
|
@ -1,20 +1,21 @@
|
|||
import { isBuiltin } from 'module';
|
||||
import path from 'path';
|
||||
import { nodeResolve } from '@rollup/plugin-node-resolve';
|
||||
import { LogService } from '../core/LogService.js';
|
||||
import { memoize } from './memoize.js';
|
||||
import { toPosixPath } from './to-posix-path.js';
|
||||
|
||||
/**
|
||||
* Solution inspired by es-dev-server:
|
||||
* https://github.com/open-wc/open-wc/blob/master/packages/es-dev-server/src/utils/resolve-module-imports.js
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {import('../types/core/core').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../types/core/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../types/core/core').SpecifierSource} SpecifierSource
|
||||
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../types/index.js').SpecifierSource} SpecifierSource
|
||||
*/
|
||||
|
||||
const pathLib = require('path');
|
||||
const { nodeResolve } = require('@rollup/plugin-node-resolve');
|
||||
const { LogService } = require('../services/LogService.js');
|
||||
const { memoize } = require('./memoize.js');
|
||||
const { toPosixPath } = require('./to-posix-path.js');
|
||||
|
||||
const fakePluginContext = {
|
||||
meta: {
|
||||
// rollupVersion needed in plugin context => nodeResolvePackageJson.peerDependencies.rollup
|
||||
|
|
@ -29,32 +30,6 @@ const fakePluginContext = {
|
|||
},
|
||||
};
|
||||
|
||||
async function resolveImportPath(importee, importer, opts = {}) {
|
||||
const rollupResolve = nodeResolve({
|
||||
rootDir: pathLib.dirname(importer),
|
||||
// allow resolving polyfills for nodejs libs
|
||||
preferBuiltins: false,
|
||||
// extensions: ['.mjs', '.js', '.json', '.node'],
|
||||
...opts,
|
||||
});
|
||||
|
||||
const preserveSymlinks =
|
||||
(opts && opts.customResolveOptions && opts.customResolveOptions.preserveSymlinks) || false;
|
||||
// @ts-ignore
|
||||
rollupResolve.buildStart.call(fakePluginContext, { preserveSymlinks });
|
||||
|
||||
// @ts-ignore
|
||||
const result = await rollupResolve.resolveId.call(fakePluginContext, importee, importer, {});
|
||||
// @ts-ignore
|
||||
if (!result || !result.id) {
|
||||
// throw new Error(`importee ${importee} not found in filesystem.`);
|
||||
LogService.warn(`importee ${importee} not found in filesystem for importer '${importer}'.`);
|
||||
return null;
|
||||
}
|
||||
// @ts-ignore
|
||||
return toPosixPath(result.id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Based on importee (in a statement "import {x} from '@lion/core'", "@lion/core" is an
|
||||
* importee), which can be a bare module specifier, a filename without extension, or a folder
|
||||
|
|
@ -62,8 +37,41 @@ async function resolveImportPath(importee, importer, opts = {}) {
|
|||
* @param {SpecifierSource} importee source like '@lion/core' or '../helpers/index.js'
|
||||
* @param {PathFromSystemRoot} importer importing file, like '/my/project/importing-file.js'
|
||||
* @param {{customResolveOptions?: {preserveSymlinks:boolean}}} [opts] nodeResolve options
|
||||
* @returns {Promise<PathFromSystemRoot|null>} the resolved file system path, like '/my/project/node_modules/@lion/core/index.js'
|
||||
* @returns {Promise<PathFromSystemRoot|null|'[node-builtin]'>} the resolved file system path, like '/my/project/node_modules/@lion/core/index.js'
|
||||
*/
|
||||
const resolveImportPathMemoized = memoize(resolveImportPath);
|
||||
async function resolveImportPathFn(importee, importer, opts) {
|
||||
if (isBuiltin(importee)) {
|
||||
return '[node-builtin]';
|
||||
}
|
||||
|
||||
module.exports = { resolveImportPath: resolveImportPathMemoized };
|
||||
const rollupResolve = nodeResolve({
|
||||
rootDir: path.dirname(importer),
|
||||
// allow resolving polyfills for nodejs libs
|
||||
preferBuiltins: false,
|
||||
// extensions: ['.mjs', '.js', '.json', '.node'],
|
||||
...(opts || {}),
|
||||
});
|
||||
|
||||
const preserveSymlinks =
|
||||
(opts?.customResolveOptions && opts.customResolveOptions.preserveSymlinks) || false;
|
||||
// @ts-expect-error
|
||||
rollupResolve.buildStart.call(fakePluginContext, { preserveSymlinks });
|
||||
|
||||
// @ts-expect-error
|
||||
const result = await rollupResolve.resolveId.handler.call(
|
||||
fakePluginContext,
|
||||
importee,
|
||||
importer,
|
||||
{},
|
||||
);
|
||||
|
||||
if (!result?.id) {
|
||||
// LogService.warn(
|
||||
// `[resolveImportPath] importee ${importee} not found in filesystem for importer '${importer}'.`,
|
||||
// );
|
||||
return null;
|
||||
}
|
||||
return toPosixPath(result.id);
|
||||
}
|
||||
|
||||
export const resolveImportPath = memoize(resolveImportPathFn);
|
||||
|
|
|
|||
|
|
@ -0,0 +1,360 @@
|
|||
/**
|
||||
* @typedef {import('@swc/core').Module} SwcAstModule
|
||||
* @typedef {import('@swc/core').Node} SwcNode
|
||||
* @typedef {import('@swc/core').VariableDeclarator} SwcVariableDeclarator
|
||||
* @typedef {import('@swc/core').Identifier} SwcIdentifierNode
|
||||
* @typedef {import('../../../types/index.js').SwcPath} SwcPath
|
||||
* @typedef {import('../../../types/index.js').SwcScope} SwcScope
|
||||
* @typedef {import('../../../types/index.js').SwcVisitor} SwcVisitor
|
||||
* @typedef {import('../../../types/index.js').SwcBinding} SwcBinding
|
||||
* @typedef {import('../../../types/index.js').SwcTraversalContext} SwcTraversalContext
|
||||
*/
|
||||
|
||||
/**
|
||||
* Contains all node info, to create paths from
|
||||
* @type {WeakMap<SwcNode,SwcPath>}
|
||||
*/
|
||||
const swcPathCache = new WeakMap();
|
||||
|
||||
const fnTypes = [
|
||||
'FunctionDeclaration',
|
||||
'FunctionExpression',
|
||||
'ArrowFunctionExpression',
|
||||
'ClassMethod',
|
||||
'Constructor',
|
||||
];
|
||||
|
||||
const nonBlockParentTypes = [...fnTypes, 'SwitchStatement', 'ClassDeclaration'];
|
||||
|
||||
/**
|
||||
* @param {SwcPath} swcPath
|
||||
* @param {SwcScope} currentScope
|
||||
* @param {SwcTraversalContext} traversalContext
|
||||
* @returns {SwcScope|null}
|
||||
*/
|
||||
function getNewScope(swcPath, currentScope, traversalContext) {
|
||||
const { node, parent } = swcPath;
|
||||
// const hasNonBlockParent = (/** @type {SwcNode} */ nd) => nonBlockParentTypes.includes(nd.type);
|
||||
const isFn = (/** @type {SwcNode} */ nd) => nd && fnTypes.includes(nd.type);
|
||||
|
||||
const isIsolatedBlockStatement = !isFn(parent) && node.type === 'BlockStatement';
|
||||
|
||||
// Create new scope...
|
||||
if (nonBlockParentTypes.includes(node.type) || isIsolatedBlockStatement) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
traversalContext.scopeId += 1;
|
||||
return {
|
||||
id: traversalContext.scopeId,
|
||||
parentScope: currentScope,
|
||||
path: swcPath,
|
||||
bindings: {},
|
||||
_pendingRefsWithoutBinding: [],
|
||||
_isIsolatedBlockStatement: isIsolatedBlockStatement,
|
||||
};
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {SwcNode} node
|
||||
*/
|
||||
export function getPathFromNode(node) {
|
||||
return swcPathCache.get(node);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {SwcNode} node
|
||||
* @param {SwcNode|null} parent
|
||||
* @param {Function} stop
|
||||
* @param {SwcScope} [scope]
|
||||
* @returns {SwcPath}
|
||||
*/
|
||||
function createSwcPath(node, parent, stop, scope) {
|
||||
/** @type {SwcPath} */
|
||||
const swcPath = {
|
||||
node,
|
||||
parent,
|
||||
stop,
|
||||
// TODO: "pre-traverse" the missing scope parts instead via getter that adds refs and bindings for current scope
|
||||
scope,
|
||||
parentPath: parent ? getPathFromNode(parent) : null,
|
||||
get(/** @type {string} */ id) {
|
||||
const swcPathForNode = getPathFromNode(node[id]);
|
||||
if (node[id] && !swcPathForNode) {
|
||||
// throw new Error(
|
||||
// `[swcTraverse]: Use {needsAdvancedPaths: true} to find path for node: ${node[name]}`,
|
||||
// );
|
||||
// TODO: "pre-traverse" the missing path parts instead
|
||||
}
|
||||
return swcPathForNode;
|
||||
},
|
||||
get type() {
|
||||
return node.type;
|
||||
},
|
||||
};
|
||||
swcPathCache.set(node, swcPath);
|
||||
return swcPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Is the node:
|
||||
* - a declaration (like "const a = 1")?
|
||||
* - an import specifier (like "import { a } from 'b'")?
|
||||
* Handy to know if the parents of Identifiers mark a binding
|
||||
* @param {SwcNode} parent
|
||||
* @param {string} identifierValue
|
||||
*/
|
||||
function isBindingNode(parent, identifierValue) {
|
||||
if (parent.type === 'VariableDeclarator') {
|
||||
// @ts-ignore
|
||||
return parent.id.value === identifierValue;
|
||||
}
|
||||
return [
|
||||
'ClassDeclaration',
|
||||
'FunctionDeclaration',
|
||||
'ArrowFunctionExpression',
|
||||
'ImportSpecifier',
|
||||
'ImportDefaultSpecifier',
|
||||
].includes(parent.type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Is the node:
|
||||
* - a declaration (like "const a = 1")?
|
||||
* - an import specifier (like "import { a } from 'b'")?
|
||||
* Handy to know if the parents of Identifiers mark a binding
|
||||
* @param {SwcNode} parent
|
||||
*/
|
||||
function isBindingRefNode(parent) {
|
||||
return ![
|
||||
'ClassMethod',
|
||||
'Constructor',
|
||||
'MemberExpression',
|
||||
'KeyValueProperty',
|
||||
'SwitchStatement',
|
||||
'MethodProperty',
|
||||
].includes(parent.type);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {SwcPath} swcPathForIdentifier
|
||||
* @returns {void}
|
||||
*/
|
||||
function addPotentialBindingOrRefToScope(swcPathForIdentifier) {
|
||||
const { node, parent, scope, parentPath } = swcPathForIdentifier;
|
||||
|
||||
if (node.type !== 'Identifier') {
|
||||
return;
|
||||
}
|
||||
|
||||
// const parentPath = getPathFromNode(parent);
|
||||
if (isBindingNode(parent, node.value)) {
|
||||
/** @type {SwcBinding} */
|
||||
const binding = {
|
||||
identifier: parent,
|
||||
// kind: 'var',
|
||||
refs: [],
|
||||
path: swcPathForIdentifier.parentPath,
|
||||
};
|
||||
let scopeBindingBelongsTo = scope;
|
||||
const isVarInIsolatedBlock =
|
||||
scope._isIsolatedBlockStatement &&
|
||||
swcPathForIdentifier.parentPath.parentPath.node.kind === 'var';
|
||||
const hasNonBlockParent = nonBlockParentTypes.includes(parent.type);
|
||||
|
||||
if (isVarInIsolatedBlock || hasNonBlockParent) {
|
||||
scopeBindingBelongsTo = scope.parentScope || scope;
|
||||
}
|
||||
if (scopeBindingBelongsTo._pendingRefsWithoutBinding.includes(parentPath)) {
|
||||
binding.refs.push(parentPath);
|
||||
scopeBindingBelongsTo._pendingRefsWithoutBinding.splice(
|
||||
scopeBindingBelongsTo._pendingRefsWithoutBinding.indexOf(parentPath),
|
||||
1,
|
||||
);
|
||||
}
|
||||
const idName = node.value || node.local?.value || node.orig?.value;
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
scopeBindingBelongsTo.bindings[idName] = binding;
|
||||
|
||||
// Align with Babel... => in example `class Q {}`, Q has binding to root scope and ClassDeclaration scope
|
||||
if (parent.type === 'ClassDeclaration') {
|
||||
scope.bindings[idName] = binding;
|
||||
}
|
||||
}
|
||||
// In other cases, we are dealing with a reference that must be bound to a binding
|
||||
else if (isBindingRefNode(parent)) {
|
||||
const binding = scope.bindings[node.value];
|
||||
if (binding) {
|
||||
binding.refs.push(parentPath);
|
||||
} else {
|
||||
// we are referencing a variable that is not declared in this scope or any parent scope
|
||||
// It might be hoisted, so we might find it later. For now, store it as a pending reference
|
||||
scope._pendingRefsWithoutBinding.push(parentPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Is the node is the root of the ast?
|
||||
* in Babel, this is the equivalent of Program
|
||||
* @param {SwcNode} node
|
||||
* @returns {boolean}
|
||||
*/
|
||||
function isRootNode(node) {
|
||||
return node.type === 'Module' || node.type === 'Script';
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {{node: SwcNode; }} node
|
||||
* @param {(data:{child:SwcNode}) => void} callback
|
||||
*/
|
||||
const loopChildren = ({ node }, callback) => {
|
||||
for (const [childKey, childVal] of Object.entries(node)) {
|
||||
if (childKey === 'span') {
|
||||
// eslint-disable-next-line no-continue
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Array.isArray(childVal)) {
|
||||
for (const childValElem of childVal) {
|
||||
callback({ child: childValElem });
|
||||
}
|
||||
} else if (typeof childVal === 'object') {
|
||||
callback({ child: childVal });
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {SwcPath} swcPath
|
||||
* @param {SwcVisitor} visitor
|
||||
* @param {SwcTraversalContext} traversalContext
|
||||
*/
|
||||
function visit(swcPath, visitor, traversalContext) {
|
||||
if (visitor.enter) {
|
||||
// @ts-expect-error
|
||||
visitor.enter(swcPath);
|
||||
}
|
||||
|
||||
if (isRootNode(swcPath.node) && visitor.root) {
|
||||
// @ts-expect-error
|
||||
visitor.root(swcPath);
|
||||
}
|
||||
|
||||
// Later, consider https://github.com/babel/babel/blob/b1e73d6f961065c56427ffa89c130beea8321d3b/packages/babel-traverse/src/traverse-node.ts#L28
|
||||
if (typeof visitor[swcPath.node.type] === 'function') {
|
||||
// @ts-expect-error
|
||||
visitor[swcPath.node.type](swcPath);
|
||||
}
|
||||
// @ts-expect-error
|
||||
else if (visitor[swcPath.node.type]?.enter) {
|
||||
// @ts-expect-error
|
||||
visitor[swcPath.node.type].enter(swcPath);
|
||||
}
|
||||
// @ts-expect-error
|
||||
if (visitor[swcPath.node.type]?.exit) {
|
||||
// Let visitTree know that we should visit on exit
|
||||
// @ts-expect-error
|
||||
traversalContext.visitOnExitFns.push(() => visitor[swcPath.node.type].exit(swcPath));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple traversal for swc ast.
|
||||
* @param {SwcAstModule} swcAst
|
||||
* @param {SwcVisitor} visitor
|
||||
* @param {object} config
|
||||
* @param {boolean} [config.needsAdvancedPaths] needs a full traversal before starting the visitor, which is less performant. Only enable when path.get() is used
|
||||
*/
|
||||
export function swcTraverse(swcAst, visitor, { needsAdvancedPaths = false } = {}) {
|
||||
/**
|
||||
* For performance, the author of a visitor can call this to stop further traversal
|
||||
*/
|
||||
let isStopped = false;
|
||||
const stop = () => {
|
||||
isStopped = true;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {SwcNode} node
|
||||
* @param {SwcNode|null} parent
|
||||
* @param {SwcScope} scope
|
||||
* @param {boolean} hasPreparedTree
|
||||
* @param {SwcTraversalContext} traversalContext
|
||||
*/
|
||||
const handlePathAndScope = (node, parent, scope, hasPreparedTree, traversalContext) => {
|
||||
if (hasPreparedTree) {
|
||||
const swcPath = /** @type {SwcPath} */ (swcPathCache.get(node));
|
||||
return {
|
||||
swcPath,
|
||||
newOrCurScope: getNewScope(swcPath, scope, traversalContext) || scope,
|
||||
};
|
||||
}
|
||||
// `needsAdvancedPaths` was false
|
||||
const swcPath = createSwcPath(node, parent, stop);
|
||||
// We create scopes ourselves, since paths are not prepared yet...
|
||||
const newOrCurScope = getNewScope(swcPath, scope, traversalContext) || scope;
|
||||
swcPath.scope = newOrCurScope;
|
||||
addPotentialBindingOrRefToScope(swcPath);
|
||||
return { newOrCurScope, swcPath };
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {SwcNode} node
|
||||
* @param {SwcNode|null} parent
|
||||
* @param {SwcScope} scope
|
||||
* @param {SwcTraversalContext} traversalContext
|
||||
* @param {{haltCondition?: (node: SwcNode) => boolean;}} [config]
|
||||
*/
|
||||
const prepareTree = (node, parent, scope, traversalContext, { haltCondition } = {}) => {
|
||||
if (!node?.type) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { newOrCurScope } = handlePathAndScope(node, parent, scope, false, traversalContext);
|
||||
loopChildren({ node }, ({ child }) => {
|
||||
prepareTree(child, node, newOrCurScope, traversalContext, { haltCondition });
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {SwcNode} node
|
||||
* @param {SwcNode|null} parent
|
||||
* @param {SwcScope} scope
|
||||
* @param {{hasPreparedTree?: boolean;}} config
|
||||
* @param {SwcTraversalContext} traversalContext
|
||||
*/
|
||||
const visitTree = (node, parent, scope, config, traversalContext) => {
|
||||
if (!node?.type || isStopped) {
|
||||
return;
|
||||
}
|
||||
|
||||
const { hasPreparedTree = false } = config || {};
|
||||
|
||||
const { swcPath } = handlePathAndScope(node, parent, scope, hasPreparedTree, traversalContext);
|
||||
visit(swcPath, visitor, traversalContext);
|
||||
loopChildren({ node }, ({ child }) => {
|
||||
visitTree(child, node, swcPath.scope, config, traversalContext);
|
||||
});
|
||||
};
|
||||
|
||||
const traversalContext = { visitOnExitFns: [], scopeId: 0 };
|
||||
// https://developer.mozilla.org/en-US/docs/Glossary/Scope
|
||||
/** @type {SwcScope} */
|
||||
const initialScope = {
|
||||
id: traversalContext.scopeId,
|
||||
bindings: {},
|
||||
path: null,
|
||||
_pendingRefsWithoutBinding: [],
|
||||
_isIsolatedBlockStatement: false,
|
||||
};
|
||||
if (needsAdvancedPaths) {
|
||||
// Do one full traversal to prepare advanced path functionality like path.get() and path.scope.bindings
|
||||
// TODO: improve with on the fly, partial tree traversal for best performance
|
||||
prepareTree(swcAst, null, initialScope, traversalContext);
|
||||
}
|
||||
visitTree(swcAst, null, initialScope, { hasPreparedTree: needsAdvancedPaths }, traversalContext);
|
||||
// @ts-ignore
|
||||
traversalContext.visitOnExitFns.reverse().forEach(fn => fn());
|
||||
}
|
||||
|
|
@ -1,16 +1,14 @@
|
|||
/**
|
||||
* @typedef {import('../types/core/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {PathFromSystemRoot|string} pathStr C:\Example\path/like/this
|
||||
* @returns {PathFromSystemRoot} /Example/path/like/this
|
||||
*/
|
||||
function toPosixPath(pathStr) {
|
||||
export function toPosixPath(pathStr) {
|
||||
if (process.platform === 'win32') {
|
||||
return /** @type {PathFromSystemRoot} */ (pathStr.replace(/^.:/, '').replace(/\\/g, '/'));
|
||||
}
|
||||
return /** @type {PathFromSystemRoot} */ (pathStr);
|
||||
}
|
||||
|
||||
module.exports = { toPosixPath };
|
||||
|
|
|
|||
|
|
@ -1,28 +1,41 @@
|
|||
/**
|
||||
* @param {ASTNode} curNode Node to start from. Will loop over its children
|
||||
* @param {object} processObject Will be executed for every node
|
||||
* @param {ASTNode} [parentNode] parent of curNode
|
||||
* @typedef {import('parse5/dist/tree-adapters/default.js').Node} Node
|
||||
*/
|
||||
function traverseHtml(curNode, processObject) {
|
||||
|
||||
/**
|
||||
* Creates an api similar to Babel traverse for parse5 trees
|
||||
* @param {Parse5AstNode} curNode Node to start from. Will loop over its children
|
||||
* @param {object} processObject Will be executed for every node
|
||||
*/
|
||||
export function traverseHtml(curNode, processObject, config = {}) {
|
||||
function pathify(node) {
|
||||
return {
|
||||
node,
|
||||
traverse(obj) {
|
||||
traverseHtml(obj) {
|
||||
traverseHtml(node, obj);
|
||||
},
|
||||
stop() {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
config.stopped = true;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// let done = processFn(curNode, parentNode);
|
||||
// Match...
|
||||
if (processObject[curNode.nodeName]) {
|
||||
processObject[curNode.nodeName](pathify(curNode));
|
||||
}
|
||||
|
||||
if (curNode.childNodes) {
|
||||
curNode.childNodes.forEach(childNode => {
|
||||
traverseHtml(childNode, processObject, curNode);
|
||||
let { childNodes } = curNode;
|
||||
if (curNode.nodeName === 'template') {
|
||||
childNodes = curNode.content.childNodes;
|
||||
}
|
||||
|
||||
if (!config.stopped && childNodes) {
|
||||
childNodes.forEach(childNode => {
|
||||
if (!config.stopped) {
|
||||
traverseHtml(childNode, processObject, config);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = traverseHtml;
|
||||
|
|
|
|||
|
|
@ -0,0 +1,25 @@
|
|||
/* eslint-disable import/no-extraneous-dependencies */
|
||||
// @ts-expect-error
|
||||
import mockFs from 'mock-fs';
|
||||
// @ts-expect-error
|
||||
import mockRequire from 'mock-require';
|
||||
|
||||
/**
|
||||
* @param {object} obj
|
||||
*/
|
||||
export function mockFsAndRequire(obj) {
|
||||
mockFs(obj);
|
||||
|
||||
// Object.entries(obj).forEach(([key, value]) => {
|
||||
// if (key.endsWith('.json')) {
|
||||
// mockRequire(key, JSON.parse(value));
|
||||
// } else {
|
||||
// mockRequire(key, value);
|
||||
// }
|
||||
// });
|
||||
}
|
||||
|
||||
mockFsAndRequire.restore = () => {
|
||||
mockFs.restore();
|
||||
mockRequire.stopAll();
|
||||
};
|
||||
|
|
@ -1,57 +1,47 @@
|
|||
const { LogService } = require('../src/program/services/LogService.js');
|
||||
import { LogService } from '../src/program/core/LogService.js';
|
||||
|
||||
const originalWarn = LogService.warn;
|
||||
function suppressWarningLogs() {
|
||||
export function suppressWarningLogs() {
|
||||
LogService.warn = () => {};
|
||||
}
|
||||
function restoreSuppressWarningLogs() {
|
||||
export function restoreSuppressWarningLogs() {
|
||||
LogService.warn = originalWarn;
|
||||
}
|
||||
|
||||
const originalInfo = LogService.info;
|
||||
function suppressInfoLogs() {
|
||||
export function suppressInfoLogs() {
|
||||
LogService.info = () => {};
|
||||
}
|
||||
function restoreSuppressInfoLogs() {
|
||||
export function restoreSuppressInfoLogs() {
|
||||
LogService.info = originalInfo;
|
||||
}
|
||||
|
||||
const originalDebug = LogService.debug;
|
||||
function suppressDebugLogs() {
|
||||
export function suppressDebugLogs() {
|
||||
LogService.debug = () => {};
|
||||
}
|
||||
function restoreSuppressDebugLogs() {
|
||||
export function restoreSuppressDebugLogs() {
|
||||
LogService.debug = originalDebug;
|
||||
}
|
||||
|
||||
const originalSuccess = LogService.success;
|
||||
function suppressSuccessLogs() {
|
||||
export function suppressSuccessLogs() {
|
||||
LogService.success = () => {};
|
||||
}
|
||||
function restoreSuppressSuccessLogs() {
|
||||
export function restoreSuppressSuccessLogs() {
|
||||
LogService.success = originalSuccess;
|
||||
}
|
||||
|
||||
function suppressNonCriticalLogs() {
|
||||
export function suppressNonCriticalLogs() {
|
||||
suppressInfoLogs();
|
||||
suppressWarningLogs();
|
||||
suppressDebugLogs();
|
||||
suppressSuccessLogs();
|
||||
}
|
||||
|
||||
function restoreSuppressNonCriticalLogs() {
|
||||
export function restoreSuppressNonCriticalLogs() {
|
||||
restoreSuppressInfoLogs();
|
||||
restoreSuppressWarningLogs();
|
||||
restoreSuppressDebugLogs();
|
||||
restoreSuppressSuccessLogs();
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
suppressWarningLogs,
|
||||
restoreSuppressWarningLogs,
|
||||
suppressInfoLogs,
|
||||
restoreSuppressInfoLogs,
|
||||
|
||||
suppressNonCriticalLogs,
|
||||
restoreSuppressNonCriticalLogs,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,24 +1,8 @@
|
|||
const path = require('path');
|
||||
import path from 'path';
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
const mockFs = require('mock-fs');
|
||||
const mockRequire = require('mock-require');
|
||||
import { mockFsAndRequire } from './mock-fs-and-require.js';
|
||||
|
||||
function mock(obj) {
|
||||
mockFs(obj);
|
||||
|
||||
Object.entries(obj).forEach(([key, value]) => {
|
||||
if (key.endsWith('.json')) {
|
||||
mockRequire(key, JSON.parse(value));
|
||||
} else {
|
||||
mockRequire(key, value);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
mock.restore = () => {
|
||||
mockFs.restore();
|
||||
mockRequire.stopAll();
|
||||
};
|
||||
export const mock = mockFsAndRequire;
|
||||
|
||||
/**
|
||||
* Makes sure that, whenever the main program (providence) calls
|
||||
|
|
@ -86,21 +70,21 @@ function getMockObjectForProject(files, cfg = {}, existingMock = {}) {
|
|||
* paths match with the indexes of the files
|
||||
* @param {object} existingMock config for mock-fs, so the previous config is not overridden
|
||||
*/
|
||||
function mockProject(files, cfg = {}, existingMock = {}) {
|
||||
export function mockProject(files, cfg = {}, existingMock = {}) {
|
||||
const obj = getMockObjectForProject(files, cfg, existingMock);
|
||||
mockFs(obj);
|
||||
mockFsAndRequire(obj);
|
||||
return obj;
|
||||
}
|
||||
|
||||
function restoreMockedProjects() {
|
||||
mock.restore();
|
||||
export function restoreMockedProjects() {
|
||||
mockFsAndRequire.restore();
|
||||
}
|
||||
|
||||
function getEntry(queryResult, index = 0) {
|
||||
export function getEntry(queryResult, index = 0) {
|
||||
return queryResult.queryOutput[index];
|
||||
}
|
||||
|
||||
function getEntries(queryResult) {
|
||||
export function getEntries(queryResult) {
|
||||
return queryResult.queryOutput;
|
||||
}
|
||||
|
||||
|
|
@ -129,7 +113,7 @@ function createPackageJson({ filePaths, codeSnippets, projectName, refProjectNam
|
|||
* When a non imported ref dependency or a wrong version of a dev dependency needs to be
|
||||
* tested, please explicitly provide a ./package.json that does so.
|
||||
*/
|
||||
function mockTargetAndReferenceProject(searchTargetProject, referenceProject) {
|
||||
export function mockTargetAndReferenceProject(searchTargetProject, referenceProject) {
|
||||
const targetProjectName = searchTargetProject.name || 'fictional-target-project';
|
||||
const refProjectName = referenceProject.name || 'fictional-ref-project';
|
||||
|
||||
|
|
@ -172,12 +156,3 @@ function mockTargetAndReferenceProject(searchTargetProject, referenceProject) {
|
|||
targetMock,
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
mock,
|
||||
mockProject,
|
||||
restoreMockedProjects,
|
||||
getEntry,
|
||||
getEntries,
|
||||
mockTargetAndReferenceProject,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,21 +0,0 @@
|
|||
const { ReportService } = require('../src/program/services/ReportService.js');
|
||||
|
||||
const originalWriteToJson = ReportService.writeToJson;
|
||||
|
||||
function mockWriteToJson(queryResults) {
|
||||
ReportService.writeToJson = queryResult => {
|
||||
queryResults.push(queryResult);
|
||||
};
|
||||
}
|
||||
|
||||
function restoreWriteToJson(queryResults) {
|
||||
ReportService.writeToJson = originalWriteToJson;
|
||||
while (queryResults && queryResults.length) {
|
||||
queryResults.pop();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
mockWriteToJson,
|
||||
restoreWriteToJson,
|
||||
};
|
||||
|
|
@ -4,7 +4,7 @@
|
|||
"analyzerMeta": {
|
||||
"name": "find-exports",
|
||||
"requiredAst": "babel",
|
||||
"identifier": "exporting-ref-project_1.0.0__309114983",
|
||||
"identifier": "exporting-ref-project_1.0.0__-42206859",
|
||||
"targetProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "exporting-ref-project",
|
||||
|
|
@ -13,7 +13,9 @@
|
|||
},
|
||||
"configuration": {
|
||||
"skipFileImports": false,
|
||||
"gatherFilesConfig": {}
|
||||
"gatherFilesConfig": {},
|
||||
"skipCheckMatchCompatibility": false,
|
||||
"addSystemPathsInResult": false
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
@ -25,13 +27,13 @@
|
|||
"exportSpecifiers": [
|
||||
"[default]"
|
||||
],
|
||||
"source": "refConstImported",
|
||||
"normalizedSource": "refConstImported",
|
||||
"source": "./ref-src/core.js",
|
||||
"normalizedSource": "./ref-src/core.js",
|
||||
"rootFileMap": [
|
||||
{
|
||||
"currentFileSpecifier": "[default]",
|
||||
"rootFile": {
|
||||
"file": "refConstImported",
|
||||
"file": "./ref-src/core.js",
|
||||
"specifier": "[default]"
|
||||
}
|
||||
}
|
||||
|
|
@ -85,7 +87,6 @@
|
|||
"notImported"
|
||||
],
|
||||
"localMap": [],
|
||||
"source": null,
|
||||
"rootFileMap": [
|
||||
{
|
||||
"currentFileSpecifier": "notImported",
|
||||
|
|
@ -127,7 +128,6 @@
|
|||
"RefClass"
|
||||
],
|
||||
"localMap": [],
|
||||
"source": null,
|
||||
"rootFileMap": [
|
||||
{
|
||||
"currentFileSpecifier": "RefClass",
|
||||
|
|
@ -170,7 +170,6 @@
|
|||
"resolvePathCorrect"
|
||||
],
|
||||
"localMap": [],
|
||||
"source": null,
|
||||
"rootFileMap": [
|
||||
{
|
||||
"currentFileSpecifier": "resolvePathCorrect",
|
||||
|
|
|
|||
7
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/dep-a/README.md
generated
vendored
Normal file
7
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/dep-a/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
Has a deprecated (from Node 16) export maps format:
|
||||
|
||||
```
|
||||
"exports": {
|
||||
"./src/": "./src/"
|
||||
})
|
||||
```
|
||||
7
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/dep-a/package.json
generated
vendored
Normal file
7
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/dep-a/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"name": "dep-a",
|
||||
"version": "2.0.0",
|
||||
"exports": {
|
||||
"./src/": "./src/"
|
||||
}
|
||||
}
|
||||
0
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/dep-a/src/src.js
generated
vendored
Normal file
0
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/dep-a/src/src.js
generated
vendored
Normal file
7
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/my-dep-b/package.json
generated
vendored
Normal file
7
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/my-dep-b/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"name": "my-dep-b",
|
||||
"version": "1.0.0",
|
||||
"exports": {
|
||||
"./src/*": "./src/*"
|
||||
}
|
||||
}
|
||||
0
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/my-dep-b/src/src.js
generated
vendored
Normal file
0
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/my-dep-b/src/src.js
generated
vendored
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
import { InputDataService } from '../src/program/core/InputDataService.js';
|
||||
import { QueryService } from '../src/program/core/QueryService.js';
|
||||
import { restoreMockedProjects } from './mock-project-helpers.js';
|
||||
import {
|
||||
suppressNonCriticalLogs,
|
||||
restoreSuppressNonCriticalLogs,
|
||||
} from './mock-log-service-helpers.js';
|
||||
import { memoizeConfig } from '../src/program/utils/memoize.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('../types/index.js').QueryResult} QueryResult
|
||||
*/
|
||||
|
||||
let hasRunBefore = false;
|
||||
|
||||
export function setupAnalyzerTest() {
|
||||
if (hasRunBefore) {
|
||||
return;
|
||||
}
|
||||
|
||||
const originalReferenceProjectPaths = InputDataService.referenceProjectPaths;
|
||||
const cacheDisabledQInitialValue = QueryService.cacheDisabled;
|
||||
const cacheDisabledIInitialValue = memoizeConfig.isCacheDisabled;
|
||||
|
||||
before(() => {
|
||||
QueryService.cacheDisabled = true;
|
||||
memoizeConfig.isCacheDisabled = true;
|
||||
suppressNonCriticalLogs();
|
||||
});
|
||||
|
||||
after(() => {
|
||||
QueryService.cacheDisabled = cacheDisabledQInitialValue;
|
||||
memoizeConfig.isCacheDisabled = cacheDisabledIInitialValue;
|
||||
restoreSuppressNonCriticalLogs();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
InputDataService.referenceProjectPaths = [];
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
InputDataService.referenceProjectPaths = originalReferenceProjectPaths;
|
||||
restoreMockedProjects();
|
||||
});
|
||||
|
||||
hasRunBefore = true;
|
||||
}
|
||||
|
|
@ -1,4 +1,10 @@
|
|||
const { Analyzer } = require('../../src/program/analyzers/helpers/Analyzer.js');
|
||||
import { Analyzer } from '../../src/program/core/Analyzer.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('@babel/types').File} File
|
||||
* @typedef {import('../../types/index.js').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../types/index.js').QueryOutputEntry} QueryOutputEntry
|
||||
*/
|
||||
|
||||
/**
|
||||
* This file outlines the minimum required functionality for an analyzer.
|
||||
|
|
@ -23,11 +29,10 @@ const options = {
|
|||
* corresponds to one file.
|
||||
* The contents of this function should be designed in such a way that they
|
||||
* can be directly pasted and edited in https://astexplorer.net/
|
||||
* @param {BabelAST} ast
|
||||
* @returns {TransformedEntry}
|
||||
* @param {File} ast
|
||||
*/
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
function myAnalyzerPerAstEntry(ast) {
|
||||
function getResultPerAstFile(ast) {
|
||||
// Visit AST...
|
||||
const transformedEntryResult = [];
|
||||
// Do the traverse: https://babeljs.io/docs/en/babel-traverse
|
||||
|
|
@ -36,23 +41,9 @@ function myAnalyzerPerAstEntry(ast) {
|
|||
return transformedEntryResult;
|
||||
}
|
||||
|
||||
class MyAnalyzer extends Analyzer {
|
||||
constructor() {
|
||||
super();
|
||||
/**
|
||||
* This must match with the name in file-system (will be used for reporting)
|
||||
*/
|
||||
this.name = 'my-analyzer';
|
||||
/**
|
||||
* The ast format that the execute function expects
|
||||
* Compatible with formats supported by AstService.getAst()
|
||||
*/
|
||||
this.requiredAst = 'babel';
|
||||
/**
|
||||
* Not all analyzers require a references. Those that do, (usually 'match analyzers'),
|
||||
* must explicitly state so with `requiresReference: true`
|
||||
*/
|
||||
}
|
||||
export class DummyAnalyzer extends Analyzer {
|
||||
/** @type {AnalyzerName} */
|
||||
static analyzerName = 'find-dummy-analyzer';
|
||||
|
||||
/**
|
||||
* @param {AstDataProject[]} astDataProjects
|
||||
|
|
@ -80,7 +71,7 @@ class MyAnalyzer extends Analyzer {
|
|||
*/
|
||||
const queryOutput = await this._traverse((ast, astContext) => {
|
||||
// Run the traversel per entry
|
||||
let transformedEntryResult = myAnalyzerPerAstEntry(ast);
|
||||
let transformedEntryResult = getResultPerAstFile(ast);
|
||||
const meta = {};
|
||||
|
||||
// (optional): Post processors on TransformedEntry
|
||||
|
|
@ -91,7 +82,6 @@ class MyAnalyzer extends Analyzer {
|
|||
|
||||
return { result: transformedEntryResult, meta };
|
||||
});
|
||||
|
||||
// (optional): Post processors on TransformedQueryResult
|
||||
if (cfg.optionB) {
|
||||
// Run your QueryResult transformation based on option B
|
||||
|
|
@ -103,5 +93,3 @@ class MyAnalyzer extends Analyzer {
|
|||
return this._finalize(queryOutput, cfg);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MyAnalyzer;
|
||||
|
|
@ -0,0 +1,415 @@
|
|||
/* eslint-disable no-unused-expressions */
|
||||
/* eslint-disable import/no-extraneous-dependencies */
|
||||
import sinon from 'sinon';
|
||||
import pathLib from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { expect } from 'chai';
|
||||
import { it } from 'mocha';
|
||||
import {
|
||||
mockProject,
|
||||
restoreMockedProjects,
|
||||
mockTargetAndReferenceProject,
|
||||
} from '../../test-helpers/mock-project-helpers.js';
|
||||
import { _providenceModule } from '../../src/program/providence.js';
|
||||
import { _cliHelpersModule } from '../../src/cli/cli-helpers.js';
|
||||
import { toPosixPath } from '../../src/program/utils/to-posix-path.js';
|
||||
import { memoizeConfig } from '../../src/program/utils/memoize.js';
|
||||
import { getExtendDocsResults } from '../../src/cli/launch-providence-with-extend-docs.js';
|
||||
import { AstService } from '../../src/index.js';
|
||||
import { setupAnalyzerTest } from '../../test-helpers/setup-analyzer-test.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('../../types/index.js').QueryResult} QueryResult
|
||||
*/
|
||||
|
||||
const __dirname = pathLib.dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
const { pathsArrayFromCs, pathsArrayFromCollectionName, appendProjectDependencyPaths } =
|
||||
_cliHelpersModule;
|
||||
|
||||
const externalCfgMock = {
|
||||
searchTargetCollections: {
|
||||
'lion-collection': [
|
||||
'./providence-input-data/search-targets/example-project-a',
|
||||
'./providence-input-data/search-targets/example-project-b',
|
||||
// ...etc
|
||||
],
|
||||
},
|
||||
referenceCollections: {
|
||||
'lion-based-ui-collection': [
|
||||
'./providence-input-data/references/lion-based-ui',
|
||||
'./providence-input-data/references/lion-based-ui-labs',
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
setupAnalyzerTest();
|
||||
|
||||
describe('CLI helpers', () => {
|
||||
const rootDir = toPosixPath(pathLib.resolve(__dirname, '../../'));
|
||||
|
||||
describe('pathsArrayFromCs', () => {
|
||||
it('allows absolute paths', async () => {
|
||||
expect(pathsArrayFromCs('/mocked/path/example-project', rootDir)).to.eql([
|
||||
'/mocked/path/example-project',
|
||||
]);
|
||||
});
|
||||
|
||||
it('allows relative paths', async () => {
|
||||
expect(
|
||||
pathsArrayFromCs('./test-helpers/project-mocks/importing-target-project', rootDir),
|
||||
).to.eql([`${rootDir}/test-helpers/project-mocks/importing-target-project`]);
|
||||
expect(
|
||||
pathsArrayFromCs('test-helpers/project-mocks/importing-target-project', rootDir),
|
||||
).to.eql([`${rootDir}/test-helpers/project-mocks/importing-target-project`]);
|
||||
});
|
||||
|
||||
it('allows globs', async () => {
|
||||
expect(pathsArrayFromCs('test-helpers/project-mocks*', rootDir)).to.eql([
|
||||
`${rootDir}/test-helpers/project-mocks`,
|
||||
`${rootDir}/test-helpers/project-mocks-analyzer-outputs`,
|
||||
]);
|
||||
});
|
||||
|
||||
it('allows multiple comma separated paths', async () => {
|
||||
const paths =
|
||||
'test-helpers/project-mocks*, ./test-helpers/project-mocks/importing-target-project,/mocked/path/example-project';
|
||||
expect(pathsArrayFromCs(paths, rootDir)).to.eql([
|
||||
`${rootDir}/test-helpers/project-mocks`,
|
||||
`${rootDir}/test-helpers/project-mocks-analyzer-outputs`,
|
||||
`${rootDir}/test-helpers/project-mocks/importing-target-project`,
|
||||
'/mocked/path/example-project',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('pathsArrayFromCollectionName', () => {
|
||||
it('gets collections from external target config', async () => {
|
||||
expect(
|
||||
pathsArrayFromCollectionName('lion-collection', 'search-target', externalCfgMock, rootDir),
|
||||
).to.eql(
|
||||
externalCfgMock.searchTargetCollections['lion-collection'].map(p =>
|
||||
toPosixPath(pathLib.join(rootDir, p)),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('gets collections from external reference config', async () => {
|
||||
expect(
|
||||
pathsArrayFromCollectionName(
|
||||
'lion-based-ui-collection',
|
||||
'reference',
|
||||
externalCfgMock,
|
||||
rootDir,
|
||||
),
|
||||
).to.eql(
|
||||
externalCfgMock.referenceCollections['lion-based-ui-collection'].map(p =>
|
||||
toPosixPath(pathLib.join(rootDir, p)),
|
||||
),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('appendProjectDependencyPaths', () => {
|
||||
beforeEach(() => {
|
||||
mockProject(
|
||||
{
|
||||
'./src/OriginalComp.js': `export class OriginalComp {}`,
|
||||
'./src/inbetween.js': `export { OriginalComp as InBetweenComp } from './OriginalComp.js'`,
|
||||
'./index.js': `export { InBetweenComp as MyComp } from './src/inbetween.js'`,
|
||||
'./node_modules/dependency-a/index.js': '',
|
||||
'./bower_components/dependency-b/index.js': '',
|
||||
'./node_modules/my-dependency/index.js': '',
|
||||
},
|
||||
{
|
||||
projectName: 'example-project',
|
||||
projectPath: '/mocked/path/example-project',
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('adds bower and node dependencies', async () => {
|
||||
const result = await appendProjectDependencyPaths(['/mocked/path/example-project']);
|
||||
expect(result).to.eql([
|
||||
'/mocked/path/example-project/node_modules/dependency-a',
|
||||
'/mocked/path/example-project/node_modules/my-dependency',
|
||||
'/mocked/path/example-project/bower_components/dependency-b',
|
||||
'/mocked/path/example-project',
|
||||
]);
|
||||
});
|
||||
|
||||
it('allows a regex filter', async () => {
|
||||
const result = await appendProjectDependencyPaths(
|
||||
['/mocked/path/example-project'],
|
||||
'/^dependency-/',
|
||||
);
|
||||
expect(result).to.eql([
|
||||
'/mocked/path/example-project/node_modules/dependency-a',
|
||||
// in windows, it should not add '/mocked/path/example-project/node_modules/my-dependency',
|
||||
'/mocked/path/example-project/bower_components/dependency-b',
|
||||
'/mocked/path/example-project',
|
||||
]);
|
||||
|
||||
const result2 = await appendProjectDependencyPaths(['/mocked/path/example-project'], '/b$/');
|
||||
expect(result2).to.eql([
|
||||
'/mocked/path/example-project/bower_components/dependency-b',
|
||||
'/mocked/path/example-project',
|
||||
]);
|
||||
});
|
||||
|
||||
it('allows to filter out only npm or bower deps', async () => {
|
||||
const result = await appendProjectDependencyPaths(
|
||||
['/mocked/path/example-project'],
|
||||
undefined,
|
||||
['npm'],
|
||||
);
|
||||
expect(result).to.eql([
|
||||
'/mocked/path/example-project/node_modules/dependency-a',
|
||||
'/mocked/path/example-project/node_modules/my-dependency',
|
||||
'/mocked/path/example-project',
|
||||
]);
|
||||
|
||||
const result2 = await appendProjectDependencyPaths(
|
||||
['/mocked/path/example-project'],
|
||||
undefined,
|
||||
['bower'],
|
||||
);
|
||||
expect(result2).to.eql([
|
||||
'/mocked/path/example-project/bower_components/dependency-b',
|
||||
'/mocked/path/example-project',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Extend docs', () => {
|
||||
afterEach(() => {
|
||||
restoreMockedProjects();
|
||||
});
|
||||
|
||||
it('rewrites monorepo package paths when analysis is run from monorepo root', async () => {
|
||||
// This fails after InputDataService.addAstToProjectsData is memoized
|
||||
// (it does pass when run in isolation however, as a quick fix we disable memoization cache here...)
|
||||
memoizeConfig.isCacheDisabled = true;
|
||||
// Since we use the print method here, we need to force Babel, bc swc-to-babel output is not compatible
|
||||
// with @babel/generate
|
||||
const initialAstServiceFallbackToBabel = AstService.fallbackToBabel;
|
||||
AstService.fallbackToBabel = true;
|
||||
|
||||
const theirProjectFiles = {
|
||||
'./package.json': JSON.stringify({
|
||||
name: 'their-components',
|
||||
version: '1.0.0',
|
||||
}),
|
||||
'./src/TheirButton.js': `export class TheirButton extends HTMLElement {}`,
|
||||
'./src/TheirTooltip.js': `export class TheirTooltip extends HTMLElement {}`,
|
||||
'./their-button.js': `
|
||||
import { TheirButton } from './src/TheirButton.js';
|
||||
|
||||
customElements.define('their-button', TheirButton);
|
||||
`,
|
||||
'./demo.js': `
|
||||
import { TheirTooltip } from './src/TheirTooltip.js';
|
||||
import './their-button.js';
|
||||
`,
|
||||
};
|
||||
|
||||
const myProjectFiles = {
|
||||
'./package.json': JSON.stringify({
|
||||
name: '@my/root',
|
||||
workspaces: ['packages/*', 'another-folder/my-tooltip'],
|
||||
dependencies: {
|
||||
'their-components': '1.0.0',
|
||||
},
|
||||
}),
|
||||
// Package 1: @my/button
|
||||
'./packages/button/package.json': JSON.stringify({
|
||||
name: '@my/button',
|
||||
}),
|
||||
'./packages/button/src/MyButton.js': `
|
||||
import { TheirButton } from 'their-components/src/TheirButton.js';
|
||||
|
||||
export class MyButton extends TheirButton {}
|
||||
`,
|
||||
'./packages/button/src/my-button.js': `
|
||||
import { MyButton } from './MyButton.js';
|
||||
|
||||
customElements.define('my-button', MyButton);
|
||||
`,
|
||||
|
||||
// Package 2: @my/tooltip
|
||||
'./packages/tooltip/package.json': JSON.stringify({
|
||||
name: '@my/tooltip',
|
||||
}),
|
||||
'./packages/tooltip/src/MyTooltip.js': `
|
||||
import { TheirTooltip } from 'their-components/src/TheirTooltip.js';
|
||||
|
||||
export class MyTooltip extends TheirTooltip {}
|
||||
`,
|
||||
};
|
||||
|
||||
const theirProject = {
|
||||
path: '/my-components/node_modules/their-components',
|
||||
name: 'their-components',
|
||||
files: Object.entries(theirProjectFiles).map(([file, code]) => ({ file, code })),
|
||||
};
|
||||
|
||||
const myProject = {
|
||||
path: '/my-components',
|
||||
name: 'my-components',
|
||||
files: Object.entries(myProjectFiles).map(([file, code]) => ({ file, code })),
|
||||
};
|
||||
|
||||
mockTargetAndReferenceProject(theirProject, myProject);
|
||||
|
||||
const result = await getExtendDocsResults({
|
||||
referenceProjectPaths: [theirProject.path],
|
||||
prefixCfg: { from: 'their', to: 'my' },
|
||||
extensions: ['.js'],
|
||||
cwd: '/my-components',
|
||||
});
|
||||
|
||||
expect(result).to.eql([
|
||||
{
|
||||
name: 'TheirButton',
|
||||
variable: {
|
||||
from: 'TheirButton',
|
||||
to: 'MyButton',
|
||||
paths: [
|
||||
{
|
||||
from: './src/TheirButton.js',
|
||||
to: '@my/button/src/MyButton.js', // rewritten from './packages/button/src/MyButton.js',
|
||||
},
|
||||
{
|
||||
from: 'their-components/src/TheirButton.js',
|
||||
to: '@my/button/src/MyButton.js', // rewritten from './packages/button/src/MyButton.js',
|
||||
},
|
||||
],
|
||||
},
|
||||
tag: {
|
||||
from: 'their-button',
|
||||
to: 'my-button',
|
||||
paths: [
|
||||
{
|
||||
from: './their-button.js',
|
||||
to: '@my/button/src/my-button.js', // rewritten from './packages/button/src/MyButton.js',
|
||||
},
|
||||
{
|
||||
from: 'their-components/their-button.js',
|
||||
to: '@my/button/src/my-button.js', // rewritten from './packages/button/src/MyButton.js',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'TheirTooltip',
|
||||
variable: {
|
||||
from: 'TheirTooltip',
|
||||
to: 'MyTooltip',
|
||||
paths: [
|
||||
{
|
||||
from: './src/TheirTooltip.js',
|
||||
to: '@my/tooltip/src/MyTooltip.js', // './packages/tooltip/src/MyTooltip.js',
|
||||
},
|
||||
{
|
||||
from: 'their-components/src/TheirTooltip.js',
|
||||
to: '@my/tooltip/src/MyTooltip.js', // './packages/tooltip/src/MyTooltip.js',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
AstService.fallbackToBabel = initialAstServiceFallbackToBabel;
|
||||
});
|
||||
|
||||
it('does not check for match compatibility (target and reference) in monorepo targets', async () => {
|
||||
// ===== REFERENCE AND TARGET PROJECTS =====
|
||||
|
||||
const theirProjectFiles = {
|
||||
'./package.json': JSON.stringify({
|
||||
name: 'their-components',
|
||||
version: '1.0.0',
|
||||
}),
|
||||
'./src/TheirButton.js': `export class TheirButton extends HTMLElement {}`,
|
||||
};
|
||||
|
||||
// This will be detected as being a monorepo
|
||||
const monoProjectFiles = {
|
||||
'./package.json': JSON.stringify({
|
||||
name: '@mono/root',
|
||||
workspaces: ['packages/*'],
|
||||
dependencies: {
|
||||
'their-components': '1.0.0',
|
||||
},
|
||||
}),
|
||||
// Package: @mono/button
|
||||
'./packages/button/package.json': JSON.stringify({
|
||||
name: '@mono/button',
|
||||
}),
|
||||
};
|
||||
|
||||
// This will be detected as NOT being a monorepo
|
||||
const nonMonoProjectFiles = {
|
||||
'./package.json': JSON.stringify({
|
||||
name: 'non-mono',
|
||||
dependencies: {
|
||||
'their-components': '1.0.0',
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
const theirProject = {
|
||||
path: '/their-components',
|
||||
name: 'their-components',
|
||||
files: Object.entries(theirProjectFiles).map(([file, code]) => ({ file, code })),
|
||||
};
|
||||
|
||||
const monoProject = {
|
||||
path: '/mono-components',
|
||||
name: 'mono-components',
|
||||
files: Object.entries(monoProjectFiles).map(([file, code]) => ({ file, code })),
|
||||
};
|
||||
|
||||
const nonMonoProject = {
|
||||
path: '/non-mono-components',
|
||||
name: 'non-mono-components',
|
||||
files: Object.entries(nonMonoProjectFiles).map(([file, code]) => ({ file, code })),
|
||||
};
|
||||
|
||||
// ===== TESTS =====
|
||||
|
||||
const providenceStub = sinon.stub(_providenceModule, 'providence').returns(
|
||||
new Promise(resolve => {
|
||||
resolve([]);
|
||||
}),
|
||||
);
|
||||
|
||||
// ===== mono =====
|
||||
|
||||
mockTargetAndReferenceProject(theirProject, monoProject);
|
||||
await getExtendDocsResults({
|
||||
referenceProjectPaths: ['/their-components'],
|
||||
prefixCfg: { from: 'their', to: 'my' },
|
||||
extensions: ['.js'],
|
||||
cwd: '/mono-components',
|
||||
});
|
||||
|
||||
expect(providenceStub.args[0][1].skipCheckMatchCompatibility).to.equal(true);
|
||||
providenceStub.resetHistory();
|
||||
restoreMockedProjects();
|
||||
|
||||
// ===== non mono =====
|
||||
|
||||
mockTargetAndReferenceProject(theirProject, nonMonoProject);
|
||||
await getExtendDocsResults({
|
||||
referenceProjectPaths: ['/their-components'],
|
||||
prefixCfg: { from: 'their', to: 'my' },
|
||||
extensions: ['.js'],
|
||||
cwd: '/non-mono-components',
|
||||
});
|
||||
expect(providenceStub.args[0][1].skipCheckMatchCompatibility).to.equal(false);
|
||||
|
||||
providenceStub.restore();
|
||||
});
|
||||
});
|
||||
});
|
||||
31
packages-node/providence-analytics/test-node/cli/cli.e2e.js
Normal file
31
packages-node/providence-analytics/test-node/cli/cli.e2e.js
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
/* eslint-disable import/no-extraneous-dependencies */
|
||||
import pathLib from 'path';
|
||||
import { expect } from 'chai';
|
||||
import { it } from 'mocha';
|
||||
import { appendProjectDependencyPaths } from '../../src/cli/cli-helpers.js';
|
||||
import { toPosixPath } from '../../src/program/utils/to-posix-path.js';
|
||||
import { getCurrentDir } from '../../src/program/utils/get-current-dir.js';
|
||||
|
||||
/**
|
||||
* These tests are added on top of unit tests. See:
|
||||
* - https://github.com/ing-bank/lion/issues/1565
|
||||
* - https://github.com/ing-bank/lion/issues/1564
|
||||
*/
|
||||
describe('CLI helpers against filesystem', () => {
|
||||
describe('appendProjectDependencyPaths', () => {
|
||||
it('allows a regex filter', async () => {
|
||||
const targetFilePath = toPosixPath(
|
||||
pathLib.resolve(
|
||||
getCurrentDir(import.meta.url),
|
||||
'../../test-helpers/project-mocks/importing-target-project',
|
||||
),
|
||||
);
|
||||
const result = await appendProjectDependencyPaths([targetFilePath], '/^dep-/');
|
||||
expect(result).to.eql([
|
||||
`${targetFilePath}/node_modules/dep-a`,
|
||||
// in windows, it should not add `${targetFilePath}/node_modules/my-dep-b`,
|
||||
targetFilePath,
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -1,36 +1,24 @@
|
|||
const sinon = require('sinon');
|
||||
const pathLib = require('path');
|
||||
const { expect } = require('chai');
|
||||
const commander = require('commander');
|
||||
const {
|
||||
mockProject,
|
||||
restoreMockedProjects,
|
||||
mockTargetAndReferenceProject,
|
||||
} = require('../../test-helpers/mock-project-helpers.js');
|
||||
const {
|
||||
mockWriteToJson,
|
||||
restoreWriteToJson,
|
||||
} = require('../../test-helpers/mock-report-service-helpers.js');
|
||||
const {
|
||||
suppressNonCriticalLogs,
|
||||
restoreSuppressNonCriticalLogs,
|
||||
} = require('../../test-helpers/mock-log-service-helpers.js');
|
||||
const { InputDataService } = require('../../src/program/services/InputDataService.js');
|
||||
const { QueryService } = require('../../src/program/services/QueryService.js');
|
||||
const providenceModule = require('../../src/program/providence.js');
|
||||
const extendDocsModule = require('../../src/cli/launch-providence-with-extend-docs.js');
|
||||
const cliHelpersModule = require('../../src/cli/cli-helpers.js');
|
||||
const { cli } = require('../../src/cli/cli.js');
|
||||
const promptAnalyzerModule = require('../../src/cli/prompt-analyzer-menu.js');
|
||||
const { toPosixPath } = require('../../src/program/utils/to-posix-path.js');
|
||||
const { getExtendDocsResults } = require('../../src/cli/launch-providence-with-extend-docs.js');
|
||||
/* eslint-disable no-unused-expressions */
|
||||
/* eslint-disable import/no-extraneous-dependencies */
|
||||
import sinon from 'sinon';
|
||||
import { expect } from 'chai';
|
||||
import { it } from 'mocha';
|
||||
import commander from 'commander';
|
||||
import { mockProject } from '../../test-helpers/mock-project-helpers.js';
|
||||
import { InputDataService } from '../../src/program/core/InputDataService.js';
|
||||
import { QueryService } from '../../src/program/core/QueryService.js';
|
||||
import { _providenceModule } from '../../src/program/providence.js';
|
||||
import { _cliHelpersModule } from '../../src/cli/cli-helpers.js';
|
||||
import { cli } from '../../src/cli/cli.js';
|
||||
import { _promptAnalyzerMenuModule } from '../../src/cli/prompt-analyzer-menu.js';
|
||||
import { memoizeConfig } from '../../src/program/utils/memoize.js';
|
||||
import { _extendDocsModule } from '../../src/cli/launch-providence-with-extend-docs.js';
|
||||
import { dashboardServer } from '../../src/dashboard/server.js';
|
||||
import { setupAnalyzerTest } from '../../test-helpers/setup-analyzer-test.js';
|
||||
|
||||
const { pathsArrayFromCs, pathsArrayFromCollectionName, appendProjectDependencyPaths } =
|
||||
cliHelpersModule;
|
||||
|
||||
const queryResults = [];
|
||||
|
||||
const rootDir = toPosixPath(pathLib.resolve(__dirname, '../../'));
|
||||
/**
|
||||
* @typedef {import('../../types/index.js').QueryResult} QueryResult
|
||||
*/
|
||||
|
||||
const externalCfgMock = {
|
||||
searchTargetCollections: {
|
||||
|
|
@ -48,28 +36,77 @@ const externalCfgMock = {
|
|||
},
|
||||
};
|
||||
|
||||
setupAnalyzerTest();
|
||||
|
||||
/**
|
||||
* @param {string} args
|
||||
* @param {string} cwd
|
||||
*/
|
||||
async function runCli(args, cwd) {
|
||||
process.argv = [
|
||||
const argv = [
|
||||
...process.argv.slice(0, 2),
|
||||
...args.split(' ').map(a => a.replace(/^("|')?(.*)("|')?$/, '$2')),
|
||||
];
|
||||
await cli({ cwd });
|
||||
await cli({ argv, cwd });
|
||||
}
|
||||
|
||||
describe('Providence CLI', () => {
|
||||
const rootDir = '/mocked/path/example-project';
|
||||
|
||||
/** @type {sinon.SinonStub} */
|
||||
let providenceStub;
|
||||
/** @type {sinon.SinonStub} */
|
||||
let promptCfgStub;
|
||||
/** @type {sinon.SinonStub} */
|
||||
let iExtConfStub;
|
||||
/** @type {sinon.SinonStub} */
|
||||
let promptStub;
|
||||
/** @type {sinon.SinonStub} */
|
||||
let qConfStub;
|
||||
|
||||
before(() => {
|
||||
// Prevent MaxListenersExceededWarning
|
||||
commander.setMaxListeners(100);
|
||||
|
||||
mockWriteToJson(queryResults);
|
||||
suppressNonCriticalLogs();
|
||||
/** @type {sinon.SinonStub} */
|
||||
providenceStub = sinon.stub(_providenceModule, 'providence').returns(Promise.resolve());
|
||||
|
||||
/** @type {sinon.SinonStub} */
|
||||
promptCfgStub = sinon
|
||||
.stub(_promptAnalyzerMenuModule, 'promptAnalyzerConfigMenu')
|
||||
.returns(Promise.resolve({ analyzerConfig: { con: 'fig' } }));
|
||||
|
||||
/** @type {sinon.SinonStub} */
|
||||
iExtConfStub = sinon.stub(InputDataService, 'getExternalConfig').returns(externalCfgMock);
|
||||
|
||||
/** @type {sinon.SinonStub} */
|
||||
promptStub = sinon
|
||||
.stub(_promptAnalyzerMenuModule, 'promptAnalyzerMenu')
|
||||
.returns(Promise.resolve({ analyzerName: 'match-analyzer-mock' }));
|
||||
|
||||
/** @type {sinon.SinonStub} */
|
||||
qConfStub = sinon.stub(QueryService, 'getQueryConfigFromAnalyzer').returns(
|
||||
// @ts-expect-error
|
||||
Promise.resolve({
|
||||
analyzer: {
|
||||
name: 'match-analyzer-mock',
|
||||
requiresReference: true,
|
||||
},
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
after(() => {
|
||||
commander.setMaxListeners(10);
|
||||
|
||||
providenceStub.restore();
|
||||
promptCfgStub.restore();
|
||||
iExtConfStub.restore();
|
||||
promptStub.restore();
|
||||
qConfStub.restore();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
mockProject(
|
||||
{
|
||||
'./src/OriginalComp.js': `export class OriginalComp {}`,
|
||||
|
|
@ -83,43 +120,7 @@ describe('Providence CLI', () => {
|
|||
projectPath: '/mocked/path/example-project',
|
||||
},
|
||||
);
|
||||
|
||||
providenceStub = sinon.stub(providenceModule, 'providence').returns(
|
||||
new Promise(resolve => {
|
||||
resolve();
|
||||
}),
|
||||
);
|
||||
|
||||
promptCfgStub = sinon
|
||||
.stub(promptAnalyzerModule, 'promptAnalyzerConfigMenu')
|
||||
.returns({ analyzerConfig: { con: 'fig' } });
|
||||
|
||||
iExtConfStub = sinon.stub(InputDataService, 'getExternalConfig').returns(externalCfgMock);
|
||||
|
||||
promptStub = sinon
|
||||
.stub(promptAnalyzerModule, 'promptAnalyzerMenu')
|
||||
.returns({ analyzerName: 'mock-analyzer' });
|
||||
|
||||
qConfStub = sinon.stub(QueryService, 'getQueryConfigFromAnalyzer').returns({
|
||||
analyzer: {
|
||||
name: 'mock-analyzer',
|
||||
requiresReference: true,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
after(() => {
|
||||
commander.setMaxListeners(10);
|
||||
|
||||
restoreSuppressNonCriticalLogs();
|
||||
restoreMockedProjects();
|
||||
restoreWriteToJson();
|
||||
|
||||
providenceStub.restore();
|
||||
promptCfgStub.restore();
|
||||
iExtConfStub.restore();
|
||||
promptStub.restore();
|
||||
qConfStub.restore();
|
||||
memoizeConfig.isCacheDisabled = true;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
|
|
@ -130,38 +131,45 @@ describe('Providence CLI', () => {
|
|||
qConfStub.resetHistory();
|
||||
});
|
||||
|
||||
const analyzeCmd = 'analyze mock-analyzer';
|
||||
const analyzeCmd = 'analyze match-analyzer-mock';
|
||||
|
||||
it('calls providence', async () => {
|
||||
await runCli(`${analyzeCmd} -t /mocked/path/example-project`);
|
||||
await runCli(`${analyzeCmd} -t /mocked/path/example-project`, rootDir);
|
||||
expect(providenceStub.called).to.be.true;
|
||||
});
|
||||
|
||||
it('creates a QueryConfig', async () => {
|
||||
await runCli(`${analyzeCmd} -t /mocked/path/example-project`);
|
||||
await runCli(`${analyzeCmd} -t /mocked/path/example-project`, rootDir);
|
||||
expect(qConfStub.called).to.be.true;
|
||||
expect(qConfStub.args[0][0]).to.equal('mock-analyzer');
|
||||
expect(qConfStub.args[0][0]).to.equal('match-analyzer-mock');
|
||||
});
|
||||
|
||||
describe('Global options', () => {
|
||||
const anyCmdThatAcceptsGlobalOpts = 'analyze match-analyzer-mock';
|
||||
|
||||
/** @type {sinon.SinonStub} */
|
||||
let pathsArrayFromCollectionStub;
|
||||
/** @type {sinon.SinonStub} */
|
||||
let pathsArrayFromCsStub;
|
||||
/** @type {sinon.SinonStub} */
|
||||
let appendProjectDependencyPathsStub;
|
||||
|
||||
before(() => {
|
||||
pathsArrayFromCsStub = sinon
|
||||
.stub(cliHelpersModule, 'pathsArrayFromCs')
|
||||
.stub(_cliHelpersModule, 'pathsArrayFromCs')
|
||||
.returns(['/mocked/path/example-project']);
|
||||
pathsArrayFromCollectionStub = sinon
|
||||
.stub(cliHelpersModule, 'pathsArrayFromCollectionName')
|
||||
.stub(_cliHelpersModule, 'pathsArrayFromCollectionName')
|
||||
.returns(['/mocked/path/example-project']);
|
||||
appendProjectDependencyPathsStub = sinon
|
||||
.stub(cliHelpersModule, 'appendProjectDependencyPaths')
|
||||
.returns([
|
||||
.stub(_cliHelpersModule, 'appendProjectDependencyPaths')
|
||||
.returns(
|
||||
Promise.resolve([
|
||||
'/mocked/path/example-project',
|
||||
'/mocked/path/example-project/node_modules/mock-dep-a',
|
||||
'/mocked/path/example-project/bower_components/mock-dep-b',
|
||||
]);
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
after(() => {
|
||||
|
|
@ -177,30 +185,33 @@ describe('Providence CLI', () => {
|
|||
});
|
||||
|
||||
it('"-e --extensions"', async () => {
|
||||
await runCli(`${analyzeCmd} -e bla,blu`);
|
||||
await runCli(`${anyCmdThatAcceptsGlobalOpts} -e bla,blu`, rootDir);
|
||||
expect(providenceStub.args[0][1].gatherFilesConfig.extensions).to.eql(['.bla', '.blu']);
|
||||
|
||||
providenceStub.resetHistory();
|
||||
|
||||
await runCli(`${analyzeCmd} --extensions bla,blu`);
|
||||
await runCli(`${anyCmdThatAcceptsGlobalOpts} --extensions bla,blu`, rootDir);
|
||||
expect(providenceStub.args[0][1].gatherFilesConfig.extensions).to.eql(['.bla', '.blu']);
|
||||
});
|
||||
|
||||
it('"-t --search-target-paths"', async () => {
|
||||
await runCli(`${analyzeCmd} -t /mocked/path/example-project`, rootDir);
|
||||
await runCli(`${anyCmdThatAcceptsGlobalOpts} -t /mocked/path/example-project`, rootDir);
|
||||
expect(pathsArrayFromCsStub.args[0][0]).to.equal('/mocked/path/example-project');
|
||||
expect(providenceStub.args[0][1].targetProjectPaths).to.eql(['/mocked/path/example-project']);
|
||||
|
||||
pathsArrayFromCsStub.resetHistory();
|
||||
providenceStub.resetHistory();
|
||||
|
||||
await runCli(`${analyzeCmd} --search-target-paths /mocked/path/example-project`, rootDir);
|
||||
await runCli(
|
||||
`${anyCmdThatAcceptsGlobalOpts} --search-target-paths /mocked/path/example-project`,
|
||||
rootDir,
|
||||
);
|
||||
expect(pathsArrayFromCsStub.args[0][0]).to.equal('/mocked/path/example-project');
|
||||
expect(providenceStub.args[0][1].targetProjectPaths).to.eql(['/mocked/path/example-project']);
|
||||
});
|
||||
|
||||
it('"-r --reference-paths"', async () => {
|
||||
await runCli(`${analyzeCmd} -r /mocked/path/example-project`, rootDir);
|
||||
await runCli(`${anyCmdThatAcceptsGlobalOpts} -r /mocked/path/example-project`, rootDir);
|
||||
expect(pathsArrayFromCsStub.args[0][0]).to.equal('/mocked/path/example-project');
|
||||
expect(providenceStub.args[0][1].referenceProjectPaths).to.eql([
|
||||
'/mocked/path/example-project',
|
||||
|
|
@ -209,7 +220,10 @@ describe('Providence CLI', () => {
|
|||
pathsArrayFromCsStub.resetHistory();
|
||||
providenceStub.resetHistory();
|
||||
|
||||
await runCli(`${analyzeCmd} --reference-paths /mocked/path/example-project`, rootDir);
|
||||
await runCli(
|
||||
`${anyCmdThatAcceptsGlobalOpts} --reference-paths /mocked/path/example-project`,
|
||||
rootDir,
|
||||
);
|
||||
expect(pathsArrayFromCsStub.args[0][0]).to.equal('/mocked/path/example-project');
|
||||
expect(providenceStub.args[0][1].referenceProjectPaths).to.eql([
|
||||
'/mocked/path/example-project',
|
||||
|
|
@ -217,13 +231,19 @@ describe('Providence CLI', () => {
|
|||
});
|
||||
|
||||
it('"--search-target-collection"', async () => {
|
||||
await runCli(`${analyzeCmd} --search-target-collection lion-collection`, rootDir);
|
||||
await runCli(
|
||||
`${anyCmdThatAcceptsGlobalOpts} --search-target-collection lion-collection`,
|
||||
rootDir,
|
||||
);
|
||||
expect(pathsArrayFromCollectionStub.args[0][0]).to.equal('lion-collection');
|
||||
expect(providenceStub.args[0][1].targetProjectPaths).to.eql(['/mocked/path/example-project']);
|
||||
});
|
||||
|
||||
it('"--reference-collection"', async () => {
|
||||
await runCli(`${analyzeCmd} --reference-collection lion-based-ui-collection`, rootDir);
|
||||
await runCli(
|
||||
`${anyCmdThatAcceptsGlobalOpts} --reference-collection lion-based-ui-collection`,
|
||||
rootDir,
|
||||
);
|
||||
expect(pathsArrayFromCollectionStub.args[0][0]).to.equal('lion-based-ui-collection');
|
||||
expect(providenceStub.args[0][1].referenceProjectPaths).to.eql([
|
||||
'/mocked/path/example-project',
|
||||
|
|
@ -231,7 +251,7 @@ describe('Providence CLI', () => {
|
|||
});
|
||||
|
||||
it('"-a --allowlist"', async () => {
|
||||
await runCli(`${analyzeCmd} -a mocked/**/*,rocked/*`, rootDir);
|
||||
await runCli(`${anyCmdThatAcceptsGlobalOpts} -a mocked/**/*,rocked/*`, rootDir);
|
||||
expect(providenceStub.args[0][1].gatherFilesConfig.allowlist).to.eql([
|
||||
'mocked/**/*',
|
||||
'rocked/*',
|
||||
|
|
@ -239,7 +259,7 @@ describe('Providence CLI', () => {
|
|||
|
||||
providenceStub.resetHistory();
|
||||
|
||||
await runCli(`${analyzeCmd} --allowlist mocked/**/*,rocked/*`, rootDir);
|
||||
await runCli(`${anyCmdThatAcceptsGlobalOpts} --allowlist mocked/**/*,rocked/*`, rootDir);
|
||||
expect(providenceStub.args[0][1].gatherFilesConfig.allowlist).to.eql([
|
||||
'mocked/**/*',
|
||||
'rocked/*',
|
||||
|
|
@ -247,46 +267,49 @@ describe('Providence CLI', () => {
|
|||
});
|
||||
|
||||
it('"--allowlist-reference"', async () => {
|
||||
await runCli(`${analyzeCmd} --allowlist-reference mocked/**/*,rocked/*`, rootDir);
|
||||
await runCli(
|
||||
`${anyCmdThatAcceptsGlobalOpts} --allowlist-reference mocked/**/*,rocked/*`,
|
||||
rootDir,
|
||||
);
|
||||
expect(providenceStub.args[0][1].gatherFilesConfigReference.allowlist).to.eql([
|
||||
'mocked/**/*',
|
||||
'rocked/*',
|
||||
]);
|
||||
});
|
||||
|
||||
it('--allowlist-mode', async () => {
|
||||
await runCli(`${analyzeCmd} --allowlist-mode git`, rootDir);
|
||||
it('"--allowlist-mode"', async () => {
|
||||
await runCli(`${anyCmdThatAcceptsGlobalOpts} --allowlist-mode git`, rootDir);
|
||||
expect(providenceStub.args[0][1].gatherFilesConfig.allowlistMode).to.equal('git');
|
||||
});
|
||||
|
||||
it('--allowlist-mode-reference', async () => {
|
||||
await runCli(`${analyzeCmd} --allowlist-mode-reference npm`, rootDir);
|
||||
it('"--allowlist-mode-reference"', async () => {
|
||||
await runCli(`${anyCmdThatAcceptsGlobalOpts} --allowlist-mode-reference npm`, rootDir);
|
||||
expect(providenceStub.args[0][1].gatherFilesConfigReference.allowlistMode).to.equal('npm');
|
||||
});
|
||||
|
||||
it('"-D --debug"', async () => {
|
||||
await runCli(`${analyzeCmd} -D`, rootDir);
|
||||
await runCli(`${anyCmdThatAcceptsGlobalOpts} -D`, rootDir);
|
||||
expect(providenceStub.args[0][1].debugEnabled).to.equal(true);
|
||||
|
||||
providenceStub.resetHistory();
|
||||
|
||||
await runCli(`${analyzeCmd} --debug`, rootDir);
|
||||
await runCli(`${anyCmdThatAcceptsGlobalOpts} --debug`, rootDir);
|
||||
expect(providenceStub.args[0][1].debugEnabled).to.equal(true);
|
||||
});
|
||||
|
||||
it('--write-log-file"', async () => {
|
||||
await runCli(`${analyzeCmd} --write-log-file`, rootDir);
|
||||
it('"--write-log-file"', async () => {
|
||||
await runCli(`${anyCmdThatAcceptsGlobalOpts} --write-log-file`, rootDir);
|
||||
expect(providenceStub.args[0][1].writeLogFile).to.equal(true);
|
||||
});
|
||||
|
||||
it('--target-dependencies"', async () => {
|
||||
await runCli(`${analyzeCmd}`, rootDir);
|
||||
it('"--target-dependencies"', async () => {
|
||||
await runCli(`${anyCmdThatAcceptsGlobalOpts}`, rootDir);
|
||||
expect(appendProjectDependencyPathsStub.called).to.be.false;
|
||||
|
||||
appendProjectDependencyPathsStub.resetHistory();
|
||||
providenceStub.resetHistory();
|
||||
|
||||
await runCli(`${analyzeCmd} --target-dependencies`, rootDir);
|
||||
await runCli(`${anyCmdThatAcceptsGlobalOpts} --target-dependencies`, rootDir);
|
||||
expect(appendProjectDependencyPathsStub.called).to.be.true;
|
||||
expect(providenceStub.args[0][1].targetProjectPaths).to.eql([
|
||||
'/mocked/path/example-project',
|
||||
|
|
@ -295,15 +318,20 @@ describe('Providence CLI', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
it('--target-dependencies /^with-regex/"', async () => {
|
||||
await runCli(`${analyzeCmd} --target-dependencies /^mock-/`, rootDir);
|
||||
it('"--target-dependencies /^with-regex/"', async () => {
|
||||
await runCli(`${anyCmdThatAcceptsGlobalOpts} --target-dependencies /^mock-/`, rootDir);
|
||||
expect(appendProjectDependencyPathsStub.args[0][1]).to.equal('/^mock-/');
|
||||
});
|
||||
|
||||
it('"--skip-check-match-compatibility"', async () => {
|
||||
await runCli(`${analyzeCmd} --skip-check-match-compatibility`, rootDir);
|
||||
await runCli(`${anyCmdThatAcceptsGlobalOpts} --skip-check-match-compatibility`, rootDir);
|
||||
expect(providenceStub.args[0][1].skipCheckMatchCompatibility).to.equal(true);
|
||||
});
|
||||
|
||||
it('"--fallback-to-babel"', async () => {
|
||||
await runCli(`${anyCmdThatAcceptsGlobalOpts} --fallback-to-babel`, rootDir);
|
||||
expect(providenceStub.args[0][1].fallbackToBabel).to.equal(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Commands', () => {
|
||||
|
|
@ -325,19 +353,19 @@ describe('Providence CLI', () => {
|
|||
});
|
||||
|
||||
it('"-c --config"', async () => {
|
||||
await runCli(`analyze mock-analyzer -c {"a":"2"}`, rootDir);
|
||||
expect(qConfStub.args[0][0]).to.equal('mock-analyzer');
|
||||
await runCli(`analyze match-analyzer-mock -c {"a":"2"}`, rootDir);
|
||||
expect(qConfStub.args[0][0]).to.equal('match-analyzer-mock');
|
||||
expect(qConfStub.args[0][1]).to.eql({ a: '2', metaConfig: {} });
|
||||
|
||||
qConfStub.resetHistory();
|
||||
|
||||
await runCli(`analyze mock-analyzer --config {"a":"2"}`, rootDir);
|
||||
expect(qConfStub.args[0][0]).to.equal('mock-analyzer');
|
||||
await runCli(`analyze match-analyzer-mock --config {"a":"2"}`, rootDir);
|
||||
expect(qConfStub.args[0][0]).to.equal('match-analyzer-mock');
|
||||
expect(qConfStub.args[0][1]).to.eql({ a: '2', metaConfig: {} });
|
||||
});
|
||||
|
||||
it('calls "promptAnalyzerConfigMenu" without config given', async () => {
|
||||
await runCli(`analyze mock-analyzer`, rootDir);
|
||||
await runCli(`analyze match-analyzer-mock`, rootDir);
|
||||
expect(promptCfgStub.called).to.be.true;
|
||||
});
|
||||
});
|
||||
|
|
@ -348,12 +376,22 @@ describe('Providence CLI', () => {
|
|||
|
||||
describe('Manage', () => {});
|
||||
|
||||
describe('Dashboard', () => {
|
||||
/** @type {sinon.SinonStub} */
|
||||
const startStub = sinon.stub(dashboardServer, 'start');
|
||||
it('spawns a dashboard', async () => {
|
||||
runCli(`dashboard`, rootDir);
|
||||
expect(startStub.called).to.be.true;
|
||||
});
|
||||
});
|
||||
|
||||
describe('Extend docs', () => {
|
||||
/** @type {sinon.SinonStub} */
|
||||
let extendDocsStub;
|
||||
|
||||
before(() => {
|
||||
extendDocsStub = sinon
|
||||
.stub(extendDocsModule, 'launchProvidenceWithExtendDocs')
|
||||
.stub(_extendDocsModule, 'launchProvidenceWithExtendDocs')
|
||||
.returns(Promise.resolve());
|
||||
});
|
||||
|
||||
|
|
@ -389,361 +427,10 @@ describe('Providence CLI', () => {
|
|||
extensions: ['.bla'],
|
||||
allowlist: ['al'],
|
||||
allowlistReference: ['alr'],
|
||||
cwd: undefined,
|
||||
cwd: '/mocked/path/example-project',
|
||||
skipCheckMatchCompatibility: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('CLI helpers', () => {
|
||||
describe('pathsArrayFromCs', () => {
|
||||
it('allows absolute paths', async () => {
|
||||
expect(pathsArrayFromCs('/mocked/path/example-project', rootDir)).to.eql([
|
||||
'/mocked/path/example-project',
|
||||
]);
|
||||
});
|
||||
|
||||
it('allows relative paths', async () => {
|
||||
expect(
|
||||
pathsArrayFromCs('./test-helpers/project-mocks/importing-target-project', rootDir),
|
||||
).to.eql([`${rootDir}/test-helpers/project-mocks/importing-target-project`]);
|
||||
expect(
|
||||
pathsArrayFromCs('test-helpers/project-mocks/importing-target-project', rootDir),
|
||||
).to.eql([`${rootDir}/test-helpers/project-mocks/importing-target-project`]);
|
||||
});
|
||||
|
||||
it('allows globs', async () => {
|
||||
expect(pathsArrayFromCs('test-helpers/project-mocks*', rootDir)).to.eql([
|
||||
`${rootDir}/test-helpers/project-mocks`,
|
||||
`${rootDir}/test-helpers/project-mocks-analyzer-outputs`,
|
||||
]);
|
||||
});
|
||||
|
||||
it('allows multiple comma separated paths', async () => {
|
||||
const paths =
|
||||
'test-helpers/project-mocks*, ./test-helpers/project-mocks/importing-target-project,/mocked/path/example-project';
|
||||
expect(pathsArrayFromCs(paths, rootDir)).to.eql([
|
||||
`${rootDir}/test-helpers/project-mocks`,
|
||||
`${rootDir}/test-helpers/project-mocks-analyzer-outputs`,
|
||||
`${rootDir}/test-helpers/project-mocks/importing-target-project`,
|
||||
'/mocked/path/example-project',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('pathsArrayFromCollectionName', () => {
|
||||
it('gets collections from external target config', async () => {
|
||||
expect(
|
||||
pathsArrayFromCollectionName('lion-collection', 'search-target', externalCfgMock, rootDir),
|
||||
).to.eql(
|
||||
externalCfgMock.searchTargetCollections['lion-collection'].map(p =>
|
||||
toPosixPath(pathLib.join(rootDir, p)),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
it('gets collections from external reference config', async () => {
|
||||
expect(
|
||||
pathsArrayFromCollectionName(
|
||||
'lion-based-ui-collection',
|
||||
'reference',
|
||||
externalCfgMock,
|
||||
rootDir,
|
||||
),
|
||||
).to.eql(
|
||||
externalCfgMock.referenceCollections['lion-based-ui-collection'].map(p =>
|
||||
toPosixPath(pathLib.join(rootDir, p)),
|
||||
),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('appendProjectDependencyPaths', () => {
|
||||
before(() => {
|
||||
mockWriteToJson(queryResults);
|
||||
suppressNonCriticalLogs();
|
||||
|
||||
mockProject(
|
||||
{
|
||||
'./src/OriginalComp.js': `export class OriginalComp {}`,
|
||||
'./src/inbetween.js': `export { OriginalComp as InBetweenComp } from './OriginalComp.js'`,
|
||||
'./index.js': `export { InBetweenComp as MyComp } from './src/inbetween.js'`,
|
||||
'./node_modules/dependency-a/index.js': '',
|
||||
'./bower_components/dependency-b/index.js': '',
|
||||
},
|
||||
{
|
||||
projectName: 'example-project',
|
||||
projectPath: '/mocked/path/example-project',
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('adds bower and node dependencies', async () => {
|
||||
const result = await appendProjectDependencyPaths(['/mocked/path/example-project']);
|
||||
expect(result).to.eql([
|
||||
'/mocked/path/example-project/node_modules/dependency-a',
|
||||
'/mocked/path/example-project/bower_components/dependency-b',
|
||||
'/mocked/path/example-project',
|
||||
]);
|
||||
});
|
||||
|
||||
it('allows a regex filter', async () => {
|
||||
const result = await appendProjectDependencyPaths(
|
||||
['/mocked/path/example-project'],
|
||||
'/^dependency-/',
|
||||
);
|
||||
expect(result).to.eql([
|
||||
'/mocked/path/example-project/node_modules/dependency-a',
|
||||
'/mocked/path/example-project/bower_components/dependency-b',
|
||||
'/mocked/path/example-project',
|
||||
]);
|
||||
|
||||
const result2 = await appendProjectDependencyPaths(['/mocked/path/example-project'], '/b$/');
|
||||
expect(result2).to.eql([
|
||||
'/mocked/path/example-project/bower_components/dependency-b',
|
||||
'/mocked/path/example-project',
|
||||
]);
|
||||
});
|
||||
|
||||
it('allows to filter out only npm or bower deps', async () => {
|
||||
const result = await appendProjectDependencyPaths(['/mocked/path/example-project'], null, [
|
||||
'npm',
|
||||
]);
|
||||
expect(result).to.eql([
|
||||
'/mocked/path/example-project/node_modules/dependency-a',
|
||||
'/mocked/path/example-project',
|
||||
]);
|
||||
|
||||
const result2 = await appendProjectDependencyPaths(['/mocked/path/example-project'], null, [
|
||||
'bower',
|
||||
]);
|
||||
expect(result2).to.eql([
|
||||
'/mocked/path/example-project/bower_components/dependency-b',
|
||||
'/mocked/path/example-project',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Extend docs', () => {
|
||||
afterEach(() => {
|
||||
restoreMockedProjects();
|
||||
});
|
||||
it('rewrites monorepo package paths when analysis is run from monorepo root', async () => {
|
||||
const theirProjectFiles = {
|
||||
'./package.json': JSON.stringify({
|
||||
name: 'their-components',
|
||||
version: '1.0.0',
|
||||
}),
|
||||
'./src/TheirButton.js': `export class TheirButton extends HTMLElement {}`,
|
||||
'./src/TheirTooltip.js': `export class TheirTooltip extends HTMLElement {}`,
|
||||
'./their-button.js': `
|
||||
import { TheirButton } from './src/TheirButton.js';
|
||||
|
||||
customElements.define('their-button', TheirButton);
|
||||
`,
|
||||
'./demo.js': `
|
||||
import { TheirTooltip } from './src/TheirTooltip.js';
|
||||
import './their-button.js';
|
||||
`,
|
||||
};
|
||||
|
||||
const myProjectFiles = {
|
||||
'./package.json': JSON.stringify({
|
||||
name: '@my/root',
|
||||
workspaces: ['packages/*', 'another-folder/my-tooltip'],
|
||||
dependencies: {
|
||||
'their-components': '1.0.0',
|
||||
},
|
||||
}),
|
||||
// Package 1: @my/button
|
||||
'./packages/button/package.json': JSON.stringify({
|
||||
name: '@my/button',
|
||||
}),
|
||||
'./packages/button/src/MyButton.js': `
|
||||
import { TheirButton } from 'their-components/src/TheirButton.js';
|
||||
|
||||
export class MyButton extends TheirButton {}
|
||||
`,
|
||||
'./packages/button/src/my-button.js': `
|
||||
import { MyButton } from './MyButton.js';
|
||||
|
||||
customElements.define('my-button', MyButton);
|
||||
`,
|
||||
|
||||
// Package 2: @my/tooltip
|
||||
'./packages/tooltip/package.json': JSON.stringify({
|
||||
name: '@my/tooltip',
|
||||
}),
|
||||
'./packages/tooltip/src/MyTooltip.js': `
|
||||
import { TheirTooltip } from 'their-components/src/TheirTooltip.js';
|
||||
|
||||
export class MyTooltip extends TheirTooltip {}
|
||||
`,
|
||||
};
|
||||
|
||||
const theirProject = {
|
||||
path: '/my-components/node_modules/their-components',
|
||||
name: 'their-components',
|
||||
files: Object.entries(theirProjectFiles).map(([file, code]) => ({ file, code })),
|
||||
};
|
||||
|
||||
const myProject = {
|
||||
path: '/my-components',
|
||||
name: 'my-components',
|
||||
files: Object.entries(myProjectFiles).map(([file, code]) => ({ file, code })),
|
||||
};
|
||||
|
||||
mockTargetAndReferenceProject(theirProject, myProject);
|
||||
|
||||
const result = await getExtendDocsResults({
|
||||
referenceProjectPaths: [theirProject.path],
|
||||
prefixCfg: { from: 'their', to: 'my' },
|
||||
extensions: ['.js'],
|
||||
cwd: '/my-components',
|
||||
});
|
||||
|
||||
expect(result).to.eql([
|
||||
{
|
||||
name: 'TheirButton',
|
||||
variable: {
|
||||
from: 'TheirButton',
|
||||
to: 'MyButton',
|
||||
paths: [
|
||||
{
|
||||
from: './src/TheirButton.js',
|
||||
to: '@my/button/src/MyButton.js', // rewritten from './packages/button/src/MyButton.js',
|
||||
},
|
||||
{
|
||||
from: 'their-components/src/TheirButton.js',
|
||||
to: '@my/button/src/MyButton.js', // rewritten from './packages/button/src/MyButton.js',
|
||||
},
|
||||
],
|
||||
},
|
||||
tag: {
|
||||
from: 'their-button',
|
||||
to: 'my-button',
|
||||
paths: [
|
||||
{
|
||||
from: './their-button.js',
|
||||
to: '@my/button/src/my-button.js', // rewritten from './packages/button/src/MyButton.js',
|
||||
},
|
||||
{
|
||||
from: 'their-components/their-button.js',
|
||||
to: '@my/button/src/my-button.js', // rewritten from './packages/button/src/MyButton.js',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'TheirTooltip',
|
||||
variable: {
|
||||
from: 'TheirTooltip',
|
||||
to: 'MyTooltip',
|
||||
paths: [
|
||||
{
|
||||
from: './src/TheirTooltip.js',
|
||||
to: '@my/tooltip/src/MyTooltip.js', // './packages/tooltip/src/MyTooltip.js',
|
||||
},
|
||||
{
|
||||
from: 'their-components/src/TheirTooltip.js',
|
||||
to: '@my/tooltip/src/MyTooltip.js', // './packages/tooltip/src/MyTooltip.js',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('does not check for match compatibility (target and reference) in monorepo targets', async () => {
|
||||
// ===== REFERENCE AND TARGET PROJECTS =====
|
||||
|
||||
const theirProjectFiles = {
|
||||
'./package.json': JSON.stringify({
|
||||
name: 'their-components',
|
||||
version: '1.0.0',
|
||||
}),
|
||||
'./src/TheirButton.js': `export class TheirButton extends HTMLElement {}`,
|
||||
};
|
||||
|
||||
// This will be detected as being a monorepo
|
||||
const monoProjectFiles = {
|
||||
'./package.json': JSON.stringify({
|
||||
name: '@mono/root',
|
||||
workspaces: ['packages/*'],
|
||||
dependencies: {
|
||||
'their-components': '1.0.0',
|
||||
},
|
||||
}),
|
||||
// Package: @mono/button
|
||||
'./packages/button/package.json': JSON.stringify({
|
||||
name: '@mono/button',
|
||||
}),
|
||||
};
|
||||
|
||||
// This will be detected as NOT being a monorepo
|
||||
const nonMonoProjectFiles = {
|
||||
'./package.json': JSON.stringify({
|
||||
name: 'non-mono',
|
||||
dependencies: {
|
||||
'their-components': '1.0.0',
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
const theirProject = {
|
||||
path: '/their-components',
|
||||
name: 'their-components',
|
||||
files: Object.entries(theirProjectFiles).map(([file, code]) => ({ file, code })),
|
||||
};
|
||||
|
||||
const monoProject = {
|
||||
path: '/mono-components',
|
||||
name: 'mono-components',
|
||||
files: Object.entries(monoProjectFiles).map(([file, code]) => ({ file, code })),
|
||||
};
|
||||
|
||||
const nonMonoProject = {
|
||||
path: '/non-mono-components',
|
||||
name: 'non-mono-components',
|
||||
files: Object.entries(nonMonoProjectFiles).map(([file, code]) => ({ file, code })),
|
||||
};
|
||||
|
||||
// ===== TESTS =====
|
||||
|
||||
const providenceStub = sinon.stub(providenceModule, 'providence').returns(
|
||||
new Promise(resolve => {
|
||||
resolve([]);
|
||||
}),
|
||||
);
|
||||
|
||||
// ===== mono =====
|
||||
|
||||
mockTargetAndReferenceProject(theirProject, monoProject);
|
||||
await getExtendDocsResults({
|
||||
referenceProjectPaths: ['/their-components'],
|
||||
prefixCfg: { from: 'their', to: 'my' },
|
||||
extensions: ['.js'],
|
||||
cwd: '/mono-components',
|
||||
});
|
||||
|
||||
expect(providenceStub.args[0][1].skipCheckMatchCompatibility).to.equal(true);
|
||||
providenceStub.resetHistory();
|
||||
restoreMockedProjects();
|
||||
|
||||
// ===== non mono =====
|
||||
|
||||
mockTargetAndReferenceProject(theirProject, nonMonoProject);
|
||||
await getExtendDocsResults({
|
||||
referenceProjectPaths: ['/their-components'],
|
||||
prefixCfg: { from: 'their', to: 'my' },
|
||||
extensions: ['.js'],
|
||||
cwd: '/non-mono-components',
|
||||
});
|
||||
expect(providenceStub.args[0][1].skipCheckMatchCompatibility).to.equal(false);
|
||||
|
||||
providenceStub.restore();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -0,0 +1,121 @@
|
|||
/* eslint-disable import/no-extraneous-dependencies */
|
||||
import fs from 'fs';
|
||||
import pathLib from 'path';
|
||||
import sinon from 'sinon';
|
||||
import { fileURLToPath, pathToFileURL } from 'url';
|
||||
import { expect } from 'chai';
|
||||
import { it } from 'mocha';
|
||||
import fetch from 'node-fetch';
|
||||
import { createTestServer } from '@web/dev-server-core/test-helpers';
|
||||
import { createDashboardServerConfig } from '../../src/dashboard/server.js';
|
||||
import { ReportService } from '../../src/program/core/ReportService.js';
|
||||
import { providenceConfUtil } from '../../src/program/utils/providence-conf-util.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('@web/dev-server-core').DevServer} DevServer
|
||||
*/
|
||||
|
||||
const __dirname = pathLib.dirname(fileURLToPath(import.meta.url));
|
||||
const { outputPath: reportServiceOutputPathOriginal } = ReportService;
|
||||
const fixturesPath = pathLib.join(__dirname, 'fixtures');
|
||||
const mockedResponsesPath = pathLib.join(__dirname, 'fixtures/dashboard-responses');
|
||||
const mockedOutputPath = pathLib.join(__dirname, 'fixtures/providence-output');
|
||||
|
||||
/**
|
||||
* @param {string} url
|
||||
*/
|
||||
async function getConf(url) {
|
||||
const { href } = pathToFileURL(url);
|
||||
const { default: providenceConf } = await import(href);
|
||||
const providenceConfRaw = fs.readFileSync(url, 'utf8');
|
||||
return { providenceConf, providenceConfRaw };
|
||||
}
|
||||
|
||||
describe('Dashboard Server', () => {
|
||||
/** @type {string} */
|
||||
let host;
|
||||
/** @type {DevServer} */
|
||||
let server;
|
||||
/** @type {sinon.SinonStub} */
|
||||
let providenceConfStub;
|
||||
|
||||
before(() => {
|
||||
// N.B. don't use mock-fs, since it doesn't correctly handle dynamic imports and fs.promises
|
||||
ReportService.outputPath = mockedOutputPath;
|
||||
});
|
||||
|
||||
after(() => {
|
||||
ReportService.outputPath = reportServiceOutputPathOriginal;
|
||||
});
|
||||
|
||||
describe('Happy flow', () => {
|
||||
beforeEach(async () => {
|
||||
const conf = await getConf(`${fixturesPath}/providence.conf.mjs`);
|
||||
providenceConfStub = sinon.stub(providenceConfUtil, 'getConf').resolves(conf);
|
||||
({ host, server } = await createTestServer(await createDashboardServerConfig()));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
providenceConfStub.restore();
|
||||
server.stop();
|
||||
});
|
||||
|
||||
describe('Index', () => {
|
||||
it(`returns an index on '/'`, async () => {
|
||||
const response = await fetch(`${host}/src/dashboard`);
|
||||
const responseText = await response.text();
|
||||
expect(response.status).to.equal(200);
|
||||
expect(responseText).to.include('<title>Providence dashboard</title>');
|
||||
});
|
||||
});
|
||||
|
||||
describe('App assets', () => {
|
||||
it(`returns (static) js assets via app/*`, async () => {
|
||||
const response = await fetch(`${host}/src/dashboard/app/p-board.js`);
|
||||
expect(response.status).to.equal(200);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Menu data', () => {
|
||||
it(`returns json object based on output`, async () => {
|
||||
const response = await fetch(`${host}/menu-data.json`);
|
||||
expect(response.status).to.equal(200);
|
||||
const responseJSON = await response.json();
|
||||
const expectedResult = fs.readFileSync(`${mockedResponsesPath}/menu-data.json`, 'utf8');
|
||||
expect(responseJSON).to.eql(JSON.parse(expectedResult));
|
||||
});
|
||||
});
|
||||
|
||||
describe('Results', () => {
|
||||
it(`returns json object based on output`, async () => {
|
||||
const response = await fetch(`${host}/results.json`);
|
||||
expect(response.status).to.equal(200);
|
||||
const responseJson = await response.json();
|
||||
const expectedResult = fs.readFileSync(`${mockedResponsesPath}/results.json`, 'utf8');
|
||||
expect(responseJson).to.eql(JSON.parse(expectedResult));
|
||||
});
|
||||
});
|
||||
|
||||
describe('Config file "providence.conf.(m)js"', () => {
|
||||
it(`returns providence.conf.mjs found in cwd`, async () => {
|
||||
const response = await fetch(`${host}/providence-conf.js`);
|
||||
expect(response.status).to.equal(200);
|
||||
const responseText = await response.text();
|
||||
const { providenceConfRaw } = await getConf(`${fixturesPath}/providence.conf.mjs`);
|
||||
expect(responseText).to.equal(providenceConfRaw);
|
||||
});
|
||||
|
||||
// Since we cannot mock dynamic imports: skip for now...
|
||||
it.skip(`returns providence.conf.js found in cwd`, async () => {});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unhappy flow', () => {
|
||||
// Since we cannot mock dynamic imports: skip for now...
|
||||
describe.skip('Config file "providence.conf.(m)js"', () => {
|
||||
it(`throws when no providence.conf.(m)js found`, async () => {});
|
||||
|
||||
it(`throws when providence.conf.(m)js is not an esm module`, async () => {});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"searchTargetCollections": {
|
||||
"@lion-targets": ["@lion/ui"]
|
||||
},
|
||||
"referenceCollections": {
|
||||
"@lion-references": ["@lion/ui"]
|
||||
},
|
||||
"searchTargetDeps": {
|
||||
"@lion/input#0.15.7": ["@lion/input#0.15.7"],
|
||||
"@lion/listbox#0.10.7": ["@lion/listbox#0.10.7"]
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,634 @@
|
|||
{
|
||||
"match-imports": [
|
||||
{
|
||||
"fileName": "match-imports_-_%40lion%2Finput_0.15.7_+_%40lion%2Fform-core_0.15.4__1410239906.json",
|
||||
"content": {
|
||||
"meta": {
|
||||
"searchType": "ast-analyzer",
|
||||
"analyzerMeta": {
|
||||
"name": "match-imports",
|
||||
"requiredAst": "babel",
|
||||
"identifier": "%40lion%2Finput_0.15.7_+_%40lion%2Fform-core_0.15.4__1410239906",
|
||||
"targetProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/input",
|
||||
"version": "0.15.7",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"referenceProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/form-core",
|
||||
"version": "0.15.4",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"configuration": {
|
||||
"gatherFilesConfig": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"targetProjectResult": null,
|
||||
"referenceProjectResult": null,
|
||||
"gatherFilesConfigReference": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"skipCheckMatchCompatibility": false,
|
||||
"metaConfig": {
|
||||
"categoryConfig": [
|
||||
{
|
||||
"project": "@lion/overlays",
|
||||
"majorVersion": 1,
|
||||
"categories": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"queryOutput": [
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "LionField::./index.js::@lion/form-core",
|
||||
"name": "LionField",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
"./src/LionInput.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "NativeTextFieldMixin::./index.js::@lion/form-core",
|
||||
"name": "NativeTextFieldMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
"./src/LionInput.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "Validator::./index.js::@lion/form-core",
|
||||
"name": "Validator",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
"./test/lion-input.test.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileName": "match-imports_-_%40lion%2Flistbox_0.10.7_+_%40lion%2Fform-core_0.15.4__-1773728033.json",
|
||||
"content": {
|
||||
"meta": {
|
||||
"searchType": "ast-analyzer",
|
||||
"analyzerMeta": {
|
||||
"name": "match-imports",
|
||||
"requiredAst": "babel",
|
||||
"identifier": "%40lion%2Flistbox_0.10.7_+_%40lion%2Fform-core_0.15.4__-1773728033",
|
||||
"targetProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/listbox",
|
||||
"version": "0.10.7",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"referenceProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/form-core",
|
||||
"version": "0.15.4",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"configuration": {
|
||||
"gatherFilesConfig": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"targetProjectResult": null,
|
||||
"referenceProjectResult": null,
|
||||
"gatherFilesConfigReference": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"skipCheckMatchCompatibility": false,
|
||||
"metaConfig": {
|
||||
"categoryConfig": [
|
||||
{
|
||||
"project": "@lion/overlays",
|
||||
"majorVersion": 1,
|
||||
"categories": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"queryOutput": [
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FocusMixin::./index.js::@lion/form-core",
|
||||
"name": "FocusMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionListbox.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FormControlMixin::./index.js::@lion/form-core",
|
||||
"name": "FormControlMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/ListboxMixin.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "InteractionStateMixin::./index.js::@lion/form-core",
|
||||
"name": "InteractionStateMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionListbox.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FormRegisteringMixin::./index.js::@lion/form-core",
|
||||
"name": "FormRegisteringMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionOption.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FormRegistrarMixin::./index.js::@lion/form-core",
|
||||
"name": "FormRegistrarMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/ListboxMixin.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FormRegistrarPortalMixin::./index.js::@lion/form-core",
|
||||
"name": "FormRegistrarPortalMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionOptions.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "ValidateMixin::./index.js::@lion/form-core",
|
||||
"name": "ValidateMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionListbox.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "ChoiceGroupMixin::./index.js::@lion/form-core",
|
||||
"name": "ChoiceGroupMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/ListboxMixin.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "ChoiceInputMixin::./index.js::@lion/form-core",
|
||||
"name": "ChoiceInputMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionOption.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"match-subclasses": [
|
||||
{
|
||||
"fileName": "match-subclasses_-_%40lion%2Finput_0.15.7_+_%40lion%2Fform-core_0.15.4__-1212823364.json",
|
||||
"content": {
|
||||
"meta": {
|
||||
"searchType": "ast-analyzer",
|
||||
"analyzerMeta": {
|
||||
"name": "match-subclasses",
|
||||
"requiredAst": "babel",
|
||||
"identifier": "%40lion%2Finput_0.15.7_+_%40lion%2Fform-core_0.15.4__-1212823364",
|
||||
"targetProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/input",
|
||||
"version": "0.15.7",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"referenceProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/form-core",
|
||||
"version": "0.15.4",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"configuration": {
|
||||
"gatherFilesConfig": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"gatherFilesConfigReference": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"skipCheckMatchCompatibility": false,
|
||||
"metaConfig": {
|
||||
"categoryConfig": [
|
||||
{
|
||||
"project": "@lion/overlays",
|
||||
"majorVersion": 1,
|
||||
"categories": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"queryOutput": [
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "LionField",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "LionField::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionInput.js",
|
||||
"identifier": "LionInput"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "NativeTextFieldMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "NativeTextFieldMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionInput.js",
|
||||
"identifier": "LionInput"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "Validator",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "Validator::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
{
|
||||
"file": "./test/lion-input.test.js",
|
||||
"identifier": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileName": "match-subclasses_-_%40lion%2Flistbox_0.10.7_+_%40lion%2Fform-core_0.15.4__-222436449.json",
|
||||
"content": {
|
||||
"meta": {
|
||||
"searchType": "ast-analyzer",
|
||||
"analyzerMeta": {
|
||||
"name": "match-subclasses",
|
||||
"requiredAst": "babel",
|
||||
"identifier": "%40lion%2Flistbox_0.10.7_+_%40lion%2Fform-core_0.15.4__-222436449",
|
||||
"targetProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/listbox",
|
||||
"version": "0.10.7",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"referenceProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/form-core",
|
||||
"version": "0.15.4",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"configuration": {
|
||||
"gatherFilesConfig": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"gatherFilesConfigReference": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"skipCheckMatchCompatibility": false,
|
||||
"metaConfig": {
|
||||
"categoryConfig": [
|
||||
{
|
||||
"project": "@lion/overlays",
|
||||
"majorVersion": 1,
|
||||
"categories": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"queryOutput": [
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FocusMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FocusMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionListbox.js",
|
||||
"identifier": "LionListbox"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FormControlMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FormControlMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/ListboxMixin.js",
|
||||
"identifier": "ListboxMixin"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "InteractionStateMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "InteractionStateMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionListbox.js",
|
||||
"identifier": "LionListbox"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FormRegisteringMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FormRegisteringMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionOption.js",
|
||||
"identifier": "LionOption"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FormRegistrarMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FormRegistrarMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/ListboxMixin.js",
|
||||
"identifier": "ListboxMixin"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FormRegistrarPortalMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FormRegistrarPortalMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionOptions.js",
|
||||
"identifier": "LionOptions"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "ValidateMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "ValidateMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionListbox.js",
|
||||
"identifier": "LionListbox"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "ChoiceGroupMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "ChoiceGroupMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/ListboxMixin.js",
|
||||
"identifier": "ListboxMixin"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "ChoiceInputMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "ChoiceInputMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionOption.js",
|
||||
"identifier": "LionOption"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,98 @@
|
|||
{
|
||||
"meta": {
|
||||
"searchType": "ast-analyzer",
|
||||
"analyzerMeta": {
|
||||
"name": "match-imports",
|
||||
"requiredAst": "babel",
|
||||
"identifier": "%40lion%2Finput_0.15.7_+_%40lion%2Fform-core_0.15.4__1410239906",
|
||||
"targetProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/input",
|
||||
"version": "0.15.7",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"referenceProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/form-core",
|
||||
"version": "0.15.4",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"configuration": {
|
||||
"gatherFilesConfig": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"targetProjectResult": null,
|
||||
"referenceProjectResult": null,
|
||||
"gatherFilesConfigReference": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"skipCheckMatchCompatibility": false,
|
||||
"metaConfig": {
|
||||
"categoryConfig": [
|
||||
{
|
||||
"project": "@lion/overlays",
|
||||
"majorVersion": 1,
|
||||
"categories": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"queryOutput": [
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "LionField::./index.js::@lion/form-core",
|
||||
"name": "LionField",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
"./src/LionInput.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "NativeTextFieldMixin::./index.js::@lion/form-core",
|
||||
"name": "NativeTextFieldMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
"./src/LionInput.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "Validator::./index.js::@lion/form-core",
|
||||
"name": "Validator",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
"./test/lion-input.test.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,194 @@
|
|||
{
|
||||
"meta": {
|
||||
"searchType": "ast-analyzer",
|
||||
"analyzerMeta": {
|
||||
"name": "match-imports",
|
||||
"requiredAst": "babel",
|
||||
"identifier": "%40lion%2Flistbox_0.10.7_+_%40lion%2Fform-core_0.15.4__-1773728033",
|
||||
"targetProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/listbox",
|
||||
"version": "0.10.7",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"referenceProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/form-core",
|
||||
"version": "0.15.4",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"configuration": {
|
||||
"gatherFilesConfig": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"targetProjectResult": null,
|
||||
"referenceProjectResult": null,
|
||||
"gatherFilesConfigReference": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"skipCheckMatchCompatibility": false,
|
||||
"metaConfig": {
|
||||
"categoryConfig": [
|
||||
{
|
||||
"project": "@lion/overlays",
|
||||
"majorVersion": 1,
|
||||
"categories": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"queryOutput": [
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FocusMixin::./index.js::@lion/form-core",
|
||||
"name": "FocusMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionListbox.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FormControlMixin::./index.js::@lion/form-core",
|
||||
"name": "FormControlMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/ListboxMixin.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "InteractionStateMixin::./index.js::@lion/form-core",
|
||||
"name": "InteractionStateMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionListbox.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FormRegisteringMixin::./index.js::@lion/form-core",
|
||||
"name": "FormRegisteringMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionOption.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FormRegistrarMixin::./index.js::@lion/form-core",
|
||||
"name": "FormRegistrarMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/ListboxMixin.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FormRegistrarPortalMixin::./index.js::@lion/form-core",
|
||||
"name": "FormRegistrarPortalMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionOptions.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "ValidateMixin::./index.js::@lion/form-core",
|
||||
"name": "ValidateMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionListbox.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "ChoiceGroupMixin::./index.js::@lion/form-core",
|
||||
"name": "ChoiceGroupMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/ListboxMixin.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "ChoiceInputMixin::./index.js::@lion/form-core",
|
||||
"name": "ChoiceInputMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionOption.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,105 @@
|
|||
{
|
||||
"meta": {
|
||||
"searchType": "ast-analyzer",
|
||||
"analyzerMeta": {
|
||||
"name": "match-subclasses",
|
||||
"requiredAst": "babel",
|
||||
"identifier": "%40lion%2Finput_0.15.7_+_%40lion%2Fform-core_0.15.4__-1212823364",
|
||||
"targetProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/input",
|
||||
"version": "0.15.7",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"referenceProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/form-core",
|
||||
"version": "0.15.4",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"configuration": {
|
||||
"gatherFilesConfig": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"gatherFilesConfigReference": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"skipCheckMatchCompatibility": false,
|
||||
"metaConfig": {
|
||||
"categoryConfig": [
|
||||
{
|
||||
"project": "@lion/overlays",
|
||||
"majorVersion": 1,
|
||||
"categories": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"queryOutput": [
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "LionField",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "LionField::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionInput.js",
|
||||
"identifier": "LionInput"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "NativeTextFieldMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "NativeTextFieldMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionInput.js",
|
||||
"identifier": "LionInput"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "Validator",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "Validator::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
{
|
||||
"file": "./test/lion-input.test.js",
|
||||
"identifier": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,219 @@
|
|||
{
|
||||
"meta": {
|
||||
"searchType": "ast-analyzer",
|
||||
"analyzerMeta": {
|
||||
"name": "match-subclasses",
|
||||
"requiredAst": "babel",
|
||||
"identifier": "%40lion%2Flistbox_0.10.7_+_%40lion%2Fform-core_0.15.4__-222436449",
|
||||
"targetProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/listbox",
|
||||
"version": "0.10.7",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"referenceProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/form-core",
|
||||
"version": "0.15.4",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"configuration": {
|
||||
"gatherFilesConfig": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"gatherFilesConfigReference": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"skipCheckMatchCompatibility": false,
|
||||
"metaConfig": {
|
||||
"categoryConfig": [
|
||||
{
|
||||
"project": "@lion/overlays",
|
||||
"majorVersion": 1,
|
||||
"categories": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"queryOutput": [
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FocusMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FocusMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionListbox.js",
|
||||
"identifier": "LionListbox"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FormControlMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FormControlMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/ListboxMixin.js",
|
||||
"identifier": "ListboxMixin"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "InteractionStateMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "InteractionStateMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionListbox.js",
|
||||
"identifier": "LionListbox"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FormRegisteringMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FormRegisteringMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionOption.js",
|
||||
"identifier": "LionOption"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FormRegistrarMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FormRegistrarMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/ListboxMixin.js",
|
||||
"identifier": "ListboxMixin"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FormRegistrarPortalMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FormRegistrarPortalMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionOptions.js",
|
||||
"identifier": "LionOptions"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "ValidateMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "ValidateMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionListbox.js",
|
||||
"identifier": "LionListbox"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "ChoiceGroupMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "ChoiceGroupMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/ListboxMixin.js",
|
||||
"identifier": "ListboxMixin"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "ChoiceInputMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "ChoiceInputMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionOption.js",
|
||||
"identifier": "LionOption"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"@lion/input#0.15.7": [
|
||||
"@lion/input#0.15.7"
|
||||
],
|
||||
"@lion/listbox#0.10.7": [
|
||||
"@lion/listbox#0.10.7"
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
export default {
|
||||
metaConfig: {
|
||||
categoryConfig: [
|
||||
{
|
||||
// This is the name found in package.json
|
||||
project: '@lion/overlays',
|
||||
majorVersion: 1,
|
||||
// These conditions will be run on overy filePath
|
||||
categories: {
|
||||
overlays: localFilePath => {
|
||||
const names = ['dialog', 'tooltip'];
|
||||
const fromPackages = names.some(p =>
|
||||
localFilePath.startsWith(`./packages/ui/components/${p}`),
|
||||
);
|
||||
const fromRoot =
|
||||
names.some(p => localFilePath.startsWith(`./ui-${p}`)) ||
|
||||
localFilePath.startsWith('./overlays.js');
|
||||
return fromPackages || fromRoot;
|
||||
},
|
||||
// etc...
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
// By predefening groups, we can do a query for programs/collections...
|
||||
// Select via " providence analyze --search-target-collection 'exampleCollection' "
|
||||
searchTargetCollections: {
|
||||
'@lion-targets': ['../../packages/ui'],
|
||||
// ...
|
||||
},
|
||||
referenceCollections: {
|
||||
// Usually the references are different from the targets.
|
||||
// In this demo file, we test @lion usage amongst itself
|
||||
// Select via " providence analyze --reference-collection 'exampleCollection' "
|
||||
'@lion-references': ['../../packages/ui'],
|
||||
},
|
||||
};
|
||||
|
|
@ -1,18 +1,41 @@
|
|||
const pathLib = require('path');
|
||||
const { expect } = require('chai');
|
||||
const { providence } = require('../../../../src/program/providence.js');
|
||||
const { QueryService } = require('../../../../src/program/services/QueryService.js');
|
||||
const { ReportService } = require('../../../../src/program/services/ReportService.js');
|
||||
/* eslint-disable import/no-extraneous-dependencies */
|
||||
import pathLib, { dirname } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import fs from 'fs';
|
||||
import { expect } from 'chai';
|
||||
import { it } from 'mocha';
|
||||
import { providence } from '../../../../src/program/providence.js';
|
||||
import { QueryService } from '../../../../src/program/core/QueryService.js';
|
||||
import { ReportService } from '../../../../src/program/core/ReportService.js';
|
||||
import { memoizeConfig } from '../../../../src/program/utils/memoize.js';
|
||||
import { setupAnalyzerTest } from '../../../../test-helpers/setup-analyzer-test.js';
|
||||
import {
|
||||
FindExportsAnalyzer,
|
||||
FindImportsAnalyzer,
|
||||
MatchImportsAnalyzer,
|
||||
} from '../../../../src/program/analyzers/index.js';
|
||||
import MatchSubclassesAnalyzer from '../../../../src/program/analyzers/match-subclasses.js';
|
||||
import MatchPathsAnalyzer from '../../../../src/program/analyzers/match-paths.js';
|
||||
import FindCustomelementsAnalyzer from '../../../../src/program/analyzers/find-customelements.js';
|
||||
import FindClassesAnalyzer from '../../../../src/program/analyzers/find-classes.js';
|
||||
|
||||
const {
|
||||
mockWriteToJson,
|
||||
restoreWriteToJson,
|
||||
} = require('../../../../test-helpers/mock-report-service-helpers.js');
|
||||
/**
|
||||
* @typedef {import('../../../../types/index.js').ProvidenceConfig} ProvidenceConfig
|
||||
* @typedef {import('../../../../types/index.js').QueryResult} QueryResult
|
||||
*/
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
setupAnalyzerTest();
|
||||
|
||||
describe('Analyzers file-system integration', () => {
|
||||
/**
|
||||
* Flag to enable mode that generates e2e mocks.
|
||||
* We 'abuse' this test file for that purpose for ease of maintenance
|
||||
* @type {boolean}
|
||||
*/
|
||||
const generateE2eMode = process.argv.includes('--generate-e2e-mode');
|
||||
|
||||
const queryResults = [];
|
||||
const targetPath = pathLib.resolve(
|
||||
__dirname,
|
||||
'../../../../test-helpers/project-mocks/importing-target-project',
|
||||
|
|
@ -25,9 +48,13 @@ describe('Analyzers file-system integration', () => {
|
|||
const originalGetResultFileNameAndPath = ReportService._getResultFileNameAndPath;
|
||||
const originalOutputPath = ReportService.outputPath;
|
||||
|
||||
const memoizeCacheDisabledInitial = memoizeConfig.isCacheDisabled;
|
||||
memoizeConfig.isCacheDisabled = true;
|
||||
|
||||
after(() => {
|
||||
ReportService._getResultFileNameAndPath = originalGetResultFileNameAndPath;
|
||||
ReportService.outputPath = originalOutputPath;
|
||||
memoizeConfig.isCacheDisabled = memoizeCacheDisabledInitial;
|
||||
});
|
||||
|
||||
if (generateE2eMode) {
|
||||
|
|
@ -35,81 +62,84 @@ describe('Analyzers file-system integration', () => {
|
|||
__dirname,
|
||||
'../../../../test-helpers/project-mocks-analyzer-outputs',
|
||||
);
|
||||
// @ts-expect-error
|
||||
// eslint-disable-next-line func-names
|
||||
ReportService._getResultFileNameAndPath = function (name) {
|
||||
return pathLib.join(this.outputPath, `${name}.json`);
|
||||
};
|
||||
} else {
|
||||
ReportService.outputPath = __dirname; // prevents cache to fail the test
|
||||
|
||||
beforeEach(() => {
|
||||
mockWriteToJson(queryResults);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
restoreWriteToJson(queryResults);
|
||||
});
|
||||
}
|
||||
const analyzers = [
|
||||
{
|
||||
analyzerName: 'find-customelements',
|
||||
providenceConfig: {
|
||||
targetProjectPaths: [targetPath],
|
||||
},
|
||||
ctor: FindCustomelementsAnalyzer,
|
||||
},
|
||||
{
|
||||
analyzerName: 'find-imports',
|
||||
providenceConfig: {
|
||||
targetProjectPaths: [targetPath],
|
||||
},
|
||||
ctor: FindImportsAnalyzer,
|
||||
},
|
||||
{
|
||||
analyzerName: 'find-exports',
|
||||
providenceConfig: {
|
||||
targetProjectPaths: [referencePath],
|
||||
},
|
||||
ctor: FindExportsAnalyzer,
|
||||
},
|
||||
{
|
||||
analyzerName: 'find-classes',
|
||||
providenceConfig: {
|
||||
targetProjectPaths: [targetPath],
|
||||
},
|
||||
ctor: FindClassesAnalyzer,
|
||||
},
|
||||
{
|
||||
analyzerName: 'match-imports',
|
||||
providenceConfig: {
|
||||
targetProjectPaths: [targetPath],
|
||||
referenceProjectPaths: [referencePath],
|
||||
},
|
||||
ctor: MatchImportsAnalyzer,
|
||||
},
|
||||
{
|
||||
analyzerName: 'match-subclasses',
|
||||
providenceConfig: {
|
||||
targetProjectPaths: [targetPath],
|
||||
referenceProjectPaths: [referencePath],
|
||||
},
|
||||
ctor: MatchSubclassesAnalyzer,
|
||||
},
|
||||
{
|
||||
analyzerName: 'match-paths',
|
||||
providenceConfig: {
|
||||
targetProjectPaths: [targetPath],
|
||||
referenceProjectPaths: [referencePath],
|
||||
},
|
||||
ctor: MatchPathsAnalyzer,
|
||||
},
|
||||
];
|
||||
|
||||
for (const { analyzerName, providenceConfig } of analyzers) {
|
||||
it(`"${analyzerName}" analyzer`, async () => {
|
||||
const findExportsQueryConfig = QueryService.getQueryConfigFromAnalyzer(analyzerName);
|
||||
await providence(findExportsQueryConfig, providenceConfig);
|
||||
for (const { ctor, providenceConfig } of analyzers) {
|
||||
it(`"${ctor.analyzerName}" analyzer`, async () => {
|
||||
const findExportsQueryConfig = await QueryService.getQueryConfigFromAnalyzer(ctor);
|
||||
const queryResults = await providence(
|
||||
findExportsQueryConfig,
|
||||
/** @type {ProvidenceConfig} */ (providenceConfig),
|
||||
);
|
||||
if (generateE2eMode) {
|
||||
console.info(
|
||||
'Successfully created mocks. Do not forget to rerun tests now without "--generate-e2e-mode"',
|
||||
);
|
||||
return;
|
||||
}
|
||||
// eslint-disable-next-line import/no-dynamic-require, global-require
|
||||
const expectedOutput = require(`../../../../test-helpers/project-mocks-analyzer-outputs/${analyzerName}.json`);
|
||||
const expectedOutput = JSON.parse(
|
||||
fs.readFileSync(
|
||||
pathLib.resolve(
|
||||
__dirname,
|
||||
`../../../../test-helpers/project-mocks-analyzer-outputs/${ctor.analyzerName}.json`,
|
||||
),
|
||||
'utf8',
|
||||
),
|
||||
);
|
||||
const { queryOutput } = JSON.parse(JSON.stringify(queryResults[0]));
|
||||
expect(queryOutput).not.to.eql([]);
|
||||
expect(queryOutput).to.eql(expectedOutput.queryOutput);
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue