fix(providence): type fixes, windows fixes
This commit is contained in:
parent
c1d66f1e04
commit
c745761ce7
94 changed files with 3103 additions and 1233 deletions
|
|
@ -1,3 +1,3 @@
|
|||
providence-output
|
||||
/providence-output
|
||||
providence-input-data
|
||||
/.nyc_output
|
||||
|
|
|
|||
|
|
@ -278,7 +278,7 @@ class PBoard extends DecorateMixin(LitElement) {
|
|||
if (!this.__menuData) {
|
||||
return;
|
||||
}
|
||||
await this.__fetchResults();
|
||||
// await this.__fetchResults();
|
||||
|
||||
const elements = Array.from(this._selectionMenuFormNode.elements);
|
||||
const repos = elements.filter(n => n.name === 'repos');
|
||||
|
|
@ -303,7 +303,8 @@ class PBoard extends DecorateMixin(LitElement) {
|
|||
this.__providenceConf,
|
||||
);
|
||||
dataResult[i].type = specifierRes.exportSpecifier.name === '[file]' ? 'file' : 'specifier';
|
||||
dataResult[i].count = specifierRes.matchesPerProject
|
||||
// dedupe, because outputs genarted with older versions might have dedupe problems
|
||||
dataResult[i].count = Array.from(new Set(specifierRes.matchesPerProject))
|
||||
.map(mpp => mpp.files)
|
||||
.flat(Infinity).length;
|
||||
dataResult[i].matchedProjects = specifierRes.matchesPerProject;
|
||||
|
|
@ -435,7 +436,7 @@ class PBoard extends DecorateMixin(LitElement) {
|
|||
|
||||
async __fetchMenuData() {
|
||||
// Derived from providence.conf.js, generated in server.mjs
|
||||
this.__initialMenuData = await fetch('/menu-data').then(response => response.json());
|
||||
this.__initialMenuData = await fetch('/menu-data.json').then(response => response.json());
|
||||
}
|
||||
|
||||
async __fetchProvidenceConf() {
|
||||
|
|
@ -446,7 +447,7 @@ class PBoard extends DecorateMixin(LitElement) {
|
|||
}
|
||||
|
||||
async __fetchResults() {
|
||||
this.__resultFiles = await fetch('/results').then(response => response.json());
|
||||
this.__resultFiles = await fetch('/results.json').then(response => response.json());
|
||||
}
|
||||
}
|
||||
customElements.define('p-board', PBoard);
|
||||
16
packages-node/providence-analytics/dashboard/index.html
Normal file
16
packages-node/providence-analytics/dashboard/index.html
Normal file
|
|
@ -0,0 +1,16 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Providence dashboard</title>
|
||||
<style>
|
||||
body {
|
||||
margin: 8px 32px;
|
||||
}
|
||||
</style>
|
||||
<script type="module" src="./app/p-board.js"></script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<p-board></p-board>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -1,9 +1,9 @@
|
|||
import fs from 'fs';
|
||||
import pathLib, { dirname } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { createConfig, startServer } from 'es-dev-server';
|
||||
import { ReportService } from '../../src/program/services/ReportService.js';
|
||||
import { getProvidenceConf } from '../../src/program/utils/get-providence-conf.mjs';
|
||||
import { startDevServer } from '@web/dev-server';
|
||||
import { ReportService } from '../src/program/core/ReportService.js';
|
||||
import { providenceConfUtil } from '../src/program/utils/providence-conf-util.mjs';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
|
|
@ -66,7 +66,7 @@ function createMiddleWares({ providenceConf, providenceConfRaw, searchTargetDeps
|
|||
|
||||
/**
|
||||
* @param {object[]} collections
|
||||
* @returns {{[keu as string]: }}
|
||||
* @returns {{[key as string]: }}
|
||||
*/
|
||||
function transformToProjectNames(collections) {
|
||||
const res = {};
|
||||
|
|
@ -74,7 +74,7 @@ function createMiddleWares({ providenceConf, providenceConfRaw, searchTargetDeps
|
|||
Object.entries(collections).map(([key, val]) => {
|
||||
res[key] = val.map(c => {
|
||||
const pkg = getPackageJson(c);
|
||||
return pkg && pkg.name;
|
||||
return pkg?.name;
|
||||
});
|
||||
});
|
||||
return res;
|
||||
|
|
@ -94,9 +94,10 @@ function createMiddleWares({ providenceConf, providenceConfRaw, searchTargetDeps
|
|||
ctx.url = `${pathFromServerRootToHere}/index.html`;
|
||||
return next();
|
||||
}
|
||||
if (ctx.url === '/results') {
|
||||
if (ctx.url === '/results.json') {
|
||||
ctx.type = 'application/json';
|
||||
ctx.body = resultFiles;
|
||||
} else if (ctx.url === '/menu-data') {
|
||||
} else if (ctx.url === '/menu-data.json') {
|
||||
// Gathers all data that are relevant to create a configuration menu
|
||||
// at the top of the dashboard:
|
||||
// - referenceCollections as defined in providence.conf.js
|
||||
|
|
@ -112,16 +113,18 @@ function createMiddleWares({ providenceConf, providenceConfRaw, searchTargetDeps
|
|||
}
|
||||
|
||||
const menuData = {
|
||||
// N.B. theoratically there can be a mismatch between basename and pkgJson.name,
|
||||
// N.B. theoretically there can be a mismatch between basename and pkgJson.name,
|
||||
// but we assume folder names and pkgJson.names to be similar
|
||||
searchTargetCollections,
|
||||
referenceCollections: transformToProjectNames(providenceConf.referenceCollections),
|
||||
searchTargetDeps,
|
||||
};
|
||||
|
||||
ctx.type = 'application/json';
|
||||
ctx.body = menuData;
|
||||
} else if (ctx.url === '/providence-conf.js') {
|
||||
// Alloes frontend dasbboard app to find categoriesand other configs
|
||||
ctx.type = 'text/javascript';
|
||||
// Allows frontend dasbboard app to find categories and other configs
|
||||
ctx.type = 'application/javascript';
|
||||
ctx.body = providenceConfRaw;
|
||||
} else {
|
||||
await next();
|
||||
|
|
@ -130,8 +133,8 @@ function createMiddleWares({ providenceConf, providenceConfRaw, searchTargetDeps
|
|||
];
|
||||
}
|
||||
|
||||
(async function main() {
|
||||
const { providenceConf, providenceConfRaw } = await getProvidenceConf();
|
||||
export async function createDashboardServerConfig() {
|
||||
const { providenceConf, providenceConfRaw } = (await providenceConfUtil.getConf()) || {};
|
||||
const { searchTargetDeps, resultFiles } = await getCachedProvidenceResults();
|
||||
|
||||
// Needed for dev purposes (we call it from ./packages-node/providence-analytics/ instead of ./)
|
||||
|
|
@ -139,21 +142,37 @@ function createMiddleWares({ providenceConf, providenceConfRaw, searchTargetDeps
|
|||
const fromPackageRoot = process.argv.includes('--serve-from-package-root');
|
||||
const moduleRoot = fromPackageRoot ? pathLib.resolve(process.cwd(), '../../') : process.cwd();
|
||||
|
||||
const config = createConfig({
|
||||
port: 8080,
|
||||
return {
|
||||
appIndex: pathLib.resolve(__dirname, 'index.html'),
|
||||
rootDir: moduleRoot,
|
||||
nodeResolve: true,
|
||||
moduleDirs: pathLib.resolve(moduleRoot, 'node_modules'),
|
||||
watch: false,
|
||||
open: true,
|
||||
middlewares: createMiddleWares({
|
||||
middleware: createMiddleWares({
|
||||
providenceConf,
|
||||
providenceConfRaw,
|
||||
searchTargetDeps,
|
||||
resultFiles,
|
||||
}),
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
await startServer(config);
|
||||
let resolveLoaded;
|
||||
export const serverInstanceLoaded = new Promise(resolve => {
|
||||
resolveLoaded = resolve;
|
||||
});
|
||||
|
||||
// Export interface as object, so we can mock it easily inside tests
|
||||
export const dashboardServer = {
|
||||
start: async () => {
|
||||
await startDevServer({ config: await createDashboardServerConfig() });
|
||||
resolveLoaded();
|
||||
},
|
||||
};
|
||||
|
||||
(async () => {
|
||||
if (process.argv.includes('--run-server')) {
|
||||
dashboardServer.start();
|
||||
}
|
||||
})();
|
||||
|
|
@ -1,18 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<title>providence-board</title>
|
||||
<style>
|
||||
body {
|
||||
margin: 8px 32px;
|
||||
}
|
||||
</style>
|
||||
<script type="module" src="./app/p-board.js"></script>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<p-board></p-board>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
|
@ -1,8 +0,0 @@
|
|||
const { LogService } = require('../../src/program/services/LogService.js');
|
||||
|
||||
LogService.warn(
|
||||
'Running via "dashboard/src/server.js" is deprecated. Please run "providence dashboard" instead.',
|
||||
);
|
||||
|
||||
// @ts-ignore
|
||||
import('./server.mjs');
|
||||
|
|
@ -26,14 +26,15 @@
|
|||
"src"
|
||||
],
|
||||
"scripts": {
|
||||
"dashboard": "node ./dashboard/src/server.js --serve-from-package-root",
|
||||
"match-lion-imports": "npm run providence analyze match-imports --search-target-collection @lion-targets --reference-collection @lion-references",
|
||||
"dashboard": "node ./dashboard/server.mjs --run-server --serve-from-package-root",
|
||||
"postinstall": "npx patch-package",
|
||||
"match-lion-imports": "npm run providence -- analyze match-subclasses --search-target-collection @lion-targets --reference-collection @lion-references --measure-perf --add-system-paths",
|
||||
"providence": "node --max-old-space-size=8192 ./src/cli/index.mjs",
|
||||
"publish-docs": "node ../../packages-node/publish-docs/src/cli.js --github-url https://github.com/ing-bank/lion/ --git-root-dir ../../",
|
||||
"prepublishOnly": "npm run publish-docs",
|
||||
"test:node": "mocha './test-node/**/*.test.js'",
|
||||
"test:node:e2e": "mocha './test-node/program/**/*.e2e.js' --timeout 60000",
|
||||
"test:node:watch": "npm run test:node --watch"
|
||||
"test:node": "npm run test:node:unit && npm run test:node:e2e",
|
||||
"test:node:e2e": "mocha './test-node/**/*.e2e.{j,mj}s' --timeout 60000",
|
||||
"test:node:unit": "mocha './test-node/**/*.test.{j,mj}s'"
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/core": "^7.10.1",
|
||||
|
|
@ -44,26 +45,25 @@
|
|||
"@babel/register": "^7.5.5",
|
||||
"@babel/traverse": "^7.23.2",
|
||||
"@babel/types": "^7.9.0",
|
||||
"@rollup/plugin-node-resolve": "^13.0.6",
|
||||
"@typescript-eslint/typescript-estree": "^3.0.0",
|
||||
"@rollup/plugin-node-resolve": "^15.0.0",
|
||||
"@web/dev-server": "^0.1.28",
|
||||
"anymatch": "^3.1.1",
|
||||
"chalk": "^4.1.0",
|
||||
"commander": "^2.20.0",
|
||||
"deepmerge": "^4.0.0",
|
||||
"es-dev-server": "^1.57.1",
|
||||
"es-module-lexer": "^0.3.6",
|
||||
"glob": "^7.1.6",
|
||||
"htm": "^3.0.3",
|
||||
"inquirer": "^7.0.0",
|
||||
"is-negated-glob": "^1.0.0",
|
||||
"lit-element": "~2.4.0",
|
||||
"mock-require": "^3.0.3",
|
||||
"ora": "^3.4.0",
|
||||
"parse5": "^5.1.1",
|
||||
"read-package-tree": "5.3.1",
|
||||
"semver": "^7.5.2",
|
||||
"typescript": "~4.8.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@web/dev-server-core": "^0.3.19",
|
||||
"mock-require": "^3.0.3"
|
||||
},
|
||||
"keywords": [
|
||||
"analysis",
|
||||
"impact",
|
||||
|
|
@ -77,5 +77,8 @@
|
|||
],
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
},
|
||||
"imports": {
|
||||
"#types": "./src/program/types"
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,11 @@
|
|||
diff --git a/node_modules/@web/dev-server-core/test-helpers.mjs b/node_modules/@web/dev-server-core/test-helpers.mjs
|
||||
index 1a4d604..9c0d714 100644
|
||||
--- a/node_modules/@web/dev-server-core/test-helpers.mjs
|
||||
+++ b/node_modules/@web/dev-server-core/test-helpers.mjs
|
||||
@@ -1,5 +1,5 @@
|
||||
// this file is autogenerated with the generate-mjs-dts-entrypoints script
|
||||
-import cjsEntrypoint from './dist/index.js';
|
||||
+import cjsEntrypoint from './dist/test-helpers.js';
|
||||
|
||||
const {
|
||||
virtualFilesPlugin,
|
||||
|
|
@ -1,37 +1,4 @@
|
|||
import pathLib, { dirname } from 'path';
|
||||
import fs from 'fs';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
// This file is read by dashboard and cli and needs to be present under process.cwd()
|
||||
// It mainly serves as an example and it allows to run the dashboard locally
|
||||
// from within this repo.
|
||||
|
||||
/**
|
||||
* @returns {string[]}
|
||||
*/
|
||||
function getAllLionScopedPackagePaths() {
|
||||
const rootPath = pathLib.resolve(__dirname, '../../packages');
|
||||
const filesAndDirs = fs.readdirSync(rootPath);
|
||||
const packages = filesAndDirs.filter(f => {
|
||||
const filePath = pathLib.join(rootPath, f);
|
||||
if (fs.lstatSync(filePath).isDirectory()) {
|
||||
let pkgJson;
|
||||
try {
|
||||
pkgJson = JSON.parse(fs.readFileSync(pathLib.resolve(filePath, './package.json')));
|
||||
// eslint-disable-next-line no-empty
|
||||
} catch (_) {
|
||||
return false;
|
||||
}
|
||||
return pkgJson.name && pkgJson.name.startsWith('@lion/');
|
||||
}
|
||||
return false;
|
||||
});
|
||||
return packages.map(p => pathLib.join(rootPath, p));
|
||||
}
|
||||
|
||||
const lionScopedPackagePaths = getAllLionScopedPackagePaths();
|
||||
const lionScopedPackagePaths = ['../../packages/ui'];
|
||||
|
||||
export default {
|
||||
metaConfig: {
|
||||
|
|
@ -44,7 +11,9 @@ export default {
|
|||
categories: {
|
||||
overlays: localFilePath => {
|
||||
const names = ['dialog', 'tooltip'];
|
||||
const fromPackages = names.some(p => localFilePath.startsWith(`./packages/${p}`));
|
||||
const fromPackages = names.some(p =>
|
||||
localFilePath.startsWith(`./packages/ui/components/${p}`),
|
||||
);
|
||||
const fromRoot =
|
||||
names.some(p => localFilePath.startsWith(`./ui-${p}`)) ||
|
||||
localFilePath.startsWith('./overlays.js');
|
||||
|
|
@ -65,6 +34,6 @@ export default {
|
|||
// Usually the references are different from the targets.
|
||||
// In this demo file, we test @lion usage amongst itself
|
||||
// Select via " providence analyze --reference-collection 'exampleCollection' "
|
||||
'@lion-references': lionScopedPackagePaths,
|
||||
'@lion-references': ['../../packages/ui/'],
|
||||
},
|
||||
};
|
||||
|
|
|
|||
|
|
@ -3,19 +3,29 @@ const pathLib = require('path');
|
|||
const child_process = require('child_process'); // eslint-disable-line camelcase
|
||||
const glob = require('glob');
|
||||
const readPackageTree = require('../program/utils/read-package-tree-with-bower-support.js');
|
||||
const { InputDataService } = require('../program/services/InputDataService.js');
|
||||
const { LogService } = require('../program/services/LogService.js');
|
||||
const { aForEach } = require('../program/utils/async-array-utils.js');
|
||||
const { LogService } = require('../program/core/LogService.js');
|
||||
const { toPosixPath } = require('../program/utils/to-posix-path.js');
|
||||
|
||||
/**
|
||||
* @param {any[]} arr
|
||||
* @returns {any[]}
|
||||
*/
|
||||
function flatten(arr) {
|
||||
return Array.prototype.concat.apply([], arr);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} v
|
||||
* @returns {string[]}
|
||||
*/
|
||||
function csToArray(v) {
|
||||
return v.split(',').map(v => v.trim());
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} v like 'js,html'
|
||||
* @returns {string[]} like ['.js', '.html']
|
||||
*/
|
||||
function extensionsFromCs(v) {
|
||||
return csToArray(v).map(v => `.${v}`);
|
||||
}
|
||||
|
|
@ -25,13 +35,13 @@ function setQueryMethod(m) {
|
|||
if (allowedMehods.includes(m)) {
|
||||
return m;
|
||||
}
|
||||
// eslint-disable-next-line no-console
|
||||
LogService.error(`Please provide one of the following methods: ${allowedMehods.join(', ')}`);
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {string[]}
|
||||
* @param {string} t
|
||||
* @returns {string[]|undefined}
|
||||
*/
|
||||
function pathsArrayFromCs(t, cwd = process.cwd()) {
|
||||
if (!t) {
|
||||
|
|
@ -57,27 +67,40 @@ function pathsArrayFromCs(t, cwd = process.cwd()) {
|
|||
|
||||
/**
|
||||
* @param {string} name collection name found in eCfg
|
||||
* @param {'search-target'|'reference'} [colType='search-targets'] collection type
|
||||
* @param {object} eCfg external configuration. Usually providence.conf.js
|
||||
* @returns {string[]}
|
||||
* @param {'search-target'|'reference'} collectionType collection type
|
||||
* @param {{searchTargetCollections: {[repo:string]:string[]}; referenceCollections:{[repo:string]:string[]}}} [eCfg] external configuration. Usually providence.conf.js
|
||||
* @param {string} [cwd]
|
||||
* @returns {string[]|undefined}
|
||||
*/
|
||||
function pathsArrayFromCollectionName(name, colType = 'search-target', eCfg, cwd) {
|
||||
function pathsArrayFromCollectionName(
|
||||
name,
|
||||
collectionType = 'search-target',
|
||||
eCfg,
|
||||
cwd = process.cwd(),
|
||||
) {
|
||||
let collection;
|
||||
if (colType === 'search-target') {
|
||||
collection = eCfg.searchTargetCollections;
|
||||
} else if (colType === 'reference') {
|
||||
collection = eCfg.referenceCollections;
|
||||
if (collectionType === 'search-target') {
|
||||
collection = eCfg?.searchTargetCollections;
|
||||
} else if (collectionType === 'reference') {
|
||||
collection = eCfg?.referenceCollections;
|
||||
}
|
||||
if (collection && collection[name]) {
|
||||
if (collection?.[name]) {
|
||||
return pathsArrayFromCs(collection[name].join(','), cwd);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} processArgStr
|
||||
* @param {object} [opts]
|
||||
* @returns {Promise<{ code:string; number:string }>}
|
||||
* @throws {Error}
|
||||
*/
|
||||
function spawnProcess(processArgStr, opts) {
|
||||
const processArgs = processArgStr.split(' ');
|
||||
// eslint-disable-next-line camelcase
|
||||
const proc = child_process.spawn(processArgs[0], processArgs.slice(1), opts);
|
||||
/** @type {string} */
|
||||
let output;
|
||||
proc.stdout.on('data', data => {
|
||||
output += data;
|
||||
|
|
@ -95,28 +118,26 @@ function spawnProcess(processArgStr, opts) {
|
|||
}
|
||||
|
||||
/**
|
||||
* When providence is called from the root of a repo and no target is provided,
|
||||
* this will provide the default fallback (the project itself)
|
||||
* @param {string} cwd
|
||||
* @returns {string[]}
|
||||
*/
|
||||
function targetDefault() {
|
||||
// eslint-disable-next-line import/no-dynamic-require, global-require
|
||||
const { name } = require(`${process.cwd()}/package.json`);
|
||||
if (name === 'providence') {
|
||||
return InputDataService.targetProjectPaths;
|
||||
}
|
||||
return [toPosixPath(process.cwd())];
|
||||
function targetDefault(cwd) {
|
||||
return [toPosixPath(cwd)];
|
||||
}
|
||||
|
||||
/**
|
||||
* @desc Returns all sub projects matching condition supplied in matchFn
|
||||
* @param {string[]} searchTargetPaths all search-target project paths
|
||||
* @param {string} matchPattern base for RegExp
|
||||
* @param {string[]} modes
|
||||
* Returns all sub projects matching condition supplied in matchFn
|
||||
* @param {string[]} rootPaths all search-target project paths
|
||||
* @param {string} [matchPattern] base for RegExp
|
||||
* @param {('npm'|'bower')[]} [modes]
|
||||
*/
|
||||
async function appendProjectDependencyPaths(rootPaths, matchPattern, modes = ['npm', 'bower']) {
|
||||
let matchFn;
|
||||
if (matchPattern) {
|
||||
if (matchPattern.startsWith('/') && matchPattern.endsWith('/')) {
|
||||
matchFn = (_, d) => {
|
||||
matchFn = (/** @type {any} */ _, /** @type {string} */ d) => {
|
||||
const reString = matchPattern.slice(1, -1);
|
||||
const result = new RegExp(reString).test(d);
|
||||
LogService.debug(`[appendProjectDependencyPaths]: /${reString}/.test(${d} => ${result})`);
|
||||
|
|
@ -128,13 +149,14 @@ async function appendProjectDependencyPaths(rootPaths, matchPattern, modes = ['n
|
|||
);
|
||||
}
|
||||
}
|
||||
/** @type {string[]} */
|
||||
const depProjectPaths = [];
|
||||
await aForEach(rootPaths, async targetPath => {
|
||||
await aForEach(modes, async mode => {
|
||||
for (const targetPath of rootPaths) {
|
||||
for (const mode of modes) {
|
||||
await readPackageTree(
|
||||
targetPath,
|
||||
matchFn,
|
||||
(err, tree) => {
|
||||
(/** @type {string | undefined} */ err, /** @type {{ children: any[]; }} */ tree) => {
|
||||
if (err) {
|
||||
throw new Error(err);
|
||||
}
|
||||
|
|
@ -143,8 +165,8 @@ async function appendProjectDependencyPaths(rootPaths, matchPattern, modes = ['n
|
|||
},
|
||||
mode,
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
// Write all data to {outputPath}/projectDeps.json
|
||||
// const projectDeps = {};
|
||||
// rootPaths.forEach(rootP => {
|
||||
|
|
@ -154,25 +176,27 @@ async function appendProjectDependencyPaths(rootPaths, matchPattern, modes = ['n
|
|||
return depProjectPaths.concat(rootPaths).map(toPosixPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Will install all npm and bower deps, so an analysis can be performed on them as well.
|
||||
* Relevant when '--target-dependencies' is supplied.
|
||||
* @param {string[]} searchTargetPaths
|
||||
*/
|
||||
async function installDeps(searchTargetPaths) {
|
||||
return aForEach(searchTargetPaths, async t => {
|
||||
const spawnConfig = { cwd: t };
|
||||
const extraOptions = { log: true };
|
||||
|
||||
LogService.info(`Installing npm dependencies for ${pathLib.basename(t)}`);
|
||||
for (const targetPath of searchTargetPaths) {
|
||||
LogService.info(`Installing npm dependencies for ${pathLib.basename(targetPath)}`);
|
||||
try {
|
||||
await spawnProcess('npm i --no-progress', spawnConfig, extraOptions);
|
||||
await spawnProcess('npm i --no-progress', { cwd: targetPath });
|
||||
} catch (e) {
|
||||
LogService.error(e);
|
||||
}
|
||||
|
||||
LogService.info(`Installing bower dependencies for ${pathLib.basename(t)}`);
|
||||
LogService.info(`Installing bower dependencies for ${pathLib.basename(targetPath)}`);
|
||||
try {
|
||||
await spawnProcess(`bower i --production --force-latest`, spawnConfig, extraOptions);
|
||||
await spawnProcess(`bower i --production --force-latest`, { cwd: targetPath });
|
||||
} catch (e) {
|
||||
LogService.error(e);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
|
|
|||
|
|
@ -1,21 +1,30 @@
|
|||
const child_process = require('child_process'); // eslint-disable-line camelcase
|
||||
const pathLib = require('path');
|
||||
const commander = require('commander');
|
||||
const providenceModule = require('../program/providence.js');
|
||||
const { LogService } = require('../program/services/LogService.js');
|
||||
const { QueryService } = require('../program/services/QueryService.js');
|
||||
const { InputDataService } = require('../program/services/InputDataService.js');
|
||||
const promptModule = require('./prompt-analyzer-menu.js');
|
||||
const cliHelpers = require('./cli-helpers.js');
|
||||
const extendDocsModule = require('./launch-providence-with-extend-docs.js');
|
||||
const { toPosixPath } = require('../program/utils/to-posix-path.js');
|
||||
import child_process from 'child_process'; // eslint-disable-line camelcase
|
||||
import pathLib from 'path';
|
||||
import fs from 'fs';
|
||||
import commander from 'commander';
|
||||
import providenceModule from '../program/providence.js';
|
||||
import { LogService } from '../program/core/LogService.js';
|
||||
import { QueryService } from '../program/core/QueryService.js';
|
||||
import { InputDataService } from '../program/core/InputDataService.js';
|
||||
import promptModule from './prompt-analyzer-menu.js';
|
||||
import cliHelpers from './cli-helpers.js';
|
||||
import extendDocsModule from './launch-providence-with-extend-docs.js';
|
||||
import { toPosixPath } from '../program/utils/to-posix-path.js';
|
||||
import { getCurrentDir } from '../program/utils/get-current-dir.mjs';
|
||||
import { dashboardServer } from '../../dashboard/server.mjs';
|
||||
|
||||
const { extensionsFromCs, setQueryMethod, targetDefault, installDeps, spawnProcess } = cliHelpers;
|
||||
const { version } = JSON.parse(
|
||||
fs.readFileSync(pathLib.resolve(getCurrentDir(import.meta.url), '../../package.json'), 'utf8'),
|
||||
);
|
||||
const { extensionsFromCs, setQueryMethod, targetDefault, installDeps } = cliHelpers;
|
||||
|
||||
const { version } = require('../../package.json');
|
||||
|
||||
async function cli({ cwd, providenceConf } = {}) {
|
||||
/**
|
||||
* @param {{cwd?:string; argv: string[]; providenceConf?: object}} cfg
|
||||
*/
|
||||
export async function cli({ cwd = process.cwd(), providenceConf, argv = process.argv }) {
|
||||
/** @type {(value: any) => void} */
|
||||
let resolveCli;
|
||||
/** @type {(reason?: any) => void} */
|
||||
let rejectCli;
|
||||
|
||||
const cliPromise = new Promise((resolve, reject) => {
|
||||
|
|
@ -35,7 +44,7 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
// TODO: change back to "InputDataService.getExternalConfig();" once full package ESM
|
||||
const externalConfig = providenceConf;
|
||||
|
||||
async function getQueryInputData(
|
||||
async function getQueryConfigAndMeta(
|
||||
/* eslint-disable no-shadow */
|
||||
searchMode,
|
||||
regexSearchOptions,
|
||||
|
|
@ -80,7 +89,7 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
}
|
||||
|
||||
async function launchProvidence() {
|
||||
const { queryConfig, queryMethod } = await getQueryInputData(
|
||||
const { queryConfig, queryMethod } = await getQueryConfigAndMeta(
|
||||
searchMode,
|
||||
regexSearchOptions,
|
||||
featureOptions,
|
||||
|
|
@ -130,6 +139,8 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
targetProjectRootPaths: searchTargetPaths,
|
||||
writeLogFile: commander.writeLogFile,
|
||||
skipCheckMatchCompatibility: commander.skipCheckMatchCompatibility,
|
||||
measurePerformance: commander.measurePerf,
|
||||
addSystemPathsInResult: commander.addSystemPaths,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -153,15 +164,6 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
}
|
||||
}
|
||||
|
||||
async function runDashboard() {
|
||||
const pathFromServerRootToDashboard = `${pathLib.relative(
|
||||
process.cwd(),
|
||||
pathLib.resolve(__dirname, '../../dashboard'),
|
||||
)}`;
|
||||
|
||||
spawnProcess(`node ${pathFromServerRootToDashboard}/src/server.mjs`);
|
||||
}
|
||||
|
||||
commander
|
||||
.version(version, '-v, --version')
|
||||
.option('-e, --extensions [extensions]', 'extensions like "js,html"', extensionsFromCs, [
|
||||
|
|
@ -174,7 +176,7 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
`path(s) to project(s) on which analysis/querying should take place. Requires
|
||||
a list of comma seperated values relative to project root`,
|
||||
v => cliHelpers.pathsArrayFromCs(v, cwd),
|
||||
targetDefault(),
|
||||
targetDefault(cwd),
|
||||
)
|
||||
.option(
|
||||
'-r, --reference-paths [references]',
|
||||
|
|
@ -185,12 +187,12 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
InputDataService.referenceProjectPaths,
|
||||
)
|
||||
.option('-a, --allowlist [allowlist]', `allowlisted paths, like 'src/**/*, packages/**/*'`, v =>
|
||||
cliHelpers.csToArray(v, cwd),
|
||||
cliHelpers.csToArray(v),
|
||||
)
|
||||
.option(
|
||||
'--allowlist-reference [allowlist-reference]',
|
||||
`allowed paths for reference, like 'src/**/*, packages/**/*'`,
|
||||
v => cliHelpers.csToArray(v, cwd),
|
||||
v => cliHelpers.csToArray(v),
|
||||
)
|
||||
.option(
|
||||
'--search-target-collection [collection-name]',
|
||||
|
|
@ -232,7 +234,9 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
.option(
|
||||
'--skip-check-match-compatibility',
|
||||
`skips semver checks, handy for forward compatible libs or libs below v1`,
|
||||
);
|
||||
)
|
||||
.option('--measure-perf', 'Logs the completion time in seconds')
|
||||
.option('--add-system-paths', 'Adds system paths to results');
|
||||
|
||||
commander
|
||||
.command('search <regex>')
|
||||
|
|
@ -346,12 +350,10 @@ async function cli({ cwd, providenceConf } = {}) {
|
|||
via providence.conf`,
|
||||
)
|
||||
.action(() => {
|
||||
runDashboard();
|
||||
dashboardServer.start();
|
||||
});
|
||||
|
||||
commander.parse(process.argv);
|
||||
commander.parse(argv);
|
||||
|
||||
await cliPromise;
|
||||
}
|
||||
|
||||
module.exports = { cli };
|
||||
|
|
@ -1,9 +1,9 @@
|
|||
#!/usr/bin/env node
|
||||
import { cli } from './cli.js';
|
||||
import { getProvidenceConf } from '../program/utils/get-providence-conf.mjs';
|
||||
import { cli } from './cli.mjs';
|
||||
import { providenceConfUtil } from '../program/utils/providence-conf-util.mjs';
|
||||
|
||||
(async () => {
|
||||
// We need to provide config to cli, until whole package is rewritten as ESM.
|
||||
const { providenceConf } = (await getProvidenceConf()) || {};
|
||||
const { providenceConf } = (await providenceConfUtil.getConf()) || {};
|
||||
cli({ providenceConf });
|
||||
})();
|
||||
|
|
|
|||
|
|
@ -3,11 +3,27 @@ const fs = require('fs');
|
|||
const pathLib = require('path');
|
||||
const { performance } = require('perf_hooks');
|
||||
const providenceModule = require('../program/providence.js');
|
||||
const { QueryService } = require('../program/services/QueryService.js');
|
||||
const { InputDataService } = require('../program/services/InputDataService.js');
|
||||
const { LogService } = require('../program/services/LogService.js');
|
||||
const { QueryService } = require('../program/core/QueryService.js');
|
||||
const { InputDataService } = require('../program/core/InputDataService.js');
|
||||
const { LogService } = require('../program/core/LogService.js');
|
||||
const { flatten } = require('./cli-helpers.js');
|
||||
|
||||
/**
|
||||
* @typedef {import('../program/types').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../program/types').GatherFilesConfig} GatherFilesConfig
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {{
|
||||
* referenceProjectPaths: PathFromSystemRoot[];
|
||||
* prefixCfg:{from:string;to:string};
|
||||
* extensions:GatherFilesConfig['extensions'];
|
||||
* allowlist?:string[];
|
||||
* allowlistReference?:string[];
|
||||
* cwd:PathFromSystemRoot
|
||||
* }} opts
|
||||
* @returns
|
||||
*/
|
||||
async function getExtendDocsResults({
|
||||
referenceProjectPaths,
|
||||
prefixCfg,
|
||||
|
|
@ -22,7 +38,7 @@ async function getExtendDocsResults({
|
|||
QueryService.getQueryConfigFromAnalyzer('match-paths', { prefix: prefixCfg }),
|
||||
{
|
||||
gatherFilesConfig: {
|
||||
extensions: extensions || ['.js'],
|
||||
extensions: extensions || /** @type {GatherFilesConfig['extensions']} */ (['.js']),
|
||||
allowlist: allowlist || ['!coverage', '!test'],
|
||||
},
|
||||
gatherFilesConfigReference: {
|
||||
|
|
@ -31,7 +47,7 @@ async function getExtendDocsResults({
|
|||
},
|
||||
queryMethod: 'ast',
|
||||
report: false,
|
||||
targetProjectPaths: [pathLib.resolve(cwd)],
|
||||
targetProjectPaths: [cwd],
|
||||
referenceProjectPaths,
|
||||
// For mono repos, a match between root package.json and ref project will not exist.
|
||||
// Disable this check, so it won't be a blocker for extendin docs
|
||||
|
|
@ -45,7 +61,7 @@ async function getExtendDocsResults({
|
|||
|
||||
/**
|
||||
* @param {string} pathStr ./packages/lea-tabs/lea-tabs.js
|
||||
* @param {string[]} pkgs ['packages/lea-tabs', ...]
|
||||
* @param {{path:string;name:string}[]} pkgs ['packages/lea-tabs', ...]
|
||||
*/
|
||||
function replaceToMonoRepoPath(pathStr, pkgs) {
|
||||
let result = pathStr;
|
||||
|
|
|
|||
|
|
@ -2,9 +2,9 @@ const fs = require('fs');
|
|||
const pathLib = require('path');
|
||||
const inquirer = require('inquirer');
|
||||
const { default: traverse } = require('@babel/traverse');
|
||||
const { InputDataService } = require('../program/services/InputDataService.js');
|
||||
const { AstService } = require('../program/services/AstService.js');
|
||||
const { LogService } = require('../program/services/LogService.js');
|
||||
const { InputDataService } = require('../program/core/InputDataService.js');
|
||||
const { AstService } = require('../program/core/AstService.js');
|
||||
const { LogService } = require('../program/core/LogService.js');
|
||||
const JsdocCommentParser = require('../program/utils/jsdoc-comment-parser.js');
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
const { providence } = require('./program/providence.js');
|
||||
const { QueryService } = require('./program/services/QueryService.js');
|
||||
const { LogService } = require('./program/services/LogService.js');
|
||||
const { InputDataService } = require('./program/services/InputDataService.js');
|
||||
const { AstService } = require('./program/services/AstService.js');
|
||||
const { QueryService } = require('./program/core/QueryService.js');
|
||||
const { LogService } = require('./program/core/LogService.js');
|
||||
const { InputDataService } = require('./program/core/InputDataService.js');
|
||||
const { AstService } = require('./program/core/AstService.js');
|
||||
|
||||
module.exports = { providence, QueryService, LogService, InputDataService, AstService };
|
||||
|
|
|
|||
|
|
@ -2,18 +2,21 @@
|
|||
const pathLib = require('path');
|
||||
const t = require('@babel/types');
|
||||
const { default: traverse } = require('@babel/traverse');
|
||||
const { Analyzer } = require('./helpers/Analyzer.js');
|
||||
const { Analyzer } = require('../core/Analyzer.js');
|
||||
const { trackDownIdentifierFromScope } = require('./helpers/track-down-identifier.js');
|
||||
const { aForEach } = require('../utils/async-array-utils.js');
|
||||
|
||||
/** @typedef {import('../types/analyzers').FindClassesAnalyzerOutput} FindClassesAnalyzerOutput */
|
||||
/** @typedef {import('../types/analyzers').FindClassesAnalyzerOutputEntry} FindClassesAnalyzerOutputEntry */
|
||||
/** @typedef {import('../types/analyzers').FindClassesConfig} FindClassesConfig */
|
||||
/**
|
||||
* @typedef {import('@babel/types').File} File
|
||||
* @typedef {import('@babel/types').ClassMethod} ClassMethod
|
||||
* @typedef {import('../types/analyzers').FindClassesAnalyzerOutput} FindClassesAnalyzerOutput
|
||||
* @typedef {import('../types/analyzers').FindClassesAnalyzerOutputEntry} FindClassesAnalyzerOutputEntry
|
||||
* @typedef {import('../types/analyzers').FindClassesConfig} FindClassesConfig
|
||||
*/
|
||||
|
||||
/**
|
||||
* Finds import specifiers and sources
|
||||
* @param {BabelAst} ast
|
||||
* @param {string} relativePath the file being currently processed
|
||||
* @param {File} ast
|
||||
* @param {string} fullCurrentFilePath the file being currently processed
|
||||
*/
|
||||
async function findMembersPerAstEntry(ast, fullCurrentFilePath, projectPath) {
|
||||
// The transformed entry
|
||||
|
|
@ -34,6 +37,10 @@ async function findMembersPerAstEntry(ast, fullCurrentFilePath, projectPath) {
|
|||
return 'public';
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {{node:ClassMethod}} cfg
|
||||
* @returns
|
||||
*/
|
||||
function isStaticProperties({ node }) {
|
||||
return node.static && node.kind === 'get' && node.key.name === 'properties';
|
||||
}
|
||||
|
|
@ -73,7 +80,12 @@ async function findMembersPerAstEntry(ast, fullCurrentFilePath, projectPath) {
|
|||
// return false;
|
||||
// }
|
||||
|
||||
async function traverseClass(path, { isMixin } = {}) {
|
||||
/**
|
||||
*
|
||||
* @param {*} path
|
||||
* @param {{isMixin?:boolean}} param1
|
||||
*/
|
||||
async function traverseClass(path, { isMixin = false } = {}) {
|
||||
const classRes = {};
|
||||
classRes.name = path.node.id && path.node.id.name;
|
||||
classRes.isMixin = Boolean(isMixin);
|
||||
|
|
@ -95,7 +107,8 @@ async function findMembersPerAstEntry(ast, fullCurrentFilePath, projectPath) {
|
|||
// or an external path like '@lion/overlays'. In the latter case,
|
||||
// tracking down will halt and should be done when there is access to
|
||||
// the external repo... (similar to how 'match-imports' analyzer works)
|
||||
await aForEach(superClasses, async classObj => {
|
||||
|
||||
for (const classObj of superClasses) {
|
||||
// Finds the file that holds the declaration of the import
|
||||
classObj.rootFile = await trackDownIdentifierFromScope(
|
||||
path,
|
||||
|
|
@ -103,13 +116,17 @@ async function findMembersPerAstEntry(ast, fullCurrentFilePath, projectPath) {
|
|||
fullCurrentFilePath,
|
||||
projectPath,
|
||||
);
|
||||
});
|
||||
}
|
||||
classRes.superClasses = superClasses;
|
||||
}
|
||||
|
||||
classRes.members = {};
|
||||
classRes.members.props = []; // meta: private, public, getter/setter, (found in static get properties)
|
||||
classRes.members.methods = []; // meta: private, public, getter/setter
|
||||
classRes.members = {
|
||||
// meta: private, public, getter/setter, (found in static get properties)
|
||||
props: [],
|
||||
// meta: private, public, getter/setter
|
||||
methods: [],
|
||||
};
|
||||
|
||||
path.traverse({
|
||||
ClassMethod(path) {
|
||||
// if (isBlacklisted(path)) {
|
||||
|
|
@ -174,9 +191,9 @@ async function findMembersPerAstEntry(ast, fullCurrentFilePath, projectPath) {
|
|||
},
|
||||
});
|
||||
|
||||
await aForEach(classesToTraverse, async klass => {
|
||||
for (const klass of classesToTraverse) {
|
||||
await traverseClass(klass.path, { isMixin: klass.isMixin });
|
||||
});
|
||||
}
|
||||
|
||||
return classesFound;
|
||||
}
|
||||
|
|
@ -202,9 +219,8 @@ async function findMembersPerAstEntry(ast, fullCurrentFilePath, projectPath) {
|
|||
// }
|
||||
|
||||
class FindClassesAnalyzer extends Analyzer {
|
||||
constructor() {
|
||||
super();
|
||||
this.name = 'find-classes';
|
||||
static get analyzerName() {
|
||||
return 'find-classes';
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -1,9 +1,8 @@
|
|||
const pathLib = require('path');
|
||||
const t = require('@babel/types');
|
||||
const { default: traverse } = require('@babel/traverse');
|
||||
const { Analyzer } = require('./helpers/Analyzer.js');
|
||||
const { Analyzer } = require('../core/Analyzer.js');
|
||||
const { trackDownIdentifierFromScope } = require('./helpers/track-down-identifier.js');
|
||||
const { aForEach } = require('../utils/async-array-utils.js');
|
||||
|
||||
function cleanup(transformedEntry) {
|
||||
transformedEntry.forEach(definitionObj => {
|
||||
|
|
@ -18,7 +17,7 @@ function cleanup(transformedEntry) {
|
|||
async function trackdownRoot(transformedEntry, relativePath, projectPath) {
|
||||
const fullCurrentFilePath = pathLib.resolve(projectPath, relativePath);
|
||||
|
||||
await aForEach(transformedEntry, async definitionObj => {
|
||||
for (const definitionObj of transformedEntry) {
|
||||
const rootFile = await trackDownIdentifierFromScope(
|
||||
definitionObj.__tmp.path,
|
||||
definitionObj.constructorIdentifier,
|
||||
|
|
@ -27,7 +26,7 @@ async function trackdownRoot(transformedEntry, relativePath, projectPath) {
|
|||
);
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
definitionObj.rootFile = rootFile;
|
||||
});
|
||||
}
|
||||
return transformedEntry;
|
||||
}
|
||||
|
||||
|
|
@ -85,13 +84,12 @@ function findCustomElementsPerAstEntry(ast) {
|
|||
}
|
||||
|
||||
class FindCustomelementsAnalyzer extends Analyzer {
|
||||
constructor() {
|
||||
super();
|
||||
this.name = 'find-customelements';
|
||||
static get analyzerName() {
|
||||
return 'find-customelements';
|
||||
}
|
||||
|
||||
/**
|
||||
* @desc Finds export specifiers and sources
|
||||
* Finds export specifiers and sources
|
||||
* @param {FindCustomelementsConfig} customConfig
|
||||
*/
|
||||
async execute(customConfig = {}) {
|
||||
|
|
|
|||
|
|
@ -1,12 +1,12 @@
|
|||
/* eslint-disable no-shadow, no-param-reassign */
|
||||
const pathLib = require('path');
|
||||
const { default: traverse } = require('@babel/traverse');
|
||||
const { Analyzer } = require('./helpers/Analyzer.js');
|
||||
const { Analyzer } = require('../core/Analyzer.js');
|
||||
const { trackDownIdentifier } = require('./helpers/track-down-identifier.js');
|
||||
const { normalizeSourcePaths } = require('./helpers/normalize-source-paths.js');
|
||||
const { getReferencedDeclaration } = require('../utils/get-source-code-fragment-of-declaration.js');
|
||||
|
||||
const { LogService } = require('../services/LogService.js');
|
||||
const { LogService } = require('../core/LogService.js');
|
||||
|
||||
/**
|
||||
* @typedef {import('./helpers/track-down-identifier.js').RootFile} RootFile
|
||||
|
|
@ -141,8 +141,8 @@ const isImportingSpecifier = pathOrNode =>
|
|||
pathOrNode.type === 'ImportDefaultSpecifier' || pathOrNode.type === 'ImportSpecifier';
|
||||
|
||||
/**
|
||||
* @desc Finds import specifiers and sources for a given ast result
|
||||
* @param {BabelAst} ast
|
||||
* Finds import specifiers and sources for a given ast result
|
||||
* @param {File} ast
|
||||
* @param {FindExportsConfig} config
|
||||
*/
|
||||
function findExportsPerAstEntry(ast, { skipFileImports }) {
|
||||
|
|
@ -207,13 +207,12 @@ function findExportsPerAstEntry(ast, { skipFileImports }) {
|
|||
}
|
||||
|
||||
class FindExportsAnalyzer extends Analyzer {
|
||||
constructor() {
|
||||
super();
|
||||
this.name = 'find-exports';
|
||||
static get analyzerName() {
|
||||
return 'find-exports';
|
||||
}
|
||||
|
||||
/**
|
||||
* @desc Finds export specifiers and sources
|
||||
* Finds export specifiers and sources
|
||||
* @param {FindExportsConfig} customConfig
|
||||
*/
|
||||
async execute(customConfig = {}) {
|
||||
|
|
|
|||
|
|
@ -2,10 +2,12 @@
|
|||
const { default: traverse } = require('@babel/traverse');
|
||||
const { isRelativeSourcePath } = require('../utils/relative-source-path.js');
|
||||
const { normalizeSourcePaths } = require('./helpers/normalize-source-paths.js');
|
||||
const { Analyzer } = require('./helpers/Analyzer.js');
|
||||
const { LogService } = require('../services/LogService.js');
|
||||
const { Analyzer } = require('../core/Analyzer.js');
|
||||
const { LogService } = require('../core/LogService.js');
|
||||
|
||||
/**
|
||||
* @typedef {import('@babel/types').File} File
|
||||
* @typedef {import('@babel/types').Node} Node *
|
||||
* @typedef {import('../types/core').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../types/analyzers').FindImportsAnalyzerResult} FindImportsAnalyzerResult
|
||||
* @typedef {import('../types/analyzers').FindImportsAnalyzerEntry} FindImportsAnalyzerEntry
|
||||
|
|
@ -16,12 +18,12 @@ const { LogService } = require('../services/LogService.js');
|
|||
* Options that allow to filter 'on a file basis'.
|
||||
* We can also filter on the total result
|
||||
*/
|
||||
const /** @type {AnalyzerOptions} */ options = {
|
||||
const /** @type {AnalyzerConfig} */ options = {
|
||||
/**
|
||||
* Only leaves entries with external sources:
|
||||
* - keeps: '@open-wc/testing'
|
||||
* - drops: '../testing'
|
||||
* @param {FindImportsAnalyzerResult} result
|
||||
* @param {FindImportsAnalyzerQueryOutput} result
|
||||
* @param {string} targetSpecifier for instance 'LitElement'
|
||||
*/
|
||||
onlyExternalSources(result) {
|
||||
|
|
@ -29,6 +31,9 @@ const /** @type {AnalyzerOptions} */ options = {
|
|||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {Node} node
|
||||
*/
|
||||
function getImportOrReexportsSpecifiers(node) {
|
||||
return node.specifiers.map(s => {
|
||||
if (s.type === 'ImportDefaultSpecifier' || s.type === 'ExportDefaultSpecifier') {
|
||||
|
|
@ -49,11 +54,12 @@ function getImportOrReexportsSpecifiers(node) {
|
|||
|
||||
/**
|
||||
* Finds import specifiers and sources
|
||||
* @param {any} ast
|
||||
* @param {File} ast
|
||||
*/
|
||||
function findImportsPerAstEntry(ast) {
|
||||
LogService.debug(`Analyzer "find-imports": started findImportsPerAstEntry method`);
|
||||
|
||||
// https://github.com/babel/babel/blob/672a58660f0b15691c44582f1f3fdcdac0fa0d2f/packages/babel-core/src/transformation/index.ts#L110
|
||||
// Visit AST...
|
||||
/** @type {Partial<FindImportsAnalyzerEntry>[]} */
|
||||
const transformedEntry = [];
|
||||
|
|
@ -110,10 +116,9 @@ function findImportsPerAstEntry(ast) {
|
|||
}
|
||||
|
||||
class FindImportsAnalyzer extends Analyzer {
|
||||
constructor() {
|
||||
super();
|
||||
/** @type {AnalyzerName} */
|
||||
this.name = 'find-imports';
|
||||
static get analyzerName() {
|
||||
return 'find-imports';
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -1,29 +1,14 @@
|
|||
const { isRelativeSourcePath } = require('../../utils/relative-source-path.js');
|
||||
const { LogService } = require('../../services/LogService.js');
|
||||
const { LogService } = require('../../core/LogService.js');
|
||||
const { resolveImportPath } = require('../../utils/resolve-import-path.js');
|
||||
const { toPosixPath } = require('../../utils/to-posix-path.js');
|
||||
|
||||
/**
|
||||
* @typedef {import('../../types/core').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../types/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../types/core').SpecifierSource} SpecifierSource
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {string} importee like '@lion/core/myFile.js'
|
||||
* @returns {string} project name ('@lion/core')
|
||||
*/
|
||||
function getProjectFromImportee(importee) {
|
||||
const scopedProject = importee[0] === '@';
|
||||
// 'external-project/src/file.js' -> ['external-project', 'src', file.js']
|
||||
let splitSource = importee.split('/');
|
||||
if (scopedProject) {
|
||||
// '@external/project'
|
||||
splitSource = [splitSource.slice(0, 2).join('/'), ...splitSource.slice(2)];
|
||||
}
|
||||
// ['external-project', 'src', 'file.js'] -> 'external-project'
|
||||
const project = splitSource.slice(0, 1).join('/');
|
||||
|
||||
return project;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets local path from reference project
|
||||
*
|
||||
|
|
@ -33,28 +18,25 @@ function getProjectFromImportee(importee) {
|
|||
* - from: 'reference-project'
|
||||
* - to: './index.js' (or other file specified in package.json 'main')
|
||||
* @param {object} config
|
||||
* @param {string} config.importee 'reference-project/foo.js'
|
||||
* @param {string} config.importer '/my/project/importing-file.js'
|
||||
* @param {SpecifierSource} config.importee 'reference-project/foo.js'
|
||||
* @param {PathFromSystemRoot} config.importer '/my/project/importing-file.js'
|
||||
* @param {PathFromSystemRoot} config.importeeProjectPath '/path/to/reference/project'
|
||||
* @returns {Promise<PathRelativeFromProjectRoot|null>} './foo.js'
|
||||
*/
|
||||
async function fromImportToExportPerspective({ importee, importer }) {
|
||||
async function fromImportToExportPerspective({ importee, importer, importeeProjectPath }) {
|
||||
if (isRelativeSourcePath(importee)) {
|
||||
LogService.warn('[fromImportToExportPerspective] Please only provide external import paths');
|
||||
return null;
|
||||
}
|
||||
|
||||
const absolutePath = await resolveImportPath(importee, importer);
|
||||
const projectName = getProjectFromImportee(importee);
|
||||
if (!absolutePath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* - from: '/my/reference/project/packages/foo/index.js'
|
||||
* - to: './packages/foo/index.js'
|
||||
*/
|
||||
return absolutePath
|
||||
? /** @type {PathRelativeFromProjectRoot} */ (
|
||||
absolutePath.replace(new RegExp(`^.*/${projectName}/?(.*)$`), './$1')
|
||||
)
|
||||
: null;
|
||||
return /** @type {PathRelativeFromProjectRoot} */ (
|
||||
absolutePath.replace(new RegExp(`^${toPosixPath(importeeProjectPath)}/?(.*)$`), './$1')
|
||||
);
|
||||
}
|
||||
|
||||
module.exports = { fromImportToExportPerspective };
|
||||
|
|
|
|||
|
|
@ -3,7 +3,6 @@ const pathLib = require('path');
|
|||
const { isRelativeSourcePath } = require('../../utils/relative-source-path.js');
|
||||
const { resolveImportPath } = require('../../utils/resolve-import-path.js');
|
||||
const { toPosixPath } = require('../../utils/to-posix-path.js');
|
||||
const { aMap } = require('../../utils/async-array-utils.js');
|
||||
|
||||
/**
|
||||
* @typedef {import('../../types/core').PathRelative} PathRelative
|
||||
|
|
@ -44,7 +43,9 @@ async function normalizeSourcePaths(queryOutput, relativePath, rootPath = proces
|
|||
pathLib.resolve(rootPath, relativePath)
|
||||
);
|
||||
const currentDirPath = /** @type {PathFromSystemRoot} */ (pathLib.dirname(currentFilePath));
|
||||
return aMap(queryOutput, async specifierResObj => {
|
||||
|
||||
const normalizedQueryOutput = [];
|
||||
for (const specifierResObj of queryOutput) {
|
||||
if (specifierResObj.source) {
|
||||
if (isRelativeSourcePath(specifierResObj.source) && relativePath) {
|
||||
// This will be a source like '../my/file.js' or './file.js'
|
||||
|
|
@ -60,8 +61,9 @@ async function normalizeSourcePaths(queryOutput, relativePath, rootPath = proces
|
|||
// specifierResObj.fullSource = specifierResObj.source;
|
||||
}
|
||||
}
|
||||
return specifierResObj;
|
||||
});
|
||||
normalizedQueryOutput.push(specifierResObj);
|
||||
}
|
||||
return normalizedQueryOutput;
|
||||
}
|
||||
|
||||
module.exports = { normalizeSourcePaths };
|
||||
|
|
|
|||
|
|
@ -5,10 +5,10 @@ const {
|
|||
isRelativeSourcePath,
|
||||
toRelativeSourcePath,
|
||||
} = require('../../utils/relative-source-path.js');
|
||||
const { AstService } = require('../../services/AstService.js');
|
||||
const { LogService } = require('../../services/LogService.js');
|
||||
const { InputDataService } = require('../../services/InputDataService.js');
|
||||
const { InputDataService } = require('../../core/InputDataService.js');
|
||||
const { resolveImportPath } = require('../../utils/resolve-import-path.js');
|
||||
const { AstService } = require('../../core/AstService.js');
|
||||
const { LogService } = require('../../core/LogService.js');
|
||||
const { memoize } = require('../../utils/memoize.js');
|
||||
|
||||
/**
|
||||
|
|
@ -18,6 +18,14 @@ const { memoize } = require('../../utils/memoize.js');
|
|||
* @typedef {import('../../types/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {string} source
|
||||
* @param {string} projectName
|
||||
*/
|
||||
function isSelfReferencingProject(source, projectName) {
|
||||
return source.startsWith(`${projectName}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} source
|
||||
* @param {string} projectName
|
||||
|
|
@ -26,7 +34,7 @@ function isExternalProject(source, projectName) {
|
|||
return (
|
||||
!source.startsWith('#') &&
|
||||
!isRelativeSourcePath(source) &&
|
||||
!source.startsWith(`${projectName}/`)
|
||||
!isSelfReferencingProject(source, projectName)
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
// A base class for writing Analyzers
|
||||
const { Analyzer } = require('./helpers/Analyzer.js');
|
||||
const { Analyzer } = require('../core/Analyzer.js');
|
||||
|
||||
// Expose analyzers that are requested to be run in external contexts
|
||||
const FindExportsAnalyzer = require('./find-exports.js');
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ const pathLib = require('path');
|
|||
/* eslint-disable no-shadow, no-param-reassign */
|
||||
const FindImportsAnalyzer = require('./find-imports.js');
|
||||
const FindExportsAnalyzer = require('./find-exports.js');
|
||||
const { Analyzer } = require('./helpers/Analyzer.js');
|
||||
const { Analyzer } = require('../core/Analyzer.js');
|
||||
const { fromImportToExportPerspective } = require('./helpers/from-import-to-export-perspective.js');
|
||||
const {
|
||||
transformIntoIterableFindExportsOutput,
|
||||
|
|
@ -21,6 +21,7 @@ const {
|
|||
* @typedef {import('../types/analyzers').MatchImportsConfig} MatchImportsConfig
|
||||
* @typedef {import('../types/analyzers').MatchImportsAnalyzerResult} MatchImportsAnalyzerResult
|
||||
* @typedef {import('../types/core').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../types/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../types/core').AnalyzerName} AnalyzerName
|
||||
*/
|
||||
|
||||
|
|
@ -117,9 +118,13 @@ async function matchImportsPostprocess(exportsAnalyzerResult, importsAnalyzerRes
|
|||
*/
|
||||
const fromImportToExport = await fromImportToExportPerspective({
|
||||
importee: importEntry.normalizedSource,
|
||||
importer: pathLib.resolve(importProjectPath, importEntry.file),
|
||||
importer: /** @type {PathFromSystemRoot} */ (
|
||||
pathLib.resolve(importProjectPath, importEntry.file)
|
||||
),
|
||||
importeeProjectPath: cfg.referenceProjectPath,
|
||||
});
|
||||
const isFromSameSource = compareImportAndExportPaths(exportEntry.file, fromImportToExport);
|
||||
|
||||
if (!isFromSameSource) {
|
||||
continue;
|
||||
}
|
||||
|
|
@ -133,7 +138,10 @@ async function matchImportsPostprocess(exportsAnalyzerResult, importsAnalyzerRes
|
|||
entry => entry.exportSpecifier && entry.exportSpecifier.id === id,
|
||||
);
|
||||
if (resultForCurrentExport) {
|
||||
// Prevent that we count double import like "import * as all from 'x'" and "import {smth} from 'x'"
|
||||
if (!resultForCurrentExport.importProjectFiles.includes(importEntry.file)) {
|
||||
resultForCurrentExport.importProjectFiles.push(importEntry.file);
|
||||
}
|
||||
} else {
|
||||
conciseResultsArray.push({
|
||||
exportSpecifier: { id, ...(exportEntry.meta ? { meta: exportEntry.meta } : {}) },
|
||||
|
|
@ -151,10 +159,8 @@ async function matchImportsPostprocess(exportsAnalyzerResult, importsAnalyzerRes
|
|||
}
|
||||
|
||||
class MatchImportsAnalyzer extends Analyzer {
|
||||
constructor() {
|
||||
super();
|
||||
/** @type {AnalyzerName} */
|
||||
this.name = 'match-imports';
|
||||
static get analyzerName() {
|
||||
return 'match-imports';
|
||||
}
|
||||
|
||||
static get requiresReference() {
|
||||
|
|
@ -207,6 +213,7 @@ class MatchImportsAnalyzer extends Analyzer {
|
|||
metaConfig: cfg.metaConfig,
|
||||
targetProjectPath: cfg.referenceProjectPath,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -217,6 +224,7 @@ class MatchImportsAnalyzer extends Analyzer {
|
|||
metaConfig: cfg.metaConfig,
|
||||
targetProjectPath: cfg.targetProjectPath,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -2,14 +2,14 @@
|
|||
const MatchSubclassesAnalyzer = require('./match-subclasses.js');
|
||||
const FindExportsAnalyzer = require('./find-exports.js');
|
||||
const FindCustomelementsAnalyzer = require('./find-customelements.js');
|
||||
const { Analyzer } = require('./helpers/Analyzer.js');
|
||||
const { Analyzer } = require('../core/Analyzer.js');
|
||||
|
||||
/** @typedef {import('./types').FindExportsAnalyzerResult} FindExportsAnalyzerResult */
|
||||
/** @typedef {import('./types').FindCustomelementsAnalyzerResult} FindCustomelementsAnalyzerResult */
|
||||
/** @typedef {import('./types').MatchSubclassesAnalyzerResult} MatchSubclassesAnalyzerResult */
|
||||
/** @typedef {import('./types').FindImportsAnalyzerResult} FindImportsAnalyzerResult */
|
||||
/** @typedef {import('./types').MatchedExportSpecifier} MatchedExportSpecifier */
|
||||
/** @typedef {import('./types').RootFile} RootFile */
|
||||
/** @typedef {import('../types/core').FindExportsAnalyzerResult} FindExportsAnalyzerResult */
|
||||
/** @typedef {import('../types/core').FindCustomelementsAnalyzerResult} FindCustomelementsAnalyzerResult */
|
||||
/** @typedef {import('../types/core').MatchSubclassesAnalyzerResult} MatchSubclassesAnalyzerResult */
|
||||
/** @typedef {import('../types/core').FindImportsAnalyzerResult} FindImportsAnalyzerResult */
|
||||
/** @typedef {import('../types/core').MatchedExportSpecifier} MatchedExportSpecifier */
|
||||
/** @typedef {import('../types/core').RootFile} RootFile */
|
||||
|
||||
/**
|
||||
* For prefix `{ from: 'lion', to: 'wolf' }`
|
||||
|
|
@ -362,9 +362,8 @@ function matchPathsPostprocess(
|
|||
* ]
|
||||
*/
|
||||
class MatchPathsAnalyzer extends Analyzer {
|
||||
constructor() {
|
||||
super();
|
||||
this.name = 'match-paths';
|
||||
static get analyzerName() {
|
||||
return 'match-paths';
|
||||
}
|
||||
|
||||
static get requiresReference() {
|
||||
|
|
@ -429,6 +428,7 @@ class MatchPathsAnalyzer extends Analyzer {
|
|||
gatherFilesConfig: cfg.gatherFilesConfig,
|
||||
gatherFilesConfigReference: cfg.gatherFilesConfigReference,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
|
||||
// [A2]
|
||||
|
|
@ -438,6 +438,7 @@ class MatchPathsAnalyzer extends Analyzer {
|
|||
targetProjectPath: cfg.targetProjectPath,
|
||||
gatherFilesConfig: cfg.gatherFilesConfig,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
|
||||
// [A3]
|
||||
|
|
@ -447,6 +448,7 @@ class MatchPathsAnalyzer extends Analyzer {
|
|||
targetProjectPath: cfg.referenceProjectPath,
|
||||
gatherFilesConfig: cfg.gatherFilesConfigReference,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
|
||||
/**
|
||||
|
|
@ -475,6 +477,7 @@ class MatchPathsAnalyzer extends Analyzer {
|
|||
targetProjectPath: cfg.targetProjectPath,
|
||||
gatherFilesConfig: cfg.gatherFilesConfig,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
|
||||
// [B2]
|
||||
|
|
@ -484,6 +487,7 @@ class MatchPathsAnalyzer extends Analyzer {
|
|||
targetProjectPath: cfg.referenceProjectPath,
|
||||
gatherFilesConfig: cfg.gatherFilesConfigReference,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
// refFindExportsAnalyzer was already created in A3
|
||||
|
||||
|
|
|
|||
|
|
@ -3,18 +3,19 @@ const pathLib = require('path');
|
|||
/* eslint-disable no-shadow, no-param-reassign */
|
||||
const FindClassesAnalyzer = require('./find-classes.js');
|
||||
const FindExportsAnalyzer = require('./find-exports.js');
|
||||
const { Analyzer } = require('./helpers/Analyzer.js');
|
||||
const { Analyzer } = require('../core/Analyzer.js');
|
||||
const { fromImportToExportPerspective } = require('./helpers/from-import-to-export-perspective.js');
|
||||
|
||||
/**
|
||||
* @typedef {import('../types/analyzers/find-classes').FindClassesAnalyzerResult} FindClassesAnalyzerResult
|
||||
* @typedef {import('../types/find-imports').FindImportsAnalyzerResult} FindImportsAnalyzerResult
|
||||
* @typedef {import('../types/find-exports').FindExportsAnalyzerResult} FindExportsAnalyzerResult
|
||||
* @typedef {import('../types/find-exports').IterableFindExportsAnalyzerEntry} IterableFindExportsAnalyzerEntry
|
||||
* @typedef {import('../types/find-imports').IterableFindImportsAnalyzerEntry} IterableFindImportsAnalyzerEntry
|
||||
* @typedef {import('../types/match-imports').ConciseMatchImportsAnalyzerResult} ConciseMatchImportsAnalyzerResult
|
||||
* @typedef {import('../types/match-imports').MatchImportsConfig} MatchImportsConfig
|
||||
* @typedef {import('../types/core/core').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../types/analyzers').FindClassesAnalyzerResult} FindClassesAnalyzerResult
|
||||
* @typedef {import('../types/analyzers').FindImportsAnalyzerResult} FindImportsAnalyzerResult
|
||||
* @typedef {import('../types/analyzers').FindExportsAnalyzerResult} FindExportsAnalyzerResult
|
||||
* @typedef {import('../types/analyzers').IterableFindExportsAnalyzerEntry} IterableFindExportsAnalyzerEntry
|
||||
* @typedef {import('../types/analyzers').IterableFindImportsAnalyzerEntry} IterableFindImportsAnalyzerEntry
|
||||
* @typedef {import('../types/analyzers').ConciseMatchImportsAnalyzerResult} ConciseMatchImportsAnalyzerResult
|
||||
* @typedef {import('../types/analyzers').MatchImportsConfig} MatchImportsConfig
|
||||
* @typedef {import('../types/core').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../types/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
*/
|
||||
|
||||
function getMemberOverrides(
|
||||
|
|
@ -52,7 +53,7 @@ function getMemberOverrides(
|
|||
}
|
||||
|
||||
/**
|
||||
* @desc Helper method for matchImportsPostprocess. Modifies its resultsObj
|
||||
* Helper method for matchImportsPostprocess. Modifies its resultsObj
|
||||
* @param {object} resultsObj
|
||||
* @param {string} exportId like 'myExport::./reference-project/my/export.js::my-project'
|
||||
* @param {Set<string>} filteredList
|
||||
|
|
@ -67,14 +68,14 @@ function storeResult(resultsObj, exportId, filteredList, meta) {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param {FindExportsAnalyzerResult} exportsAnalyzerResult
|
||||
* @param {FindExportsAnalyzerResult} refExportsAnalyzerResult
|
||||
* @param {FindClassesAnalyzerResult} targetClassesAnalyzerResult
|
||||
* @param {FindClassesAnalyzerResult} refClassesAResult
|
||||
* @param {MatchSubclassesConfig} customConfig
|
||||
* @returns {AnalyzerQueryResult}
|
||||
*/
|
||||
async function matchSubclassesPostprocess(
|
||||
exportsAnalyzerResult,
|
||||
refExportsAnalyzerResult,
|
||||
targetClassesAnalyzerResult,
|
||||
refClassesAResult,
|
||||
customConfig,
|
||||
|
|
@ -102,8 +103,8 @@ async function matchSubclassesPostprocess(
|
|||
*/
|
||||
const resultsObj = {};
|
||||
|
||||
for (const exportEntry of exportsAnalyzerResult.queryOutput) {
|
||||
const exportsProjectObj = exportsAnalyzerResult.analyzerMeta.targetProject;
|
||||
for (const exportEntry of refExportsAnalyzerResult.queryOutput) {
|
||||
const exportsProjectObj = refExportsAnalyzerResult.analyzerMeta.targetProject;
|
||||
const exportsProjectName = exportsProjectObj.name;
|
||||
|
||||
// Look for all specifiers that are exported, like [import {specifier} 'lion-based-ui/foo.js']
|
||||
|
|
@ -124,9 +125,10 @@ async function matchSubclassesPostprocess(
|
|||
// TODO: What if this info is retrieved from cached importProject/target project?
|
||||
const importProjectPath = cfg.targetProjectPath;
|
||||
for (const { result, file } of targetClassesAnalyzerResult.queryOutput) {
|
||||
// targetClassesAnalyzerResult.queryOutput.forEach(({ result, file }) =>
|
||||
const importerFilePath = /** @type {PathFromSystemRoot} */ (
|
||||
pathLib.resolve(importProjectPath, file)
|
||||
);
|
||||
for (const classEntryResult of result) {
|
||||
// result.forEach(classEntryResult => {
|
||||
/**
|
||||
* @example
|
||||
* Example context (read by 'find-classes'/'find-exports' analyzers)
|
||||
|
|
@ -165,7 +167,8 @@ async function matchSubclassesPostprocess(
|
|||
exportEntry.file ===
|
||||
(await fromImportToExportPerspective({
|
||||
importee: classMatch.rootFile.file,
|
||||
importer: pathLib.resolve(importProjectPath, file),
|
||||
importer: importerFilePath,
|
||||
importeeProjectPath: cfg.referenceProjectPath,
|
||||
}));
|
||||
|
||||
if (classMatch && isFromSameSource) {
|
||||
|
|
@ -176,8 +179,14 @@ async function matchSubclassesPostprocess(
|
|||
exportEntryResult,
|
||||
exportSpecifier,
|
||||
);
|
||||
|
||||
let projectFileId = `${importProject}::${file}::${classEntryResult.name}`;
|
||||
if (cfg.addSystemPathsInResult) {
|
||||
projectFileId += `::${importerFilePath}`;
|
||||
}
|
||||
|
||||
filteredImportsList.add({
|
||||
projectFileId: `${importProject}::${file}::${classEntryResult.name}`,
|
||||
projectFileId,
|
||||
memberOverrides,
|
||||
});
|
||||
}
|
||||
|
|
@ -235,13 +244,18 @@ async function matchSubclassesPostprocess(
|
|||
const matchesPerProject = [];
|
||||
flatResult.files.forEach(({ projectFileId, memberOverrides }) => {
|
||||
// eslint-disable-next-line no-shadow
|
||||
const [project, file, identifier] = projectFileId.split('::');
|
||||
const [project, file, identifier, filePath] = projectFileId.split('::');
|
||||
let projectEntry = matchesPerProject.find(m => m.project === project);
|
||||
if (!projectEntry) {
|
||||
matchesPerProject.push({ project, files: [] });
|
||||
projectEntry = matchesPerProject[matchesPerProject.length - 1];
|
||||
}
|
||||
projectEntry.files.push({ file, identifier, memberOverrides });
|
||||
const entry = { file, identifier, memberOverrides };
|
||||
if (filePath) {
|
||||
// @ts-ignore
|
||||
entry.filePath = filePath;
|
||||
}
|
||||
projectEntry.files.push(entry);
|
||||
});
|
||||
|
||||
return {
|
||||
|
|
@ -262,9 +276,8 @@ async function matchSubclassesPostprocess(
|
|||
// }
|
||||
|
||||
class MatchSubclassesAnalyzer extends Analyzer {
|
||||
constructor() {
|
||||
super();
|
||||
this.name = 'match-subclasses';
|
||||
static get analyzerName() {
|
||||
return 'match-subclasses';
|
||||
}
|
||||
|
||||
static get requiresReference() {
|
||||
|
|
@ -309,16 +322,18 @@ class MatchSubclassesAnalyzer extends Analyzer {
|
|||
*/
|
||||
const findExportsAnalyzer = new FindExportsAnalyzer();
|
||||
/** @type {FindExportsAnalyzerResult} */
|
||||
const exportsAnalyzerResult = await findExportsAnalyzer.execute({
|
||||
const refExportsAnalyzerResult = await findExportsAnalyzer.execute({
|
||||
targetProjectPath: cfg.referenceProjectPath,
|
||||
gatherFilesConfig: cfg.gatherFilesConfigReference,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
const findClassesAnalyzer = new FindClassesAnalyzer();
|
||||
/** @type {FindClassesAnalyzerResult} */
|
||||
const targetClassesAnalyzerResult = await findClassesAnalyzer.execute({
|
||||
targetProjectPath: cfg.targetProjectPath,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
const findRefClassesAnalyzer = new FindClassesAnalyzer();
|
||||
/** @type {FindClassesAnalyzerResult} */
|
||||
|
|
@ -326,10 +341,11 @@ class MatchSubclassesAnalyzer extends Analyzer {
|
|||
targetProjectPath: cfg.referenceProjectPath,
|
||||
gatherFilesConfig: cfg.gatherFilesConfigReference,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
suppressNonCriticalLogs: true,
|
||||
});
|
||||
|
||||
const queryOutput = await matchSubclassesPostprocess(
|
||||
exportsAnalyzerResult,
|
||||
refExportsAnalyzerResult,
|
||||
targetClassesAnalyzerResult,
|
||||
refClassesAnalyzerResult,
|
||||
cfg,
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
const pathLib = require('path');
|
||||
const { LogService } = require('../../services/LogService.js');
|
||||
const { LogService } = require('../../core/LogService.js');
|
||||
|
||||
const /** @type {AnalyzerOptions} */ options = {
|
||||
filterSpecifier(results, targetSpecifier, specifiersKey) {
|
||||
|
|
|
|||
|
|
@ -1,25 +1,22 @@
|
|||
/* eslint-disable no-param-reassign */
|
||||
const fs = require('fs');
|
||||
const semver = require('semver');
|
||||
const pathLib = require('path');
|
||||
const { LogService } = require('../../services/LogService.js');
|
||||
const { QueryService } = require('../../services/QueryService.js');
|
||||
const { ReportService } = require('../../services/ReportService.js');
|
||||
const { InputDataService } = require('../../services/InputDataService.js');
|
||||
const { toPosixPath } = require('../../utils/to-posix-path.js');
|
||||
const { getFilePathRelativeFromRoot } = require('../../utils/get-file-path-relative-from-root.js');
|
||||
const { LogService } = require('./LogService.js');
|
||||
const { QueryService } = require('./QueryService.js');
|
||||
const { ReportService } = require('./ReportService.js');
|
||||
const { InputDataService } = require('./InputDataService.js');
|
||||
const { toPosixPath } = require('../utils/to-posix-path.js');
|
||||
const { memoize } = require('../utils/memoize.js');
|
||||
const { getFilePathRelativeFromRoot } = require('../utils/get-file-path-relative-from-root.js');
|
||||
|
||||
/**
|
||||
* @typedef {import('../../types/core').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../types/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../types/core').QueryOutput} QueryOutput
|
||||
* @typedef {import('../../types/core').QueryOutputEntry} QueryOutputEntry
|
||||
* @typedef {import('../../types/core').ProjectInputData} ProjectInputData
|
||||
* @typedef {import('../../types/core').ProjectInputDataWithMeta} ProjectInputDataWithMeta
|
||||
* @typedef {import('../../types/core').AnalyzerQueryResult} AnalyzerQueryResult
|
||||
* @typedef {import('../../types/core').MatchAnalyzerConfig} MatchAnalyzerConfig
|
||||
*
|
||||
* @typedef {(ast: object, { relativePath: PathRelative }) => {result: QueryOutputEntry}} TraversEntryFn
|
||||
* @typedef {import('../types/core').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../types/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../types/core').QueryOutput} QueryOutput
|
||||
* @typedef {import('../types/core').ProjectInputData} ProjectInputData
|
||||
* @typedef {import('../types/core').ProjectInputDataWithMeta} ProjectInputDataWithMeta
|
||||
* @typedef {import('../types/core').AnalyzerQueryResult} AnalyzerQueryResult
|
||||
* @typedef {import('../types/core').MatchAnalyzerConfig} MatchAnalyzerConfig
|
||||
*/
|
||||
|
||||
/**
|
||||
|
|
@ -27,7 +24,7 @@ const { getFilePathRelativeFromRoot } = require('../../utils/get-file-path-relat
|
|||
* @param {ProjectInputDataWithMeta} projectData
|
||||
* @param {function} astAnalysis
|
||||
*/
|
||||
async function analyzePerAstEntry(projectData, astAnalysis) {
|
||||
async function analyzePerAstFile(projectData, astAnalysis) {
|
||||
const entries = [];
|
||||
for (const { file, ast, context: astContext } of projectData.entries) {
|
||||
const relativePath = getFilePathRelativeFromRoot(file, projectData.project.path);
|
||||
|
|
@ -64,22 +61,22 @@ function posixify(data) {
|
|||
}
|
||||
|
||||
/**
|
||||
* @desc This method ensures that the result returned by an analyzer always has a consistent format.
|
||||
* This method ensures that the result returned by an analyzer always has a consistent format.
|
||||
* By returning the configuration for the queryOutput, it will be possible to run later queries
|
||||
* under the same circumstances
|
||||
* @param {QueryOutput} queryOutput
|
||||
* @param {object} configuration
|
||||
* @param {object} cfg
|
||||
* @param {Analyzer} analyzer
|
||||
*/
|
||||
function ensureAnalyzerResultFormat(queryOutput, configuration, analyzer) {
|
||||
function ensureAnalyzerResultFormat(queryOutput, cfg, analyzer) {
|
||||
const { targetProjectMeta, identifier, referenceProjectMeta } = analyzer;
|
||||
const optional = {};
|
||||
if (targetProjectMeta) {
|
||||
optional.targetProject = targetProjectMeta;
|
||||
optional.targetProject = { ...targetProjectMeta };
|
||||
delete optional.targetProject.path; // get rid of machine specific info
|
||||
}
|
||||
if (referenceProjectMeta) {
|
||||
optional.referenceProject = referenceProjectMeta;
|
||||
optional.referenceProject = { ...referenceProjectMeta };
|
||||
delete optional.referenceProject.path; // get rid of machine specific info
|
||||
}
|
||||
|
||||
|
|
@ -91,7 +88,7 @@ function ensureAnalyzerResultFormat(queryOutput, configuration, analyzer) {
|
|||
requiredAst: analyzer.requiredAst,
|
||||
identifier,
|
||||
...optional,
|
||||
configuration,
|
||||
configuration: cfg,
|
||||
},
|
||||
};
|
||||
|
||||
|
|
@ -129,14 +126,18 @@ function ensureAnalyzerResultFormat(queryOutput, configuration, analyzer) {
|
|||
* Before running the analyzer, we need two conditions for a 'compatible match':
|
||||
* - 1. referenceProject is imported by targetProject at all
|
||||
* - 2. referenceProject and targetProject have compatible major versions
|
||||
* @param {PathFromSystemRoot} referencePath
|
||||
* @param {PathFromSystemRoot} targetPath
|
||||
* @typedef {(referencePath:PathFromSystemRoot,targetPath:PathFromSystemRoot) => {compatible:boolean}} CheckForMatchCompatibilityFn
|
||||
* @type {CheckForMatchCompatibilityFn}
|
||||
*/
|
||||
function checkForMatchCompatibility(referencePath, targetPath) {
|
||||
const refFile = pathLib.resolve(referencePath, 'package.json');
|
||||
const referencePkg = JSON.parse(fs.readFileSync(refFile, 'utf8'));
|
||||
const targetFile = pathLib.resolve(targetPath, 'package.json');
|
||||
const targetPkg = JSON.parse(fs.readFileSync(targetFile, 'utf8'));
|
||||
const checkForMatchCompatibility = memoize(
|
||||
(
|
||||
/** @type {PathFromSystemRoot} */ referencePath,
|
||||
/** @type {PathFromSystemRoot} */ targetPath,
|
||||
) => {
|
||||
// const refFile = pathLib.resolve(referencePath, 'package.json');
|
||||
const referencePkg = InputDataService.getPackageJson(referencePath);
|
||||
// const targetFile = pathLib.resolve(targetPath, 'package.json');
|
||||
const targetPkg = InputDataService.getPackageJson(targetPath);
|
||||
|
||||
const allTargetDeps = [
|
||||
...Object.entries(targetPkg.devDependencies || {}),
|
||||
|
|
@ -150,7 +151,8 @@ function checkForMatchCompatibility(referencePath, targetPath) {
|
|||
return { compatible: false, reason: 'no-matched-version' };
|
||||
}
|
||||
return { compatible: true };
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* If in json format, 'unwind' to be compatible for analysis...
|
||||
|
|
@ -163,19 +165,18 @@ function unwindJsonResult(targetOrReferenceProjectResult) {
|
|||
}
|
||||
|
||||
class Analyzer {
|
||||
constructor() {
|
||||
this.requiredAst = 'babel';
|
||||
/** @type {AnalyzerName|''} */
|
||||
this.name = '';
|
||||
}
|
||||
static requiresReference = false;
|
||||
|
||||
static get requiresReference() {
|
||||
return false;
|
||||
}
|
||||
/** @type {AnalyzerName|''} */
|
||||
static analyzerName = '';
|
||||
|
||||
name = /** @type {typeof Analyzer} */ (this.constructor).analyzerName;
|
||||
|
||||
requiredAst = 'babel';
|
||||
|
||||
/**
|
||||
* In a MatchAnalyzer, two Analyzers (a reference and target) are run.
|
||||
* For instance: a FindExportsAnalyzer and FindImportsAnalyzer are run.
|
||||
* In a MatchAnalyzer, two Analyzers (a reference and targer) are run.
|
||||
* For instance, in a MatchImportsAnalyzer, a FindExportsAnalyzer and FinImportsAnalyzer are run.
|
||||
* Their results can be provided as config params.
|
||||
* When they were stored in json format in the filesystem, 'unwind' them to be compatible for analysis...
|
||||
* @param {MatchAnalyzerConfig} cfg
|
||||
|
|
@ -198,13 +199,13 @@ class Analyzer {
|
|||
this.constructor.__unwindProvidedResults(cfg);
|
||||
|
||||
if (!cfg.targetProjectResult) {
|
||||
this.targetProjectMeta = InputDataService.getProjectMeta(cfg.targetProjectPath, true);
|
||||
this.targetProjectMeta = InputDataService.getProjectMeta(cfg.targetProjectPath);
|
||||
} else {
|
||||
this.targetProjectMeta = cfg.targetProjectResult.analyzerMeta.targetProject;
|
||||
}
|
||||
|
||||
if (cfg.referenceProjectPath && !cfg.referenceProjectResult) {
|
||||
this.referenceProjectMeta = InputDataService.getProjectMeta(cfg.referenceProjectPath, true);
|
||||
this.referenceProjectMeta = InputDataService.getProjectMeta(cfg.referenceProjectPath);
|
||||
} else if (cfg.referenceProjectResult) {
|
||||
this.referenceProjectMeta = cfg.referenceProjectResult.analyzerMeta.targetProject;
|
||||
}
|
||||
|
|
@ -227,6 +228,7 @@ class Analyzer {
|
|||
);
|
||||
|
||||
if (!compatible) {
|
||||
if (!cfg.suppressNonCriticalLogs) {
|
||||
LogService.info(
|
||||
`skipping ${LogService.pad(this.name, 16)} for ${
|
||||
this.identifier
|
||||
|
|
@ -235,6 +237,7 @@ class Analyzer {
|
|||
'',
|
||||
)}`,
|
||||
);
|
||||
}
|
||||
return ensureAnalyzerResultFormat(`[${reason}]`, cfg, this);
|
||||
}
|
||||
}
|
||||
|
|
@ -245,13 +248,16 @@ class Analyzer {
|
|||
const cachedResult = Analyzer._getCachedAnalyzerResult({
|
||||
analyzerName: this.name,
|
||||
identifier: this.identifier,
|
||||
cfg,
|
||||
});
|
||||
|
||||
if (cachedResult) {
|
||||
return cachedResult;
|
||||
}
|
||||
|
||||
if (!cfg.suppressNonCriticalLogs) {
|
||||
LogService.info(`starting ${LogService.pad(this.name, 16)} for ${this.identifier}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get reference and search-target data
|
||||
|
|
@ -282,12 +288,14 @@ class Analyzer {
|
|||
LogService.debug(`Analyzer "${this.name}": started _finalize method`);
|
||||
|
||||
const analyzerResult = ensureAnalyzerResultFormat(queryOutput, cfg, this);
|
||||
if (!cfg.suppressNonCriticalLogs) {
|
||||
LogService.success(`finished ${LogService.pad(this.name, 16)} for ${this.identifier}`);
|
||||
}
|
||||
return analyzerResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {function|{traverseEntryFn: function: filePaths:string[]; projectPath: string}} traverseEntryOrConfig
|
||||
* @param {function|{traverseEntryFn: function; filePaths:string[]; projectPath: string}} traverseEntryOrConfig
|
||||
*/
|
||||
async _traverse(traverseEntryOrConfig) {
|
||||
LogService.debug(`Analyzer "${this.name}": started _traverse method`);
|
||||
|
|
@ -323,7 +331,7 @@ class Analyzer {
|
|||
* Create ASTs for our inputData
|
||||
*/
|
||||
const astDataProjects = await QueryService.addAstToProjectsData(finalTargetData, 'babel');
|
||||
return analyzePerAstEntry(astDataProjects[0], traverseEntryFn);
|
||||
return analyzePerAstFile(astDataProjects[0], traverseEntryFn);
|
||||
}
|
||||
|
||||
async execute(customConfig = {}) {
|
||||
|
|
@ -332,6 +340,7 @@ class Analyzer {
|
|||
const cfg = {
|
||||
targetProjectPath: null,
|
||||
referenceProjectPath: null,
|
||||
suppressNonCriticalLogs: false,
|
||||
...customConfig,
|
||||
};
|
||||
|
||||
|
|
@ -355,19 +364,19 @@ class Analyzer {
|
|||
}
|
||||
|
||||
/**
|
||||
* @desc Gets a cached result from ReportService. Since ReportService slightly modifies analyzer
|
||||
* Gets a cached result from ReportService. Since ReportService slightly modifies analyzer
|
||||
* output, we 'unwind' before we return...
|
||||
* @param {object} config
|
||||
* @param {string} config.analyzerName
|
||||
* @param {string} config.identifier
|
||||
* @param {{ analyzerName:AnalyzerName, identifier:string, cfg:AnalyzerConfig}} config
|
||||
* @returns {AnalyzerQueryResult|undefined}
|
||||
*/
|
||||
static _getCachedAnalyzerResult({ analyzerName, identifier }) {
|
||||
static _getCachedAnalyzerResult({ analyzerName, identifier, cfg }) {
|
||||
const cachedResult = ReportService.getCachedResult({ analyzerName, identifier });
|
||||
if (!cachedResult) {
|
||||
return undefined;
|
||||
}
|
||||
if (!cfg.suppressNonCriticalLogs) {
|
||||
LogService.success(`cached version found for ${identifier}`);
|
||||
}
|
||||
|
||||
/** @type {AnalyzerQueryResult} */
|
||||
const result = unwindJsonResult(cachedResult);
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
const babelParser = require('@babel/parser');
|
||||
const parse5 = require('parse5');
|
||||
const traverseHtml = require('../utils/traverse-html.js');
|
||||
const { LogService } = require('./LogService.js');
|
||||
|
||||
/**
|
||||
* @typedef {import("@babel/types").File} File
|
||||
* @typedef {import("@babel/parser").ParserOptions} ParserOptions
|
||||
* @typedef {import('../types/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
*/
|
||||
|
||||
class AstService {
|
||||
/**
|
||||
* Compiles an array of file paths using Babel.
|
||||
* @param {string} code
|
||||
* @param {ParserOptions} parserOptions
|
||||
* @returns {File}
|
||||
*/
|
||||
static _getBabelAst(code, parserOptions = {}) {
|
||||
const ast = babelParser.parse(code, {
|
||||
sourceType: 'module',
|
||||
plugins: [
|
||||
'importMeta',
|
||||
'dynamicImport',
|
||||
'classProperties',
|
||||
'exportDefaultFrom',
|
||||
'importAssertions',
|
||||
],
|
||||
...parserOptions,
|
||||
});
|
||||
return ast;
|
||||
}
|
||||
|
||||
/**
|
||||
* Combines all script tags as if it were one js file.
|
||||
* @param {string} htmlCode
|
||||
*/
|
||||
static getScriptsFromHtml(htmlCode) {
|
||||
const ast = parse5.parseFragment(htmlCode);
|
||||
/**
|
||||
* @type {string[]}
|
||||
*/
|
||||
const scripts = [];
|
||||
traverseHtml(ast, {
|
||||
/**
|
||||
* @param {{ node: { childNodes: { value: any; }[]; }; }} path
|
||||
*/
|
||||
script(path) {
|
||||
const code = path.node.childNodes[0] ? path.node.childNodes[0].value : '';
|
||||
scripts.push(code);
|
||||
},
|
||||
});
|
||||
return scripts;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the Babel AST
|
||||
* @param { string } code
|
||||
* @param { 'babel' } astType
|
||||
* @param { {filePath?: PathFromSystemRoot} } options
|
||||
* @returns {File|undefined}
|
||||
*/
|
||||
// eslint-disable-next-line consistent-return
|
||||
static getAst(code, astType, { filePath } = {}) {
|
||||
// eslint-disable-next-line default-case
|
||||
try {
|
||||
return this._getBabelAst(code);
|
||||
} catch (e) {
|
||||
LogService.error(`Error when parsing "${filePath}":/n${e}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { AstService };
|
||||
|
|
@ -10,11 +10,13 @@ const { LogService } = require('./LogService.js');
|
|||
const { AstService } = require('./AstService.js');
|
||||
const { getFilePathRelativeFromRoot } = require('../utils/get-file-path-relative-from-root.js');
|
||||
const { toPosixPath } = require('../utils/to-posix-path.js');
|
||||
const { memoize } = require('../utils/memoize.js');
|
||||
|
||||
/**
|
||||
* @typedef {import('../types/analyzers').FindImportsAnalyzerResult} FindImportsAnalyzerResult
|
||||
* @typedef {import('../types/analyzers').FindImportsAnalyzerEntry} FindImportsAnalyzerEntry
|
||||
* @typedef {import('../types/core').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../types/core').PathRelative} PathRelative
|
||||
* @typedef {import('../types/core').QueryConfig} QueryConfig
|
||||
* @typedef {import('../types/core').QueryResult} QueryResult
|
||||
* @typedef {import('../types/core').FeatureQueryConfig} FeatureQueryConfig
|
||||
|
|
@ -31,20 +33,15 @@ const { toPosixPath } = require('../utils/to-posix-path.js');
|
|||
* @typedef {import('../types/core').ProjectInputDataWithMeta} ProjectInputDataWithMeta
|
||||
* @typedef {import('../types/core').Project} Project
|
||||
* @typedef {import('../types/core').ProjectName} ProjectName
|
||||
*/
|
||||
|
||||
/**
|
||||
* @typedef {import('../types/core').PackageJson} PackageJson
|
||||
* @typedef {{path:PathFromSystemRoot; name:ProjectName}} ProjectNameAndPath
|
||||
* @typedef {{name:ProjectName;files:PathRelativeFromProjectRoot[], workspaces:string[]}} PkgJson
|
||||
*/
|
||||
|
||||
// TODO: memoize
|
||||
|
||||
/**
|
||||
* @param {PathFromSystemRoot} rootPath
|
||||
* @returns {PkgJson|undefined}
|
||||
* @typedef {(rootPath:PathFromSystemRoot) => PackageJson|undefined} GetPackageJsonFn
|
||||
* @type {GetPackageJsonFn}
|
||||
*/
|
||||
function getPackageJson(rootPath) {
|
||||
const getPackageJson = memoize((/** @type {PathFromSystemRoot} */ rootPath) => {
|
||||
try {
|
||||
const fileContent = fs.readFileSync(`${rootPath}/package.json`, 'utf8');
|
||||
return JSON.parse(fileContent);
|
||||
|
|
@ -58,27 +55,30 @@ function getPackageJson(rootPath) {
|
|||
return undefined;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @param {PathFromSystemRoot} rootPath
|
||||
* @typedef {(rootPath:PathFromSystemRoot) => object|undefined} GetLernaJsonFn
|
||||
* @type {GetLernaJsonFn}
|
||||
*/
|
||||
function getLernaJson(rootPath) {
|
||||
const getLernaJson = memoize((/** @type {PathFromSystemRoot} */ rootPath) => {
|
||||
try {
|
||||
const fileContent = fs.readFileSync(`${rootPath}/lerna.json`, 'utf8');
|
||||
return JSON.parse(fileContent);
|
||||
} catch (_) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {PathFromSystemRoot[]|string[]} list
|
||||
* @param {PathFromSystemRoot} rootPath
|
||||
* @returns {ProjectNameAndPath[]}
|
||||
* @typedef {(list:PathFromSystemRoot[]|string[], rootPath:PathFromSystemRoot) => ProjectNameAndPath[]} GetPathsFromGlobListFn
|
||||
* @type {GetPathsFromGlobListFn}
|
||||
*/
|
||||
function getPathsFromGlobList(list, rootPath) {
|
||||
const getPathsFromGlobList = memoize(
|
||||
(
|
||||
/** @type {PathFromSystemRoot[]|string[]} */ list,
|
||||
/** @type {PathFromSystemRoot} */ rootPath,
|
||||
) => {
|
||||
/** @type {string[]} */
|
||||
const results = [];
|
||||
list.forEach(pathOrGlob => {
|
||||
|
|
@ -103,26 +103,27 @@ function getPathsFromGlobList(list, rootPath) {
|
|||
const name = /** @type {ProjectName} */ ((pkgJson && pkgJson.name) || basename);
|
||||
return { name, path: /** @type {PathFromSystemRoot} */ (pkgPath) };
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* @param {PathFromSystemRoot} rootPath
|
||||
* @returns {string|undefined}
|
||||
* @typedef {(rootPath:PathFromSystemRoot) => string|undefined} GetGitignoreFileFn
|
||||
* @type {GetGitignoreFileFn}
|
||||
*/
|
||||
function getGitignoreFile(rootPath) {
|
||||
const getGitignoreFile = memoize((/** @type {PathFromSystemRoot} */ rootPath) => {
|
||||
try {
|
||||
return fs.readFileSync(`${rootPath}/.gitignore`, 'utf8');
|
||||
} catch (_) {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @param {PathFromSystemRoot} rootPath
|
||||
* @returns {string[]}
|
||||
* @typedef {(rootPath:PathFromSystemRoot) => string[]} GetGitIgnorePathsFn
|
||||
* @type {GetGitIgnorePathsFn}
|
||||
*/
|
||||
function getGitIgnorePaths(rootPath) {
|
||||
const fileContent = getGitignoreFile(rootPath);
|
||||
const getGitIgnorePaths = memoize((/** @type {PathFromSystemRoot} */ rootPath) => {
|
||||
const fileContent = /** @type {string} */ (getGitignoreFile(rootPath));
|
||||
if (!fileContent) {
|
||||
return [];
|
||||
}
|
||||
|
|
@ -154,14 +155,14 @@ function getGitIgnorePaths(rootPath) {
|
|||
return entry;
|
||||
});
|
||||
return normalizedEntries;
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* Gives back all files and folders that need to be added to npm artifact
|
||||
* @param {PathFromSystemRoot} rootPath
|
||||
* @returns {string[]}
|
||||
* @typedef {(rootPath:PathFromSystemRoot) => string[]} GetNpmPackagePathsFn
|
||||
* @type {GetNpmPackagePathsFn}
|
||||
*/
|
||||
function getNpmPackagePaths(rootPath) {
|
||||
const getNpmPackagePaths = memoize((/** @type {PathFromSystemRoot} */ rootPath) => {
|
||||
const pkgJson = getPackageJson(rootPath);
|
||||
if (!pkgJson) {
|
||||
return [];
|
||||
|
|
@ -176,7 +177,7 @@ function getNpmPackagePaths(rootPath) {
|
|||
});
|
||||
}
|
||||
return [];
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* @param {any|any[]} v
|
||||
|
|
@ -189,8 +190,12 @@ function ensureArray(v) {
|
|||
/**
|
||||
* @param {string|string[]} patterns
|
||||
* @param {Partial<{keepDirs:boolean;root:string}>} [options]
|
||||
*
|
||||
* @typedef {(patterns:string|string[], opts: {keepDirs?:boolean;root:string}) => string[]} MultiGlobSyncFn
|
||||
* @type {MultiGlobSyncFn}
|
||||
*/
|
||||
function multiGlobSync(patterns, { keepDirs = false, root } = {}) {
|
||||
const multiGlobSync = memoize(
|
||||
(/** @type {string|string[]} */ patterns, { keepDirs = false, root } = {}) => {
|
||||
patterns = ensureArray(patterns);
|
||||
const res = new Set();
|
||||
patterns.forEach(pattern => {
|
||||
|
|
@ -203,8 +208,13 @@ function multiGlobSync(patterns, { keepDirs = false, root } = {}) {
|
|||
});
|
||||
});
|
||||
return Array.from(res);
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
/**
|
||||
* @param {string} localPathWithDotSlash
|
||||
* @returns {string}
|
||||
*/
|
||||
function stripDotSlashFromLocalPath(localPathWithDotSlash) {
|
||||
return localPathWithDotSlash.replace(/^\.\//, '');
|
||||
}
|
||||
|
|
@ -241,9 +251,9 @@ function getStringOrObjectValOfExportMapEntry({ valObjOrStr, nodeResolveMode })
|
|||
class InputDataService {
|
||||
/**
|
||||
* Create an array of ProjectData
|
||||
* @param {PathFromSystemRoot | ProjectInputData []} projectPaths
|
||||
* @param {(PathFromSystemRoot|ProjectInputData)[]} projectPaths
|
||||
* @param {Partial<GatherFilesConfig>} gatherFilesConfig
|
||||
* @returns {ProjectInputData[]}
|
||||
* @returns {ProjectInputDataWithMeta[]}
|
||||
*/
|
||||
static createDataObject(projectPaths, gatherFilesConfig = {}) {
|
||||
/** @type {ProjectInputData[]} */
|
||||
|
|
@ -306,7 +316,7 @@ class InputDataService {
|
|||
LogService.warn(/** @type {string} */ (e));
|
||||
}
|
||||
project.commitHash = this._getCommitHash(projectPath);
|
||||
return /** @type {Project} */ (project);
|
||||
return /** @type {Project} */ (Object.freeze(project));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -365,7 +375,7 @@ class InputDataService {
|
|||
toPosixPath(projectObj.project.path),
|
||||
);
|
||||
if (pathLib.extname(file) === '.html') {
|
||||
const extractedScripts = AstService.getScriptsFromHtml(code);
|
||||
const extractedScripts = AstService.getScriptsFromHtml(/** @type {string} */ (code));
|
||||
// eslint-disable-next-line no-shadow
|
||||
extractedScripts.forEach((code, i) => {
|
||||
newEntries.push({
|
||||
|
|
@ -619,7 +629,7 @@ class InputDataService {
|
|||
}
|
||||
|
||||
/**
|
||||
* @param {{[key:string]: string|object}} exports
|
||||
* @param {{[key:string]: string|object|null}} exports
|
||||
* @param {object} opts
|
||||
* @param {'default'|'development'|string} [opts.nodeResolveMode='default']
|
||||
* @param {string} opts.packageRootPath
|
||||
|
|
@ -688,7 +698,14 @@ class InputDataService {
|
|||
return exportMapPaths;
|
||||
}
|
||||
}
|
||||
InputDataService.cacheDisabled = false;
|
||||
// TODO: Remove memoizeConfig.isCacheDisabled this once whole providence uses cacheConfig instead of
|
||||
// memoizeConfig.isCacheDisabled
|
||||
// InputDataService.cacheDisabled = memoizeConfig.isCacheDisabled;
|
||||
|
||||
InputDataService.getProjectMeta = memoize(InputDataService.getProjectMeta);
|
||||
InputDataService.gatherFilesFromDir = memoize(InputDataService.gatherFilesFromDir);
|
||||
InputDataService.getMonoRepoPackages = memoize(InputDataService.getMonoRepoPackages);
|
||||
InputDataService.createDataObject = memoize(InputDataService.createDataObject);
|
||||
|
||||
InputDataService.getPackageJson = getPackageJson;
|
||||
|
||||
|
|
@ -1,12 +1,7 @@
|
|||
const pathLib = require('path');
|
||||
const chalk = require('chalk');
|
||||
const ora = require('ora');
|
||||
const fs = require('fs');
|
||||
|
||||
/**
|
||||
* @typedef {import('ora').Ora} Ora
|
||||
*/
|
||||
|
||||
const { log } = console;
|
||||
|
||||
/**
|
||||
|
|
@ -17,9 +12,6 @@ function printTitle(title) {
|
|||
return `${title ? `${title}\n` : ''}`;
|
||||
}
|
||||
|
||||
/** @type {Ora} */
|
||||
let spinner;
|
||||
|
||||
class LogService {
|
||||
/**
|
||||
* @param {string} text
|
||||
|
|
@ -89,39 +81,12 @@ class LogService {
|
|||
static info(text, title) {
|
||||
// @ts-ignore
|
||||
this._logHistory.push(`- info -${printTitle(title)} ${text}`);
|
||||
|
||||
if (this.allMuted) {
|
||||
return;
|
||||
}
|
||||
|
||||
log(chalk.bgBlue.black.bold(` info${printTitle(title)}`), text);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} text
|
||||
*/
|
||||
static spinnerStart(text) {
|
||||
spinner = ora(text).start();
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} text
|
||||
*/
|
||||
static spinnerText(text) {
|
||||
if (!spinner) {
|
||||
this.spinnerStart(text);
|
||||
}
|
||||
spinner.text = text;
|
||||
}
|
||||
|
||||
static spinnerStop() {
|
||||
spinner.stop();
|
||||
}
|
||||
|
||||
static get spinner() {
|
||||
return spinner;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} text
|
||||
* @param {number} minChars
|
||||
|
|
@ -3,6 +3,7 @@ const child_process = require('child_process'); // eslint-disable-line camelcase
|
|||
const { AstService } = require('./AstService.js');
|
||||
const { LogService } = require('./LogService.js');
|
||||
const { getFilePathRelativeFromRoot } = require('../utils/get-file-path-relative-from-root.js');
|
||||
const { memoize } = require('../utils/memoize.js');
|
||||
|
||||
/**
|
||||
* @typedef {import('../types/analyzers').FindImportsAnalyzerResult} FindImportsAnalyzerResult
|
||||
|
|
@ -31,6 +32,9 @@ class QueryService {
|
|||
* @returns {SearchQueryConfig}
|
||||
*/
|
||||
static getQueryConfigFromRegexSearchString(regexString) {
|
||||
if (typeof regexString !== 'string') {
|
||||
throw new Error('[QueryService.getQueryConfigFromRegexSearchString]: provide a string');
|
||||
}
|
||||
return { type: 'search', regexString };
|
||||
}
|
||||
|
||||
|
|
@ -44,8 +48,13 @@ class QueryService {
|
|||
* @returns {FeatureQueryConfig}
|
||||
*/
|
||||
static getQueryConfigFromFeatureString(queryString) {
|
||||
if (typeof queryString !== 'string') {
|
||||
throw new Error('[QueryService.getQueryConfigFromFeatureString]: provide a string');
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {string} candidate
|
||||
* Each candidate (tag, attrKey or attrValue) can end with asterisk.
|
||||
* @param {string} candidate for my-*[attr*=x*] 'my-*', 'attr*' or 'x*'
|
||||
* @returns {[string, boolean]}
|
||||
*/
|
||||
function parseContains(candidate) {
|
||||
|
|
@ -59,12 +68,12 @@ class QueryService {
|
|||
let featString;
|
||||
|
||||
// Creates tag ('tg-icon') and featString ('font-icon+size=xs')
|
||||
const match = queryString.match(/(^.*)(\[(.+)\])+/);
|
||||
if (match) {
|
||||
const attrMatch = queryString.match(/(^.*)(\[(.+)\])+/);
|
||||
if (attrMatch) {
|
||||
// eslint-disable-next-line prefer-destructuring
|
||||
tagCandidate = match[1];
|
||||
tagCandidate = attrMatch[1];
|
||||
// eslint-disable-next-line prefer-destructuring
|
||||
featString = match[3];
|
||||
featString = attrMatch[3];
|
||||
} else {
|
||||
tagCandidate = queryString;
|
||||
}
|
||||
|
|
@ -94,9 +103,9 @@ class QueryService {
|
|||
}
|
||||
|
||||
/**
|
||||
* RSetrieves the default export found in ./program/analyzers/findImport.js
|
||||
* @param {string|Analyzer} analyzerObjectOrString
|
||||
* @param {AnalyzerConfig} analyzerConfig
|
||||
* Retrieves the default export found in ./program/analyzers/find-import.js
|
||||
* @param {string|typeof Analyzer} analyzerObjectOrString
|
||||
* @param {AnalyzerConfig} [analyzerConfig]
|
||||
* @returns {AnalyzerQueryConfig}
|
||||
*/
|
||||
static getQueryConfigFromAnalyzer(analyzerObjectOrString, analyzerConfig) {
|
||||
|
|
@ -108,28 +117,26 @@ class QueryService {
|
|||
// eslint-disable-next-line import/no-dynamic-require, global-require
|
||||
analyzer = /** @type {Analyzer} */ (require(`../analyzers/${analyzerObjectOrString}`));
|
||||
} catch (e) {
|
||||
LogService.error(e);
|
||||
LogService.error(e.toString());
|
||||
process.exit(1);
|
||||
}
|
||||
} else {
|
||||
// We don't need to import the analyzer, since we already have it
|
||||
analyzer = analyzerObjectOrString;
|
||||
}
|
||||
return {
|
||||
return /** @type {AnalyzerQueryConfig} */ ({
|
||||
type: 'ast-analyzer',
|
||||
analyzerName: /** @type {AnalyzerName} */ (analyzer.name),
|
||||
analyzerName: /** @type {AnalyzerName} */ (analyzer.analyzerName),
|
||||
analyzerConfig,
|
||||
analyzer,
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @desc Search via unix grep
|
||||
* Search via unix grep
|
||||
* @param {InputData} inputData
|
||||
* @param {QueryConfig} queryConfig
|
||||
* @param {object} [customConfig]
|
||||
* @param {boolean} [customConfig.hasVerboseReporting]
|
||||
* @param {object} [customConfig.gatherFilesConfig]
|
||||
* @param {FeatureQueryConfig|SearchQueryConfig} queryConfig
|
||||
* @param {{hasVerboseReporting:boolean;gatherFilesConfig:GatherFilesConfig}} [customConfig]
|
||||
* @returns {Promise<QueryResult>}
|
||||
*/
|
||||
static async grepSearch(inputData, queryConfig, customConfig) {
|
||||
|
|
@ -190,7 +197,7 @@ class QueryService {
|
|||
}
|
||||
|
||||
/**
|
||||
* Search via ast (typescript compilation)
|
||||
* Perform ast analysis
|
||||
* @param {AnalyzerQueryConfig} analyzerQueryConfig
|
||||
* @param {AnalyzerConfig} [customConfig]
|
||||
* @returns {Promise<AnalyzerQueryResult>}
|
||||
|
|
@ -341,4 +348,6 @@ class QueryService {
|
|||
}
|
||||
QueryService.cacheDisabled = false;
|
||||
|
||||
QueryService.addAstToProjectsData = memoize(QueryService.addAstToProjectsData);
|
||||
|
||||
module.exports = { QueryService };
|
||||
|
|
@ -1,6 +1,8 @@
|
|||
const fs = require('fs');
|
||||
const pathLib = require('path');
|
||||
const getHash = require('../utils/get-hash.js');
|
||||
const { memoize } = require('../utils/memoize.js');
|
||||
// const memoize = fn => fn;
|
||||
|
||||
/**
|
||||
* @typedef {import('../types/core').Project} Project
|
||||
|
|
@ -60,6 +62,7 @@ class ReportService {
|
|||
}
|
||||
const { name } = queryResult.meta.analyzerMeta;
|
||||
const filePath = this._getResultFileNameAndPath(name, identifier);
|
||||
|
||||
fs.writeFileSync(filePath, output, { flag: 'w' });
|
||||
}
|
||||
|
||||
|
|
@ -125,5 +128,7 @@ class ReportService {
|
|||
fs.writeFileSync(filePath, JSON.stringify(file, null, 2), { flag: 'w' });
|
||||
}
|
||||
}
|
||||
ReportService.createIdentifier = memoize(ReportService.createIdentifier);
|
||||
ReportService.getCachedResult = memoize(ReportService.getCachedResult);
|
||||
|
||||
module.exports = { ReportService };
|
||||
|
|
@ -1,12 +1,25 @@
|
|||
const { performance } = require('perf_hooks');
|
||||
const deepmerge = require('deepmerge');
|
||||
const { ReportService } = require('./services/ReportService.js');
|
||||
const { InputDataService } = require('./services/InputDataService.js');
|
||||
const { LogService } = require('./services/LogService.js');
|
||||
const { QueryService } = require('./services/QueryService.js');
|
||||
const { aForEach } = require('./utils/async-array-utils.js');
|
||||
const { ReportService } = require('./core/ReportService.js');
|
||||
const { InputDataService } = require('./core/InputDataService.js');
|
||||
const { LogService } = require('./core/LogService.js');
|
||||
const { QueryService } = require('./core/QueryService.js');
|
||||
|
||||
// After handling a combo, we should know which project versions we have, since
|
||||
// the analyzer internally called createDataObject(which provides us the needed meta info).
|
||||
/**
|
||||
* @typedef {import('./types/core').ProvidenceConfig} ProvidenceConfig
|
||||
* @typedef {import('./types/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('./types/core').QueryResult} QueryResult
|
||||
* @typedef {import('./types/core').AnalyzerQueryResult} AnalyzerQueryResult
|
||||
* @typedef {import('./types/core').QueryConfig} QueryConfig
|
||||
* @typedef {import('./types/core').AnalyzerQueryConfig} AnalyzerQueryConfig
|
||||
* @typedef {import('./types/core').GatherFilesConfig} GatherFilesConfig
|
||||
*/
|
||||
|
||||
/**
|
||||
* After handling a combo, we should know which project versions we have, since
|
||||
* the analyzer internally called createDataObject(which provides us the needed meta info).
|
||||
* @param {{queryResult: AnalyzerQueryResult; queryConfig: AnalyzerQueryConfig; providenceConfig: ProvidenceConfig}} opts
|
||||
*/
|
||||
function addToSearchTargetDepsFile({ queryResult, queryConfig, providenceConfig }) {
|
||||
const currentSearchTarget = queryConfig.analyzerConfig.targetProjectPath;
|
||||
// eslint-disable-next-line array-callback-return, consistent-return
|
||||
|
|
@ -26,6 +39,10 @@ function addToSearchTargetDepsFile({ queryResult, queryConfig, providenceConfig
|
|||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {AnalyzerQueryResult} queryResult
|
||||
* @param {{outputPath:PathFromSystemRoot;report:boolean}} cfg
|
||||
*/
|
||||
function report(queryResult, cfg) {
|
||||
if (cfg.report && !queryResult.meta.analyzerMeta.__fromCache) {
|
||||
const { identifier } = queryResult.meta.analyzerMeta;
|
||||
|
|
@ -35,12 +52,13 @@ function report(queryResult, cfg) {
|
|||
|
||||
/**
|
||||
* Creates unique QueryConfig for analyzer turn
|
||||
* @param {QueryConfig} queryConfig
|
||||
* @param {string} targetProjectPath
|
||||
* @param {string} referenceProjectPath
|
||||
* @param {AnalyzerQueryConfig} queryConfig
|
||||
* @param {PathFromSystemRoot} targetProjectPath
|
||||
* @param {PathFromSystemRoot} referenceProjectPath
|
||||
* @returns {Partial<AnalyzerQueryResult>}
|
||||
*/
|
||||
function getSlicedQueryConfig(queryConfig, targetProjectPath, referenceProjectPath) {
|
||||
return {
|
||||
return /** @type {Partial<AnalyzerQueryResult>} */ ({
|
||||
...queryConfig,
|
||||
...{
|
||||
analyzerConfig: {
|
||||
|
|
@ -51,19 +69,20 @@ function getSlicedQueryConfig(queryConfig, targetProjectPath, referenceProjectPa
|
|||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @desc definition "projectCombo": referenceProject#version + searchTargetProject#version
|
||||
* @param {QueryConfig} slicedQConfig
|
||||
* @param {cfg} object
|
||||
* Definition "projectCombo": referenceProject#version + searchTargetProject#version
|
||||
* @param {AnalyzerQueryConfig} slicedQConfig
|
||||
* @param {{ gatherFilesConfig:GatherFilesConfig, gatherFilesConfigReference:GatherFilesConfig, skipCheckMatchCompatibility:boolean }} cfg
|
||||
*/
|
||||
async function handleAnalyzerForProjectCombo(slicedQConfig, cfg) {
|
||||
const queryResult = await QueryService.astSearch(slicedQConfig, {
|
||||
gatherFilesConfig: cfg.gatherFilesConfig,
|
||||
gatherFilesConfigReference: cfg.gatherFilesConfigReference,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
addSystemPathsInResult: cfg.addSystemPathsInResult,
|
||||
...slicedQConfig.analyzerConfig,
|
||||
});
|
||||
if (queryResult) {
|
||||
|
|
@ -73,7 +92,7 @@ async function handleAnalyzerForProjectCombo(slicedQConfig, cfg) {
|
|||
}
|
||||
|
||||
/**
|
||||
* @desc Here, we will match all our reference projects (exports) against all our search targets
|
||||
* Here, we will match all our reference projects (exports) against all our search targets
|
||||
* (imports).
|
||||
*
|
||||
* This is an expensive operation. Therefore, we allow caching.
|
||||
|
|
@ -88,16 +107,16 @@ async function handleAnalyzerForProjectCombo(slicedQConfig, cfg) {
|
|||
* All the json outputs can be aggregated in our dashboard and visually presented in
|
||||
* various ways.
|
||||
*
|
||||
* @param {QueryConfig} queryConfig
|
||||
* @param {ProvidenceConfig} cfg
|
||||
* @param {AnalyzerQueryConfig} queryConfig
|
||||
* @param {Partial<ProvidenceConfig>} cfg
|
||||
*/
|
||||
async function handleAnalyzer(queryConfig, cfg) {
|
||||
const queryResults = [];
|
||||
const { referenceProjectPaths, targetProjectPaths } = cfg;
|
||||
|
||||
await aForEach(targetProjectPaths, async searchTargetProject => {
|
||||
for (const searchTargetProject of targetProjectPaths) {
|
||||
if (referenceProjectPaths) {
|
||||
await aForEach(referenceProjectPaths, async ref => {
|
||||
for (const ref of referenceProjectPaths) {
|
||||
// Create shallow cfg copy with just currrent reference folder
|
||||
const slicedQueryConfig = getSlicedQueryConfig(queryConfig, searchTargetProject, ref);
|
||||
const queryResult = await handleAnalyzerForProjectCombo(slicedQueryConfig, cfg);
|
||||
|
|
@ -109,7 +128,7 @@ async function handleAnalyzer(queryConfig, cfg) {
|
|||
providenceConfig: cfg,
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
const slicedQueryConfig = getSlicedQueryConfig(queryConfig, searchTargetProject);
|
||||
const queryResult = await handleAnalyzerForProjectCombo(slicedQueryConfig, cfg);
|
||||
|
|
@ -122,7 +141,7 @@ async function handleAnalyzer(queryConfig, cfg) {
|
|||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
return queryResults;
|
||||
}
|
||||
|
||||
|
|
@ -149,23 +168,16 @@ async function handleRegexSearch(queryConfig, cfg, inputData) {
|
|||
}
|
||||
|
||||
/**
|
||||
* @desc Creates a report with usage metrics, based on a queryConfig.
|
||||
* Creates a report with usage metrics, based on a queryConfig.
|
||||
*
|
||||
* @param {QueryConfig} queryConfig a query configuration object containing analyzerOptions.
|
||||
* @param {object} customConfig
|
||||
* @param {'ast'|'grep'} customConfig.queryMethod whether analyzer should be run or a grep should
|
||||
* be performed
|
||||
* @param {string[]} customConfig.targetProjectPaths search target projects. For instance
|
||||
* ['/path/to/app-a', '/path/to/app-b', ... '/path/to/app-z']
|
||||
* @param {string[]} [customConfig.referenceProjectPaths] reference projects. Needed for 'match
|
||||
* analyzers', having `requiresReference: true`. For instance
|
||||
* ['/path/to/lib1', '/path/to/lib2']
|
||||
* @param {GatherFilesConfig} [customConfig.gatherFilesConfig]
|
||||
* @param {boolean} [customConfig.report]
|
||||
* @param {boolean} [customConfig.debugEnabled]
|
||||
* @param {Partial<ProvidenceConfig>} customConfig
|
||||
*/
|
||||
async function providenceMain(queryConfig, customConfig) {
|
||||
const cfg = deepmerge(
|
||||
const tStart = performance.now();
|
||||
|
||||
const cfg = /** @type {ProvidenceConfig} */ (
|
||||
deepmerge(
|
||||
{
|
||||
queryMethod: 'grep',
|
||||
// This is a merge of all 'main entry projects'
|
||||
|
|
@ -181,8 +193,12 @@ async function providenceMain(queryConfig, customConfig) {
|
|||
debugEnabled: false,
|
||||
writeLogFile: false,
|
||||
skipCheckMatchCompatibility: false,
|
||||
measurePerformance: false,
|
||||
/** Allows to navigate to source file in code editor */
|
||||
addSystemPathsInResult: false,
|
||||
},
|
||||
customConfig,
|
||||
)
|
||||
);
|
||||
|
||||
if (cfg.debugEnabled) {
|
||||
|
|
@ -215,6 +231,12 @@ async function providenceMain(queryConfig, customConfig) {
|
|||
LogService.writeLogFile();
|
||||
}
|
||||
|
||||
const tEnd = performance.now();
|
||||
|
||||
if (cfg.measurePerformance) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`completed in ${((tEnd - tStart) / 1000).toFixed(2)} seconds`);
|
||||
}
|
||||
return queryResults;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,136 +0,0 @@
|
|||
// @ts-nocheck
|
||||
const {
|
||||
createProgram,
|
||||
getPreEmitDiagnostics,
|
||||
ModuleKind,
|
||||
ModuleResolutionKind,
|
||||
ScriptTarget,
|
||||
} = require('typescript');
|
||||
const babelParser = require('@babel/parser');
|
||||
// @ts-expect-error
|
||||
const esModuleLexer = require('es-module-lexer');
|
||||
const parse5 = require('parse5');
|
||||
const traverseHtml = require('../utils/traverse-html.js');
|
||||
const { LogService } = require('./LogService.js');
|
||||
|
||||
/**
|
||||
* @typedef {import('../types/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
*/
|
||||
|
||||
class AstService {
|
||||
/**
|
||||
* @deprecated for simplicity/maintainability, only allow Babel for js
|
||||
* Compiles an array of file paths using Typescript.
|
||||
* @param {string[]} filePaths
|
||||
* @param {CompilerOptions} options
|
||||
*/
|
||||
static _getTypescriptAst(filePaths, options) {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
filePaths = Array.isArray(filePaths) ? filePaths : [filePaths];
|
||||
|
||||
const defaultOptions = {
|
||||
noEmitOnError: false,
|
||||
allowJs: true,
|
||||
experimentalDecorators: true,
|
||||
target: ScriptTarget.Latest,
|
||||
downlevelIteration: true,
|
||||
module: ModuleKind.ESNext,
|
||||
// module: ModuleKind.CommonJS,
|
||||
// lib: ["esnext", "dom"],
|
||||
strictNullChecks: true,
|
||||
moduleResolution: ModuleResolutionKind.NodeJs,
|
||||
esModuleInterop: true,
|
||||
noEmit: true,
|
||||
allowSyntheticDefaultImports: true,
|
||||
allowUnreachableCode: true,
|
||||
allowUnusedLabels: true,
|
||||
skipLibCheck: true,
|
||||
isolatedModules: true,
|
||||
};
|
||||
|
||||
const program = createProgram(filePaths, options || defaultOptions);
|
||||
const diagnostics = getPreEmitDiagnostics(program);
|
||||
const files = program.getSourceFiles().filter(sf => filePaths.includes(sf.fileName));
|
||||
return { diagnostics, program, files };
|
||||
}
|
||||
|
||||
/**
|
||||
* Compiles an array of file paths using Babel.
|
||||
* @param {string} code
|
||||
*/
|
||||
static _getBabelAst(code) {
|
||||
const ast = babelParser.parse(code, {
|
||||
sourceType: 'module',
|
||||
plugins: [
|
||||
'importMeta',
|
||||
'dynamicImport',
|
||||
'classProperties',
|
||||
'exportDefaultFrom',
|
||||
'importAssertions',
|
||||
],
|
||||
});
|
||||
return ast;
|
||||
}
|
||||
|
||||
/**
|
||||
* Combines all script tags as if it were one js file.
|
||||
* @param {string} htmlCode
|
||||
*/
|
||||
static getScriptsFromHtml(htmlCode) {
|
||||
const ast = parse5.parseFragment(htmlCode);
|
||||
const scripts = [];
|
||||
traverseHtml(ast, {
|
||||
script(path) {
|
||||
const code = path.node.childNodes[0] ? path.node.childNodes[0].value : '';
|
||||
scripts.push(code);
|
||||
},
|
||||
});
|
||||
return scripts;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated for simplicity/maintainability, only allow Babel for js
|
||||
* @param {string} code
|
||||
*/
|
||||
static async _getEsModuleLexerOutput(code) {
|
||||
return esModuleLexer.parse(code);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the desired AST
|
||||
* Why would we support multiple ASTs/parsers?
|
||||
* - 'babel' is our default tool for analysis. It's the most versatile and popular tool, it's
|
||||
* close to the EStree standard (other than Typescript) and a lot of plugins and resources can
|
||||
* be found online. It also allows to parse Typescript and spec proposals.
|
||||
* - 'typescript' (deprecated) is needed for some valuable third party tooling, like web-component-analyzer
|
||||
* - 'es-module-lexer' (deprecated) is needed for the dedicated task of finding module imports; it is way
|
||||
* quicker than a full fledged AST parser
|
||||
* @param { 'babel' } astType
|
||||
* @param { {filePath: PathFromSystemRoot} } [options]
|
||||
*/
|
||||
// eslint-disable-next-line consistent-return
|
||||
static getAst(code, astType, { filePath } = {}) {
|
||||
// eslint-disable-next-line default-case
|
||||
try {
|
||||
// eslint-disable-next-line default-case
|
||||
switch (astType) {
|
||||
case 'babel':
|
||||
return this._getBabelAst(code);
|
||||
case 'typescript':
|
||||
LogService.warn(`
|
||||
Please notice "typescript" support is deprecated.
|
||||
For parsing javascript, "babel" is recommended.`);
|
||||
return this._getTypescriptAst(code);
|
||||
case 'es-module-lexer':
|
||||
LogService.warn(`
|
||||
Please notice "es-module-lexer" support is deprecated.
|
||||
For parsing javascript, "babel" is recommended.`);
|
||||
return this._getEsModuleLexerOutput(code);
|
||||
}
|
||||
} catch (e) {
|
||||
LogService.error(`Error when parsing "${filePath}":/n${e}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { AstService };
|
||||
|
|
@ -8,6 +8,7 @@ import {
|
|||
Project,
|
||||
GatherFilesConfig,
|
||||
SpecifierName,
|
||||
QueryOutput,
|
||||
} from './index';
|
||||
|
||||
/**
|
||||
|
|
@ -27,21 +28,23 @@ export interface Meta {
|
|||
export interface AnalyzerMeta {
|
||||
name: AnalyzerName;
|
||||
requiredAst: RequiredAst;
|
||||
/** a unique hash based on target, reference and configuration */
|
||||
/* a unique hash based on target, reference and configuration */
|
||||
identifier: ImportOrExportId;
|
||||
/** target project meta object */
|
||||
/* target project meta object */
|
||||
targetProject: Project;
|
||||
/** reference project meta object */
|
||||
/* reference project meta object */
|
||||
referenceProject?: Project;
|
||||
/** the configuration used for this particular analyzer run */
|
||||
/* the configuration used for this particular analyzer run */
|
||||
configuration: object;
|
||||
/* whether it was cached in file system or not */
|
||||
__fromCache?: boolean;
|
||||
}
|
||||
|
||||
export interface AnalyzerQueryResult extends QueryResult {
|
||||
/** meta info object */
|
||||
meta: Meta;
|
||||
/** array of AST traversal output, per project file */
|
||||
queryOutput: any[];
|
||||
queryOutput: QueryOutput;
|
||||
}
|
||||
|
||||
export interface FindAnalyzerQueryResult extends AnalyzerQueryResult {
|
||||
|
|
@ -58,7 +61,9 @@ export interface FindAnalyzerOutputFile {
|
|||
export interface AnalyzerConfig {
|
||||
/** search target project path */
|
||||
targetProjectPath: PathFromSystemRoot;
|
||||
gatherFilesConfig: GatherFilesConfig;
|
||||
gatherFilesConfig?: GatherFilesConfig;
|
||||
gatherFilesConfigReference?: GatherFilesConfig;
|
||||
skipCheckMatchCompatibility?: boolean;
|
||||
}
|
||||
|
||||
export interface MatchAnalyzerConfig extends AnalyzerConfig {
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { AnalyzerName, Feature, AnalyzerConfig, PathRelativeFromProjectRoot } from './index';
|
||||
import { Analyzer } from '../../analyzers/helpers/Analyzer';
|
||||
export { Analyzer } from '../../analyzers/helpers/Analyzer';
|
||||
import { Analyzer } from '../../core/Analyzer';
|
||||
export { Analyzer } from '../../core/Analyzer';
|
||||
|
||||
/**
|
||||
* Type of the query. Currently only "ast-analyzer" supported
|
||||
|
|
@ -38,7 +38,7 @@ export interface QueryOutputEntry {
|
|||
file: PathRelativeFromProjectRoot;
|
||||
}
|
||||
|
||||
export type QueryOutput = QueryOutputEntry[];
|
||||
export type QueryOutput = QueryOutputEntry[] | '[no-dependency]' | '[no-matched-version]';
|
||||
|
||||
export interface QueryResult {
|
||||
queryOutput: QueryOutput;
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import { File } from '@babel/types';
|
||||
|
||||
/**
|
||||
* The name of a variable in a local context. Examples:
|
||||
* - 'b': (`import {a as b } from 'c';`)
|
||||
|
|
@ -140,6 +142,11 @@ export interface ProjectInputDataWithMeta {
|
|||
project: Project;
|
||||
entries: { file: PathRelativeFromProjectRoot; context: { code: string } }[];
|
||||
}
|
||||
|
||||
export interface ProjectInputDataWithAstMeta extends ProjectInputDataWithMeta {
|
||||
entries: { file: PathRelativeFromProjectRoot; ast: File; context: { code: string } }[];
|
||||
}
|
||||
|
||||
/**
|
||||
* See: https://www.npmjs.com/package/anymatch
|
||||
* Allows negations as well. See: https://www.npmjs.com/package/is-negated-glob
|
||||
|
|
@ -149,3 +156,33 @@ export interface ProjectInputDataWithMeta {
|
|||
* - 'scripts/vendor/react.js'
|
||||
*/
|
||||
export type AnyMatchString = string;
|
||||
|
||||
export type ProvidenceConfig = {
|
||||
/* Whether analyzer should be run or a grep should be performed */
|
||||
queryMethod: 'ast' | 'grep';
|
||||
/* Search target projects. For instance ['/path/to/app-a', '/path/to/app-b', ... '/path/to/app-z'] */
|
||||
targetProjectPaths: PathFromSystemRoot[];
|
||||
/* Reference projects. Needed for 'match analyzers', having `requiresReference: true`. For instance ['/path/to/lib1', '/path/to/lib2'] */
|
||||
referenceProjectPaths: PathFromSystemRoot[];
|
||||
/* When targetProjectPaths are dependencies of other projects (their 'roots') */
|
||||
targetProjectRootPaths: PathFromSystemRoot[];
|
||||
gatherFilesConfig: GatherFilesConfig;
|
||||
gatherFilesConfigReference: GatherFilesConfig;
|
||||
report: boolean;
|
||||
debugEnabled: boolean;
|
||||
measurePerformance: boolean;
|
||||
writeLogFile: boolean;
|
||||
skipCheckMatchCompatibility: boolean;
|
||||
};
|
||||
|
||||
/**
|
||||
* Representation of package.json with only those keys relevant for Providence
|
||||
*/
|
||||
export type PackageJson = {
|
||||
name: string;
|
||||
version: string;
|
||||
files?: PathRelativeFromProjectRoot[];
|
||||
dependencies?: { [dependency: string]: string };
|
||||
devDependencies?: { [dependency: string]: string };
|
||||
workspaces?: string[];
|
||||
};
|
||||
|
|
|
|||
3
packages-node/providence-analytics/src/program/types/index.d.ts
vendored
Normal file
3
packages-node/providence-analytics/src/program/types/index.d.ts
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
export * from './core';
|
||||
export * from './analyzers';
|
||||
export * from './utils';
|
||||
1
packages-node/providence-analytics/src/program/types/utils/memoize.d.ts
vendored
Normal file
1
packages-node/providence-analytics/src/program/types/utils/memoize.d.ts
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
export type MemoizeFunction<T> = (fn: T, storage?: object) => T;
|
||||
|
|
@ -1,41 +0,0 @@
|
|||
/**
|
||||
* Readable way to do an async forEach
|
||||
* Since predictability matters, all array items will be handled in a queue,
|
||||
* one after another
|
||||
* @param {any[]} array
|
||||
* @param {function} callback
|
||||
*/
|
||||
async function aForEach(array, callback) {
|
||||
for (let i = 0; i < array.length; i += 1) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
await callback(array[i], i);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Readable way to do an async forEach
|
||||
* If predictability does not matter, this method will traverse array items concurrently,
|
||||
* leading to a better performance
|
||||
* @param {any[]} array
|
||||
* @param {(value:any, index:number) => {}} callback
|
||||
*/
|
||||
async function aForEachNonSequential(array, callback) {
|
||||
return Promise.all(array.map(callback));
|
||||
}
|
||||
/**
|
||||
* Readable way to do an async map
|
||||
* Since predictability is crucial for a map, all array items will be handled in a queue,
|
||||
* one after anotoher
|
||||
* @param {Array<any>} array
|
||||
* @param {(param:any, i:number) => any} callback
|
||||
*/
|
||||
async function aMap(array, callback) {
|
||||
const mappedResults = [];
|
||||
for (let i = 0; i < array.length; i += 1) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const resolvedCb = await callback(array[i], i);
|
||||
mappedResults.push(resolvedCb);
|
||||
}
|
||||
return mappedResults;
|
||||
}
|
||||
|
||||
module.exports = { aForEach, aMap, aForEachNonSequential };
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
import { dirname } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
/**
|
||||
* @param {string} importMetaUrl should be import.meta.url
|
||||
*/
|
||||
export function getCurrentDir(importMetaUrl) {
|
||||
return dirname(fileURLToPath(importMetaUrl));
|
||||
}
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const babelTraversePkg = require('@babel/traverse');
|
||||
const { AstService } = require('../services/AstService.js');
|
||||
const { AstService } = require('../core/AstService.js');
|
||||
const { trackDownIdentifier } = require('../analyzers/helpers/track-down-identifier.js');
|
||||
const { toPosixPath } = require('./to-posix-path.js');
|
||||
|
||||
|
|
|
|||
|
|
@ -1,23 +0,0 @@
|
|||
// import htm from 'htm';
|
||||
const htm = require('htm');
|
||||
|
||||
function convertToObj(type, props, ...children) {
|
||||
return { type, props, children };
|
||||
}
|
||||
|
||||
/**
|
||||
* @desc
|
||||
* Used for parsing lit-html templates inside ASTs
|
||||
* @returns {type, props, children}
|
||||
*
|
||||
* @example
|
||||
* litToObj`<h1 .id=${'hello'}>Hello world!</h1>`;
|
||||
* // {
|
||||
* // type: 'h1',
|
||||
* // props: { .id: 'hello' },
|
||||
* // children: ['Hello world!']
|
||||
* // }
|
||||
*/
|
||||
const litToObj = htm.bind(convertToObj);
|
||||
|
||||
module.exports = litToObj;
|
||||
|
|
@ -1,9 +1,17 @@
|
|||
const { InputDataService } = require('../services/InputDataService.js');
|
||||
const memoizeConfig = {
|
||||
isCacheDisabled: false,
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {object|any[]|string} arg
|
||||
*/
|
||||
function isObject(arg) {
|
||||
return !Array.isArray(arg) && typeof arg === 'object';
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {object|any[]|string} arg
|
||||
*/
|
||||
function createCachableArg(arg) {
|
||||
if (isObject(arg)) {
|
||||
try {
|
||||
|
|
@ -17,7 +25,7 @@ function createCachableArg(arg) {
|
|||
|
||||
/**
|
||||
* @param {function} functionToMemoize
|
||||
* @param {{ storage:object; serializeObjects: boolean }} [opts]
|
||||
* @param {{ storage?:object; serializeObjects?: boolean }} opts
|
||||
*/
|
||||
function memoize(functionToMemoize, { storage = {}, serializeObjects = false } = {}) {
|
||||
// eslint-disable-next-line func-names
|
||||
|
|
@ -27,7 +35,7 @@ function memoize(functionToMemoize, { storage = {}, serializeObjects = false } =
|
|||
const cachableArgs = !serializeObjects ? args : args.map(createCachableArg);
|
||||
// Allow disabling of cache for testing purposes
|
||||
// @ts-ignore
|
||||
if (!InputDataService.cacheDisabled && cachableArgs in storage) {
|
||||
if (!memoizeConfig.isCacheDisabled && cachableArgs in storage) {
|
||||
// @ts-ignore
|
||||
return storage[cachableArgs];
|
||||
}
|
||||
|
|
@ -42,4 +50,5 @@ function memoize(functionToMemoize, { storage = {}, serializeObjects = false } =
|
|||
|
||||
module.exports = {
|
||||
memoize,
|
||||
memoizeConfig,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import { pathToFileURL } from 'url';
|
|||
/**
|
||||
* @returns {Promise<object|null>}
|
||||
*/
|
||||
export async function getProvidenceConf() {
|
||||
async function getConf() {
|
||||
const confPathWithoutExtension = `${pathLib.join(process.cwd(), 'providence.conf')}`;
|
||||
let confPathFound;
|
||||
try {
|
||||
|
|
@ -33,6 +33,8 @@ export async function getProvidenceConf() {
|
|||
}
|
||||
|
||||
const providenceConfRaw = fs.readFileSync(confPathFound, 'utf8');
|
||||
|
||||
return { providenceConf, providenceConfRaw };
|
||||
}
|
||||
|
||||
// Wrapped in object for stubbing
|
||||
export const providenceConfUtil = { getConf };
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
const { toPosixPath } = require('./to-posix-path.js');
|
||||
|
||||
/**
|
||||
* @desc determines for a source path of an import- or export specifier, whether
|
||||
* Determines for a source path of an import- or export specifier, whether
|
||||
* it is relative (an internal import/export) or absolute (external)
|
||||
* - relative: './helpers', './helpers.js', '../helpers.js'
|
||||
* - not relative: '@open-wc/helpers', 'project-x/helpers'
|
||||
|
|
@ -13,7 +13,7 @@ function isRelativeSourcePath(source) {
|
|||
}
|
||||
|
||||
/**
|
||||
* @desc Simple helper te make code a bit more readable.
|
||||
* Simple helper te make code a bit more readable.
|
||||
* - from '/path/to/repo/my/file.js';
|
||||
* - to './my/file.js'
|
||||
* @param {string} fullPath like '/path/to/repo/my/file.js'
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@
|
|||
|
||||
const pathLib = require('path');
|
||||
const { nodeResolve } = require('@rollup/plugin-node-resolve');
|
||||
const { LogService } = require('../services/LogService.js');
|
||||
const { LogService } = require('../core/LogService.js');
|
||||
const { memoize } = require('./memoize.js');
|
||||
const { toPosixPath } = require('./to-posix-path.js');
|
||||
|
||||
|
|
@ -29,13 +29,13 @@ const fakePluginContext = {
|
|||
},
|
||||
};
|
||||
|
||||
async function resolveImportPath(importee, importer, opts = {}) {
|
||||
async function resolveImportPath(importee, importer, opts) {
|
||||
const rollupResolve = nodeResolve({
|
||||
rootDir: pathLib.dirname(importer),
|
||||
// allow resolving polyfills for nodejs libs
|
||||
preferBuiltins: false,
|
||||
// extensions: ['.mjs', '.js', '.json', '.node'],
|
||||
...opts,
|
||||
...(opts || {}),
|
||||
});
|
||||
|
||||
const preserveSymlinks =
|
||||
|
|
@ -44,10 +44,14 @@ async function resolveImportPath(importee, importer, opts = {}) {
|
|||
rollupResolve.buildStart.call(fakePluginContext, { preserveSymlinks });
|
||||
|
||||
// @ts-ignore
|
||||
const result = await rollupResolve.resolveId.call(fakePluginContext, importee, importer, {});
|
||||
const result = await rollupResolve.resolveId.handler.call(
|
||||
fakePluginContext,
|
||||
importee,
|
||||
importer,
|
||||
{},
|
||||
);
|
||||
// @ts-ignore
|
||||
if (!result || !result.id) {
|
||||
// throw new Error(`importee ${importee} not found in filesystem.`);
|
||||
if (!result?.id) {
|
||||
LogService.warn(`importee ${importee} not found in filesystem for importer '${importer}'.`);
|
||||
return null;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,29 @@
|
|||
/* eslint-disable import/no-extraneous-dependencies */
|
||||
// @ts-ignore
|
||||
const mockFs = require('mock-fs');
|
||||
// @ts-ignore
|
||||
const mockRequire = require('mock-require');
|
||||
|
||||
/**
|
||||
* @param {object} obj
|
||||
*/
|
||||
function mockFsAndRequire(obj) {
|
||||
mockFs(obj);
|
||||
|
||||
// Object.entries(obj).forEach(([key, value]) => {
|
||||
// if (key.endsWith('.json')) {
|
||||
// mockRequire(key, JSON.parse(value));
|
||||
// } else {
|
||||
// mockRequire(key, value);
|
||||
// }
|
||||
// });
|
||||
}
|
||||
|
||||
mockFsAndRequire.restore = () => {
|
||||
mockFs.restore();
|
||||
mockRequire.stopAll();
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
mockFsAndRequire,
|
||||
};
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
const { LogService } = require('../src/program/services/LogService.js');
|
||||
const { LogService } = require('../src/program/core/LogService.js');
|
||||
|
||||
const originalWarn = LogService.warn;
|
||||
function suppressWarningLogs() {
|
||||
|
|
|
|||
|
|
@ -1,24 +1,8 @@
|
|||
const path = require('path');
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
const mockFs = require('mock-fs');
|
||||
const mockRequire = require('mock-require');
|
||||
const { mockFsAndRequire: mock } = require('./mock-fs-and-require.js');
|
||||
|
||||
function mock(obj) {
|
||||
mockFs(obj);
|
||||
|
||||
Object.entries(obj).forEach(([key, value]) => {
|
||||
if (key.endsWith('.json')) {
|
||||
mockRequire(key, JSON.parse(value));
|
||||
} else {
|
||||
mockRequire(key, value);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
mock.restore = () => {
|
||||
mockFs.restore();
|
||||
mockRequire.stopAll();
|
||||
};
|
||||
|
||||
/**
|
||||
* Makes sure that, whenever the main program (providence) calls
|
||||
|
|
@ -88,7 +72,7 @@ function getMockObjectForProject(files, cfg = {}, existingMock = {}) {
|
|||
*/
|
||||
function mockProject(files, cfg = {}, existingMock = {}) {
|
||||
const obj = getMockObjectForProject(files, cfg, existingMock);
|
||||
mockFs(obj);
|
||||
mock(obj);
|
||||
return obj;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,16 +1,26 @@
|
|||
const { ReportService } = require('../src/program/services/ReportService.js');
|
||||
const { ReportService } = require('../src/program/core/ReportService.js');
|
||||
|
||||
/**
|
||||
* @typedef {import('../src/program/types/core').QueryResult} QueryResult
|
||||
*/
|
||||
|
||||
const originalWriteToJson = ReportService.writeToJson;
|
||||
|
||||
/**
|
||||
* @param {QueryResult[]} queryResults
|
||||
*/
|
||||
function mockWriteToJson(queryResults) {
|
||||
ReportService.writeToJson = queryResult => {
|
||||
queryResults.push(queryResult);
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {QueryResult[]} [queryResults]
|
||||
*/
|
||||
function restoreWriteToJson(queryResults) {
|
||||
ReportService.writeToJson = originalWriteToJson;
|
||||
while (queryResults && queryResults.length) {
|
||||
while (queryResults?.length) {
|
||||
queryResults.pop();
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
"analyzerMeta": {
|
||||
"name": "find-exports",
|
||||
"requiredAst": "babel",
|
||||
"identifier": "exporting-ref-project_1.0.0__309114983",
|
||||
"identifier": "exporting-ref-project_1.0.0__-42206859",
|
||||
"targetProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "exporting-ref-project",
|
||||
|
|
@ -13,7 +13,9 @@
|
|||
},
|
||||
"configuration": {
|
||||
"skipFileImports": false,
|
||||
"gatherFilesConfig": {}
|
||||
"gatherFilesConfig": {},
|
||||
"skipCheckMatchCompatibility": false,
|
||||
"addSystemPathsInResult": false
|
||||
}
|
||||
}
|
||||
},
|
||||
|
|
@ -25,13 +27,13 @@
|
|||
"exportSpecifiers": [
|
||||
"[default]"
|
||||
],
|
||||
"source": "refConstImported",
|
||||
"normalizedSource": "refConstImported",
|
||||
"source": "./ref-src/core.js",
|
||||
"normalizedSource": "./ref-src/core.js",
|
||||
"rootFileMap": [
|
||||
{
|
||||
"currentFileSpecifier": "[default]",
|
||||
"rootFile": {
|
||||
"file": "refConstImported",
|
||||
"file": "./ref-src/core.js",
|
||||
"specifier": "[default]"
|
||||
}
|
||||
}
|
||||
|
|
@ -85,7 +87,6 @@
|
|||
"notImported"
|
||||
],
|
||||
"localMap": [],
|
||||
"source": null,
|
||||
"rootFileMap": [
|
||||
{
|
||||
"currentFileSpecifier": "notImported",
|
||||
|
|
@ -127,7 +128,6 @@
|
|||
"RefClass"
|
||||
],
|
||||
"localMap": [],
|
||||
"source": null,
|
||||
"rootFileMap": [
|
||||
{
|
||||
"currentFileSpecifier": "RefClass",
|
||||
|
|
@ -170,7 +170,6 @@
|
|||
"resolvePathCorrect"
|
||||
],
|
||||
"localMap": [],
|
||||
"source": null,
|
||||
"rootFileMap": [
|
||||
{
|
||||
"currentFileSpecifier": "resolvePathCorrect",
|
||||
|
|
|
|||
6
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/dep-a/README.md
generated
vendored
Normal file
6
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/dep-a/README.md
generated
vendored
Normal file
|
|
@ -0,0 +1,6 @@
|
|||
Has a deprecated (from Node 16) export maps format:
|
||||
```
|
||||
"exports": {
|
||||
"./src/": "./src/"
|
||||
})
|
||||
```
|
||||
7
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/dep-a/package.json
generated
vendored
Normal file
7
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/dep-a/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"name": "dep-a",
|
||||
"version": "2.0.0",
|
||||
"exports": {
|
||||
"./src/": "./src/"
|
||||
}
|
||||
}
|
||||
0
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/dep-a/src/src.js
generated
vendored
Normal file
0
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/dep-a/src/src.js
generated
vendored
Normal file
7
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/my-dep-b/package.json
generated
vendored
Normal file
7
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/my-dep-b/package.json
generated
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
{
|
||||
"name": "my-dep-b",
|
||||
"version": "1.0.0",
|
||||
"exports": {
|
||||
"./src/*": "./src/*"
|
||||
}
|
||||
}
|
||||
0
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/my-dep-b/src/src.js
generated
vendored
Normal file
0
packages-node/providence-analytics/test-helpers/project-mocks/importing-target-project/node_modules/my-dep-b/src/src.js
generated
vendored
Normal file
|
|
@ -0,0 +1,50 @@
|
|||
const { InputDataService } = require('../src/program/core/InputDataService.js');
|
||||
const { QueryService } = require('../src/program/core/QueryService.js');
|
||||
const { restoreMockedProjects } = require('./mock-project-helpers.js');
|
||||
const { mockWriteToJson, restoreWriteToJson } = require('./mock-report-service-helpers.js');
|
||||
const {
|
||||
suppressNonCriticalLogs,
|
||||
restoreSuppressNonCriticalLogs,
|
||||
} = require('./mock-log-service-helpers.js');
|
||||
const { memoizeConfig } = require('../src/program/utils/memoize.js');
|
||||
|
||||
/**
|
||||
* @typedef {import('../src/program/types/core').QueryResult} QueryResult
|
||||
* @returns {QueryResult[]}
|
||||
*/
|
||||
|
||||
function setupAnalyzerTest() {
|
||||
/** @type {QueryResult[]} */
|
||||
const queryResults = [];
|
||||
|
||||
const originalReferenceProjectPaths = InputDataService.referenceProjectPaths;
|
||||
const cacheDisabledQInitialValue = QueryService.cacheDisabled;
|
||||
const cacheDisabledIInitialValue = memoizeConfig.isCacheDisabled;
|
||||
|
||||
before(() => {
|
||||
QueryService.cacheDisabled = true;
|
||||
memoizeConfig.isCacheDisabled = true;
|
||||
suppressNonCriticalLogs();
|
||||
});
|
||||
|
||||
after(() => {
|
||||
QueryService.cacheDisabled = cacheDisabledQInitialValue;
|
||||
memoizeConfig.isCacheDisabled = cacheDisabledIInitialValue;
|
||||
restoreSuppressNonCriticalLogs();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
InputDataService.referenceProjectPaths = [];
|
||||
mockWriteToJson(queryResults);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
InputDataService.referenceProjectPaths = originalReferenceProjectPaths;
|
||||
restoreWriteToJson(queryResults);
|
||||
restoreMockedProjects();
|
||||
});
|
||||
|
||||
return queryResults;
|
||||
}
|
||||
|
||||
module.exports = { setupAnalyzerTest };
|
||||
|
|
@ -1,4 +1,9 @@
|
|||
const { Analyzer } = require('../../src/program/analyzers/helpers/Analyzer.js');
|
||||
const { Analyzer } = require('../../src/program/core/Analyzer.js');
|
||||
|
||||
/**
|
||||
* @typedef {import('@babel/types').File} File
|
||||
* @typedef {import('../../src/program/types/core').QueryOutputEntry} QueryOutputEntry
|
||||
*/
|
||||
|
||||
/**
|
||||
* This file outlines the minimum required functionality for an analyzer.
|
||||
|
|
@ -23,8 +28,7 @@ const options = {
|
|||
* corresponds to one file.
|
||||
* The contents of this function should be designed in such a way that they
|
||||
* can be directly pasted and edited in https://astexplorer.net/
|
||||
* @param {BabelAST} ast
|
||||
* @returns {TransformedEntry}
|
||||
* @param {File} ast
|
||||
*/
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
function myAnalyzerPerAstEntry(ast) {
|
||||
|
|
@ -36,22 +40,9 @@ function myAnalyzerPerAstEntry(ast) {
|
|||
return transformedEntryResult;
|
||||
}
|
||||
|
||||
class MyAnalyzer extends Analyzer {
|
||||
constructor() {
|
||||
super();
|
||||
/**
|
||||
* This must match with the name in file-system (will be used for reporting)
|
||||
*/
|
||||
this.name = 'my-analyzer';
|
||||
/**
|
||||
* The ast format that the execute function expects
|
||||
* Compatible with formats supported by AstService.getAst()
|
||||
*/
|
||||
this.requiredAst = 'babel';
|
||||
/**
|
||||
* Not all analyzers require a references. Those that do, (usually 'match analyzers'),
|
||||
* must explicitly state so with `requiresReference: true`
|
||||
*/
|
||||
class DummyAnalyzer extends Analyzer {
|
||||
static get analyzerName() {
|
||||
return 'dummy-analyzer';
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -91,7 +82,6 @@ class MyAnalyzer extends Analyzer {
|
|||
|
||||
return { result: transformedEntryResult, meta };
|
||||
});
|
||||
|
||||
// (optional): Post processors on TransformedQueryResult
|
||||
if (cfg.optionB) {
|
||||
// Run your QueryResult transformation based on option B
|
||||
|
|
@ -104,4 +94,4 @@ class MyAnalyzer extends Analyzer {
|
|||
}
|
||||
}
|
||||
|
||||
module.exports = MyAnalyzer;
|
||||
module.exports = { DummyAnalyzer };
|
||||
30
packages-node/providence-analytics/test-node/cli/cli.e2e.mjs
Normal file
30
packages-node/providence-analytics/test-node/cli/cli.e2e.mjs
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
/* eslint-disable import/no-extraneous-dependencies */
|
||||
import pathLib from 'path';
|
||||
import { expect } from 'chai';
|
||||
import { appendProjectDependencyPaths } from '../../src/cli/cli-helpers.js';
|
||||
import { toPosixPath } from '../../src/program/utils/to-posix-path.js';
|
||||
import { getCurrentDir } from '../../src/program/utils/get-current-dir.mjs';
|
||||
|
||||
/**
|
||||
* These tests are added on top of unit tests. See:
|
||||
* - https://github.com/ing-bank/lion/issues/1565
|
||||
* - https://github.com/ing-bank/lion/issues/1564
|
||||
*/
|
||||
describe('CLI helpers against filesystem', () => {
|
||||
describe('appendProjectDependencyPaths', () => {
|
||||
it('allows a regex filter', async () => {
|
||||
const targetFilePath = toPosixPath(
|
||||
pathLib.resolve(
|
||||
getCurrentDir(import.meta.url),
|
||||
'../../test-helpers/project-mocks/importing-target-project',
|
||||
),
|
||||
);
|
||||
const result = await appendProjectDependencyPaths([targetFilePath], '/^dep-/');
|
||||
expect(result).to.eql([
|
||||
`${targetFilePath}/node_modules/dep-a`,
|
||||
// in windows, it should not add `${targetFilePath}/node_modules/my-dep-b`,
|
||||
targetFilePath,
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -1,37 +1,48 @@
|
|||
const sinon = require('sinon');
|
||||
const pathLib = require('path');
|
||||
const { expect } = require('chai');
|
||||
const commander = require('commander');
|
||||
const {
|
||||
/* eslint-disable no-unused-expressions */
|
||||
/* eslint-disable import/no-extraneous-dependencies */
|
||||
import sinon from 'sinon';
|
||||
import pathLib from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { expect } from 'chai';
|
||||
import commander from 'commander';
|
||||
import {
|
||||
mockProject,
|
||||
restoreMockedProjects,
|
||||
mockTargetAndReferenceProject,
|
||||
} = require('../../test-helpers/mock-project-helpers.js');
|
||||
const {
|
||||
} from '../../test-helpers/mock-project-helpers.js';
|
||||
import {
|
||||
mockWriteToJson,
|
||||
restoreWriteToJson,
|
||||
} = require('../../test-helpers/mock-report-service-helpers.js');
|
||||
const {
|
||||
} from '../../test-helpers/mock-report-service-helpers.js';
|
||||
import {
|
||||
suppressNonCriticalLogs,
|
||||
restoreSuppressNonCriticalLogs,
|
||||
} = require('../../test-helpers/mock-log-service-helpers.js');
|
||||
const { InputDataService } = require('../../src/program/services/InputDataService.js');
|
||||
const { QueryService } = require('../../src/program/services/QueryService.js');
|
||||
const providenceModule = require('../../src/program/providence.js');
|
||||
const extendDocsModule = require('../../src/cli/launch-providence-with-extend-docs.js');
|
||||
const cliHelpersModule = require('../../src/cli/cli-helpers.js');
|
||||
const { cli } = require('../../src/cli/cli.js');
|
||||
const promptAnalyzerModule = require('../../src/cli/prompt-analyzer-menu.js');
|
||||
const { toPosixPath } = require('../../src/program/utils/to-posix-path.js');
|
||||
const { getExtendDocsResults } = require('../../src/cli/launch-providence-with-extend-docs.js');
|
||||
} from '../../test-helpers/mock-log-service-helpers.js';
|
||||
import { InputDataService } from '../../src/program/core/InputDataService.js';
|
||||
import { QueryService } from '../../src/program/core/QueryService.js';
|
||||
import providenceModule from '../../src/program/providence.js';
|
||||
import cliHelpersModule from '../../src/cli/cli-helpers.js';
|
||||
import { cli } from '../../src/cli/cli.mjs';
|
||||
import promptAnalyzerModule from '../../src/cli/prompt-analyzer-menu.js';
|
||||
import { toPosixPath } from '../../src/program/utils/to-posix-path.js';
|
||||
import { memoizeConfig } from '../../src/program/utils/memoize.js';
|
||||
import extendDocsModule, {
|
||||
getExtendDocsResults,
|
||||
} from '../../src/cli/launch-providence-with-extend-docs.js';
|
||||
import { dashboardServer } from '../../dashboard/server.mjs';
|
||||
|
||||
/**
|
||||
* @typedef {import('../../src/program/types/core').QueryResult} QueryResult
|
||||
*/
|
||||
|
||||
const __dirname = pathLib.dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
const { pathsArrayFromCs, pathsArrayFromCollectionName, appendProjectDependencyPaths } =
|
||||
cliHelpersModule;
|
||||
|
||||
/** @type {QueryResult[]} */
|
||||
const queryResults = [];
|
||||
|
||||
const rootDir = toPosixPath(pathLib.resolve(__dirname, '../../'));
|
||||
|
||||
const externalCfgMock = {
|
||||
searchTargetCollections: {
|
||||
'lion-collection': [
|
||||
|
|
@ -48,21 +59,35 @@ const externalCfgMock = {
|
|||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string} args
|
||||
* @param {string} cwd
|
||||
*/
|
||||
async function runCli(args, cwd) {
|
||||
process.argv = [
|
||||
const argv = [
|
||||
...process.argv.slice(0, 2),
|
||||
...args.split(' ').map(a => a.replace(/^("|')?(.*)("|')?$/, '$2')),
|
||||
];
|
||||
await cli({ cwd });
|
||||
await cli({ argv, cwd });
|
||||
}
|
||||
|
||||
describe('Providence CLI', () => {
|
||||
const rootDir = '/mocked/path/example-project';
|
||||
|
||||
/** @type {sinon.SinonStub} */
|
||||
let providenceStub;
|
||||
/** @type {sinon.SinonStub} */
|
||||
let promptCfgStub;
|
||||
/** @type {sinon.SinonStub} */
|
||||
let iExtConfStub;
|
||||
/** @type {sinon.SinonStub} */
|
||||
let promptStub;
|
||||
/** @type {sinon.SinonStub} */
|
||||
let qConfStub;
|
||||
|
||||
const memoizeCacheDisabledInitial = memoizeConfig.isCacheDisabled;
|
||||
memoizeConfig.isCacheDisabled = true;
|
||||
|
||||
before(() => {
|
||||
// Prevent MaxListenersExceededWarning
|
||||
commander.setMaxListeners(100);
|
||||
|
|
@ -84,25 +109,26 @@ describe('Providence CLI', () => {
|
|||
},
|
||||
);
|
||||
|
||||
providenceStub = sinon.stub(providenceModule, 'providence').returns(
|
||||
new Promise(resolve => {
|
||||
resolve();
|
||||
}),
|
||||
);
|
||||
/** @type {sinon.SinonStub} */
|
||||
providenceStub = sinon.stub(providenceModule, 'providence').returns(Promise.resolve());
|
||||
|
||||
/** @type {sinon.SinonStub} */
|
||||
promptCfgStub = sinon
|
||||
.stub(promptAnalyzerModule, 'promptAnalyzerConfigMenu')
|
||||
.returns({ analyzerConfig: { con: 'fig' } });
|
||||
.returns(Promise.resolve({ analyzerConfig: { con: 'fig' } }));
|
||||
|
||||
/** @type {sinon.SinonStub} */
|
||||
iExtConfStub = sinon.stub(InputDataService, 'getExternalConfig').returns(externalCfgMock);
|
||||
|
||||
/** @type {sinon.SinonStub} */
|
||||
promptStub = sinon
|
||||
.stub(promptAnalyzerModule, 'promptAnalyzerMenu')
|
||||
.returns({ analyzerName: 'mock-analyzer' });
|
||||
.returns(Promise.resolve({ analyzerName: 'match-analyzer-mock' }));
|
||||
|
||||
/** @type {sinon.SinonStub} */
|
||||
qConfStub = sinon.stub(QueryService, 'getQueryConfigFromAnalyzer').returns({
|
||||
analyzer: {
|
||||
name: 'mock-analyzer',
|
||||
name: 'match-analyzer-mock',
|
||||
requiresReference: true,
|
||||
},
|
||||
});
|
||||
|
|
@ -120,6 +146,12 @@ describe('Providence CLI', () => {
|
|||
iExtConfStub.restore();
|
||||
promptStub.restore();
|
||||
qConfStub.restore();
|
||||
|
||||
memoizeConfig.isCacheDisabled = memoizeCacheDisabledInitial;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
memoizeConfig.isCacheDisabled = true;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
|
|
@ -130,22 +162,25 @@ describe('Providence CLI', () => {
|
|||
qConfStub.resetHistory();
|
||||
});
|
||||
|
||||
const analyzeCmd = 'analyze mock-analyzer';
|
||||
const analyzeCmd = 'analyze match-analyzer-mock';
|
||||
|
||||
it('calls providence', async () => {
|
||||
await runCli(`${analyzeCmd} -t /mocked/path/example-project`);
|
||||
await runCli(`${analyzeCmd} -t /mocked/path/example-project`, rootDir);
|
||||
expect(providenceStub.called).to.be.true;
|
||||
});
|
||||
|
||||
it('creates a QueryConfig', async () => {
|
||||
await runCli(`${analyzeCmd} -t /mocked/path/example-project`);
|
||||
await runCli(`${analyzeCmd} -t /mocked/path/example-project`, rootDir);
|
||||
expect(qConfStub.called).to.be.true;
|
||||
expect(qConfStub.args[0][0]).to.equal('mock-analyzer');
|
||||
expect(qConfStub.args[0][0]).to.equal('match-analyzer-mock');
|
||||
});
|
||||
|
||||
describe('Global options', () => {
|
||||
/** @type {sinon.SinonStub} */
|
||||
let pathsArrayFromCollectionStub;
|
||||
/** @type {sinon.SinonStub} */
|
||||
let pathsArrayFromCsStub;
|
||||
/** @type {sinon.SinonStub} */
|
||||
let appendProjectDependencyPathsStub;
|
||||
|
||||
before(() => {
|
||||
|
|
@ -157,11 +192,13 @@ describe('Providence CLI', () => {
|
|||
.returns(['/mocked/path/example-project']);
|
||||
appendProjectDependencyPathsStub = sinon
|
||||
.stub(cliHelpersModule, 'appendProjectDependencyPaths')
|
||||
.returns([
|
||||
.returns(
|
||||
Promise.resolve([
|
||||
'/mocked/path/example-project',
|
||||
'/mocked/path/example-project/node_modules/mock-dep-a',
|
||||
'/mocked/path/example-project/bower_components/mock-dep-b',
|
||||
]);
|
||||
]),
|
||||
);
|
||||
});
|
||||
|
||||
after(() => {
|
||||
|
|
@ -177,12 +214,12 @@ describe('Providence CLI', () => {
|
|||
});
|
||||
|
||||
it('"-e --extensions"', async () => {
|
||||
await runCli(`${analyzeCmd} -e bla,blu`);
|
||||
await runCli(`${analyzeCmd} -e bla,blu`, rootDir);
|
||||
expect(providenceStub.args[0][1].gatherFilesConfig.extensions).to.eql(['.bla', '.blu']);
|
||||
|
||||
providenceStub.resetHistory();
|
||||
|
||||
await runCli(`${analyzeCmd} --extensions bla,blu`);
|
||||
await runCli(`${analyzeCmd} --extensions bla,blu`, rootDir);
|
||||
expect(providenceStub.args[0][1].gatherFilesConfig.extensions).to.eql(['.bla', '.blu']);
|
||||
});
|
||||
|
||||
|
|
@ -325,19 +362,19 @@ describe('Providence CLI', () => {
|
|||
});
|
||||
|
||||
it('"-c --config"', async () => {
|
||||
await runCli(`analyze mock-analyzer -c {"a":"2"}`, rootDir);
|
||||
expect(qConfStub.args[0][0]).to.equal('mock-analyzer');
|
||||
await runCli(`analyze match-analyzer-mock -c {"a":"2"}`, rootDir);
|
||||
expect(qConfStub.args[0][0]).to.equal('match-analyzer-mock');
|
||||
expect(qConfStub.args[0][1]).to.eql({ a: '2', metaConfig: {} });
|
||||
|
||||
qConfStub.resetHistory();
|
||||
|
||||
await runCli(`analyze mock-analyzer --config {"a":"2"}`, rootDir);
|
||||
expect(qConfStub.args[0][0]).to.equal('mock-analyzer');
|
||||
await runCli(`analyze match-analyzer-mock --config {"a":"2"}`, rootDir);
|
||||
expect(qConfStub.args[0][0]).to.equal('match-analyzer-mock');
|
||||
expect(qConfStub.args[0][1]).to.eql({ a: '2', metaConfig: {} });
|
||||
});
|
||||
|
||||
it('calls "promptAnalyzerConfigMenu" without config given', async () => {
|
||||
await runCli(`analyze mock-analyzer`, rootDir);
|
||||
await runCli(`analyze match-analyzer-mock`, rootDir);
|
||||
expect(promptCfgStub.called).to.be.true;
|
||||
});
|
||||
});
|
||||
|
|
@ -348,7 +385,17 @@ describe('Providence CLI', () => {
|
|||
|
||||
describe('Manage', () => {});
|
||||
|
||||
describe('Dashboard', () => {
|
||||
/** @type {sinon.SinonStub} */
|
||||
const startStub = sinon.stub(dashboardServer, 'start');
|
||||
it('spawns a dashboard', async () => {
|
||||
runCli(`dashboard`, rootDir);
|
||||
expect(startStub.called).to.be.true;
|
||||
});
|
||||
});
|
||||
|
||||
describe('Extend docs', () => {
|
||||
/** @type {sinon.SinonStub} */
|
||||
let extendDocsStub;
|
||||
|
||||
before(() => {
|
||||
|
|
@ -389,7 +436,7 @@ describe('Providence CLI', () => {
|
|||
extensions: ['.bla'],
|
||||
allowlist: ['al'],
|
||||
allowlistReference: ['alr'],
|
||||
cwd: undefined,
|
||||
cwd: '/mocked/path/example-project',
|
||||
skipCheckMatchCompatibility: true,
|
||||
});
|
||||
});
|
||||
|
|
@ -398,6 +445,8 @@ describe('Providence CLI', () => {
|
|||
});
|
||||
|
||||
describe('CLI helpers', () => {
|
||||
const rootDir = toPosixPath(pathLib.resolve(__dirname, '../../'));
|
||||
|
||||
describe('pathsArrayFromCs', () => {
|
||||
it('allows absolute paths', async () => {
|
||||
expect(pathsArrayFromCs('/mocked/path/example-project', rootDir)).to.eql([
|
||||
|
|
@ -472,6 +521,7 @@ describe('CLI helpers', () => {
|
|||
'./index.js': `export { InBetweenComp as MyComp } from './src/inbetween.js'`,
|
||||
'./node_modules/dependency-a/index.js': '',
|
||||
'./bower_components/dependency-b/index.js': '',
|
||||
'./node_modules/my-dependency/index.js': '',
|
||||
},
|
||||
{
|
||||
projectName: 'example-project',
|
||||
|
|
@ -484,6 +534,7 @@ describe('CLI helpers', () => {
|
|||
const result = await appendProjectDependencyPaths(['/mocked/path/example-project']);
|
||||
expect(result).to.eql([
|
||||
'/mocked/path/example-project/node_modules/dependency-a',
|
||||
'/mocked/path/example-project/node_modules/my-dependency',
|
||||
'/mocked/path/example-project/bower_components/dependency-b',
|
||||
'/mocked/path/example-project',
|
||||
]);
|
||||
|
|
@ -496,6 +547,7 @@ describe('CLI helpers', () => {
|
|||
);
|
||||
expect(result).to.eql([
|
||||
'/mocked/path/example-project/node_modules/dependency-a',
|
||||
// in windows, it should not add '/mocked/path/example-project/node_modules/my-dependency',
|
||||
'/mocked/path/example-project/bower_components/dependency-b',
|
||||
'/mocked/path/example-project',
|
||||
]);
|
||||
|
|
@ -513,6 +565,7 @@ describe('CLI helpers', () => {
|
|||
]);
|
||||
expect(result).to.eql([
|
||||
'/mocked/path/example-project/node_modules/dependency-a',
|
||||
'/mocked/path/example-project/node_modules/my-dependency',
|
||||
'/mocked/path/example-project',
|
||||
]);
|
||||
|
||||
|
|
@ -530,7 +583,12 @@ describe('CLI helpers', () => {
|
|||
afterEach(() => {
|
||||
restoreMockedProjects();
|
||||
});
|
||||
|
||||
it('rewrites monorepo package paths when analysis is run from monorepo root', async () => {
|
||||
// This fails after InputDataService.addAstToProjectsData is memoized
|
||||
// (it does pass when run in isolation however, as a quick fix we disable memoization cache here...)
|
||||
memoizeConfig.isCacheDisabled = true;
|
||||
|
||||
const theirProjectFiles = {
|
||||
'./package.json': JSON.stringify({
|
||||
name: 'their-components',
|
||||
|
|
@ -0,0 +1,116 @@
|
|||
/* eslint-disable import/no-extraneous-dependencies */
|
||||
import fs from 'fs';
|
||||
import pathLib from 'path';
|
||||
import sinon from 'sinon';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { expect } from 'chai';
|
||||
import fetch from 'node-fetch';
|
||||
import { createTestServer } from '@web/dev-server-core/test-helpers';
|
||||
import { createDashboardServerConfig } from '../../dashboard/server.mjs';
|
||||
import { ReportService } from '../../src/program/core/ReportService.js';
|
||||
import { providenceConfUtil } from '../../src/program/utils/providence-conf-util.mjs';
|
||||
|
||||
/**
|
||||
* @typedef {import('@web/dev-server-core').DevServer} DevServer
|
||||
*/
|
||||
|
||||
const __dirname = pathLib.dirname(fileURLToPath(import.meta.url));
|
||||
const { outputPath: reportServiceOutputPathOriginal } = ReportService;
|
||||
const fixturesPath = pathLib.join(__dirname, 'fixtures');
|
||||
const mockedResponsesPath = pathLib.join(__dirname, 'fixtures/dashboard-responses');
|
||||
const mockedOutputPath = pathLib.join(__dirname, 'fixtures/providence-output');
|
||||
|
||||
async function getConf(url) {
|
||||
const { default: providenceConf } = await import(url);
|
||||
const providenceConfRaw = fs.readFileSync(url, 'utf8');
|
||||
return { providenceConf, providenceConfRaw };
|
||||
}
|
||||
|
||||
describe('Dashboard Server', () => {
|
||||
/** @type {string} */
|
||||
let host;
|
||||
/** @type {DevServer} */
|
||||
let server;
|
||||
/** @type {sinon.SinonStub} */
|
||||
let providenceConfStub;
|
||||
|
||||
before(() => {
|
||||
// N.B. don't use mock-fs, since it doesn't correctly handle dynamic imports and fs.promises
|
||||
ReportService.outputPath = mockedOutputPath;
|
||||
});
|
||||
|
||||
after(() => {
|
||||
ReportService.outputPath = reportServiceOutputPathOriginal;
|
||||
});
|
||||
|
||||
describe('Happy flow', () => {
|
||||
beforeEach(async () => {
|
||||
const conf = await getConf(`${fixturesPath}/providence.conf.mjs`);
|
||||
providenceConfStub = sinon.stub(providenceConfUtil, 'getConf').resolves(conf);
|
||||
({ host, server } = await createTestServer(await createDashboardServerConfig()));
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
providenceConfStub.restore();
|
||||
server.stop();
|
||||
});
|
||||
|
||||
describe('Index', () => {
|
||||
it(`returns an index on '/'`, async () => {
|
||||
const response = await fetch(`${host}/dashboard`);
|
||||
const responseText = await response.text();
|
||||
expect(response.status).to.equal(200);
|
||||
expect(responseText).to.include('<title>Providence dashboard</title>');
|
||||
});
|
||||
});
|
||||
|
||||
describe('App assets', () => {
|
||||
it(`returns (static) js assets via app/*`, async () => {
|
||||
const response = await fetch(`${host}/dashboard/app/p-board.js`);
|
||||
expect(response.status).to.equal(200);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Menu data', () => {
|
||||
it(`returns json object based on output`, async () => {
|
||||
const response = await fetch(`${host}/menu-data.json`);
|
||||
expect(response.status).to.equal(200);
|
||||
const responseJSON = await response.json();
|
||||
const expectedResult = fs.readFileSync(`${mockedResponsesPath}/menu-data.json`, 'utf8');
|
||||
expect(responseJSON).to.eql(JSON.parse(expectedResult));
|
||||
});
|
||||
});
|
||||
|
||||
describe('Results', () => {
|
||||
it(`returns json object based on output`, async () => {
|
||||
const response = await fetch(`${host}/results.json`);
|
||||
expect(response.status).to.equal(200);
|
||||
const responseJson = await response.json();
|
||||
const expectedResult = fs.readFileSync(`${mockedResponsesPath}/results.json`, 'utf8');
|
||||
expect(responseJson).to.eql(JSON.parse(expectedResult));
|
||||
});
|
||||
});
|
||||
|
||||
describe('Config file "providence.conf.(m)js"', () => {
|
||||
it(`returns providence.conf.mjs found in cwd`, async () => {
|
||||
const response = await fetch(`${host}/providence-conf.js`);
|
||||
expect(response.status).to.equal(200);
|
||||
const responseText = await response.text();
|
||||
const { providenceConfRaw } = await getConf(`${fixturesPath}/providence.conf.mjs`);
|
||||
expect(responseText).to.equal(providenceConfRaw);
|
||||
});
|
||||
|
||||
// Since we cannot mock dynamic imports: skip for now...
|
||||
it.skip(`returns providence.conf.js found in cwd`, async () => {});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Unhappy flow', () => {
|
||||
// Since we cannot mock dynamic imports: skip for now...
|
||||
describe.skip('Config file "providence.conf.(m)js"', () => {
|
||||
it(`throws when no providence.conf.(m)js found`, async () => {});
|
||||
|
||||
it(`throws when providence.conf.(m)js is not an esm module`, async () => {});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"searchTargetCollections": {
|
||||
"@lion-targets": ["@lion/ui"]
|
||||
},
|
||||
"referenceCollections": {
|
||||
"@lion-references": ["@lion/ui"]
|
||||
},
|
||||
"searchTargetDeps": {
|
||||
"@lion/input#0.15.7": ["@lion/input#0.15.7"],
|
||||
"@lion/listbox#0.10.7": ["@lion/listbox#0.10.7"]
|
||||
}
|
||||
}
|
||||
|
|
@ -0,0 +1,634 @@
|
|||
{
|
||||
"match-imports": [
|
||||
{
|
||||
"fileName": "match-imports_-_%40lion%2Finput_0.15.7_+_%40lion%2Fform-core_0.15.4__1410239906.json",
|
||||
"content": {
|
||||
"meta": {
|
||||
"searchType": "ast-analyzer",
|
||||
"analyzerMeta": {
|
||||
"name": "match-imports",
|
||||
"requiredAst": "babel",
|
||||
"identifier": "%40lion%2Finput_0.15.7_+_%40lion%2Fform-core_0.15.4__1410239906",
|
||||
"targetProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/input",
|
||||
"version": "0.15.7",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"referenceProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/form-core",
|
||||
"version": "0.15.4",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"configuration": {
|
||||
"gatherFilesConfig": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"targetProjectResult": null,
|
||||
"referenceProjectResult": null,
|
||||
"gatherFilesConfigReference": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"skipCheckMatchCompatibility": false,
|
||||
"metaConfig": {
|
||||
"categoryConfig": [
|
||||
{
|
||||
"project": "@lion/overlays",
|
||||
"majorVersion": 1,
|
||||
"categories": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"queryOutput": [
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "LionField::./index.js::@lion/form-core",
|
||||
"name": "LionField",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
"./src/LionInput.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "NativeTextFieldMixin::./index.js::@lion/form-core",
|
||||
"name": "NativeTextFieldMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
"./src/LionInput.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "Validator::./index.js::@lion/form-core",
|
||||
"name": "Validator",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
"./test/lion-input.test.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileName": "match-imports_-_%40lion%2Flistbox_0.10.7_+_%40lion%2Fform-core_0.15.4__-1773728033.json",
|
||||
"content": {
|
||||
"meta": {
|
||||
"searchType": "ast-analyzer",
|
||||
"analyzerMeta": {
|
||||
"name": "match-imports",
|
||||
"requiredAst": "babel",
|
||||
"identifier": "%40lion%2Flistbox_0.10.7_+_%40lion%2Fform-core_0.15.4__-1773728033",
|
||||
"targetProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/listbox",
|
||||
"version": "0.10.7",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"referenceProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/form-core",
|
||||
"version": "0.15.4",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"configuration": {
|
||||
"gatherFilesConfig": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"targetProjectResult": null,
|
||||
"referenceProjectResult": null,
|
||||
"gatherFilesConfigReference": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"skipCheckMatchCompatibility": false,
|
||||
"metaConfig": {
|
||||
"categoryConfig": [
|
||||
{
|
||||
"project": "@lion/overlays",
|
||||
"majorVersion": 1,
|
||||
"categories": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"queryOutput": [
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FocusMixin::./index.js::@lion/form-core",
|
||||
"name": "FocusMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionListbox.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FormControlMixin::./index.js::@lion/form-core",
|
||||
"name": "FormControlMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/ListboxMixin.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "InteractionStateMixin::./index.js::@lion/form-core",
|
||||
"name": "InteractionStateMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionListbox.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FormRegisteringMixin::./index.js::@lion/form-core",
|
||||
"name": "FormRegisteringMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionOption.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FormRegistrarMixin::./index.js::@lion/form-core",
|
||||
"name": "FormRegistrarMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/ListboxMixin.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FormRegistrarPortalMixin::./index.js::@lion/form-core",
|
||||
"name": "FormRegistrarPortalMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionOptions.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "ValidateMixin::./index.js::@lion/form-core",
|
||||
"name": "ValidateMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionListbox.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "ChoiceGroupMixin::./index.js::@lion/form-core",
|
||||
"name": "ChoiceGroupMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/ListboxMixin.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "ChoiceInputMixin::./index.js::@lion/form-core",
|
||||
"name": "ChoiceInputMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionOption.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"match-subclasses": [
|
||||
{
|
||||
"fileName": "match-subclasses_-_%40lion%2Finput_0.15.7_+_%40lion%2Fform-core_0.15.4__-1212823364.json",
|
||||
"content": {
|
||||
"meta": {
|
||||
"searchType": "ast-analyzer",
|
||||
"analyzerMeta": {
|
||||
"name": "match-subclasses",
|
||||
"requiredAst": "babel",
|
||||
"identifier": "%40lion%2Finput_0.15.7_+_%40lion%2Fform-core_0.15.4__-1212823364",
|
||||
"targetProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/input",
|
||||
"version": "0.15.7",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"referenceProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/form-core",
|
||||
"version": "0.15.4",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"configuration": {
|
||||
"gatherFilesConfig": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"gatherFilesConfigReference": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"skipCheckMatchCompatibility": false,
|
||||
"metaConfig": {
|
||||
"categoryConfig": [
|
||||
{
|
||||
"project": "@lion/overlays",
|
||||
"majorVersion": 1,
|
||||
"categories": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"queryOutput": [
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "LionField",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "LionField::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionInput.js",
|
||||
"identifier": "LionInput"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "NativeTextFieldMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "NativeTextFieldMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionInput.js",
|
||||
"identifier": "LionInput"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "Validator",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "Validator::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
{
|
||||
"file": "./test/lion-input.test.js",
|
||||
"identifier": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"fileName": "match-subclasses_-_%40lion%2Flistbox_0.10.7_+_%40lion%2Fform-core_0.15.4__-222436449.json",
|
||||
"content": {
|
||||
"meta": {
|
||||
"searchType": "ast-analyzer",
|
||||
"analyzerMeta": {
|
||||
"name": "match-subclasses",
|
||||
"requiredAst": "babel",
|
||||
"identifier": "%40lion%2Flistbox_0.10.7_+_%40lion%2Fform-core_0.15.4__-222436449",
|
||||
"targetProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/listbox",
|
||||
"version": "0.10.7",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"referenceProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/form-core",
|
||||
"version": "0.15.4",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"configuration": {
|
||||
"gatherFilesConfig": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"gatherFilesConfigReference": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"skipCheckMatchCompatibility": false,
|
||||
"metaConfig": {
|
||||
"categoryConfig": [
|
||||
{
|
||||
"project": "@lion/overlays",
|
||||
"majorVersion": 1,
|
||||
"categories": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"queryOutput": [
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FocusMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FocusMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionListbox.js",
|
||||
"identifier": "LionListbox"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FormControlMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FormControlMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/ListboxMixin.js",
|
||||
"identifier": "ListboxMixin"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "InteractionStateMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "InteractionStateMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionListbox.js",
|
||||
"identifier": "LionListbox"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FormRegisteringMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FormRegisteringMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionOption.js",
|
||||
"identifier": "LionOption"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FormRegistrarMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FormRegistrarMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/ListboxMixin.js",
|
||||
"identifier": "ListboxMixin"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FormRegistrarPortalMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FormRegistrarPortalMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionOptions.js",
|
||||
"identifier": "LionOptions"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "ValidateMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "ValidateMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionListbox.js",
|
||||
"identifier": "LionListbox"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "ChoiceGroupMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "ChoiceGroupMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/ListboxMixin.js",
|
||||
"identifier": "ListboxMixin"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "ChoiceInputMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "ChoiceInputMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionOption.js",
|
||||
"identifier": "LionOption"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,98 @@
|
|||
{
|
||||
"meta": {
|
||||
"searchType": "ast-analyzer",
|
||||
"analyzerMeta": {
|
||||
"name": "match-imports",
|
||||
"requiredAst": "babel",
|
||||
"identifier": "%40lion%2Finput_0.15.7_+_%40lion%2Fform-core_0.15.4__1410239906",
|
||||
"targetProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/input",
|
||||
"version": "0.15.7",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"referenceProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/form-core",
|
||||
"version": "0.15.4",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"configuration": {
|
||||
"gatherFilesConfig": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"targetProjectResult": null,
|
||||
"referenceProjectResult": null,
|
||||
"gatherFilesConfigReference": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"skipCheckMatchCompatibility": false,
|
||||
"metaConfig": {
|
||||
"categoryConfig": [
|
||||
{
|
||||
"project": "@lion/overlays",
|
||||
"majorVersion": 1,
|
||||
"categories": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"queryOutput": [
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "LionField::./index.js::@lion/form-core",
|
||||
"name": "LionField",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
"./src/LionInput.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "NativeTextFieldMixin::./index.js::@lion/form-core",
|
||||
"name": "NativeTextFieldMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
"./src/LionInput.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "Validator::./index.js::@lion/form-core",
|
||||
"name": "Validator",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
"./test/lion-input.test.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,194 @@
|
|||
{
|
||||
"meta": {
|
||||
"searchType": "ast-analyzer",
|
||||
"analyzerMeta": {
|
||||
"name": "match-imports",
|
||||
"requiredAst": "babel",
|
||||
"identifier": "%40lion%2Flistbox_0.10.7_+_%40lion%2Fform-core_0.15.4__-1773728033",
|
||||
"targetProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/listbox",
|
||||
"version": "0.10.7",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"referenceProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/form-core",
|
||||
"version": "0.15.4",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"configuration": {
|
||||
"gatherFilesConfig": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"targetProjectResult": null,
|
||||
"referenceProjectResult": null,
|
||||
"gatherFilesConfigReference": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"skipCheckMatchCompatibility": false,
|
||||
"metaConfig": {
|
||||
"categoryConfig": [
|
||||
{
|
||||
"project": "@lion/overlays",
|
||||
"majorVersion": 1,
|
||||
"categories": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"queryOutput": [
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FocusMixin::./index.js::@lion/form-core",
|
||||
"name": "FocusMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionListbox.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FormControlMixin::./index.js::@lion/form-core",
|
||||
"name": "FormControlMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/ListboxMixin.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "InteractionStateMixin::./index.js::@lion/form-core",
|
||||
"name": "InteractionStateMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionListbox.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FormRegisteringMixin::./index.js::@lion/form-core",
|
||||
"name": "FormRegisteringMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionOption.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FormRegistrarMixin::./index.js::@lion/form-core",
|
||||
"name": "FormRegistrarMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/ListboxMixin.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "FormRegistrarPortalMixin::./index.js::@lion/form-core",
|
||||
"name": "FormRegistrarPortalMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionOptions.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "ValidateMixin::./index.js::@lion/form-core",
|
||||
"name": "ValidateMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionListbox.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "ChoiceGroupMixin::./index.js::@lion/form-core",
|
||||
"name": "ChoiceGroupMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/ListboxMixin.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"id": "ChoiceInputMixin::./index.js::@lion/form-core",
|
||||
"name": "ChoiceInputMixin",
|
||||
"filePath": "./index.js",
|
||||
"project": "@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
"./src/LionOption.js"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,105 @@
|
|||
{
|
||||
"meta": {
|
||||
"searchType": "ast-analyzer",
|
||||
"analyzerMeta": {
|
||||
"name": "match-subclasses",
|
||||
"requiredAst": "babel",
|
||||
"identifier": "%40lion%2Finput_0.15.7_+_%40lion%2Fform-core_0.15.4__-1212823364",
|
||||
"targetProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/input",
|
||||
"version": "0.15.7",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"referenceProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/form-core",
|
||||
"version": "0.15.4",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"configuration": {
|
||||
"gatherFilesConfig": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"gatherFilesConfigReference": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"skipCheckMatchCompatibility": false,
|
||||
"metaConfig": {
|
||||
"categoryConfig": [
|
||||
{
|
||||
"project": "@lion/overlays",
|
||||
"majorVersion": 1,
|
||||
"categories": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"queryOutput": [
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "LionField",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "LionField::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionInput.js",
|
||||
"identifier": "LionInput"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "NativeTextFieldMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "NativeTextFieldMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionInput.js",
|
||||
"identifier": "LionInput"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "Validator",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "Validator::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/input",
|
||||
"files": [
|
||||
{
|
||||
"file": "./test/lion-input.test.js",
|
||||
"identifier": "null"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,219 @@
|
|||
{
|
||||
"meta": {
|
||||
"searchType": "ast-analyzer",
|
||||
"analyzerMeta": {
|
||||
"name": "match-subclasses",
|
||||
"requiredAst": "babel",
|
||||
"identifier": "%40lion%2Flistbox_0.10.7_+_%40lion%2Fform-core_0.15.4__-222436449",
|
||||
"targetProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/listbox",
|
||||
"version": "0.10.7",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"referenceProject": {
|
||||
"mainEntry": "./index.js",
|
||||
"name": "@lion/form-core",
|
||||
"version": "0.15.4",
|
||||
"commitHash": "[not-a-git-root]"
|
||||
},
|
||||
"configuration": {
|
||||
"gatherFilesConfig": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"gatherFilesConfigReference": {
|
||||
"extensions": [
|
||||
".js",
|
||||
".html"
|
||||
]
|
||||
},
|
||||
"skipCheckMatchCompatibility": false,
|
||||
"metaConfig": {
|
||||
"categoryConfig": [
|
||||
{
|
||||
"project": "@lion/overlays",
|
||||
"majorVersion": 1,
|
||||
"categories": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"queryOutput": [
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FocusMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FocusMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionListbox.js",
|
||||
"identifier": "LionListbox"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FormControlMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FormControlMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/ListboxMixin.js",
|
||||
"identifier": "ListboxMixin"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "InteractionStateMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "InteractionStateMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionListbox.js",
|
||||
"identifier": "LionListbox"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FormRegisteringMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FormRegisteringMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionOption.js",
|
||||
"identifier": "LionOption"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FormRegistrarMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FormRegistrarMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/ListboxMixin.js",
|
||||
"identifier": "ListboxMixin"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "FormRegistrarPortalMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "FormRegistrarPortalMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionOptions.js",
|
||||
"identifier": "LionOptions"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "ValidateMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "ValidateMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionListbox.js",
|
||||
"identifier": "LionListbox"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "ChoiceGroupMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "ChoiceGroupMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/ListboxMixin.js",
|
||||
"identifier": "ListboxMixin"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"exportSpecifier": {
|
||||
"name": "ChoiceInputMixin",
|
||||
"project": "@lion/form-core",
|
||||
"filePath": "./index.js",
|
||||
"id": "ChoiceInputMixin::./index.js::@lion/form-core"
|
||||
},
|
||||
"matchesPerProject": [
|
||||
{
|
||||
"project": "@lion/listbox",
|
||||
"files": [
|
||||
{
|
||||
"file": "./src/LionOption.js",
|
||||
"identifier": "LionOption"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"@lion/input#0.15.7": [
|
||||
"@lion/input#0.15.7"
|
||||
],
|
||||
"@lion/listbox#0.10.7": [
|
||||
"@lion/listbox#0.10.7"
|
||||
]
|
||||
}
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
export default {
|
||||
metaConfig: {
|
||||
categoryConfig: [
|
||||
{
|
||||
// This is the name found in package.json
|
||||
project: '@lion/overlays',
|
||||
majorVersion: 1,
|
||||
// These conditions will be run on overy filePath
|
||||
categories: {
|
||||
overlays: localFilePath => {
|
||||
const names = ['dialog', 'tooltip'];
|
||||
const fromPackages = names.some(p =>
|
||||
localFilePath.startsWith(`./packages/ui/components/${p}`),
|
||||
);
|
||||
const fromRoot =
|
||||
names.some(p => localFilePath.startsWith(`./ui-${p}`)) ||
|
||||
localFilePath.startsWith('./overlays.js');
|
||||
return fromPackages || fromRoot;
|
||||
},
|
||||
// etc...
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
// By predefening groups, we can do a query for programs/collections...
|
||||
// Select via " providence analyze --search-target-collection 'exampleCollection' "
|
||||
searchTargetCollections: {
|
||||
'@lion-targets': ['../../packages/ui'],
|
||||
// ...
|
||||
},
|
||||
referenceCollections: {
|
||||
// Usually the references are different from the targets.
|
||||
// In this demo file, we test @lion usage amongst itself
|
||||
// Select via " providence analyze --reference-collection 'exampleCollection' "
|
||||
'@lion-references': ['../../packages/ui'],
|
||||
},
|
||||
};
|
||||
|
|
@ -1,15 +1,26 @@
|
|||
const pathLib = require('path');
|
||||
const { expect } = require('chai');
|
||||
const { providence } = require('../../../../src/program/providence.js');
|
||||
const { QueryService } = require('../../../../src/program/services/QueryService.js');
|
||||
const { ReportService } = require('../../../../src/program/services/ReportService.js');
|
||||
/* eslint-disable import/no-extraneous-dependencies */
|
||||
import pathLib, { dirname } from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
import fs from 'fs';
|
||||
import { expect } from 'chai';
|
||||
import { providence } from '../../../../src/program/providence.js';
|
||||
import { QueryService } from '../../../../src/program/core/QueryService.js';
|
||||
import { ReportService } from '../../../../src/program/core/ReportService.js';
|
||||
import { memoizeConfig } from '../../../../src/program/utils/memoize.js';
|
||||
|
||||
const {
|
||||
import {
|
||||
mockWriteToJson,
|
||||
restoreWriteToJson,
|
||||
} = require('../../../../test-helpers/mock-report-service-helpers.js');
|
||||
} from '../../../../test-helpers/mock-report-service-helpers.js';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
|
||||
describe('Analyzers file-system integration', () => {
|
||||
/**
|
||||
* Flag to enable mode that generates e2e mocks.
|
||||
* We 'abuse' this test file for that purpose for ease of maintenance
|
||||
* @type {boolean}
|
||||
*/
|
||||
const generateE2eMode = process.argv.includes('--generate-e2e-mode');
|
||||
|
||||
const queryResults = [];
|
||||
|
|
@ -25,9 +36,13 @@ describe('Analyzers file-system integration', () => {
|
|||
const originalGetResultFileNameAndPath = ReportService._getResultFileNameAndPath;
|
||||
const originalOutputPath = ReportService.outputPath;
|
||||
|
||||
const memoizeCacheDisabledInitial = memoizeConfig.isCacheDisabled;
|
||||
memoizeConfig.isCacheDisabled = true;
|
||||
|
||||
after(() => {
|
||||
ReportService._getResultFileNameAndPath = originalGetResultFileNameAndPath;
|
||||
ReportService.outputPath = originalOutputPath;
|
||||
memoizeConfig.isCacheDisabled = memoizeCacheDisabledInitial;
|
||||
});
|
||||
|
||||
if (generateE2eMode) {
|
||||
|
|
@ -108,8 +123,15 @@ describe('Analyzers file-system integration', () => {
|
|||
);
|
||||
return;
|
||||
}
|
||||
// eslint-disable-next-line import/no-dynamic-require, global-require
|
||||
const expectedOutput = require(`../../../../test-helpers/project-mocks-analyzer-outputs/${analyzerName}.json`);
|
||||
const expectedOutput = JSON.parse(
|
||||
fs.readFileSync(
|
||||
pathLib.resolve(
|
||||
__dirname,
|
||||
`../../../../test-helpers/project-mocks-analyzer-outputs/${analyzerName}.json`,
|
||||
),
|
||||
'utf8',
|
||||
),
|
||||
);
|
||||
const { queryOutput } = JSON.parse(JSON.stringify(queryResults[0]));
|
||||
expect(queryOutput).not.to.eql([]);
|
||||
expect(queryOutput).to.eql(expectedOutput.queryOutput);
|
||||
|
|
@ -1,49 +1,17 @@
|
|||
const { expect } = require('chai');
|
||||
const { providence } = require('../../../src/program/providence.js');
|
||||
const { QueryService } = require('../../../src/program/services/QueryService.js');
|
||||
const {
|
||||
mockProject,
|
||||
restoreMockedProjects,
|
||||
getEntry,
|
||||
} = require('../../../test-helpers/mock-project-helpers.js');
|
||||
const {
|
||||
mockWriteToJson,
|
||||
restoreWriteToJson,
|
||||
} = require('../../../test-helpers/mock-report-service-helpers.js');
|
||||
const {
|
||||
suppressNonCriticalLogs,
|
||||
restoreSuppressNonCriticalLogs,
|
||||
} = require('../../../test-helpers/mock-log-service-helpers.js');
|
||||
const { QueryService } = require('../../../src/program/core/QueryService.js');
|
||||
const { setupAnalyzerTest } = require('../../../test-helpers/setup-analyzer-test.js');
|
||||
const { mockProject, getEntry } = require('../../../test-helpers/mock-project-helpers.js');
|
||||
|
||||
const findClassesQueryConfig = QueryService.getQueryConfigFromAnalyzer('find-classes');
|
||||
|
||||
describe('Analyzer "find-classes"', () => {
|
||||
const queryResults = [];
|
||||
const queryResults = setupAnalyzerTest();
|
||||
const _providenceCfg = {
|
||||
targetProjectPaths: ['/fictional/project'], // defined in mockProject
|
||||
};
|
||||
|
||||
const cacheDisabledInitialValue = QueryService.cacheDisabled;
|
||||
|
||||
before(() => {
|
||||
QueryService.cacheDisabled = true;
|
||||
});
|
||||
|
||||
after(() => {
|
||||
QueryService.cacheDisabled = cacheDisabledInitialValue;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
suppressNonCriticalLogs();
|
||||
mockWriteToJson(queryResults);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
restoreSuppressNonCriticalLogs();
|
||||
restoreWriteToJson(queryResults);
|
||||
restoreMockedProjects();
|
||||
});
|
||||
|
||||
it(`finds class definitions`, async () => {
|
||||
mockProject([`class EmptyClass {}`]);
|
||||
await providence(findClassesQueryConfig, _providenceCfg);
|
||||
|
|
|
|||
|
|
@ -1,19 +1,9 @@
|
|||
const { expect } = require('chai');
|
||||
const { providence } = require('../../../src/program/providence.js');
|
||||
const { QueryService } = require('../../../src/program/services/QueryService.js');
|
||||
const {
|
||||
mockProject,
|
||||
restoreMockedProjects,
|
||||
getEntry,
|
||||
} = require('../../../test-helpers/mock-project-helpers.js');
|
||||
const {
|
||||
mockWriteToJson,
|
||||
restoreWriteToJson,
|
||||
} = require('../../../test-helpers/mock-report-service-helpers.js');
|
||||
const {
|
||||
suppressNonCriticalLogs,
|
||||
restoreSuppressNonCriticalLogs,
|
||||
} = require('../../../test-helpers/mock-log-service-helpers.js');
|
||||
const { QueryService } = require('../../../src/program/core/QueryService.js');
|
||||
const { setupAnalyzerTest } = require('../../../test-helpers/setup-analyzer-test.js');
|
||||
|
||||
const { mockProject, getEntry } = require('../../../test-helpers/mock-project-helpers.js');
|
||||
|
||||
const findCustomelementsQueryConfig =
|
||||
QueryService.getQueryConfigFromAnalyzer('find-customelements');
|
||||
|
|
@ -22,28 +12,7 @@ const _providenceCfg = {
|
|||
};
|
||||
|
||||
describe('Analyzer "find-customelements"', () => {
|
||||
const queryResults = [];
|
||||
|
||||
const cacheDisabledInitialValue = QueryService.cacheDisabled;
|
||||
|
||||
before(() => {
|
||||
QueryService.cacheDisabled = true;
|
||||
});
|
||||
|
||||
after(() => {
|
||||
QueryService.cacheDisabled = cacheDisabledInitialValue;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
suppressNonCriticalLogs();
|
||||
mockWriteToJson(queryResults);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
restoreSuppressNonCriticalLogs();
|
||||
restoreMockedProjects();
|
||||
restoreWriteToJson(queryResults);
|
||||
});
|
||||
const queryResults = setupAnalyzerTest();
|
||||
|
||||
it(`stores the tagName of a custom element`, async () => {
|
||||
mockProject([`customElements.define('custom-el', class extends HTMLElement {});`]);
|
||||
|
|
|
|||
|
|
@ -1,50 +1,22 @@
|
|||
const { expect } = require('chai');
|
||||
const { providence } = require('../../../src/program/providence.js');
|
||||
const { QueryService } = require('../../../src/program/services/QueryService.js');
|
||||
const { QueryService } = require('../../../src/program/core/QueryService.js');
|
||||
const { setupAnalyzerTest } = require('../../../test-helpers/setup-analyzer-test.js');
|
||||
|
||||
const {
|
||||
mockProject,
|
||||
restoreMockedProjects,
|
||||
getEntry,
|
||||
getEntries,
|
||||
} = require('../../../test-helpers/mock-project-helpers.js');
|
||||
const {
|
||||
mockWriteToJson,
|
||||
restoreWriteToJson,
|
||||
} = require('../../../test-helpers/mock-report-service-helpers.js');
|
||||
const {
|
||||
suppressNonCriticalLogs,
|
||||
restoreSuppressNonCriticalLogs,
|
||||
} = require('../../../test-helpers/mock-log-service-helpers.js');
|
||||
|
||||
const findExportsQueryConfig = QueryService.getQueryConfigFromAnalyzer('find-exports');
|
||||
|
||||
describe('Analyzer "find-exports"', () => {
|
||||
const queryResults = [];
|
||||
const queryResults = setupAnalyzerTest();
|
||||
const _providenceCfg = {
|
||||
targetProjectPaths: ['/fictional/project'], // defined in mockProject
|
||||
};
|
||||
|
||||
const cacheDisabledInitialValue = QueryService.cacheDisabled;
|
||||
|
||||
before(() => {
|
||||
QueryService.cacheDisabled = true;
|
||||
});
|
||||
|
||||
after(() => {
|
||||
QueryService.cacheDisabled = cacheDisabledInitialValue;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
suppressNonCriticalLogs();
|
||||
mockWriteToJson(queryResults);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
restoreSuppressNonCriticalLogs();
|
||||
restoreWriteToJson(queryResults);
|
||||
restoreMockedProjects();
|
||||
});
|
||||
|
||||
describe('Export notations', () => {
|
||||
it(`supports [export const x = 0] (named specifier)`, async () => {
|
||||
mockProject([`export const x = 0`]);
|
||||
|
|
|
|||
|
|
@ -1,47 +1,21 @@
|
|||
const { expect } = require('chai');
|
||||
const { providence } = require('../../../src/program/providence.js');
|
||||
const { QueryService } = require('../../../src/program/services/QueryService.js');
|
||||
const {
|
||||
mockProject,
|
||||
restoreMockedProjects,
|
||||
getEntry,
|
||||
} = require('../../../test-helpers/mock-project-helpers.js');
|
||||
const {
|
||||
mockWriteToJson,
|
||||
restoreWriteToJson,
|
||||
} = require('../../../test-helpers/mock-report-service-helpers.js');
|
||||
const {
|
||||
suppressNonCriticalLogs,
|
||||
restoreSuppressNonCriticalLogs,
|
||||
} = require('../../../test-helpers/mock-log-service-helpers.js');
|
||||
const { QueryService } = require('../../../src/program/core/QueryService.js');
|
||||
const { setupAnalyzerTest } = require('../../../test-helpers/setup-analyzer-test.js');
|
||||
const { mockProject, getEntry } = require('../../../test-helpers/mock-project-helpers.js');
|
||||
|
||||
/**
|
||||
* @typedef {import('../../../src/program/types/core').ProvidenceConfig} ProvidenceConfig
|
||||
*/
|
||||
|
||||
const findImportsQueryConfig = QueryService.getQueryConfigFromAnalyzer('find-imports');
|
||||
/** @type {Partial<ProvidenceConfig>} */
|
||||
const _providenceCfg = {
|
||||
targetProjectPaths: ['/fictional/project'], // defined in mockProject
|
||||
};
|
||||
|
||||
describe('Analyzer "find-imports"', () => {
|
||||
const queryResults = [];
|
||||
const cacheDisabledInitialValue = QueryService.cacheDisabled;
|
||||
|
||||
before(() => {
|
||||
QueryService.cacheDisabled = true;
|
||||
});
|
||||
|
||||
after(() => {
|
||||
QueryService.cacheDisabled = cacheDisabledInitialValue;
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
suppressNonCriticalLogs();
|
||||
mockWriteToJson(queryResults);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
restoreSuppressNonCriticalLogs();
|
||||
restoreMockedProjects();
|
||||
restoreWriteToJson(queryResults);
|
||||
});
|
||||
const queryResults = setupAnalyzerTest();
|
||||
|
||||
describe('Import notations', () => {
|
||||
it(`supports [import 'imported/source'] (no specifiers)`, async () => {
|
||||
|
|
@ -190,7 +164,6 @@ describe('Analyzer "find-imports"', () => {
|
|||
await providence(findImportsQueryConfig, _providenceCfg);
|
||||
const queryResult = queryResults[0];
|
||||
const firstEntry = getEntry(queryResult);
|
||||
console.log({ firstEntry });
|
||||
expect(firstEntry.result[0].importSpecifiers[0]).to.equal('[default]');
|
||||
expect(firstEntry.result[0].source).to.equal('@css/lib/styles.css');
|
||||
expect(firstEntry.result[0].assertionType).to.equal('css');
|
||||
|
|
@ -201,7 +174,6 @@ describe('Analyzer "find-imports"', () => {
|
|||
await providence(findImportsQueryConfig, _providenceCfg);
|
||||
const queryResult = queryResults[0];
|
||||
const firstEntry = getEntry(queryResult);
|
||||
console.log({ firstEntry });
|
||||
expect(firstEntry.result[0].importSpecifiers[0]).to.equal('[default]');
|
||||
expect(firstEntry.result[0].source).to.equal('@css/lib/styles.css');
|
||||
expect(firstEntry.result[0].assertionType).to.equal('css');
|
||||
|
|
|
|||
|
|
@ -4,15 +4,19 @@ const {
|
|||
trackDownIdentifier,
|
||||
trackDownIdentifierFromScope,
|
||||
} = require('../../../../src/program/analyzers/helpers/track-down-identifier.js');
|
||||
const { AstService } = require('../../../../src/program/services/AstService.js');
|
||||
|
||||
const { AstService } = require('../../../../src/program/core/AstService.js');
|
||||
const {
|
||||
mockProject,
|
||||
restoreMockedProjects,
|
||||
} = require('../../../../test-helpers/mock-project-helpers.js');
|
||||
const { memoizeConfig } = require('../../../../src/program/utils/memoize.js');
|
||||
|
||||
describe('trackdownIdentifier', () => {
|
||||
beforeEach(() => {
|
||||
memoizeConfig.isCacheDisabled = true;
|
||||
});
|
||||
afterEach(() => {
|
||||
memoizeConfig.isCacheDisabled = false;
|
||||
restoreMockedProjects();
|
||||
});
|
||||
|
||||
|
|
@ -168,13 +172,18 @@ describe('trackdownIdentifier', () => {
|
|||
});
|
||||
});
|
||||
|
||||
it(`identifies the current project as internal source`, async () => {
|
||||
it(`self-referencing projects are recognized as internal source`, async () => {
|
||||
// https://nodejs.org/api/packages.html#self-referencing-a-package-using-its-name
|
||||
mockProject(
|
||||
{
|
||||
'./MyClass.js': `export default class {}`,
|
||||
'./currentFile.js': `
|
||||
import MyClass from 'my-project/MyClass.js';
|
||||
`,
|
||||
'./package.json': JSON.stringify({
|
||||
name: 'my-project',
|
||||
exports: { './MyClass.js': './MyClass.js' },
|
||||
}),
|
||||
},
|
||||
{
|
||||
projectName: 'my-project',
|
||||
|
|
@ -183,7 +192,7 @@ describe('trackdownIdentifier', () => {
|
|||
);
|
||||
|
||||
// Let's say we want to track down 'MyClass' in the code above
|
||||
const source = '#internal/source';
|
||||
const source = 'my-project/MyClass.js';
|
||||
const identifierName = '[default]';
|
||||
const currentFilePath = '/my/project/currentFile.js';
|
||||
const rootPath = '/my/project';
|
||||
|
|
|
|||
|
|
@ -1,22 +1,10 @@
|
|||
const { expect } = require('chai');
|
||||
const { providence } = require('../../../src/program/providence.js');
|
||||
const { QueryService } = require('../../../src/program/services/QueryService.js');
|
||||
const { InputDataService } = require('../../../src/program/services/InputDataService.js');
|
||||
const { QueryService } = require('../../../src/program/core/QueryService.js');
|
||||
const FindExportsAnalyzer = require('../../../src/program/analyzers/find-exports.js');
|
||||
const FindImportsAnalyzer = require('../../../src/program/analyzers/find-imports.js');
|
||||
|
||||
const {
|
||||
mockTargetAndReferenceProject,
|
||||
restoreMockedProjects,
|
||||
} = require('../../../test-helpers/mock-project-helpers.js');
|
||||
const {
|
||||
mockWriteToJson,
|
||||
restoreWriteToJson,
|
||||
} = require('../../../test-helpers/mock-report-service-helpers.js');
|
||||
const {
|
||||
suppressNonCriticalLogs,
|
||||
restoreSuppressNonCriticalLogs,
|
||||
} = require('../../../test-helpers/mock-log-service-helpers.js');
|
||||
const { setupAnalyzerTest } = require('../../../test-helpers/setup-analyzer-test.js');
|
||||
const { mockTargetAndReferenceProject } = require('../../../test-helpers/mock-project-helpers.js');
|
||||
|
||||
const matchImportsQueryConfig = QueryService.getQueryConfigFromAnalyzer('match-imports');
|
||||
const _providenceCfg = {
|
||||
|
|
@ -210,31 +198,7 @@ const expectedMatchesOutput = [
|
|||
];
|
||||
|
||||
describe('Analyzer "match-imports"', () => {
|
||||
const originalReferenceProjectPaths = InputDataService.referenceProjectPaths;
|
||||
const queryResults = [];
|
||||
|
||||
const cacheDisabledInitialValue = QueryService.cacheDisabled;
|
||||
|
||||
before(() => {
|
||||
QueryService.cacheDisabled = true;
|
||||
suppressNonCriticalLogs();
|
||||
});
|
||||
|
||||
after(() => {
|
||||
QueryService.cacheDisabled = cacheDisabledInitialValue;
|
||||
restoreSuppressNonCriticalLogs();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
mockWriteToJson(queryResults);
|
||||
InputDataService.referenceProjectPaths = [];
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
InputDataService.referenceProjectPaths = originalReferenceProjectPaths;
|
||||
restoreWriteToJson(queryResults);
|
||||
restoreMockedProjects();
|
||||
});
|
||||
const queryResults = setupAnalyzerTest();
|
||||
|
||||
function testMatchedEntry(targetExportedId, queryResult, importedByFiles = []) {
|
||||
const matchedEntry = queryResult.queryOutput.find(
|
||||
|
|
@ -466,6 +430,51 @@ describe('Analyzer "match-imports"', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
it(`correctly merges/dedupes double found file matches when imported in different ways`, async () => {
|
||||
const refProject = {
|
||||
path: '/target/node_modules/ref',
|
||||
name: 'ref',
|
||||
files: [
|
||||
{
|
||||
file: './src/core.js',
|
||||
code: `
|
||||
export default function x() {};
|
||||
export class RefClass extends HTMLElement {}
|
||||
`,
|
||||
},
|
||||
],
|
||||
};
|
||||
const targetProject = {
|
||||
path: '/target',
|
||||
name: 'target',
|
||||
files: [
|
||||
{
|
||||
file: './deep-imports.js',
|
||||
code: `
|
||||
import myFn1 from 'ref/src/core.js';
|
||||
import { RefClass } from 'ref/src/core.js';
|
||||
|
||||
import * as all from 'ref/src/core.js';
|
||||
`,
|
||||
},
|
||||
],
|
||||
};
|
||||
mockTargetAndReferenceProject(targetProject, refProject);
|
||||
await providence(matchImportsQueryConfig, {
|
||||
targetProjectPaths: [targetProject.path],
|
||||
referenceProjectPaths: [refProject.path],
|
||||
});
|
||||
const queryResult = queryResults[0];
|
||||
expect(queryResult.queryOutput[0].exportSpecifier.name).to.equal('[default]');
|
||||
expect(queryResult.queryOutput[0].matchesPerProject).to.eql([
|
||||
{ files: ['./deep-imports.js'], project: 'target' },
|
||||
]);
|
||||
expect(queryResult.queryOutput[1].exportSpecifier.name).to.equal('RefClass');
|
||||
expect(queryResult.queryOutput[1].matchesPerProject).to.eql([
|
||||
{ files: ['./deep-imports.js'], project: 'target' },
|
||||
]);
|
||||
});
|
||||
|
||||
describe('Inside small example project', () => {
|
||||
it(`produces a list of all matches, sorted by project`, async () => {
|
||||
mockTargetAndReferenceProject(searchTargetProject, referenceProject);
|
||||
|
|
|
|||
|
|
@ -1,45 +1,11 @@
|
|||
const { expect } = require('chai');
|
||||
const { providence } = require('../../../src/program/providence.js');
|
||||
const { QueryService } = require('../../../src/program/services/QueryService.js');
|
||||
const { InputDataService } = require('../../../src/program/services/InputDataService.js');
|
||||
const {
|
||||
mockTargetAndReferenceProject,
|
||||
restoreMockedProjects,
|
||||
} = require('../../../test-helpers/mock-project-helpers.js');
|
||||
const {
|
||||
mockWriteToJson,
|
||||
restoreWriteToJson,
|
||||
} = require('../../../test-helpers/mock-report-service-helpers.js');
|
||||
const {
|
||||
suppressNonCriticalLogs,
|
||||
restoreSuppressNonCriticalLogs,
|
||||
} = require('../../../test-helpers/mock-log-service-helpers.js');
|
||||
const { QueryService } = require('../../../src/program/core/QueryService.js');
|
||||
const { setupAnalyzerTest } = require('../../../test-helpers/setup-analyzer-test.js');
|
||||
const { mockTargetAndReferenceProject } = require('../../../test-helpers/mock-project-helpers.js');
|
||||
|
||||
describe('Analyzer "match-paths"', () => {
|
||||
const originalReferenceProjectPaths = InputDataService.referenceProjectPaths;
|
||||
const queryResults = [];
|
||||
const cacheDisabledInitialValue = QueryService.cacheDisabled;
|
||||
|
||||
before(() => {
|
||||
QueryService.cacheDisabled = true;
|
||||
suppressNonCriticalLogs();
|
||||
});
|
||||
|
||||
after(() => {
|
||||
QueryService.cacheDisabled = cacheDisabledInitialValue;
|
||||
restoreSuppressNonCriticalLogs();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
InputDataService.referenceProjectPaths = [];
|
||||
mockWriteToJson(queryResults);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
InputDataService.referenceProjectPaths = originalReferenceProjectPaths;
|
||||
restoreWriteToJson(queryResults);
|
||||
restoreMockedProjects();
|
||||
});
|
||||
const queryResults = setupAnalyzerTest();
|
||||
|
||||
const referenceProject = {
|
||||
path: '/importing/target/project/node_modules/reference-project',
|
||||
|
|
|
|||
|
|
@ -1,19 +1,8 @@
|
|||
const { expect } = require('chai');
|
||||
const { providence } = require('../../../src/program/providence.js');
|
||||
const { QueryService } = require('../../../src/program/services/QueryService.js');
|
||||
const { InputDataService } = require('../../../src/program/services/InputDataService.js');
|
||||
const {
|
||||
mockTargetAndReferenceProject,
|
||||
restoreMockedProjects,
|
||||
} = require('../../../test-helpers/mock-project-helpers.js');
|
||||
const {
|
||||
mockWriteToJson,
|
||||
restoreWriteToJson,
|
||||
} = require('../../../test-helpers/mock-report-service-helpers.js');
|
||||
const {
|
||||
suppressNonCriticalLogs,
|
||||
restoreSuppressNonCriticalLogs,
|
||||
} = require('../../../test-helpers/mock-log-service-helpers.js');
|
||||
const { QueryService } = require('../../../src/program/core/QueryService.js');
|
||||
const { mockTargetAndReferenceProject } = require('../../../test-helpers/mock-project-helpers.js');
|
||||
const { setupAnalyzerTest } = require('../../../test-helpers/setup-analyzer-test.js');
|
||||
|
||||
// 1. Reference input data
|
||||
const referenceProject = {
|
||||
|
|
@ -136,36 +125,168 @@ const expectedMatchesOutput = [
|
|||
// eslint-disable-next-line no-shadow
|
||||
|
||||
describe('Analyzer "match-subclasses"', () => {
|
||||
const originalReferenceProjectPaths = InputDataService.referenceProjectPaths;
|
||||
const queryResults = [];
|
||||
const cacheDisabledQInitialValue = QueryService.cacheDisabled;
|
||||
const cacheDisabledIInitialValue = InputDataService.cacheDisabled;
|
||||
const queryResults = setupAnalyzerTest();
|
||||
|
||||
before(() => {
|
||||
QueryService.cacheDisabled = true;
|
||||
InputDataService.cacheDisabled = true;
|
||||
suppressNonCriticalLogs();
|
||||
describe('Match Features', () => {
|
||||
it(`identifies all directly imported class extensions`, async () => {
|
||||
const refProject = {
|
||||
path: '/target/node_modules/ref',
|
||||
name: 'ref',
|
||||
files: [{ file: './LionComp.js', code: `export class LionComp extends HTMLElement {};` }],
|
||||
};
|
||||
const targetProject = {
|
||||
path: '/target',
|
||||
name: 'target',
|
||||
files: [
|
||||
{
|
||||
file: './WolfComp.js',
|
||||
code: `
|
||||
import { LionComp } from 'ref/LionComp.js';
|
||||
|
||||
export class WolfComp extends LionComp {}
|
||||
`,
|
||||
},
|
||||
],
|
||||
};
|
||||
mockTargetAndReferenceProject(targetProject, refProject);
|
||||
await providence(matchSubclassesQueryConfig, {
|
||||
targetProjectPaths: [targetProject.path],
|
||||
referenceProjectPaths: [refProject.path],
|
||||
});
|
||||
const queryResult = queryResults[0];
|
||||
expect(queryResult.queryOutput).eql([
|
||||
{
|
||||
exportSpecifier: {
|
||||
filePath: './LionComp.js',
|
||||
id: 'LionComp::./LionComp.js::ref',
|
||||
name: 'LionComp',
|
||||
project: 'ref',
|
||||
},
|
||||
matchesPerProject: [
|
||||
{
|
||||
files: [
|
||||
{ file: './WolfComp.js', identifier: 'WolfComp', memberOverrides: undefined },
|
||||
],
|
||||
project: 'target',
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
after(() => {
|
||||
QueryService.cacheDisabled = cacheDisabledQInitialValue;
|
||||
InputDataService.cacheDisabled = cacheDisabledIInitialValue;
|
||||
restoreSuppressNonCriticalLogs();
|
||||
it(`identifies all indirectly imported (transitive) class extensions`, async () => {
|
||||
const refProject = {
|
||||
path: '/target/node_modules/ref',
|
||||
name: 'ref',
|
||||
files: [
|
||||
{ file: './LionComp.js', code: `export class LionComp extends HTMLElement {};` },
|
||||
{
|
||||
file: './RenamedLionComp.js',
|
||||
code: `export { LionComp as RenamedLionComp } from './LionComp.js';`,
|
||||
},
|
||||
],
|
||||
};
|
||||
const targetProject = {
|
||||
path: '/target',
|
||||
name: 'target',
|
||||
files: [
|
||||
{
|
||||
file: './WolfComp2.js',
|
||||
code: `
|
||||
import { RenamedLionComp } from 'ref/RenamedLionComp.js';
|
||||
|
||||
export class WolfComp2 extends RenamedLionComp {}
|
||||
`,
|
||||
},
|
||||
],
|
||||
};
|
||||
mockTargetAndReferenceProject(targetProject, refProject);
|
||||
await providence(matchSubclassesQueryConfig, {
|
||||
targetProjectPaths: [targetProject.path],
|
||||
referenceProjectPaths: [refProject.path],
|
||||
});
|
||||
const queryResult = queryResults[0];
|
||||
expect(queryResult.queryOutput).eql([
|
||||
{
|
||||
exportSpecifier: {
|
||||
filePath: './RenamedLionComp.js',
|
||||
id: 'RenamedLionComp::./RenamedLionComp.js::ref',
|
||||
name: 'RenamedLionComp',
|
||||
project: 'ref',
|
||||
},
|
||||
matchesPerProject: [
|
||||
{
|
||||
files: [
|
||||
{ file: './WolfComp2.js', identifier: 'WolfComp2', memberOverrides: undefined },
|
||||
],
|
||||
project: 'target',
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
InputDataService.cacheDisabled = true;
|
||||
InputDataService.referenceProjectPaths = [];
|
||||
mockWriteToJson(queryResults);
|
||||
});
|
||||
it(`identifies Mixins`, async () => {
|
||||
const refProject = {
|
||||
path: '/target/node_modules/ref',
|
||||
name: 'ref',
|
||||
files: [
|
||||
{
|
||||
file: './LionMixin.js',
|
||||
code: `
|
||||
export function LionMixin(superclass) {
|
||||
return class extends superclass {};
|
||||
}`,
|
||||
},
|
||||
],
|
||||
};
|
||||
const targetProject = {
|
||||
path: '/target',
|
||||
name: 'target',
|
||||
files: [
|
||||
{
|
||||
file: './WolfCompUsingMixin.js',
|
||||
code: `
|
||||
import { LionMixin } from 'ref/LionMixin.js';
|
||||
|
||||
afterEach(() => {
|
||||
InputDataService.referenceProjectPaths = originalReferenceProjectPaths;
|
||||
restoreWriteToJson(queryResults);
|
||||
restoreMockedProjects();
|
||||
export class WolfCompUsingMixin extends LionMixin(HTMLElement) {}
|
||||
`,
|
||||
},
|
||||
],
|
||||
};
|
||||
mockTargetAndReferenceProject(targetProject, refProject);
|
||||
await providence(matchSubclassesQueryConfig, {
|
||||
targetProjectPaths: [targetProject.path],
|
||||
referenceProjectPaths: [refProject.path],
|
||||
});
|
||||
const queryResult = queryResults[0];
|
||||
expect(queryResult.queryOutput).eql([
|
||||
{
|
||||
exportSpecifier: {
|
||||
filePath: './LionMixin.js',
|
||||
id: 'LionMixin::./LionMixin.js::ref',
|
||||
name: 'LionMixin',
|
||||
project: 'ref',
|
||||
},
|
||||
matchesPerProject: [
|
||||
{
|
||||
files: [
|
||||
{
|
||||
file: './WolfCompUsingMixin.js',
|
||||
identifier: 'WolfCompUsingMixin',
|
||||
memberOverrides: undefined,
|
||||
},
|
||||
],
|
||||
project: 'target',
|
||||
},
|
||||
],
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Extracting exports', () => {
|
||||
describe('Inside small example project', () => {
|
||||
it(`identifies all indirect export specifiers consumed by "importing-target-project"`, async () => {
|
||||
mockTargetAndReferenceProject(searchTargetProject, referenceProject);
|
||||
await providence(matchSubclassesQueryConfig, _providenceCfg);
|
||||
|
|
@ -192,6 +313,7 @@ describe('Analyzer "match-subclasses"', () => {
|
|||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Matching', () => {
|
||||
// TODO: because we intoduced an object in match-classes, we find duplicate entries in
|
||||
|
|
|
|||
|
|
@ -1,34 +1,20 @@
|
|||
const { expect } = require('chai');
|
||||
const {
|
||||
// mockTargetAndReferenceProject,
|
||||
mockProject,
|
||||
restoreMockedProjects,
|
||||
} = require('../../test-helpers/mock-project-helpers.js');
|
||||
const {
|
||||
mockWriteToJson,
|
||||
restoreWriteToJson,
|
||||
} = require('../../test-helpers/mock-report-service-helpers.js');
|
||||
const {
|
||||
suppressNonCriticalLogs,
|
||||
restoreSuppressNonCriticalLogs,
|
||||
} = require('../../test-helpers/mock-log-service-helpers.js');
|
||||
} = require('../../../test-helpers/mock-project-helpers.js');
|
||||
const { setupAnalyzerTest } = require('../../../test-helpers/setup-analyzer-test.js');
|
||||
const { QueryService } = require('../../../src/program/core/QueryService.js');
|
||||
const { providence } = require('../../../src/program/providence.js');
|
||||
const { DummyAnalyzer } = require('../../../test-helpers/templates/DummyAnalyzer.js');
|
||||
|
||||
const { QueryService } = require('../../src/program/services/QueryService.js');
|
||||
const { providence } = require('../../src/program/providence.js');
|
||||
const dummyAnalyzer = require('../../test-helpers/templates/analyzer-template.js');
|
||||
|
||||
const queryResults = [];
|
||||
/**
|
||||
* @typedef {import('../../../src/program/types/core').ProvidenceConfig} ProvidenceConfig
|
||||
*/
|
||||
|
||||
describe('Analyzer', () => {
|
||||
before(() => {
|
||||
suppressNonCriticalLogs();
|
||||
mockWriteToJson(queryResults);
|
||||
});
|
||||
|
||||
after(() => {
|
||||
restoreSuppressNonCriticalLogs();
|
||||
restoreWriteToJson(queryResults);
|
||||
});
|
||||
const dummyAnalyzer = new DummyAnalyzer();
|
||||
const queryResults = setupAnalyzerTest();
|
||||
|
||||
describe('Public api', () => {
|
||||
it('has a "name" string', async () => {
|
||||
|
|
@ -41,34 +27,32 @@ describe('Analyzer', () => {
|
|||
|
||||
it('has a "requiredAst" string', async () => {
|
||||
expect(typeof dummyAnalyzer.requiredAst).to.equal('string');
|
||||
const allowedAsts = ['babel', 'typescript', 'es-module-lexer'];
|
||||
const allowedAsts = ['babel'];
|
||||
expect(allowedAsts).to.include(dummyAnalyzer.requiredAst);
|
||||
});
|
||||
|
||||
it('has a "requiresReference" boolean', async () => {
|
||||
expect(typeof dummyAnalyzer.requiresReference).to.equal('boolean');
|
||||
expect(typeof DummyAnalyzer.requiresReference).to.equal('boolean');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Find Analyzers', async () => {
|
||||
describe.skip('Find Analyzers', async () => {
|
||||
afterEach(() => {
|
||||
restoreMockedProjects();
|
||||
});
|
||||
|
||||
// Our configuration object
|
||||
const myQueryConfigObject = QueryService.getQueryConfigFromAnalyzer(dummyAnalyzer);
|
||||
mockProject([`const validJs = true;`, `let invalidJs = false;`], {
|
||||
projectName: 'my-project',
|
||||
projectPath: '/path/to/my-project',
|
||||
filePaths: ['./test-file1.js', './test-file2.js'],
|
||||
});
|
||||
|
||||
await providence(myQueryConfigObject, {
|
||||
targetProjectPaths: ['/path/to/my-project'],
|
||||
});
|
||||
const myQueryConfigObject = QueryService.getQueryConfigFromAnalyzer(DummyAnalyzer);
|
||||
/** @type {Partial<ProvidenceConfig>} */
|
||||
const _providenceCfg = {
|
||||
targetProjectPaths: ['/fictional/project'],
|
||||
};
|
||||
|
||||
describe('Prepare phase', () => {
|
||||
it('looks for a cached result', async () => {});
|
||||
it('looks for a cached result', async () => {
|
||||
// Our configuration object
|
||||
mockProject([`const validJs = true;`, `let invalidJs = false;`]);
|
||||
await providence(myQueryConfigObject, _providenceCfg);
|
||||
});
|
||||
|
||||
it('exposes a ".targetMeta" object', async () => {});
|
||||
|
||||
|
|
@ -77,15 +61,29 @@ describe('Analyzer', () => {
|
|||
it('exposes a ".identifier" string', async () => {});
|
||||
});
|
||||
|
||||
describe('Traverse phase', () => {});
|
||||
describe('Traverse phase', () => {
|
||||
it('schedules a Babel visitor', async () => {});
|
||||
it('merges multiple Babel visitors for performance', async () => {});
|
||||
it('traverses Babel visitor and stores traversal result', async () => {});
|
||||
});
|
||||
|
||||
describe('Postprocess phase', () => {
|
||||
it('optionally post processes traversal result', async () => {});
|
||||
});
|
||||
|
||||
describe('Performance', () => {
|
||||
it('memoizes execute functions', async () => {});
|
||||
});
|
||||
|
||||
describe('Finalize phase', () => {
|
||||
it('returns an AnalyzerQueryResult', async () => {
|
||||
await providence(myQueryConfigObject, _providenceCfg);
|
||||
|
||||
const queryResult = queryResults[0];
|
||||
const { queryOutput, meta } = queryResult;
|
||||
|
||||
expect(queryOutput[0]).to.eql({
|
||||
file: './test-file1.js',
|
||||
file: './test-file-0.js',
|
||||
meta: {},
|
||||
result: [{ matched: 'entry' }],
|
||||
});
|
||||
|
|
@ -226,3 +224,7 @@ describe('Analyzer', () => {
|
|||
// });
|
||||
// });
|
||||
});
|
||||
|
||||
describe('FindAnalyzer', () => {});
|
||||
|
||||
describe('MatchAnalyzer', () => {});
|
||||
|
|
@ -1,31 +1,46 @@
|
|||
const { expect } = require('chai');
|
||||
const pathLib = require('path');
|
||||
const { InputDataService } = require('../../../src/index.js');
|
||||
const { InputDataService } = require('../../../src/program/core/InputDataService.js');
|
||||
const { memoizeConfig } = require('../../../src/program/utils/memoize.js');
|
||||
const {
|
||||
restoreMockedProjects,
|
||||
mockProject,
|
||||
mock,
|
||||
} = require('../../../test-helpers/mock-project-helpers.js');
|
||||
|
||||
function restoreOriginalInputDataPaths() {
|
||||
InputDataService.referenceProjectPaths = undefined;
|
||||
InputDataService.targetProjectPaths = undefined;
|
||||
}
|
||||
/**
|
||||
* @typedef {import('../../../src/program/types/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
*/
|
||||
|
||||
describe('InputDataService', () => {
|
||||
const initialReferenceProjectPaths = InputDataService.referenceProjectPaths;
|
||||
const initialTargetProjectPaths = InputDataService.referenceProjectPaths;
|
||||
|
||||
function restoreOriginalInputDataPaths() {
|
||||
InputDataService.referenceProjectPaths = initialReferenceProjectPaths;
|
||||
InputDataService.targetProjectPaths = initialTargetProjectPaths;
|
||||
}
|
||||
|
||||
beforeEach(() => {
|
||||
memoizeConfig.isCacheDisabled = true;
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
restoreOriginalInputDataPaths();
|
||||
restoreMockedProjects();
|
||||
memoizeConfig.isCacheDisabled = false;
|
||||
});
|
||||
|
||||
describe('Configuration', () => {
|
||||
it('allows to set referenceProjectPaths', async () => {
|
||||
/** @type {PathFromSystemRoot[]} */
|
||||
const newPaths = ['/my/path', '/my/other/path'];
|
||||
InputDataService.referenceProjectPaths = newPaths;
|
||||
expect(InputDataService.referenceProjectPaths).to.equal(newPaths);
|
||||
});
|
||||
|
||||
it('allows to set targetProjectPaths', async () => {
|
||||
/** @type {PathFromSystemRoot[]} */
|
||||
const newPaths = ['/my/path', '/my/other/path'];
|
||||
InputDataService.targetProjectPaths = newPaths;
|
||||
expect(InputDataService.targetProjectPaths).to.equal(newPaths);
|
||||
|
|
@ -35,10 +50,13 @@ describe('InputDataService', () => {
|
|||
describe('Methods', () => {
|
||||
// TODO: mock file system...
|
||||
it('"createDataObject"', async () => {
|
||||
const projectPaths = [
|
||||
pathLib.resolve(__dirname, '../../../test-helpers/project-mocks/importing-target-project'),
|
||||
];
|
||||
const inputDataPerProject = InputDataService.createDataObject(projectPaths);
|
||||
/** @type {* & PathFromSystemRoot} */
|
||||
const projectPath = pathLib.resolve(
|
||||
__dirname,
|
||||
'../../../test-helpers/project-mocks/importing-target-project',
|
||||
);
|
||||
|
||||
const inputDataPerProject = InputDataService.createDataObject([projectPath]);
|
||||
expect(Object.keys(inputDataPerProject[0].project)).to.eql([
|
||||
'path',
|
||||
'mainEntry',
|
||||
|
|
@ -0,0 +1,200 @@
|
|||
const { expect } = require('chai');
|
||||
const { QueryService } = require('../../../src/program/core/QueryService.js');
|
||||
const { DummyAnalyzer } = require('../../../test-helpers/templates/DummyAnalyzer.js');
|
||||
const FindImportsAnalyzer = require('../../../src/program/analyzers/find-imports.js');
|
||||
|
||||
/**
|
||||
* @typedef {import('../../../src/program/types/core').Analyzer} Analyzer
|
||||
* @typedef {import('../../../src/program/types/core').PathFromSystemRoot} PathFromSystemRoot
|
||||
*/
|
||||
|
||||
describe('QueryService', () => {
|
||||
describe('Methods', () => {
|
||||
describe('Retrieving QueryConfig', () => {
|
||||
it('"getQueryConfigFromRegexSearchString"', async () => {
|
||||
const result = QueryService.getQueryConfigFromRegexSearchString('x');
|
||||
expect(result).to.eql({ type: 'search', regexString: 'x' });
|
||||
|
||||
expect(() => {
|
||||
// @ts-expect-error
|
||||
QueryService.getQueryConfigFromRegexSearchString();
|
||||
}).to.throw('[QueryService.getQueryConfigFromRegexSearchString]: provide a string');
|
||||
});
|
||||
|
||||
describe('"getQueryConfigFromFeatureString"', () => {
|
||||
it('with tag, attr-key and attr-value', async () => {
|
||||
const result = QueryService.getQueryConfigFromFeatureString('tg-icon[size=xs]');
|
||||
expect(result).to.eql({
|
||||
type: 'feature',
|
||||
feature: {
|
||||
name: 'size',
|
||||
value: 'xs',
|
||||
tag: 'tg-icon',
|
||||
isAttribute: true,
|
||||
usesValueContains: false,
|
||||
usesValuePartialMatch: false,
|
||||
usesTagPartialMatch: false,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('with only tag', async () => {
|
||||
const result = QueryService.getQueryConfigFromFeatureString('tg-icon');
|
||||
expect(result).to.eql({
|
||||
type: 'feature',
|
||||
feature: {
|
||||
tag: 'tg-icon',
|
||||
usesTagPartialMatch: false,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('with only attr-key', async () => {
|
||||
const result = QueryService.getQueryConfigFromFeatureString('[attr]');
|
||||
expect(result).to.eql({
|
||||
type: 'feature',
|
||||
feature: {
|
||||
name: 'attr',
|
||||
value: undefined,
|
||||
tag: '',
|
||||
isAttribute: true,
|
||||
usesValueContains: false,
|
||||
usesValuePartialMatch: false,
|
||||
usesTagPartialMatch: false,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('with only attr-key and attr-value', async () => {
|
||||
const result = QueryService.getQueryConfigFromFeatureString('[attr=x]');
|
||||
expect(result).to.eql({
|
||||
type: 'feature',
|
||||
feature: {
|
||||
name: 'attr',
|
||||
value: 'x',
|
||||
tag: '',
|
||||
isAttribute: true,
|
||||
usesValueContains: false,
|
||||
usesValuePartialMatch: false,
|
||||
usesTagPartialMatch: false,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
describe('With partial value', async () => {
|
||||
it('with tag, attr-key and attr-value', async () => {
|
||||
const result = QueryService.getQueryConfigFromFeatureString('tg-icon*[size*=xs*]');
|
||||
expect(result).to.eql({
|
||||
type: 'feature',
|
||||
feature: {
|
||||
name: 'size',
|
||||
value: 'xs',
|
||||
tag: 'tg-icon',
|
||||
isAttribute: true,
|
||||
usesValueContains: true,
|
||||
usesValuePartialMatch: true,
|
||||
usesTagPartialMatch: true,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('with only tag', async () => {
|
||||
const result = QueryService.getQueryConfigFromFeatureString('tg-icon*');
|
||||
expect(result).to.eql({
|
||||
type: 'feature',
|
||||
feature: {
|
||||
tag: 'tg-icon',
|
||||
usesTagPartialMatch: true,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('with only attr-key', async () => {
|
||||
const result = QueryService.getQueryConfigFromFeatureString('[attr*]');
|
||||
expect(result).to.eql({
|
||||
type: 'feature',
|
||||
feature: {
|
||||
name: 'attr',
|
||||
value: undefined,
|
||||
tag: '',
|
||||
isAttribute: true,
|
||||
usesValueContains: true,
|
||||
usesValuePartialMatch: false,
|
||||
usesTagPartialMatch: false,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('with only attr-key and attr-value', async () => {
|
||||
const result = QueryService.getQueryConfigFromFeatureString('[attr*=x*]');
|
||||
expect(result).to.eql({
|
||||
type: 'feature',
|
||||
feature: {
|
||||
name: 'attr',
|
||||
value: 'x',
|
||||
tag: '',
|
||||
isAttribute: true,
|
||||
usesValueContains: true,
|
||||
usesValuePartialMatch: true,
|
||||
usesTagPartialMatch: false,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('throws when no string provided', async () => {
|
||||
expect(() => {
|
||||
QueryService.getQueryConfigFromFeatureString();
|
||||
}).to.throw('[QueryService.getQueryConfigFromFeatureString]: provide a string');
|
||||
});
|
||||
});
|
||||
|
||||
describe('"getQueryConfigFromAnalyzer"', () => {
|
||||
const myAnalyzerCfg = { targetProjectPath: /** @type {PathFromSystemRoot} */ ('/my/path') };
|
||||
it('accepts a constructor as first argument', async () => {
|
||||
const result = QueryService.getQueryConfigFromAnalyzer('find-imports', myAnalyzerCfg);
|
||||
expect(result).to.eql({
|
||||
type: 'ast-analyzer',
|
||||
analyzerName: 'find-imports',
|
||||
analyzerConfig: myAnalyzerCfg,
|
||||
analyzer: FindImportsAnalyzer,
|
||||
});
|
||||
});
|
||||
|
||||
it('accepts a string as first argument', async () => {
|
||||
const result = QueryService.getQueryConfigFromAnalyzer(
|
||||
/** @type {* & Analyzer} */ (DummyAnalyzer),
|
||||
myAnalyzerCfg,
|
||||
);
|
||||
expect(result).to.eql({
|
||||
type: 'ast-analyzer',
|
||||
analyzerName: 'dummy-analyzer',
|
||||
analyzerConfig: myAnalyzerCfg,
|
||||
analyzer: DummyAnalyzer,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('QueryResults', () => {
|
||||
describe.skip('"grepSearch"', () => {
|
||||
it('with FeatureConfig', async () => {
|
||||
const featureCfg = QueryService.getQueryConfigFromFeatureString('tg-icon[size=xs]');
|
||||
const result = QueryService.grepSearch(featureCfg);
|
||||
expect(result).to.eql({
|
||||
type: 'ast-analyzer',
|
||||
analyzerName: 'find-imports',
|
||||
analyzerConfig: { x: 'y' },
|
||||
analyzer: FindImportsAnalyzer,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('"astSearch"', async () => {});
|
||||
});
|
||||
|
||||
describe('Ast retrieval', () => {
|
||||
it('"addAstToProjectsData"', async () => {});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
@ -1,16 +1,15 @@
|
|||
const { expect } = require('chai');
|
||||
const { memoize } = require('../../../src/program/utils/memoize.js');
|
||||
const { InputDataService } = require('../../../src/program/services/InputDataService.js');
|
||||
const { memoize, memoizeConfig } = require('../../../src/program/utils/memoize.js');
|
||||
|
||||
const cacheDisabledInitialValue = InputDataService.cacheDisabled;
|
||||
const cacheDisabledInitialValue = memoizeConfig.isCacheDisabled;
|
||||
|
||||
describe('Memoize', () => {
|
||||
beforeEach(() => {
|
||||
// This is important, since memoization only works
|
||||
InputDataService.cacheDisabled = false;
|
||||
memoizeConfig.isCacheDisabled = false;
|
||||
});
|
||||
afterEach(() => {
|
||||
InputDataService.cacheDisabled = cacheDisabledInitialValue;
|
||||
memoizeConfig.isCacheDisabled = cacheDisabledInitialValue;
|
||||
});
|
||||
|
||||
describe('With primitives', () => {
|
||||
|
|
|
|||
|
|
@ -5,9 +5,14 @@ const {
|
|||
mockTargetAndReferenceProject,
|
||||
} = require('../../../test-helpers/mock-project-helpers.js');
|
||||
const { resolveImportPath } = require('../../../src/program/utils/resolve-import-path.js');
|
||||
const { memoizeConfig } = require('../../../src/program/utils/memoize.js');
|
||||
|
||||
describe('resolveImportPath', () => {
|
||||
beforeEach(() => {
|
||||
memoizeConfig.isCacheDisabled = true;
|
||||
});
|
||||
afterEach(() => {
|
||||
memoizeConfig.isCacheDisabled = false;
|
||||
restoreMockedProjects();
|
||||
});
|
||||
|
||||
|
|
|
|||
Loading…
Reference in a new issue