fix: cleanup dependencies, code and deprecated cli commands (extend-docs, search, etc.)
This commit is contained in:
parent
f6693b3bc4
commit
519cb9c91a
38 changed files with 39828 additions and 2254 deletions
|
|
@ -51,4 +51,20 @@ module.exports = {
|
|||
env: {
|
||||
es2020: true,
|
||||
},
|
||||
// ignores: [
|
||||
// 'node_modules',
|
||||
// 'coverage/',
|
||||
// 'bundlesize/',
|
||||
// '.history/',
|
||||
// 'storybook-static/',
|
||||
// '*.d.ts',
|
||||
// '_site-dev',
|
||||
// '_site',
|
||||
// 'docs/_merged_*',
|
||||
// 'patches/',
|
||||
|
||||
// '/docs/_assets/scoped-custom-element-registry.min.js',
|
||||
// '/docs/_assets/scoped-custom-element-registry.min.js.map',
|
||||
// '/docs/_merged_assets/scoped-custom-element-registry.min.js',
|
||||
// ],
|
||||
};
|
||||
2841
package-lock.json
generated
2841
package-lock.json
generated
File diff suppressed because it is too large
Load diff
37792
package-lock.jsonx
Normal file
37792
package-lock.jsonx
Normal file
File diff suppressed because it is too large
Load diff
|
|
@ -29,12 +29,12 @@
|
|||
"types": "wireit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@babel/generator": "^7.22.5",
|
||||
"@babel/parser": "^7.22.5",
|
||||
"@babel/traverse": "^7.23.2",
|
||||
"@babel/types": "^7.22.5",
|
||||
"es-module-lexer": "^0.3.6",
|
||||
"globby": "^13.2.0",
|
||||
"@babel/generator": "^7.24.4",
|
||||
"@babel/parser": "^7.24.4",
|
||||
"@babel/traverse": "^7.24.1",
|
||||
"@babel/types": "^7.24.0",
|
||||
"es-module-lexer": "^0.3.26",
|
||||
"globby": "^14.0.1",
|
||||
"prettier": "^2.8.8"
|
||||
},
|
||||
"keywords": [
|
||||
|
|
|
|||
|
|
@ -30,8 +30,8 @@
|
|||
"postinstall": "npx patch-package",
|
||||
"match-lion-imports": "npm run providence -- analyze match-imports --search-target-collection @lion-targets --reference-collection @lion-references --measure-perf --skip-check-match-compatibility",
|
||||
"providence": "node --max-old-space-size=8192 ./src/cli/index.js",
|
||||
"publish-docs": "node ../../packages-node/publish-docs/src/cli.js --github-url https://github.com/ing-bank/lion/ --git-root-dir ../../",
|
||||
"prepublishOnly": "npm run publish-docs",
|
||||
"publish-docs": "node ../../packages-node/publish-docs/src/cli.js --github-url https://github.com/ing-bank/lion/ --git-root-dir ../../",
|
||||
"test:node": "npm run test:node:unit && npm run test:node:e2e",
|
||||
"test:node:e2e": "mocha './test-node/**/*.e2e.js' --timeout 60000",
|
||||
"test:node:unit": "mocha './{test-node,src}/**/*.test.js'"
|
||||
|
|
@ -43,20 +43,19 @@
|
|||
"@babel/types": "^7.24.5",
|
||||
"@rollup/plugin-node-resolve": "^15.2.3",
|
||||
"@swc/core": "^1.5.5",
|
||||
"@web/dev-server": "^0.4.4",
|
||||
"commander": "^2.20.3",
|
||||
"inquirer": "^9.2.20",
|
||||
"lit-element": "^4.0.5",
|
||||
"parse5": "^7.1.2",
|
||||
"semver": "^7.6.1",
|
||||
"swc-to-babel": "^1.26.0"
|
||||
"swc-to-babel": "^3.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/chai": "^4.3.16",
|
||||
"@types/inquirer": "^9.0.7",
|
||||
"@types/mocha": "^10.0.6",
|
||||
"@web/dev-server": "^0.4.4",
|
||||
"@web/dev-server-core": "^0.7.1",
|
||||
"globby": "^14.0.1",
|
||||
"lit-element": "^4.0.5",
|
||||
"mock-fs": "^5.2.0",
|
||||
"mock-require": "^3.0.3"
|
||||
},
|
||||
|
|
@ -71,6 +70,9 @@
|
|||
"semver",
|
||||
"software"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,10 +1,11 @@
|
|||
/* eslint-disable no-shadow */
|
||||
import path from 'path';
|
||||
import child_process from 'child_process'; // eslint-disable-line camelcase
|
||||
import path from 'path';
|
||||
|
||||
import { globbySync } from 'globby'; // eslint-disable-line import/no-extraneous-dependencies
|
||||
import { optimisedGlob } from '../program/utils/optimised-glob.js';
|
||||
import { LogService } from '../program/core/LogService.js';
|
||||
import { toPosixPath } from '../program/utils/to-posix-path.js';
|
||||
import { LogService } from '../program/core/LogService.js';
|
||||
import { fsAdapter } from '../program/utils/fs-adapter.js';
|
||||
|
||||
/**
|
||||
|
|
@ -195,31 +196,6 @@ export async function appendProjectDependencyPaths(
|
|||
return depProjectPaths.concat(rootPaths).map(toPosixPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Will install all npm and bower deps, so an analysis can be performed on them as well.
|
||||
* Relevant when '--target-dependencies' is supplied.
|
||||
* @param {string[]} searchTargetPaths
|
||||
*/
|
||||
export async function installDeps(searchTargetPaths) {
|
||||
for (const targetPath of searchTargetPaths) {
|
||||
LogService.info(`Installing npm dependencies for ${path.basename(targetPath)}`);
|
||||
try {
|
||||
await spawnProcess('npm i --no-progress', { cwd: targetPath });
|
||||
} catch (e) {
|
||||
// @ts-expect-error
|
||||
LogService.error(e);
|
||||
}
|
||||
|
||||
LogService.info(`Installing bower dependencies for ${path.basename(targetPath)}`);
|
||||
try {
|
||||
await spawnProcess(`bower i --production --force-latest`, { cwd: targetPath });
|
||||
} catch (e) {
|
||||
// @ts-expect-error
|
||||
LogService.error(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export const _cliHelpersModule = {
|
||||
appendProjectDependencyPaths,
|
||||
pathsArrayFromCollectionName,
|
||||
|
|
@ -228,7 +204,6 @@ export const _cliHelpersModule = {
|
|||
setQueryMethod,
|
||||
targetDefault,
|
||||
spawnProcess,
|
||||
installDeps,
|
||||
csToArray,
|
||||
flatten,
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,23 +1,17 @@
|
|||
import child_process from 'child_process'; // eslint-disable-line camelcase
|
||||
import path from 'path';
|
||||
|
||||
import commander from 'commander';
|
||||
|
||||
import { LogService } from '../program/core/LogService.js';
|
||||
import { QueryService } from '../program/core/QueryService.js';
|
||||
import { InputDataService } from '../program/core/InputDataService.js';
|
||||
import { toPosixPath } from '../program/utils/to-posix-path.js';
|
||||
import { getCurrentDir } from '../program/utils/get-current-dir.js';
|
||||
import { dashboardServer } from '../dashboard/server.js';
|
||||
import { QueryService } from '../program/core/QueryService.js';
|
||||
import { _providenceModule } from '../program/providence.js';
|
||||
import { _cliHelpersModule } from './cli-helpers.js';
|
||||
import { _extendDocsModule } from './launch-providence-with-extend-docs.js';
|
||||
import { _promptAnalyzerMenuModule } from './prompt-analyzer-menu.js';
|
||||
import { fsAdapter } from '../program/utils/fs-adapter.js';
|
||||
import { _cliHelpersModule } from './cli-helpers.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('../../types/index.js').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../types/index.js').ProvidenceCliConf} ProvidenceCliConf
|
||||
* @typedef {import('../../types/index.js').AnalyzerName} AnalyzerName
|
||||
*/
|
||||
|
||||
const { version } = JSON.parse(
|
||||
|
|
@ -26,7 +20,7 @@ const { version } = JSON.parse(
|
|||
'utf8',
|
||||
),
|
||||
);
|
||||
const { extensionsFromCs, setQueryMethod, targetDefault, installDeps } = _cliHelpersModule;
|
||||
const { extensionsFromCs, targetDefault } = _cliHelpersModule;
|
||||
|
||||
/**
|
||||
* @param {{cwd?:string; argv?: string[]; providenceConf?: Partial<ProvidenceCliConf>}} cfg
|
||||
|
|
@ -42,77 +36,34 @@ export async function cli({ cwd = process.cwd(), providenceConf, argv = process.
|
|||
rejectCli = reject;
|
||||
});
|
||||
|
||||
/** @type {'analyzer'|'queryString'} */
|
||||
let searchMode;
|
||||
/** @type {object} */
|
||||
let analyzerOptions;
|
||||
/** @type {object} */
|
||||
let featureOptions;
|
||||
/** @type {object} */
|
||||
let regexSearchOptions;
|
||||
|
||||
// TODO: change back to "InputDataService.getExternalConfig();" once full package ESM
|
||||
const externalConfig = providenceConf;
|
||||
|
||||
/**
|
||||
* @param {'search-query'|'feature-query'|'analyzer-query'} searchMode
|
||||
* @param {{regexString: string}} regexSearchOptions
|
||||
* @param {{queryString: string}} featureOptions
|
||||
* @param {{name:AnalyzerName; config:object;promptOptionalConfig:object}} analyzerOptions
|
||||
* @returns
|
||||
* @param {{analyzerOptions:{name:AnalyzerName; config:object;promptOptionalConfig:object}}} opts
|
||||
*/
|
||||
async function getQueryConfigAndMeta(
|
||||
/* eslint-disable no-shadow */
|
||||
searchMode,
|
||||
regexSearchOptions,
|
||||
featureOptions,
|
||||
analyzerOptions,
|
||||
/* eslint-enable no-shadow */
|
||||
) {
|
||||
async function getQueryConfigAndMeta(opts) {
|
||||
let queryConfig = null;
|
||||
let queryMethod = null;
|
||||
|
||||
if (searchMode === 'search-query') {
|
||||
queryConfig = QueryService.getQueryConfigFromRegexSearchString(
|
||||
regexSearchOptions.regexString,
|
||||
);
|
||||
queryMethod = 'grep';
|
||||
} else if (searchMode === 'feature-query') {
|
||||
queryConfig = QueryService.getQueryConfigFromFeatureString(featureOptions.queryString);
|
||||
queryMethod = 'grep';
|
||||
} else if (searchMode === 'analyzer-query') {
|
||||
let { name, config } = analyzerOptions;
|
||||
// eslint-disable-next-line prefer-const
|
||||
let { name, config } = opts.analyzerOptions;
|
||||
if (!name) {
|
||||
const answers = await _promptAnalyzerMenuModule.promptAnalyzerMenu();
|
||||
|
||||
name = answers.analyzerName;
|
||||
}
|
||||
if (!config) {
|
||||
const answers = await _promptAnalyzerMenuModule.promptAnalyzerConfigMenu(
|
||||
name,
|
||||
analyzerOptions.promptOptionalConfig,
|
||||
);
|
||||
config = answers.analyzerConfig;
|
||||
throw new Error('Please provide an analyzer name');
|
||||
}
|
||||
// Will get metaConfig from ./providence.conf.js
|
||||
const metaConfig = externalConfig ? externalConfig.metaConfig : {};
|
||||
config = { ...config, metaConfig };
|
||||
queryConfig = await QueryService.getQueryConfigFromAnalyzer(name, config);
|
||||
queryMethod = 'ast';
|
||||
} else {
|
||||
LogService.error('Please define a feature, analyzer or search');
|
||||
process.exit(1);
|
||||
}
|
||||
return { queryConfig, queryMethod };
|
||||
}
|
||||
|
||||
async function launchProvidence() {
|
||||
const { queryConfig, queryMethod } = await getQueryConfigAndMeta(
|
||||
searchMode,
|
||||
regexSearchOptions,
|
||||
featureOptions,
|
||||
analyzerOptions,
|
||||
);
|
||||
const { queryConfig, queryMethod } = await getQueryConfigAndMeta({ analyzerOptions });
|
||||
|
||||
const searchTargetPaths = commander.searchTargetCollection || commander.searchTargetPaths;
|
||||
let referencePaths;
|
||||
|
|
@ -163,29 +114,6 @@ export async function cli({ cwd = process.cwd(), providenceConf, argv = process.
|
|||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {{update:boolean; deps:boolean;createVersionHistory:boolean}} options
|
||||
*/
|
||||
async function manageSearchTargets(options) {
|
||||
const basePath = path.join(__dirname, '../..');
|
||||
if (options.update) {
|
||||
LogService.info('git submodule update --init --recursive');
|
||||
|
||||
// eslint-disable-next-line camelcase
|
||||
const updateResult = child_process.execSync('git submodule update --init --recursive', {
|
||||
cwd: basePath,
|
||||
});
|
||||
|
||||
LogService.info(String(updateResult));
|
||||
}
|
||||
if (options.deps) {
|
||||
await installDeps(commander.searchTargetPaths);
|
||||
}
|
||||
if (options.createVersionHistory) {
|
||||
await installDeps(commander.searchTargetPaths);
|
||||
}
|
||||
}
|
||||
|
||||
commander
|
||||
.version(version, '-v, --version')
|
||||
.option('-e, --extensions [extensions]', 'extensions like "js,html"', extensionsFromCs, [
|
||||
|
|
@ -264,29 +192,6 @@ export async function cli({ cwd = process.cwd(), providenceConf, argv = process.
|
|||
'Uses babel instead of swc. This will be slower, but guaranteed to be 100% compatible with @babel/generate and @babel/traverse',
|
||||
);
|
||||
|
||||
commander
|
||||
.command('search <regex>')
|
||||
.alias('s')
|
||||
.description('perfoms regex search string like "my-.*-comp"')
|
||||
.action((regexString, options) => {
|
||||
searchMode = 'search-query';
|
||||
regexSearchOptions = options;
|
||||
regexSearchOptions.regexString = regexString;
|
||||
launchProvidence().then(resolveCli).catch(rejectCli);
|
||||
});
|
||||
|
||||
commander
|
||||
.command('feature <query-string>')
|
||||
.alias('f')
|
||||
.description('query like "tg-icon[size=xs]"')
|
||||
.option('-m, --method [method]', 'query method: "grep" or "ast"', setQueryMethod, 'grep')
|
||||
.action((queryString, options) => {
|
||||
searchMode = 'feature-query';
|
||||
featureOptions = options;
|
||||
featureOptions.queryString = queryString;
|
||||
launchProvidence().then(resolveCli).catch(rejectCli);
|
||||
});
|
||||
|
||||
commander
|
||||
.command('analyze [analyzer-name]')
|
||||
.alias('a')
|
||||
|
|
@ -301,84 +206,11 @@ export async function cli({ cwd = process.cwd(), providenceConf, argv = process.
|
|||
)
|
||||
.option('-c, --config [config]', 'configuration object for analyzer', c => JSON.parse(c))
|
||||
.action((analyzerName, options) => {
|
||||
searchMode = 'analyzer-query';
|
||||
analyzerOptions = options;
|
||||
analyzerOptions.name = analyzerName;
|
||||
launchProvidence().then(resolveCli).catch(rejectCli);
|
||||
});
|
||||
|
||||
commander
|
||||
.command('extend-docs')
|
||||
.alias('e')
|
||||
.description(
|
||||
`Generates data for "babel-extend-docs" plugin. These data are generated by the "match-paths"
|
||||
plugin, which automatically resolves import paths from reference projects
|
||||
(say [@lion/input, @lion/textarea, ...etc]) to a target project (say "wolf-ui").`,
|
||||
)
|
||||
.option(
|
||||
'--prefix-from [prefix-from]',
|
||||
`Prefix for components of reference layer. By default "lion"`,
|
||||
a => a,
|
||||
'lion',
|
||||
)
|
||||
.option(
|
||||
'--prefix-to [prefix-to]',
|
||||
`Prefix for components of reference layer. For instance "wolf"`,
|
||||
)
|
||||
.option(
|
||||
'--output-folder [output-folder]',
|
||||
`This is the file path where the result file "providence-extend-docs-data.json" will be written to`,
|
||||
p => toPosixPath(path.resolve(process.cwd(), p.trim())),
|
||||
process.cwd(),
|
||||
)
|
||||
.action(options => {
|
||||
if (!options.prefixTo) {
|
||||
LogService.error(`Please provide a "prefix to" like '--prefix-to "myprefix"'`);
|
||||
process.exit(1);
|
||||
}
|
||||
if (!commander.referencePaths) {
|
||||
LogService.error(`Please provide referencePaths path like '-r "node_modules/@lion/*"'`);
|
||||
process.exit(1);
|
||||
}
|
||||
const prefixCfg = { from: options.prefixFrom, to: options.prefixTo };
|
||||
_extendDocsModule
|
||||
.launchProvidenceWithExtendDocs({
|
||||
referenceProjectPaths: commander.referencePaths,
|
||||
prefixCfg,
|
||||
outputFolder: options.outputFolder,
|
||||
extensions: commander.extensions,
|
||||
allowlist: commander.allowlist,
|
||||
allowlistReference: commander.allowlistReference,
|
||||
skipCheckMatchCompatibility: commander.skipCheckMatchCompatibility,
|
||||
cwd,
|
||||
})
|
||||
.then(resolveCli)
|
||||
.catch(rejectCli);
|
||||
});
|
||||
|
||||
commander
|
||||
.command('manage-projects')
|
||||
.description(
|
||||
`Before running a query, be sure to have search-targets up to date (think of
|
||||
npm/bower dependencies, latest version etc.)`,
|
||||
)
|
||||
.option('-u, --update', 'gets latest of all search-targets and references')
|
||||
.option('-d, --deps', 'installs npm/bower dependencies of search-targets')
|
||||
.option('-h, --create-version-history', 'gets latest of all search-targets and references')
|
||||
.action(options => {
|
||||
manageSearchTargets(options);
|
||||
});
|
||||
|
||||
commander
|
||||
.command('dashboard')
|
||||
.description(
|
||||
`Runs an interactive dashboard that shows all aggregated data from proivdence-output, configured
|
||||
via providence.conf`,
|
||||
)
|
||||
.action(() => {
|
||||
dashboardServer.start();
|
||||
});
|
||||
|
||||
commander.parse(argv);
|
||||
|
||||
await cliPromise;
|
||||
|
|
|
|||
|
|
@ -1,163 +0,0 @@
|
|||
import path from 'path';
|
||||
import inquirer from 'inquirer';
|
||||
import traverse from '@babel/traverse';
|
||||
import { InputDataService } from '../program/core/InputDataService.js';
|
||||
import { AstService } from '../program/core/AstService.js';
|
||||
import { LogService } from '../program/core/LogService.js';
|
||||
import JsdocCommentParser from '../program/utils/jsdoc-comment-parser.js';
|
||||
import { getCurrentDir } from '../program/utils/get-current-dir.js';
|
||||
import { fsAdapter } from '../program/utils/fs-adapter.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('../../types/index.js').TargetDepsObj} TargetDepsObj
|
||||
* @typedef {import('../../types/index.js').TargetOrRefCollectionsObj} TargetOrRefCollectionsObj
|
||||
* @typedef {import('../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../types/index.js').AnalyzerName} AnalyzerName
|
||||
*/
|
||||
|
||||
/**
|
||||
* Extracts name, defaultValue, optional, type, desc from JsdocCommentParser.parse method
|
||||
* result
|
||||
* @param {{tagName:string;tagValue:string}[]} jsdoc
|
||||
* @returns {{ name:string, defaultValue:string, optional:boolean, type:string, desc:string }[]}
|
||||
*/
|
||||
function getPropsFromParsedJsDoc(jsdoc) {
|
||||
const jsdocProps = jsdoc.filter(p => p.tagName === '@property');
|
||||
const options = jsdocProps.map(({ tagValue }) => {
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
const [_, type, nameOptionalDefault, desc] = tagValue.match(/\{(.*)\}\s*([^\s]*)\s*(.*)/) || [];
|
||||
let nameDefault = nameOptionalDefault;
|
||||
let optional = false;
|
||||
if (nameOptionalDefault.startsWith('[') && nameOptionalDefault.endsWith(']')) {
|
||||
optional = true;
|
||||
nameDefault = nameOptionalDefault.slice(1).slice(0, -1);
|
||||
}
|
||||
const [name, defaultValue] = nameDefault.split('=');
|
||||
return { name, defaultValue, optional, type, desc };
|
||||
});
|
||||
return options;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {PathFromSystemRoot} file
|
||||
*/
|
||||
function getAnalyzerOptions(file) {
|
||||
const code = fsAdapter.fs.readFileSync(file, 'utf8');
|
||||
const babelAst = AstService.getAst(code, 'swc-to-babel', { filePath: file });
|
||||
|
||||
let commentNode;
|
||||
traverse.default(babelAst, {
|
||||
// eslint-disable-next-line no-shadow
|
||||
VariableDeclaration(astPath) {
|
||||
const { node } = astPath;
|
||||
if (!node.leadingComments) {
|
||||
return;
|
||||
}
|
||||
node.declarations.forEach(decl => {
|
||||
// @ts-expect-error
|
||||
if (decl?.id?.name === 'cfg') {
|
||||
// eslint-disable-next-line prefer-destructuring
|
||||
commentNode = node.leadingComments?.[0];
|
||||
}
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
if (commentNode) {
|
||||
const jsdoc = JsdocCommentParser.parse(commentNode);
|
||||
return getPropsFromParsedJsDoc(jsdoc);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {PathFromSystemRoot} dir
|
||||
* @param {boolean} [shouldGetOptions]
|
||||
*/
|
||||
async function gatherAnalyzers(dir, shouldGetOptions) {
|
||||
return InputDataService.gatherFilesFromDir(dir, { depth: 0 }).map(file => {
|
||||
const analyzerObj = { file, name: path.basename(file, '.js') };
|
||||
if (shouldGetOptions) {
|
||||
analyzerObj.options = getAnalyzerOptions(file);
|
||||
}
|
||||
return analyzerObj;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {AnalyzerName} analyzerName
|
||||
* @param {*} promptOptionalConfig
|
||||
* @param {PathFromSystemRoot} [dir]
|
||||
* @returns
|
||||
*/
|
||||
export async function promptAnalyzerConfigMenu(
|
||||
analyzerName,
|
||||
promptOptionalConfig,
|
||||
dir = /** @type {PathFromSystemRoot} */ (
|
||||
path.resolve(getCurrentDir(import.meta.url), '../program/analyzers')
|
||||
),
|
||||
) {
|
||||
const menuOptions = await gatherAnalyzers(dir, true);
|
||||
const analyzer = menuOptions.find(o => o.name === analyzerName);
|
||||
if (!analyzer) {
|
||||
LogService.error(`[promptAnalyzerConfigMenu] analyzer "${analyzerName}" not found.`);
|
||||
process.exit(1);
|
||||
}
|
||||
let configAnswers;
|
||||
if (analyzer.options) {
|
||||
configAnswers = await inquirer.prompt(
|
||||
analyzer.options
|
||||
.filter(a => promptOptionalConfig || !a.optional)
|
||||
.map(a => ({
|
||||
name: a.name,
|
||||
message: a.description,
|
||||
...(a.defaultValue ? { default: a.defaultValue } : {}),
|
||||
})),
|
||||
);
|
||||
|
||||
Object.entries(configAnswers).forEach(([key, value]) => {
|
||||
const { type } = analyzer.options.find(o => o.name === key);
|
||||
if (type.toLowerCase() === 'boolean') {
|
||||
configAnswers[key] = value === 'false' ? false : Boolean(value);
|
||||
} else if (type.toLowerCase() === 'number') {
|
||||
configAnswers[key] = Number(value);
|
||||
} else if (type.toLowerCase() !== 'string') {
|
||||
if (value) {
|
||||
configAnswers[key] = JSON.parse(value);
|
||||
} else {
|
||||
// Make sure to not override predefined values with undefined ones
|
||||
delete configAnswers[key];
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return {
|
||||
analyzerConfig: configAnswers,
|
||||
};
|
||||
}
|
||||
|
||||
export async function promptAnalyzerMenu(
|
||||
dir = /** @type {PathFromSystemRoot} */ (
|
||||
path.resolve(getCurrentDir(import.meta.url), '../program/analyzers')
|
||||
),
|
||||
) {
|
||||
const menuOptions = gatherAnalyzers(dir);
|
||||
const answers = await inquirer.prompt([
|
||||
{
|
||||
type: 'list',
|
||||
name: 'analyzerName',
|
||||
message: 'Which analyzer do you want to run?',
|
||||
choices: menuOptions.map(o => o.name),
|
||||
},
|
||||
]);
|
||||
return {
|
||||
analyzerName: answers.analyzerName,
|
||||
};
|
||||
}
|
||||
|
||||
export const _promptAnalyzerMenuModule = {
|
||||
promptAnalyzerMenu,
|
||||
promptAnalyzerConfigMenu,
|
||||
};
|
||||
|
|
@ -1,10 +1,10 @@
|
|||
/* eslint-disable no-shadow, no-param-reassign */
|
||||
import path from 'path';
|
||||
|
||||
import t from '@babel/types';
|
||||
// @ts-ignore
|
||||
import babelTraverse from '@babel/traverse';
|
||||
import { Analyzer } from '../core/Analyzer.js';
|
||||
import { trackDownIdentifierFromScope } from './helpers/track-down-identifier--legacy.js';
|
||||
import { trackDownIdentifierFromScope } from '../utils/track-down-identifier--legacy.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('@babel/types').File} File
|
||||
|
|
|
|||
|
|
@ -1,13 +1,15 @@
|
|||
import path from 'path';
|
||||
import t from '@babel/types';
|
||||
|
||||
import babelTraverse from '@babel/traverse';
|
||||
import t from '@babel/types';
|
||||
|
||||
import { trackDownIdentifierFromScope } from '../utils/track-down-identifier--legacy.js';
|
||||
import { Analyzer } from '../core/Analyzer.js';
|
||||
import { trackDownIdentifierFromScope } from './helpers/track-down-identifier--legacy.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('@babel/types').File} File
|
||||
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../../types/index.js').FindCustomelementsConfig} FindCustomelementsConfig
|
||||
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('@babel/types').File} File
|
||||
*/
|
||||
|
||||
function cleanup(transformedEntry) {
|
||||
|
|
|
|||
|
|
@ -1,32 +1,33 @@
|
|||
/* eslint-disable no-shadow, no-param-reassign */
|
||||
import path from 'path';
|
||||
import { swcTraverse } from '../utils/swc-traverse.js';
|
||||
import { getAssertionType } from '../utils/get-assertion-type.js';
|
||||
import { Analyzer } from '../core/Analyzer.js';
|
||||
import { trackDownIdentifier } from './helpers/track-down-identifier.js';
|
||||
import { normalizeSourcePaths } from './helpers/normalize-source-paths.js';
|
||||
|
||||
import { getReferencedDeclaration } from '../utils/get-source-code-fragment-of-declaration.js';
|
||||
import { normalizeSourcePaths } from './helpers/normalize-source-paths.js';
|
||||
import { trackDownIdentifier } from '../utils/track-down-identifier.js';
|
||||
import { getAssertionType } from '../utils/get-assertion-type.js';
|
||||
import { swcTraverse } from '../utils/swc-traverse.js';
|
||||
import { LogService } from '../core/LogService.js';
|
||||
import { Analyzer } from '../core/Analyzer.js';
|
||||
|
||||
/**
|
||||
* @typedef {import("@swc/core").Module} SwcAstModule
|
||||
* @typedef {import("@swc/core").Node} SwcNode
|
||||
* @typedef {import("@swc/core").VariableDeclaration} SwcVariableDeclaration
|
||||
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../../types/index.js').AnalyzerAst} AnalyzerAst
|
||||
* @typedef {{ exportSpecifiers:string[]; localMap: object; source:string, __tmp: { path:string } }} FindExportsSpecifierObj
|
||||
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../types/index.js').FindExportsAnalyzerResult} FindExportsAnalyzerResult
|
||||
* @typedef {import('../../../types/index.js').FindExportsAnalyzerEntry} FindExportsAnalyzerEntry
|
||||
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../types/index.js').SwcScope} SwcScope
|
||||
* @typedef {import("@swc/core").VariableDeclaration} SwcVariableDeclaration
|
||||
* @typedef {import('../utils/track-down-identifier.js').RootFile} RootFile
|
||||
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../../types/index.js').AnalyzerAst} AnalyzerAst
|
||||
* @typedef {import('../../../types/index.js').SwcBinding} SwcBinding
|
||||
* @typedef {import('../../../types/index.js').SwcPath} SwcPath
|
||||
* @typedef {import('../../../types/index.js').SwcVisitor} SwcVisitor
|
||||
* @typedef {import('./helpers/track-down-identifier.js').RootFile} RootFile
|
||||
* @typedef {object} RootFileMapEntry
|
||||
* @typedef {string} currentFileSpecifier this is the local name in the file we track from
|
||||
* @typedef {RootFile} rootFile contains file(filePath) and specifier
|
||||
* @typedef {import('../../../types/index.js').SwcScope} SwcScope
|
||||
* @typedef {import('../../../types/index.js').SwcPath} SwcPath
|
||||
* @typedef {import("@swc/core").Module} SwcAstModule
|
||||
* @typedef {import("@swc/core").Node} SwcNode
|
||||
* @typedef {RootFileMapEntry[]} RootFileMap
|
||||
* @typedef {{ exportSpecifiers:string[]; localMap: object; source:string, __tmp: { path:string } }} FindExportsSpecifierObj
|
||||
* @typedef {string} currentFileSpecifier this is the local name in the file we track from
|
||||
* @typedef {object} RootFileMapEntry
|
||||
* @typedef {RootFile} rootFile contains file(filePath) and specifier
|
||||
*/
|
||||
|
||||
/**
|
||||
|
|
@ -108,14 +109,12 @@ function cleanup(transformedFile) {
|
|||
*/
|
||||
function getExportSpecifiers(node) {
|
||||
// handles default [export const g = 4];
|
||||
if (node.declaration) {
|
||||
if (node.declaration.declarations) {
|
||||
if (node.declaration?.declarations) {
|
||||
return [node.declaration.declarations[0].id.value];
|
||||
}
|
||||
if (node.declaration.identifier) {
|
||||
if (node.declaration?.identifier) {
|
||||
return [node.declaration.identifier.value];
|
||||
}
|
||||
}
|
||||
|
||||
// handles (re)named specifiers [export { x (as y)} from 'y'];
|
||||
return (node.specifiers || []).map(s => {
|
||||
|
|
|
|||
|
|
@ -1,20 +1,20 @@
|
|||
/* eslint-disable no-shadow, no-param-reassign */
|
||||
import { isRelativeSourcePath } from '../utils/relative-source-path.js';
|
||||
import { swcTraverse } from '../utils/swc-traverse.js';
|
||||
import { getAssertionType } from '../utils/get-assertion-type.js';
|
||||
import { normalizeSourcePaths } from './helpers/normalize-source-paths.js';
|
||||
import { Analyzer } from '../core/Analyzer.js';
|
||||
import { isRelativeSourcePath } from '../utils/relative-source-path.js';
|
||||
import { getAssertionType } from '../utils/get-assertion-type.js';
|
||||
import { swcTraverse } from '../utils/swc-traverse.js';
|
||||
import { LogService } from '../core/LogService.js';
|
||||
import { Analyzer } from '../core/Analyzer.js';
|
||||
|
||||
/**
|
||||
* @typedef {import("@swc/core").Module} SwcAstModule
|
||||
* @typedef {import("@swc/core").Node} SwcNode
|
||||
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../../types/index.js').AnalyzerAst} AnalyzerAst
|
||||
* @typedef {import('../../../types/index.js').AnalyzerConfig} AnalyzerConfig
|
||||
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../types/index.js').FindImportsAnalyzerResult} FindImportsAnalyzerResult
|
||||
* @typedef {import('../../../types/index.js').FindImportsAnalyzerEntry} FindImportsAnalyzerEntry
|
||||
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../types/index.js').AnalyzerConfig} AnalyzerConfig
|
||||
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
|
||||
* @typedef {import('../../../types/index.js').AnalyzerAst} AnalyzerAst
|
||||
* @typedef {import("@swc/core").Module} SwcAstModule
|
||||
* @typedef {import("@swc/core").Node} SwcNode
|
||||
*/
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import pathLib from 'path';
|
|||
import FindImportsAnalyzer from './find-imports.js';
|
||||
import FindExportsAnalyzer from './find-exports.js';
|
||||
import { Analyzer } from '../core/Analyzer.js';
|
||||
import { fromImportToExportPerspective } from './helpers/from-import-to-export-perspective.js';
|
||||
import { fromImportToExportPerspective } from '../utils/from-import-to-export-perspective.js';
|
||||
import { transformIntoIterableFindExportsOutput } from './helpers/transform-into-iterable-find-exports-output.js';
|
||||
import { transformIntoIterableFindImportsOutput } from './helpers/transform-into-iterable-find-imports-output.js';
|
||||
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@ import pathLib from 'path';
|
|||
import FindClassesAnalyzer from './find-classes.js';
|
||||
import FindExportsAnalyzer from './find-exports.js';
|
||||
import { Analyzer } from '../core/Analyzer.js';
|
||||
import { fromImportToExportPerspective } from './helpers/from-import-to-export-perspective.js';
|
||||
import { fromImportToExportPerspective } from '../utils/from-import-to-export-perspective.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('../../../types/index.js').FindClassesAnalyzerResult} FindClassesAnalyzerResult
|
||||
|
|
|
|||
|
|
@ -1,8 +1,7 @@
|
|||
import child_process from 'child_process'; // eslint-disable-line camelcase
|
||||
import path from 'path';
|
||||
|
||||
import { AstService } from './AstService.js';
|
||||
import { LogService } from './LogService.js';
|
||||
import { getFilePathRelativeFromRoot } from '../utils/get-file-path-relative-from-root.js';
|
||||
import { getCurrentDir } from '../utils/get-current-dir.js';
|
||||
// import { memoize } from '../utils/memoize.js';
|
||||
|
||||
|
|
@ -31,81 +30,6 @@ const memoize = fn => fn;
|
|||
const astProjectsDataCache = new Map();
|
||||
|
||||
export class QueryService {
|
||||
/**
|
||||
* @param {string} regexString string for 'free' regex searches.
|
||||
* @returns {SearchQueryConfig}
|
||||
*/
|
||||
static getQueryConfigFromRegexSearchString(regexString) {
|
||||
if (typeof regexString !== 'string') {
|
||||
throw new Error('[QueryService.getQueryConfigFromRegexSearchString]: provide a string');
|
||||
}
|
||||
return { type: 'search', regexString };
|
||||
}
|
||||
|
||||
/**
|
||||
* Util function that can be used to parse cli input and feed the result object to a new
|
||||
* instance of QueryResult
|
||||
* @example
|
||||
* const queryConfig = QueryService.getQueryConfigFromFeatureString(”tg-icon[size=xs]”)
|
||||
* const myQueryResult = QueryService.grepSearch(inputData, queryConfig)
|
||||
* @param {string} queryString - string like ”tg-icon[size=xs]”
|
||||
* @returns {FeatureQueryConfig}
|
||||
*/
|
||||
static getQueryConfigFromFeatureString(queryString) {
|
||||
if (typeof queryString !== 'string') {
|
||||
throw new Error('[QueryService.getQueryConfigFromFeatureString]: provide a string');
|
||||
}
|
||||
|
||||
/**
|
||||
* Each candidate (tag, attrKey or attrValue) can end with asterisk.
|
||||
* @param {string} candidate for my-*[attr*=x*] 'my-*', 'attr*' or 'x*'
|
||||
* @returns {[string, boolean]}
|
||||
*/
|
||||
function parseContains(candidate) {
|
||||
const hasAsterisk = candidate ? candidate.endsWith('*') : false;
|
||||
const filtered = hasAsterisk ? candidate.slice(0, -1) : candidate;
|
||||
return [filtered, hasAsterisk];
|
||||
}
|
||||
|
||||
// Detect the features in the query
|
||||
let tagCandidate;
|
||||
let featString;
|
||||
|
||||
// Creates tag ('tg-icon') and featString ('font-icon+size=xs')
|
||||
const attrMatch = queryString.match(/(^.*)(\[(.+)\])+/);
|
||||
if (attrMatch) {
|
||||
// eslint-disable-next-line prefer-destructuring
|
||||
tagCandidate = attrMatch[1];
|
||||
// eslint-disable-next-line prefer-destructuring
|
||||
featString = attrMatch[3];
|
||||
} else {
|
||||
tagCandidate = queryString;
|
||||
}
|
||||
|
||||
const [tag, usesTagPartialMatch] = parseContains(tagCandidate);
|
||||
|
||||
let featureObj;
|
||||
if (featString) {
|
||||
const [nameCandidate, valueCandidate] = featString.split('=');
|
||||
const [name, usesValueContains] = parseContains(nameCandidate);
|
||||
const [value, usesValuePartialMatch] = parseContains(valueCandidate);
|
||||
featureObj = /** @type {Feature} */ {
|
||||
name,
|
||||
value,
|
||||
tag,
|
||||
isAttribute: true,
|
||||
usesValueContains,
|
||||
usesValuePartialMatch,
|
||||
usesTagPartialMatch,
|
||||
};
|
||||
} else {
|
||||
// Just look for tag name
|
||||
featureObj = /** @type {Feature} */ ({ tag, usesTagPartialMatch });
|
||||
}
|
||||
|
||||
return { type: 'feature', feature: featureObj };
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves the default export found in ./program/analyzers/find-import.js
|
||||
* @param {typeof Analyzer} analyzerCtor
|
||||
|
|
@ -147,68 +71,6 @@ export class QueryService {
|
|||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Search via unix grep
|
||||
* @param {ProjectInputData} inputData
|
||||
* @param {FeatureQueryConfig|SearchQueryConfig} queryConfig
|
||||
* @param {{hasVerboseReporting:boolean;gatherFilesConfig:GatherFilesConfig}} [customConfig]
|
||||
* @returns {Promise<QueryResult>}
|
||||
*/
|
||||
static async grepSearch(inputData, queryConfig, customConfig) {
|
||||
const cfg = {
|
||||
hasVerboseReporting: false,
|
||||
gatherFilesConfig: {},
|
||||
...customConfig,
|
||||
};
|
||||
|
||||
const results = [];
|
||||
// 1. Analyze the type of query from the QueryConfig (for instance 'feature' or 'search').
|
||||
let regex;
|
||||
if (queryConfig.type === 'feature') {
|
||||
regex = this._getFeatureRegex(queryConfig.feature);
|
||||
} else if (queryConfig.type === 'search') {
|
||||
regex = queryConfig.regexString;
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
inputData.map(async projectData => {
|
||||
// 2. For all files found in project, we will do a different grep
|
||||
const projectResult = {};
|
||||
const countStdOut = await this._performGrep(projectData.project.path, regex, {
|
||||
count: true,
|
||||
gatherFilesConfig: cfg.gatherFilesConfig,
|
||||
});
|
||||
projectResult.count = Number(countStdOut);
|
||||
|
||||
if (cfg.hasVerboseReporting) {
|
||||
const detailStdout = await this._performGrep(projectData.project.path, regex, {
|
||||
count: false,
|
||||
gatherFilesConfig: cfg.gatherFilesConfig,
|
||||
});
|
||||
projectResult.files = detailStdout
|
||||
.split('\n')
|
||||
.filter(l => l)
|
||||
.map(l => {
|
||||
const [absolutePath, line] = l.split(':');
|
||||
const file = getFilePathRelativeFromRoot(absolutePath, projectData.path);
|
||||
const link = l.split(':').slice(0, 2).join(':');
|
||||
const match = l.split(':').slice(2);
|
||||
return { file, line: Number(line), match, link };
|
||||
});
|
||||
}
|
||||
results.push({ project: projectData.project, ...projectResult });
|
||||
}),
|
||||
);
|
||||
|
||||
return /** @type {QueryResult} */ {
|
||||
meta: {
|
||||
searchType: 'grep',
|
||||
query: queryConfig,
|
||||
},
|
||||
queryOutput: results,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform ast analysis
|
||||
* @param {AnalyzerQueryConfig} analyzerQueryConfig
|
||||
|
|
@ -282,81 +144,6 @@ export class QueryService {
|
|||
}
|
||||
astProjectsDataCache.set(pathAndRequiredAst, astData);
|
||||
}
|
||||
|
||||
/**
|
||||
* Performs a grep on given path for a certain tag name and feature
|
||||
* @param {Feature} feature
|
||||
*/
|
||||
static _getFeatureRegex(feature) {
|
||||
const { name, value, tag } = feature;
|
||||
let potentialTag;
|
||||
if (tag) {
|
||||
potentialTag = feature.usesTagPartialMatch ? `.*${tag}.+` : tag;
|
||||
} else {
|
||||
potentialTag = '.*';
|
||||
}
|
||||
|
||||
let regex;
|
||||
if (name) {
|
||||
if (value) {
|
||||
// We are looking for an exact match: div[class=foo] -> <div class="foo">
|
||||
let valueRe = value;
|
||||
if (feature.usesValueContains) {
|
||||
if (feature.usesValuePartialMatch) {
|
||||
// We are looking for a partial match: div[class*=foo*] -> <div class="baz foo-bar">
|
||||
valueRe = `.+${value}.+`;
|
||||
} else {
|
||||
// We are looking for an exact match inside a space separated list within an
|
||||
// attr: div[class*=foo] -> <div class="baz foo bar">
|
||||
valueRe = `((${value})|("${value} .*)|(.* ${value}")|(.* ${value} .*))`;
|
||||
}
|
||||
}
|
||||
regex = `<${potentialTag} .*${name}="${valueRe}".+>`;
|
||||
} else {
|
||||
regex = `<${potentialTag} .*${name}(>|( |=).+>)`;
|
||||
}
|
||||
} else if (tag) {
|
||||
regex = `<${potentialTag} .+>`;
|
||||
} else {
|
||||
LogService.error('Please provide a proper Feature');
|
||||
}
|
||||
|
||||
return regex;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param {PathFromSystemRoot} searchPath
|
||||
* @param {string} regex
|
||||
* @param {{ count:number; gatherFilesConfig:GatherFilesConfig; hasDebugEnabled:boolean }} customConfig
|
||||
* @returns
|
||||
*/
|
||||
static _performGrep(searchPath, regex, customConfig) {
|
||||
const cfg = {
|
||||
count: false,
|
||||
gatherFilesConfig: {},
|
||||
hasDebugEnabled: false,
|
||||
...customConfig,
|
||||
};
|
||||
|
||||
const /** @type {string[]} */ ext = cfg.gatherFilesConfig.extensions;
|
||||
const include = ext ? `--include="\\.(${ext.map(e => e.slice(1)).join('|')})" ` : '';
|
||||
const count = cfg.count ? ' | wc -l' : '';
|
||||
|
||||
// TODO: test on Linux (only tested on Mac)
|
||||
const cmd = `pcregrep -ornM ${include} '${regex}' ${searchPath} ${count}`;
|
||||
|
||||
if (cfg.hasDebugEnabled) {
|
||||
LogService.debug(cmd, 'grep command');
|
||||
}
|
||||
|
||||
return new Promise(resolve => {
|
||||
// eslint-disable-next-line camelcase
|
||||
child_process.exec(cmd, { maxBuffer: 200000000 }, (err, stdout) => {
|
||||
resolve(stdout);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
QueryService.cacheDisabled = false;
|
||||
QueryService.addAstToProjectsData = memoize(QueryService.addAstToProjectsData);
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import path from 'path';
|
||||
import { getHash } from '../utils/get-hash.js';
|
||||
import { hash } from '../utils/hash.js';
|
||||
import { fsAdapter } from '../utils/fs-adapter.js';
|
||||
|
||||
import { memoize } from '../utils/memoize.js';
|
||||
|
|
@ -27,7 +27,7 @@ function createResultIdentifier(searchP, cfg, refP) {
|
|||
// why encodeURIComponent: filters out slashes for path names for stuff like @lion/button
|
||||
const format = (/** @type {Project} */ p) =>
|
||||
`${encodeURIComponent(p.name)}_${p.version || (p.commitHash && p.commitHash.slice(0, 5))}`;
|
||||
const cfgHash = getHash(cfg);
|
||||
const cfgHash = hash(cfg);
|
||||
return `${format(searchP)}${refP ? `_+_${format(refP)}` : ''}__${cfgHash}`;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -148,28 +148,6 @@ async function handleAnalyzer(queryConfig, cfg) {
|
|||
return queryResults;
|
||||
}
|
||||
|
||||
async function handleFeature(queryConfig, cfg, inputData) {
|
||||
if (cfg.queryMethod === 'grep') {
|
||||
const queryResult = await QueryService.grepSearch(inputData, queryConfig, {
|
||||
gatherFilesConfig: cfg.gatherFilesConfig,
|
||||
gatherFilesConfigReference: cfg.gatherFilesConfigReference,
|
||||
});
|
||||
return queryResult;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
async function handleRegexSearch(queryConfig, cfg, inputData) {
|
||||
if (cfg.queryMethod === 'grep') {
|
||||
const queryResult = await QueryService.grepSearch(inputData, queryConfig, {
|
||||
gatherFilesConfig: cfg.gatherFilesConfig,
|
||||
gatherFilesConfigReference: cfg.gatherFilesConfigReference,
|
||||
});
|
||||
return queryResult;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a report with usage metrics, based on a queryConfig.
|
||||
*
|
||||
|
|
@ -220,23 +198,7 @@ export async function providence(queryConfig, customConfig) {
|
|||
AstService.fallbackToBabel = true;
|
||||
}
|
||||
|
||||
let queryResults;
|
||||
if (queryConfig.type === 'ast-analyzer') {
|
||||
queryResults = await handleAnalyzer(queryConfig, cfg);
|
||||
} else {
|
||||
const inputData = await InputDataService.createDataObject(
|
||||
cfg.targetProjectPaths,
|
||||
cfg.gatherFilesConfig,
|
||||
);
|
||||
|
||||
if (queryConfig.type === 'feature') {
|
||||
queryResults = await handleFeature(queryConfig, cfg, inputData);
|
||||
report(queryResults, cfg);
|
||||
} else if (queryConfig.type === 'search') {
|
||||
queryResults = await handleRegexSearch(queryConfig, cfg, inputData);
|
||||
report(queryResults, cfg);
|
||||
}
|
||||
}
|
||||
const queryResults = await handleAnalyzer(queryConfig, cfg);
|
||||
|
||||
if (cfg.writeLogFile) {
|
||||
LogService.writeLogFile();
|
||||
|
|
|
|||
|
|
@ -1,13 +1,14 @@
|
|||
import path from 'path';
|
||||
import { isRelativeSourcePath } from '../../utils/relative-source-path.js';
|
||||
import { LogService } from '../../core/LogService.js';
|
||||
import { resolveImportPath } from '../../utils/resolve-import-path.js';
|
||||
import { toPosixPath } from '../../utils/to-posix-path.js';
|
||||
|
||||
import { isRelativeSourcePath } from './relative-source-path.js';
|
||||
import { resolveImportPath } from './resolve-import-path.js';
|
||||
import { LogService } from '../core/LogService.js';
|
||||
import { toPosixPath } from './to-posix-path.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('../../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../../types/index.js').SpecifierSource} SpecifierSource
|
||||
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../types/index.js').SpecifierSource} SpecifierSource
|
||||
*/
|
||||
|
||||
/**
|
||||
|
|
@ -1,16 +0,0 @@
|
|||
/**
|
||||
* @param {string|object} inputValue
|
||||
* @returns {number}
|
||||
*/
|
||||
export function getHash(inputValue) {
|
||||
if (typeof inputValue === 'object') {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
inputValue = JSON.stringify(inputValue);
|
||||
}
|
||||
return inputValue.split('').reduce(
|
||||
(prevHash, currVal) =>
|
||||
// eslint-disable-next-line no-bitwise
|
||||
((prevHash << 5) - prevHash + currVal.charCodeAt(0)) | 0,
|
||||
0,
|
||||
);
|
||||
}
|
||||
|
|
@ -1,15 +1,17 @@
|
|||
import path from 'path';
|
||||
|
||||
import babelTraversePkg from '@babel/traverse';
|
||||
|
||||
import { trackDownIdentifier } from './track-down-identifier.js';
|
||||
import { AstService } from '../core/AstService.js';
|
||||
import { trackDownIdentifier } from '../analyzers/helpers/track-down-identifier.js';
|
||||
import { toPosixPath } from './to-posix-path.js';
|
||||
import { fsAdapter } from './fs-adapter.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('@babel/types').Node} Node
|
||||
* @typedef {import('@babel/traverse').NodePath} NodePath
|
||||
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('@babel/traverse').NodePath} NodePath
|
||||
* @typedef {import('@babel/types').Node} Node
|
||||
*/
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -1,21 +1,22 @@
|
|||
import path from 'path';
|
||||
|
||||
import { trackDownIdentifier } from './track-down-identifier.js';
|
||||
import { swcTraverse, getPathFromNode } from './swc-traverse.js';
|
||||
import { AstService } from '../core/AstService.js';
|
||||
import { trackDownIdentifier } from '../analyzers/helpers/track-down-identifier.js';
|
||||
import { toPosixPath } from './to-posix-path.js';
|
||||
import { fsAdapter } from './fs-adapter.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('@swc/core').Node} SwcNode
|
||||
* @typedef {import('../../../types/index.js').SwcPath} SwcPath
|
||||
* @typedef {import('../../../types/index.js').SwcBinding} SwcBinding
|
||||
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
|
||||
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../types/index.js').SwcBinding} SwcBinding
|
||||
* @typedef {import('../../../types/index.js').SwcPath} SwcPath
|
||||
* @typedef {import('@swc/core').Node} SwcNode
|
||||
*/
|
||||
|
||||
/**
|
||||
* @param {{rootPath:PathFromSystemRoot; localPath:PathRelativeFromProjectRoot}} opts
|
||||
* @returns
|
||||
* @returns {PathRelativeFromProjectRoot}
|
||||
*/
|
||||
export function getFilePathOrExternalSource({ rootPath, localPath }) {
|
||||
if (!localPath.startsWith('.')) {
|
||||
|
|
@ -23,7 +24,9 @@ export function getFilePathOrExternalSource({ rootPath, localPath }) {
|
|||
// but we give a 100% score if from and to are same here..
|
||||
return localPath;
|
||||
}
|
||||
return toPosixPath(path.resolve(rootPath, localPath));
|
||||
return /** @type {PathRelativeFromProjectRoot} */ (
|
||||
toPosixPath(path.resolve(rootPath, localPath))
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -80,9 +83,9 @@ export function getReferencedDeclaration({ referencedIdentifierName, globalScope
|
|||
* @returns {Promise<{ sourceNodePath: SwcPath; sourceFragment: string|null; externalImportSource: string|null; }>}
|
||||
*/
|
||||
export async function getSourceCodeFragmentOfDeclaration({
|
||||
filePath,
|
||||
exportedIdentifier,
|
||||
projectRootPath,
|
||||
filePath,
|
||||
}) {
|
||||
const code = fsAdapter.fs.readFileSync(filePath, 'utf8');
|
||||
|
||||
|
|
|
|||
18
packages-node/providence-analytics/src/program/utils/hash.js
Normal file
18
packages-node/providence-analytics/src/program/utils/hash.js
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
/**
|
||||
* @param {string|object} inputValue
|
||||
* @returns {string}
|
||||
*/
|
||||
export function hash(inputValue) {
|
||||
if (typeof inputValue === 'object') {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
inputValue = JSON.stringify(inputValue);
|
||||
}
|
||||
return String(
|
||||
inputValue.split('').reduce(
|
||||
(prevHash, currVal) =>
|
||||
// eslint-disable-next-line no-bitwise
|
||||
((prevHash << 5) - prevHash + currVal.charCodeAt(0)) | 0,
|
||||
0,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
|
@ -1,9 +1,13 @@
|
|||
export { toRelativeSourcePath, isRelativeSourcePath } from './relative-source-path.js';
|
||||
export { trackDownIdentifier } from './track-down-identifier.js';
|
||||
export {
|
||||
getSourceCodeFragmentOfDeclaration,
|
||||
getFilePathOrExternalSource,
|
||||
} from './get-source-code-fragment-of-declaration.js';
|
||||
export { optimisedGlob } from './optimised-glob.js';
|
||||
export { swcTraverse } from './swc-traverse.js';
|
||||
export { fsAdapter } from './fs-adapter.js';
|
||||
export { memoize } from './memoize.js';
|
||||
export { hash } from './hash.js';
|
||||
|
||||
// TODO: move trackdownIdentifier to utils as well
|
||||
|
|
|
|||
|
|
@ -1,124 +0,0 @@
|
|||
// @ts-nocheck
|
||||
/* eslint-disable */
|
||||
|
||||
/**
|
||||
* The MIT License (MIT)
|
||||
*
|
||||
* Copyright (c) 2015 Ryo Maruyama
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
// From: https://github.com/esdoc/esdoc/blob/master/src/Parser/CommentParser.js
|
||||
|
||||
/**
|
||||
* Doc Comment Parser class.
|
||||
*
|
||||
* @example
|
||||
* for (let comment of node.leadingComments) {
|
||||
* let tags = CommentParser.parse(comment);
|
||||
* console.log(tags);
|
||||
* }
|
||||
*/
|
||||
export default class JsdocCommentParser {
|
||||
/**
|
||||
* parse comment to tags.
|
||||
* @param {ASTNode} commentNode - comment node.
|
||||
* @param {string} commentNode.value - comment body.
|
||||
* @param {string} commentNode.type - CommentBlock or CommentLine.
|
||||
* @returns {Tag[]} parsed comment.
|
||||
*/
|
||||
static parse(commentNode) {
|
||||
if (!this.isESDoc(commentNode)) return [];
|
||||
|
||||
let comment = commentNode.value;
|
||||
|
||||
// TODO: refactor
|
||||
comment = comment.replace(/\r\n/gm, '\n'); // for windows
|
||||
comment = comment.replace(/^[\t ]*/gm, ''); // remove line head space
|
||||
comment = comment.replace(/^\*[\t ]?/, ''); // remove first '*'
|
||||
comment = comment.replace(/[\t ]$/, ''); // remove last space
|
||||
comment = comment.replace(/^\*[\t ]?/gm, ''); // remove line head '*'
|
||||
if (comment.charAt(0) !== '@') comment = `@desc ${comment}`; // auto insert @desc
|
||||
comment = comment.replace(/[\t ]*$/, ''); // remove tail space.
|
||||
comment = comment.replace(/```[\s\S]*?```/g, match => match.replace(/@/g, '\\ESCAPED_AT\\')); // escape code in descriptions
|
||||
comment = comment.replace(/^[\t ]*(@\w+)$/gm, '$1 \\TRUE'); // auto insert tag text to non-text tag (e.g. @interface)
|
||||
comment = comment.replace(/^[\t ]*(@\w+)[\t ](.*)/gm, '\\Z$1\\Z$2'); // insert separator (\\Z@tag\\Ztext)
|
||||
const lines = comment.split('\\Z');
|
||||
|
||||
let tagName = '';
|
||||
let tagValue = '';
|
||||
const tags = [];
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (line.charAt(0) === '@') {
|
||||
tagName = line;
|
||||
const nextLine = lines[i + 1];
|
||||
if (nextLine.charAt(0) === '@') {
|
||||
tagValue = '';
|
||||
} else {
|
||||
tagValue = nextLine;
|
||||
i++;
|
||||
}
|
||||
tagValue = tagValue
|
||||
.replace('\\TRUE', '')
|
||||
.replace(/\\ESCAPED_AT\\/g, '@')
|
||||
.replace(/^\n/, '')
|
||||
.replace(/\n*$/, '');
|
||||
tags.push({ tagName, tagValue });
|
||||
}
|
||||
}
|
||||
return tags;
|
||||
}
|
||||
|
||||
/**
|
||||
* parse node to tags.
|
||||
* @param {ASTNode} node - node.
|
||||
* @returns {{tags: Tag[], commentNode: CommentNode}} parsed comment.
|
||||
*/
|
||||
static parseFromNode(node) {
|
||||
if (!node.leadingComments) node.leadingComments = [{ type: 'CommentBlock', value: '' }];
|
||||
const commentNode = node.leadingComments[node.leadingComments.length - 1];
|
||||
const tags = this.parse(commentNode);
|
||||
|
||||
return { tags, commentNode };
|
||||
}
|
||||
|
||||
/**
|
||||
* judge doc comment or not.
|
||||
* @param {ASTNode} commentNode - comment node.
|
||||
* @returns {boolean} if true, this comment node is doc comment.
|
||||
*/
|
||||
static isESDoc(commentNode) {
|
||||
if (commentNode.type !== 'CommentBlock') return false;
|
||||
return commentNode.value.charAt(0) === '*';
|
||||
}
|
||||
|
||||
/**
|
||||
* build comment from tags
|
||||
* @param {Tag[]} tags
|
||||
* @returns {string} block comment value.
|
||||
*/
|
||||
static buildComment(tags) {
|
||||
return tags.reduce((comment, tag) => {
|
||||
const line = tag.tagValue.replace(/\n/g, '\n * ');
|
||||
return `${comment} * ${tag.tagName} \n * ${line} \n`;
|
||||
}, '*\n');
|
||||
}
|
||||
}
|
||||
|
|
@ -58,6 +58,7 @@ memoize.disableCaching = () => {
|
|||
};
|
||||
/**
|
||||
* Once testing is done, it is possible to restore caching.
|
||||
* @param {boolean} [initialValue]
|
||||
*/
|
||||
memoize.restoreCaching = initialValue => {
|
||||
shouldCache = initialValue || true;
|
||||
|
|
|
|||
|
|
@ -1,5 +1,6 @@
|
|||
import path from 'path';
|
||||
import { pathToFileURL } from 'url';
|
||||
import path from 'path';
|
||||
|
||||
import { fsAdapter } from './fs-adapter.js';
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -1,13 +1,13 @@
|
|||
/**
|
||||
* @typedef {import('../../../types/index.js').SwcTraversalContext} SwcTraversalContext
|
||||
* @typedef {import('@swc/core').VariableDeclarator} SwcVariableDeclarator
|
||||
* @typedef {import('../../../types/index.js').SwcBinding} SwcBinding
|
||||
* @typedef {import('../../../types/index.js').SwcVisitor} SwcVisitor
|
||||
* @typedef {import('../../../types/index.js').SwcScope} SwcScope
|
||||
* @typedef {import('../../../types/index.js').SwcPath} SwcPath
|
||||
* @typedef {import('@swc/core').Identifier} SwcIdentifierNode
|
||||
* @typedef {import('@swc/core').Module} SwcAstModule
|
||||
* @typedef {import('@swc/core').Node} SwcNode
|
||||
* @typedef {import('@swc/core').VariableDeclarator} SwcVariableDeclarator
|
||||
* @typedef {import('@swc/core').Identifier} SwcIdentifierNode
|
||||
* @typedef {import('../../../types/index.js').SwcPath} SwcPath
|
||||
* @typedef {import('../../../types/index.js').SwcScope} SwcScope
|
||||
* @typedef {import('../../../types/index.js').SwcVisitor} SwcVisitor
|
||||
* @typedef {import('../../../types/index.js').SwcBinding} SwcBinding
|
||||
* @typedef {import('../../../types/index.js').SwcTraversalContext} SwcTraversalContext
|
||||
*/
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -1,20 +1,22 @@
|
|||
/* eslint-disable no-shadow */
|
||||
// @ts-nocheck
|
||||
import pathLib from 'path';
|
||||
import path from 'path';
|
||||
|
||||
import babelTraverse from '@babel/traverse';
|
||||
import { isRelativeSourcePath, toRelativeSourcePath } from '../../utils/relative-source-path.js';
|
||||
import { InputDataService } from '../../core/InputDataService.js';
|
||||
import { resolveImportPath } from '../../utils/resolve-import-path.js';
|
||||
import { AstService } from '../../core/AstService.js';
|
||||
import { LogService } from '../../core/LogService.js';
|
||||
import { memoize } from '../../utils/memoize.js';
|
||||
import { fsAdapter } from '../../utils/fs-adapter.js';
|
||||
|
||||
import { isRelativeSourcePath, toRelativeSourcePath } from './relative-source-path.js';
|
||||
import { InputDataService } from '../core/InputDataService.js';
|
||||
import { resolveImportPath } from './resolve-import-path.js';
|
||||
import { AstService } from '../core/AstService.js';
|
||||
import { LogService } from '../core/LogService.js';
|
||||
import { fsAdapter } from './fs-adapter.js';
|
||||
import { memoize } from './memoize.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('../../../../types/index.js').RootFile} RootFile
|
||||
* @typedef {import('../../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../../types/index.js').SpecifierSource} SpecifierSource
|
||||
* @typedef {import('../../../../types/index.js').IdentifierName} IdentifierName
|
||||
* @typedef {import('../../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../../types/index.js').RootFile} RootFile
|
||||
* @typedef {import('@babel/traverse').NodePath} NodePath
|
||||
*/
|
||||
|
||||
|
|
@ -177,7 +179,7 @@ async function trackDownIdentifierFn(
|
|||
|
||||
LogService.debug(`[trackDownIdentifier] ${resolvedSourcePath}`);
|
||||
const allowedJsModuleExtensions = ['.mjs', '.js'];
|
||||
if (!allowedJsModuleExtensions.includes(pathLib.extname(resolvedSourcePath))) {
|
||||
if (!allowedJsModuleExtensions.includes(path.extname(resolvedSourcePath))) {
|
||||
// We have an import assertion
|
||||
return /** @type { RootFile } */ {
|
||||
file: toRelativeSourcePath(resolvedSourcePath, rootPath),
|
||||
|
|
@ -1,18 +1,19 @@
|
|||
import path from 'path';
|
||||
import { swcTraverse } from '../../utils/swc-traverse.js';
|
||||
import { isRelativeSourcePath, toRelativeSourcePath } from '../../utils/relative-source-path.js';
|
||||
import { InputDataService } from '../../core/InputDataService.js';
|
||||
import { resolveImportPath } from '../../utils/resolve-import-path.js';
|
||||
import { AstService } from '../../core/AstService.js';
|
||||
import { memoize } from '../../utils/memoize.js';
|
||||
import { fsAdapter } from '../../utils/fs-adapter.js';
|
||||
|
||||
import { isRelativeSourcePath, toRelativeSourcePath } from './relative-source-path.js';
|
||||
import { InputDataService } from '../core/InputDataService.js';
|
||||
import { resolveImportPath } from './resolve-import-path.js';
|
||||
import { AstService } from '../core/AstService.js';
|
||||
import { swcTraverse } from './swc-traverse.js';
|
||||
import { fsAdapter } from './fs-adapter.js';
|
||||
import { memoize } from './memoize.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('../../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../../types/index.js').SpecifierSource} SpecifierSource
|
||||
* @typedef {import('../../../../types/index.js').IdentifierName} IdentifierName
|
||||
* @typedef {import('../../../../types/index.js').RootFile} RootFile
|
||||
* @typedef {import('../../../../types/index.js').SwcPath} SwcPath
|
||||
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../types/index.js').SpecifierSource} SpecifierSource
|
||||
* @typedef {import('../../../types/index.js').IdentifierName} IdentifierName
|
||||
* @typedef {import('../../../types/index.js').RootFile} RootFile
|
||||
* @typedef {import('../../../types/index.js').SwcPath} SwcPath
|
||||
*/
|
||||
|
||||
/**
|
||||
|
|
@ -1,19 +1,17 @@
|
|||
/* eslint-disable no-unused-expressions */
|
||||
/* eslint-disable import/no-extraneous-dependencies */
|
||||
import sinon from 'sinon';
|
||||
import commander from 'commander';
|
||||
import { expect } from 'chai';
|
||||
import { it } from 'mocha';
|
||||
import commander from 'commander';
|
||||
import sinon from 'sinon';
|
||||
|
||||
import { mockProject } from '../../test-helpers/mock-project-helpers.js';
|
||||
import { InputDataService } from '../../src/program/core/InputDataService.js';
|
||||
import { QueryService } from '../../src/program/core/QueryService.js';
|
||||
import { _providenceModule } from '../../src/program/providence.js';
|
||||
import { _cliHelpersModule } from '../../src/cli/cli-helpers.js';
|
||||
import { cli } from '../../src/cli/cli.js';
|
||||
import { _promptAnalyzerMenuModule } from '../../src/cli/prompt-analyzer-menu.js';
|
||||
import { memoize } from '../../src/program/utils/memoize.js';
|
||||
import { _extendDocsModule } from '../../src/cli/launch-providence-with-extend-docs.js';
|
||||
import { dashboardServer } from '../../src/dashboard/server.js';
|
||||
import { setupAnalyzerTest } from '../../test-helpers/setup-analyzer-test.js';
|
||||
|
||||
/**
|
||||
|
|
@ -56,12 +54,8 @@ describe('Providence CLI', () => {
|
|||
/** @type {sinon.SinonStub} */
|
||||
let providenceStub;
|
||||
/** @type {sinon.SinonStub} */
|
||||
let promptCfgStub;
|
||||
/** @type {sinon.SinonStub} */
|
||||
let iExtConfStub;
|
||||
/** @type {sinon.SinonStub} */
|
||||
let promptStub;
|
||||
/** @type {sinon.SinonStub} */
|
||||
let qConfStub;
|
||||
|
||||
before(() => {
|
||||
|
|
@ -71,19 +65,9 @@ describe('Providence CLI', () => {
|
|||
/** @type {sinon.SinonStub} */
|
||||
providenceStub = sinon.stub(_providenceModule, 'providence').returns(Promise.resolve());
|
||||
|
||||
/** @type {sinon.SinonStub} */
|
||||
promptCfgStub = sinon
|
||||
.stub(_promptAnalyzerMenuModule, 'promptAnalyzerConfigMenu')
|
||||
.returns(Promise.resolve({ analyzerConfig: { con: 'fig' } }));
|
||||
|
||||
/** @type {sinon.SinonStub} */
|
||||
iExtConfStub = sinon.stub(InputDataService, 'getExternalConfig').returns(externalCfgMock);
|
||||
|
||||
/** @type {sinon.SinonStub} */
|
||||
promptStub = sinon
|
||||
.stub(_promptAnalyzerMenuModule, 'promptAnalyzerMenu')
|
||||
.returns(Promise.resolve({ analyzerName: 'match-analyzer-mock' }));
|
||||
|
||||
/** @type {sinon.SinonStub} */
|
||||
qConfStub = sinon.stub(QueryService, 'getQueryConfigFromAnalyzer').returns(
|
||||
// @ts-expect-error
|
||||
|
|
@ -100,9 +84,7 @@ describe('Providence CLI', () => {
|
|||
commander.setMaxListeners(10);
|
||||
|
||||
providenceStub.restore();
|
||||
promptCfgStub.restore();
|
||||
iExtConfStub.restore();
|
||||
promptStub.restore();
|
||||
qConfStub.restore();
|
||||
});
|
||||
|
||||
|
|
@ -126,9 +108,7 @@ describe('Providence CLI', () => {
|
|||
|
||||
afterEach(() => {
|
||||
providenceStub.resetHistory();
|
||||
promptCfgStub.resetHistory();
|
||||
iExtConfStub.resetHistory();
|
||||
promptStub.resetHistory();
|
||||
qConfStub.resetHistory();
|
||||
});
|
||||
|
||||
|
|
@ -355,16 +335,6 @@ describe('Providence CLI', () => {
|
|||
});
|
||||
|
||||
describe('Options', () => {
|
||||
it('"-o --prompt-optional-config"', async () => {
|
||||
await runCli(`analyze -o`, rootDir);
|
||||
expect(promptStub.called).to.be.true;
|
||||
|
||||
promptStub.resetHistory();
|
||||
|
||||
await runCli(`analyze --prompt-optional-config`, rootDir);
|
||||
expect(promptStub.called).to.be.true;
|
||||
});
|
||||
|
||||
it('"-c --config"', async () => {
|
||||
await runCli(`analyze match-analyzer-mock -c {"a":"2"}`, rootDir);
|
||||
expect(qConfStub.args[0][0]).to.equal('match-analyzer-mock');
|
||||
|
|
@ -376,73 +346,6 @@ describe('Providence CLI', () => {
|
|||
expect(qConfStub.args[0][0]).to.equal('match-analyzer-mock');
|
||||
expect(qConfStub.args[0][1]).to.deep.equal({ a: '2', metaConfig: {} });
|
||||
});
|
||||
|
||||
it('calls "promptAnalyzerConfigMenu" without config given', async () => {
|
||||
await runCli(`analyze match-analyzer-mock`, rootDir);
|
||||
expect(promptCfgStub.called).to.be.true;
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe.skip('Query', () => {});
|
||||
describe.skip('Search', () => {});
|
||||
|
||||
describe('Manage', () => {});
|
||||
|
||||
describe('Dashboard', () => {
|
||||
/** @type {sinon.SinonStub} */
|
||||
const startStub = sinon.stub(dashboardServer, 'start');
|
||||
it('spawns a dashboard', async () => {
|
||||
runCli(`dashboard`, rootDir);
|
||||
expect(startStub.called).to.be.true;
|
||||
});
|
||||
});
|
||||
|
||||
describe('Extend docs', () => {
|
||||
/** @type {sinon.SinonStub} */
|
||||
let extendDocsStub;
|
||||
|
||||
before(() => {
|
||||
extendDocsStub = sinon
|
||||
.stub(_extendDocsModule, 'launchProvidenceWithExtendDocs')
|
||||
.returns(Promise.resolve());
|
||||
});
|
||||
|
||||
after(() => {
|
||||
extendDocsStub.restore();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
extendDocsStub.resetHistory();
|
||||
});
|
||||
|
||||
it('allows configuration', async () => {
|
||||
await runCli(
|
||||
[
|
||||
'extend-docs',
|
||||
'-t /xyz',
|
||||
'-r /xyz/x',
|
||||
'--prefix-from pfrom --prefix-to pto',
|
||||
'--output-folder /outp',
|
||||
'--extensions bla',
|
||||
'--allowlist al --allowlist-reference alr',
|
||||
].join(' '),
|
||||
rootDir,
|
||||
);
|
||||
expect(extendDocsStub.called).to.be.true;
|
||||
expect(extendDocsStub.args[0][0]).to.deep.equal({
|
||||
referenceProjectPaths: ['/xyz/x'],
|
||||
prefixCfg: {
|
||||
from: 'pfrom',
|
||||
to: 'pto',
|
||||
},
|
||||
outputFolder: '/outp',
|
||||
extensions: ['.bla'],
|
||||
allowlist: ['al'],
|
||||
allowlistReference: ['alr'],
|
||||
cwd: '/mocked/path/example-project',
|
||||
skipCheckMatchCompatibility: true,
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,13 +1,14 @@
|
|||
import { expect } from 'chai';
|
||||
import { it } from 'mocha';
|
||||
|
||||
import { setupAnalyzerTest } from '../../../../test-helpers/setup-analyzer-test.js';
|
||||
import { mockProject } from '../../../../test-helpers/mock-project-helpers.js';
|
||||
import { swcTraverse } from '../../../../src/program/utils/swc-traverse.js';
|
||||
import { AstService } from '../../../../src/program/core/AstService.js';
|
||||
import {
|
||||
trackDownIdentifier,
|
||||
trackDownIdentifierFromScope,
|
||||
} from '../../../../src/program/analyzers/helpers/track-down-identifier.js';
|
||||
import { AstService } from '../../../../src/program/core/AstService.js';
|
||||
import { mockProject } from '../../../../test-helpers/mock-project-helpers.js';
|
||||
import { setupAnalyzerTest } from '../../../../test-helpers/setup-analyzer-test.js';
|
||||
} from '../../../../src/program/utils/track-down-identifier.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('@babel/traverse').NodePath} NodePath
|
||||
|
|
|
|||
|
|
@ -1,156 +1,18 @@
|
|||
import { expect } from 'chai';
|
||||
import { it } from 'mocha';
|
||||
import { QueryService } from '../../../src/program/core/QueryService.js';
|
||||
|
||||
import { DummyAnalyzer } from '../../../test-helpers/templates/DummyAnalyzer.js';
|
||||
import FindImportsAnalyzer from '../../../src/program/analyzers/find-imports.js';
|
||||
import { QueryService } from '../../../src/program/core/QueryService.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('../../../types/index.js').Analyzer} Analyzer
|
||||
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
|
||||
* @typedef {import('../../../types/index.js').Analyzer} Analyzer
|
||||
*/
|
||||
|
||||
describe('QueryService', () => {
|
||||
describe('Methods', () => {
|
||||
describe('Retrieving QueryConfig', () => {
|
||||
it('"getQueryConfigFromRegexSearchString"', async () => {
|
||||
const result = QueryService.getQueryConfigFromRegexSearchString('x');
|
||||
expect(result).to.deep.equal({ type: 'search', regexString: 'x' });
|
||||
|
||||
expect(() => {
|
||||
// @ts-expect-error
|
||||
QueryService.getQueryConfigFromRegexSearchString();
|
||||
}).to.throw('[QueryService.getQueryConfigFromRegexSearchString]: provide a string');
|
||||
});
|
||||
|
||||
describe('"getQueryConfigFromFeatureString"', () => {
|
||||
it('with tag, attr-key and attr-value', async () => {
|
||||
const result = QueryService.getQueryConfigFromFeatureString('tg-icon[size=xs]');
|
||||
expect(result).to.deep.equal({
|
||||
type: 'feature',
|
||||
feature: {
|
||||
name: 'size',
|
||||
value: 'xs',
|
||||
tag: 'tg-icon',
|
||||
isAttribute: true,
|
||||
usesValueContains: false,
|
||||
usesValuePartialMatch: false,
|
||||
usesTagPartialMatch: false,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('with only tag', async () => {
|
||||
const result = QueryService.getQueryConfigFromFeatureString('tg-icon');
|
||||
expect(result).to.deep.equal({
|
||||
type: 'feature',
|
||||
feature: {
|
||||
tag: 'tg-icon',
|
||||
usesTagPartialMatch: false,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('with only attr-key', async () => {
|
||||
const result = QueryService.getQueryConfigFromFeatureString('[attr]');
|
||||
expect(result).to.deep.equal({
|
||||
type: 'feature',
|
||||
feature: {
|
||||
name: 'attr',
|
||||
value: undefined,
|
||||
tag: '',
|
||||
isAttribute: true,
|
||||
usesValueContains: false,
|
||||
usesValuePartialMatch: false,
|
||||
usesTagPartialMatch: false,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('with only attr-key and attr-value', async () => {
|
||||
const result = QueryService.getQueryConfigFromFeatureString('[attr=x]');
|
||||
expect(result).to.deep.equal({
|
||||
type: 'feature',
|
||||
feature: {
|
||||
name: 'attr',
|
||||
value: 'x',
|
||||
tag: '',
|
||||
isAttribute: true,
|
||||
usesValueContains: false,
|
||||
usesValuePartialMatch: false,
|
||||
usesTagPartialMatch: false,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
describe('With partial value', async () => {
|
||||
it('with tag, attr-key and attr-value', async () => {
|
||||
const result = QueryService.getQueryConfigFromFeatureString('tg-icon*[size*=xs*]');
|
||||
expect(result).to.deep.equal({
|
||||
type: 'feature',
|
||||
feature: {
|
||||
name: 'size',
|
||||
value: 'xs',
|
||||
tag: 'tg-icon',
|
||||
isAttribute: true,
|
||||
usesValueContains: true,
|
||||
usesValuePartialMatch: true,
|
||||
usesTagPartialMatch: true,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('with only tag', async () => {
|
||||
const result = QueryService.getQueryConfigFromFeatureString('tg-icon*');
|
||||
expect(result).to.deep.equal({
|
||||
type: 'feature',
|
||||
feature: {
|
||||
tag: 'tg-icon',
|
||||
usesTagPartialMatch: true,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('with only attr-key', async () => {
|
||||
const result = QueryService.getQueryConfigFromFeatureString('[attr*]');
|
||||
expect(result).to.deep.equal({
|
||||
type: 'feature',
|
||||
feature: {
|
||||
name: 'attr',
|
||||
value: undefined,
|
||||
tag: '',
|
||||
isAttribute: true,
|
||||
usesValueContains: true,
|
||||
usesValuePartialMatch: false,
|
||||
usesTagPartialMatch: false,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('with only attr-key and attr-value', async () => {
|
||||
const result = QueryService.getQueryConfigFromFeatureString('[attr*=x*]');
|
||||
expect(result).to.deep.equal({
|
||||
type: 'feature',
|
||||
feature: {
|
||||
name: 'attr',
|
||||
value: 'x',
|
||||
tag: '',
|
||||
isAttribute: true,
|
||||
usesValueContains: true,
|
||||
usesValuePartialMatch: true,
|
||||
usesTagPartialMatch: false,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
it('throws when no string provided', async () => {
|
||||
expect(() => {
|
||||
// @ts-ignore
|
||||
QueryService.getQueryConfigFromFeatureString();
|
||||
}).to.throw('[QueryService.getQueryConfigFromFeatureString]: provide a string');
|
||||
});
|
||||
});
|
||||
|
||||
describe('"getQueryConfigFromAnalyzer"', () => {
|
||||
const myAnalyzerCfg = { targetProjectPath: /** @type {PathFromSystemRoot} */ ('/my/path') };
|
||||
it('accepts a constructor as first argument', async () => {
|
||||
|
|
|
|||
|
|
@ -702,8 +702,10 @@ describe('Ajax', () => {
|
|||
|
||||
const errors = [
|
||||
"Failed to execute 'fetch' on 'Window': The user aborted a request.", // chromium
|
||||
'signal is aborted without reason', // newer chromium (?)
|
||||
'The operation was aborted. ', // firefox
|
||||
'Request signal is aborted', // webkit
|
||||
'The operation was aborted.', // newer webkit
|
||||
];
|
||||
|
||||
expect(errors.includes(/** @type {Error} */ (err).message)).to.be.true;
|
||||
|
|
|
|||
|
|
@ -9,7 +9,9 @@ function checkChrome(flavor = 'google-chrome') {
|
|||
return flavor === 'google-chrome';
|
||||
}
|
||||
|
||||
const isChromium = /** @type {window & { chrome?: boolean}} */ (globalThis).chrome;
|
||||
const isChromium =
|
||||
/** @type {window & { chrome?: boolean}} */ (globalThis).chrome ||
|
||||
globalThis.navigator.userAgent.indexOf('Chrome') > -1;
|
||||
|
||||
if (flavor === 'chromium') {
|
||||
return isChromium;
|
||||
|
|
@ -48,11 +50,12 @@ export const browserDetection = {
|
|||
isFirefox: globalThis.navigator?.userAgent.toLowerCase().indexOf('firefox') > -1,
|
||||
isMac: globalThis.navigator?.appVersion?.indexOf('Mac') !== -1,
|
||||
isIOS: /iPhone|iPad|iPod/i.test(globalThis.navigator?.userAgent),
|
||||
isMacSafari:
|
||||
isMacSafari: Boolean(
|
||||
globalThis.navigator?.vendor &&
|
||||
globalThis.navigator?.vendor.indexOf('Apple') > -1 &&
|
||||
globalThis.navigator?.userAgent &&
|
||||
globalThis.navigator?.userAgent.indexOf('CriOS') === -1 &&
|
||||
globalThis.navigator?.userAgent.indexOf('FxiOS') === -1 &&
|
||||
globalThis.navigator?.appVersion.indexOf('Mac') !== -1,
|
||||
),
|
||||
};
|
||||
|
|
|
|||
|
|
@ -16,6 +16,7 @@ import {
|
|||
import { sendKeys } from '@web/test-runner-commands';
|
||||
import sinon from 'sinon';
|
||||
import { getListboxMembers } from '../../../exports/listbox-test-helpers.js';
|
||||
import { browserDetection } from '../../core/src/browserDetection.js';
|
||||
|
||||
/**
|
||||
* @typedef {import('../src/LionListbox.js').LionListbox} LionListbox
|
||||
|
|
@ -380,7 +381,12 @@ export function runListboxMixinSuite(customConfig = {}) {
|
|||
await aTimeout(1000);
|
||||
|
||||
// top should be offset 2x40px (sticky header elems) instead of 0px
|
||||
if (browserDetection.isChrome || browserDetection.isChromium) {
|
||||
// TODO: find out why this is different in recent Chromium
|
||||
expect(el.scrollTop).to.equal(160);
|
||||
} else {
|
||||
expect(el.scrollTop).to.equal(116);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -21,7 +21,6 @@ export function normalizeIntlDate(str, locale = '', { weekday, year, month, day
|
|||
}
|
||||
|
||||
const result = dateString.join('');
|
||||
|
||||
// Normalize webkit date formatting without year
|
||||
if (!year && weekday === 'long' && month === 'long' && day === '2-digit') {
|
||||
const CHINESE_LOCALES = [
|
||||
|
|
@ -43,7 +42,7 @@ export function normalizeIntlDate(str, locale = '', { weekday, year, month, day
|
|||
return result.replace(' ', '');
|
||||
}
|
||||
|
||||
if (result.indexOf(',') === -1 && locale === 'en-GB') {
|
||||
if ((result.indexOf(',') === -1 && locale === 'en-GB') || locale === 'en-AU') {
|
||||
// Saturday 12 October -> Saturday, 12 October
|
||||
const match = result.match(/^(\w*) (\d*) (\w*)$/);
|
||||
if (match !== null) {
|
||||
|
|
@ -63,6 +62,13 @@ export function normalizeIntlDate(str, locale = '', { weekday, year, month, day
|
|||
return `${match[1]}, ${match[3]} ${match[2]}`;
|
||||
}
|
||||
}
|
||||
} else if (weekday === 'long' && month === 'long' && day === '2-digit') {
|
||||
if (result.indexOf(',') === -1 && locale.startsWith('en-')) {
|
||||
// Saturday 12 October 2023 -> Saturday, 12 October 2023
|
||||
const [, _weekDayName, _monthDayNumber, _monthName, _year] =
|
||||
result.match(/^(\w*) (\d*) (\w*) (\d*)$/) || [];
|
||||
return `${_weekDayName}, ${_monthDayNumber} ${_monthName} ${_year}`;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
|
|
|
|||
|
|
@ -187,7 +187,7 @@ describe('formatDate', () => {
|
|||
'ru-RU': 'суббота, 12 октября',
|
||||
'sk-SK': 'sobota 12. októbra',
|
||||
'tr-TR': '12 Ekim Cumartesi',
|
||||
'uk-UA': 'субота, 12 жовтня',
|
||||
'uk-UA': 'суботу, 12 жовтня',
|
||||
'zh-CN': '10月12日星期六',
|
||||
'zh-Hans': '10月12日星期六',
|
||||
'zh-Hans-CN': '10月12日星期六',
|
||||
|
|
|
|||
|
|
@ -1,105 +0,0 @@
|
|||
import { exec } from 'child_process';
|
||||
import fs from 'fs';
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import lockfile from '@yarnpkg/lockfile';
|
||||
|
||||
/**
|
||||
* === Generic Helpers ===
|
||||
*/
|
||||
|
||||
const execPromise = cmd =>
|
||||
new Promise(resolve => exec(cmd, { maxBuffer: 200000000 }, (err, stdout) => resolve(stdout)));
|
||||
|
||||
const arrDiff = (arrA, arrB, eq = (a, b) => a === b) =>
|
||||
arrA.filter(a => arrB.every(b => !eq(a, b)));
|
||||
|
||||
/**
|
||||
* === yarn-lock-diff ===
|
||||
*/
|
||||
|
||||
function groupByPackageName(obj) {
|
||||
const packages = [];
|
||||
Object.keys(obj.object).forEach(key => {
|
||||
const names = key.split('@');
|
||||
let name = names[0];
|
||||
if (name === '') {
|
||||
// handle scoped packages
|
||||
name = `@${names[1]}`;
|
||||
}
|
||||
const { version } = obj.object[key];
|
||||
const found = packages.find(p => p.name === name);
|
||||
if (found) {
|
||||
found.versions.push(version);
|
||||
} else {
|
||||
packages.push({
|
||||
name,
|
||||
versions: [version],
|
||||
});
|
||||
}
|
||||
});
|
||||
return packages;
|
||||
}
|
||||
|
||||
function yarnLockDiff(prevLockContents, curLockContents) {
|
||||
const previous = lockfile.parse(prevLockContents);
|
||||
const current = lockfile.parse(curLockContents);
|
||||
|
||||
const previousPackages = groupByPackageName(previous);
|
||||
const currentPackages = groupByPackageName(current);
|
||||
|
||||
const removedResult = [];
|
||||
const changedResult = [];
|
||||
|
||||
previousPackages.forEach(prevPkg => {
|
||||
const foundCurPkg = currentPackages.find(curPkg => curPkg.name === prevPkg.name);
|
||||
if (!foundCurPkg) {
|
||||
removedResult.push(prevPkg);
|
||||
} else {
|
||||
const diff = arrDiff(foundCurPkg.versions, prevPkg.versions);
|
||||
if (diff.length) {
|
||||
changedResult.push({
|
||||
name: prevPkg.name,
|
||||
previousVersions: Array.from(new Set(prevPkg.versions)),
|
||||
currentVersions: Array.from(new Set(foundCurPkg.versions)),
|
||||
diff,
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
return { removed: removedResult, changed: changedResult };
|
||||
}
|
||||
|
||||
/**
|
||||
* === cli ===
|
||||
*/
|
||||
|
||||
function getArgs() {
|
||||
const idx = process.argv.findIndex(a => a === '--versions-back');
|
||||
let versionsBack;
|
||||
if (idx > 0) {
|
||||
versionsBack = Number(process.argv[idx + 1]);
|
||||
if (Number.isNaN(versionsBack)) {
|
||||
throw new Error('Please provide a number for --versions-back');
|
||||
}
|
||||
} else {
|
||||
versionsBack = 1;
|
||||
}
|
||||
return { versionsBack };
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const { versionsBack } = getArgs();
|
||||
const changeHistory = await execPromise(`git log yarn.lock`);
|
||||
const commits = changeHistory
|
||||
.match(/commit (.*)\n/g)
|
||||
.map(c => c.replace('commit ', '').replace('\n', ''));
|
||||
|
||||
// For now, we pick latest commit. When needed in the future, allow '--age 2-months' or smth
|
||||
const prevLockContents = await execPromise(`git show ${commits[versionsBack - 1]}:yarn.lock`);
|
||||
const curLockContents = await fs.promises.readFile('yarn.lock', 'utf8');
|
||||
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(JSON.stringify(yarnLockDiff(prevLockContents, curLockContents), null, 2));
|
||||
}
|
||||
|
||||
main();
|
||||
Loading…
Reference in a new issue