feat(providence-analytics): add option skipCheckMatchCompatibility
This commit is contained in:
parent
7b4a0c4aef
commit
ca210dae73
8 changed files with 119 additions and 11 deletions
5
.changeset/tasty-rabbits-fry.md
Normal file
5
.changeset/tasty-rabbits-fry.md
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
"providence-analytics": feat
|
||||
---
|
||||
|
||||
add option skipCheckMatchCompatibility and enable for monorepos in extend-docs
|
||||
|
|
@ -2,7 +2,7 @@
|
|||
const fs = require('fs');
|
||||
const pathLib = require('path');
|
||||
const { performance } = require('perf_hooks');
|
||||
const { providence } = require('../program/providence.js');
|
||||
const providenceModule = require('../program/providence.js');
|
||||
const { QueryService } = require('../program/services/QueryService.js');
|
||||
const { InputDataService } = require('../program/services/InputDataService.js');
|
||||
const { LogService } = require('../program/services/LogService.js');
|
||||
|
|
@ -16,7 +16,9 @@ async function getExtendDocsResults({
|
|||
allowlistReference,
|
||||
cwd,
|
||||
}) {
|
||||
const results = await providence(
|
||||
const monoPkgs = InputDataService.getMonoRepoPackages(cwd);
|
||||
|
||||
const results = await providenceModule.providence(
|
||||
QueryService.getQueryConfigFromAnalyzer('match-paths', { prefix: prefixCfg }),
|
||||
{
|
||||
gatherFilesConfig: {
|
||||
|
|
@ -31,6 +33,9 @@ async function getExtendDocsResults({
|
|||
report: false,
|
||||
targetProjectPaths: [pathLib.resolve(cwd)],
|
||||
referenceProjectPaths,
|
||||
// For mono repos, a match between root package.json and ref project will not exist.
|
||||
// Disable this check, so it won't be a blocker for extendin docs
|
||||
skipCheckMatchCompatibility: Boolean(monoPkgs),
|
||||
},
|
||||
);
|
||||
|
||||
|
|
@ -57,20 +62,18 @@ async function getExtendDocsResults({
|
|||
return result;
|
||||
}
|
||||
|
||||
const pkgs = InputDataService.getMonoRepoPackages(cwd);
|
||||
|
||||
if (pkgs) {
|
||||
if (monoPkgs) {
|
||||
queryOutputs.forEach(resultObj => {
|
||||
if (resultObj.variable) {
|
||||
resultObj.variable.paths.forEach(pathObj => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
pathObj.to = replaceToMonoRepoPath(pathObj.to, pkgs);
|
||||
pathObj.to = replaceToMonoRepoPath(pathObj.to, monoPkgs);
|
||||
});
|
||||
}
|
||||
if (resultObj.tag) {
|
||||
resultObj.tag.paths.forEach(pathObj => {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
pathObj.to = replaceToMonoRepoPath(pathObj.to, pkgs);
|
||||
pathObj.to = replaceToMonoRepoPath(pathObj.to, monoPkgs);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
|
|
|||
|
|
@ -205,7 +205,7 @@ class Analyzer {
|
|||
|
||||
// If we have a provided result cfg.referenceProjectResult, we assume the providing
|
||||
// party provides compatible results for now...
|
||||
if (cfg.referenceProjectPath) {
|
||||
if (cfg.referenceProjectPath && !cfg.skipCheckMatchCompatibility) {
|
||||
const { compatible, reason } = checkForMatchCompatibility(
|
||||
cfg.referenceProjectPath,
|
||||
cfg.targetProjectPath,
|
||||
|
|
|
|||
|
|
@ -249,6 +249,7 @@ class MatchImportsAnalyzer extends Analyzer {
|
|||
referenceProjectResult = await findExportsAnalyzer.execute({
|
||||
metaConfig: cfg.metaConfig,
|
||||
targetProjectPath: cfg.referenceProjectPath,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
@ -258,6 +259,7 @@ class MatchImportsAnalyzer extends Analyzer {
|
|||
targetProjectResult = await findImportsAnalyzer.execute({
|
||||
metaConfig: cfg.metaConfig,
|
||||
targetProjectPath: cfg.targetProjectPath,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -216,8 +216,7 @@ function getTagPaths(
|
|||
let targetResult;
|
||||
targetFindCustomelementsResult.queryOutput.some(({ file, result }) => {
|
||||
const targetPathMatch = result.find(entry => {
|
||||
const sameRoot =
|
||||
entry.rootFile.file === targetMatchedFile || entry.rootFile.file === '[current]';
|
||||
const sameRoot = entry.rootFile.file === targetMatchedFile;
|
||||
const sameIdentifier = entry.rootFile.specifier === toClass;
|
||||
return sameRoot && sameIdentifier;
|
||||
});
|
||||
|
|
@ -429,6 +428,7 @@ class MatchPathsAnalyzer extends Analyzer {
|
|||
referenceProjectPath: cfg.referenceProjectPath,
|
||||
gatherFilesConfig: cfg.gatherFilesConfig,
|
||||
gatherFilesConfigReference: cfg.gatherFilesConfigReference,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
});
|
||||
|
||||
// [A2]
|
||||
|
|
@ -437,6 +437,7 @@ class MatchPathsAnalyzer extends Analyzer {
|
|||
const targetExportsResult = await targetFindExportsAnalyzer.execute({
|
||||
targetProjectPath: cfg.targetProjectPath,
|
||||
gatherFilesConfig: cfg.gatherFilesConfig,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
});
|
||||
|
||||
// [A3]
|
||||
|
|
@ -445,6 +446,7 @@ class MatchPathsAnalyzer extends Analyzer {
|
|||
const refFindExportsResult = await refFindExportsAnalyzer.execute({
|
||||
targetProjectPath: cfg.referenceProjectPath,
|
||||
gatherFilesConfig: cfg.gatherFilesConfigReference,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
});
|
||||
|
||||
/**
|
||||
|
|
@ -472,6 +474,7 @@ class MatchPathsAnalyzer extends Analyzer {
|
|||
const targetFindCustomelementsResult = await targetFindCustomelementsAnalyzer.execute({
|
||||
targetProjectPath: cfg.targetProjectPath,
|
||||
gatherFilesConfig: cfg.gatherFilesConfig,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
});
|
||||
|
||||
// [B2]
|
||||
|
|
@ -480,6 +483,7 @@ class MatchPathsAnalyzer extends Analyzer {
|
|||
const refFindCustomelementsResult = await refFindCustomelementsAnalyzer.execute({
|
||||
targetProjectPath: cfg.referenceProjectPath,
|
||||
gatherFilesConfig: cfg.gatherFilesConfigReference,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
});
|
||||
// refFindExportsAnalyzer was already created in A3
|
||||
|
||||
|
|
|
|||
|
|
@ -297,17 +297,20 @@ class MatchSubclassesAnalyzer extends Analyzer {
|
|||
const exportsAnalyzerResult = await findExportsAnalyzer.execute({
|
||||
targetProjectPath: cfg.referenceProjectPath,
|
||||
gatherFilesConfig: cfg.gatherFilesConfigReference,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
});
|
||||
const findClassesAnalyzer = new FindClassesAnalyzer();
|
||||
/** @type {FindClassesAnalyzerResult} */
|
||||
const targetClassesAnalyzerResult = await findClassesAnalyzer.execute({
|
||||
targetProjectPath: cfg.targetProjectPath,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
});
|
||||
const findRefClassesAnalyzer = new FindClassesAnalyzer();
|
||||
/** @type {FindClassesAnalyzerResult} */
|
||||
const refClassesAnalyzerResult = await findRefClassesAnalyzer.execute({
|
||||
targetProjectPath: cfg.referenceProjectPath,
|
||||
gatherFilesConfig: cfg.gatherFilesConfigReference,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
});
|
||||
|
||||
const queryOutput = matchSubclassesPostprocess(
|
||||
|
|
|
|||
|
|
@ -63,6 +63,7 @@ async function handleAnalyzerForProjectCombo(slicedQConfig, cfg) {
|
|||
const queryResult = await QueryService.astSearch(slicedQConfig, {
|
||||
gatherFilesConfig: cfg.gatherFilesConfig,
|
||||
gatherFilesConfigReference: cfg.gatherFilesConfigReference,
|
||||
skipCheckMatchCompatibility: cfg.skipCheckMatchCompatibility,
|
||||
...slicedQConfig.analyzerConfig,
|
||||
});
|
||||
if (queryResult) {
|
||||
|
|
@ -179,6 +180,7 @@ async function providenceMain(queryConfig, customConfig) {
|
|||
report: true,
|
||||
debugEnabled: false,
|
||||
writeLogFile: false,
|
||||
skipCheckMatchCompatibility: false,
|
||||
},
|
||||
customConfig,
|
||||
);
|
||||
|
|
|
|||
|
|
@ -515,7 +515,6 @@ describe('CLI helpers', () => {
|
|||
afterEach(() => {
|
||||
restoreMockedProjects();
|
||||
});
|
||||
|
||||
it('rewrites monorepo package paths when analysis is run from monorepo root', async () => {
|
||||
const theirProjectFiles = {
|
||||
'./package.json': JSON.stringify({
|
||||
|
|
@ -641,5 +640,95 @@ describe('CLI helpers', () => {
|
|||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('does not check for match compatibility (target and reference) in monorepo targets', async () => {
|
||||
// ===== REFERENCE AND TARGET PROJECTS =====
|
||||
|
||||
const theirProjectFiles = {
|
||||
'./package.json': JSON.stringify({
|
||||
name: 'their-components',
|
||||
version: '1.0.0',
|
||||
}),
|
||||
'./src/TheirButton.js': `export class TheirButton extends HTMLElement {}`,
|
||||
};
|
||||
|
||||
// This will be detected as being a monorepo
|
||||
const monoProjectFiles = {
|
||||
'./package.json': JSON.stringify({
|
||||
name: '@mono/root',
|
||||
workspaces: ['packages/*'],
|
||||
dependencies: {
|
||||
'their-components': '1.0.0',
|
||||
},
|
||||
}),
|
||||
// Package: @mono/button
|
||||
'./packages/button/package.json': JSON.stringify({
|
||||
name: '@mono/button',
|
||||
}),
|
||||
};
|
||||
|
||||
// This will be detected as NOT being a monorepo
|
||||
const nonMonoProjectFiles = {
|
||||
'./package.json': JSON.stringify({
|
||||
name: 'non-mono',
|
||||
dependencies: {
|
||||
'their-components': '1.0.0',
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
const theirProject = {
|
||||
path: '/their-components',
|
||||
name: 'their-components',
|
||||
files: Object.entries(theirProjectFiles).map(([file, code]) => ({ file, code })),
|
||||
};
|
||||
|
||||
const monoProject = {
|
||||
path: '/mono-components',
|
||||
name: 'mono-components',
|
||||
files: Object.entries(monoProjectFiles).map(([file, code]) => ({ file, code })),
|
||||
};
|
||||
|
||||
const nonMonoProject = {
|
||||
path: '/non-mono-components',
|
||||
name: 'non-mono-components',
|
||||
files: Object.entries(nonMonoProjectFiles).map(([file, code]) => ({ file, code })),
|
||||
};
|
||||
|
||||
// ===== TESTS =====
|
||||
|
||||
const providenceStub = sinon.stub(providenceModule, 'providence').returns(
|
||||
new Promise(resolve => {
|
||||
resolve([]);
|
||||
}),
|
||||
);
|
||||
|
||||
// ===== mono =====
|
||||
|
||||
mockTargetAndReferenceProject(theirProject, monoProject);
|
||||
await getExtendDocsResults({
|
||||
referenceProjectPaths: ['/their-components'],
|
||||
prefixCfg: { from: 'their', to: 'my' },
|
||||
extensions: ['.js'],
|
||||
cwd: '/mono-components',
|
||||
});
|
||||
|
||||
expect(providenceStub.args[0][1].skipCheckMatchCompatibility).to.equal(true);
|
||||
providenceStub.resetHistory();
|
||||
restoreMockedProjects();
|
||||
|
||||
// ===== non mono =====
|
||||
|
||||
mockTargetAndReferenceProject(theirProject, nonMonoProject);
|
||||
await getExtendDocsResults({
|
||||
referenceProjectPaths: ['/their-components'],
|
||||
prefixCfg: { from: 'their', to: 'my' },
|
||||
extensions: ['.js'],
|
||||
cwd: '/non-mono-components',
|
||||
});
|
||||
expect(providenceStub.args[0][1].skipCheckMatchCompatibility).to.equal(false);
|
||||
|
||||
providenceStub.restore();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
|||
Loading…
Reference in a new issue