Merge pull request #2278 from ing-bank/feat/providence-updates

Feat/providence updates
This commit is contained in:
Thijs Louisse 2024-05-14 16:01:34 +02:00 committed by GitHub
commit e29e14b246
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
98 changed files with 7594 additions and 21533 deletions

View file

@ -0,0 +1,7 @@
---
'providence-analytics': minor
---
- Added `optimised-glob` util: a drop-in replacement for globby. It's faster, smaller and has zero dependencies
- Added `fs-adapter` util, allowing to provide a virtual fs instead of the default
- BREAKING: Cleanup of code and dependencies, removing deprecated cli commands

73
.github/workflows/verify-pr.yml vendored Normal file
View file

@ -0,0 +1,73 @@
name: Verify changes
on: pull_request
jobs:
verify:
name: Verify changes
runs-on: ubuntu-latest
steps:
- uses: google/wireit@setup-github-actions-caching/v1
- uses: actions/checkout@v4
- name: Sanity check
run: node ./scripts/lock-scan.mjs
- name: Setup Node 18.x
uses: actions/setup-node@v4
with:
node-version: 18.x
cache: npm
- name: Install Dependencies
run: npm install --ci
- name: Lint
run: npm run lint
# - name: Bundlesize
# run: npm run bundlesize
browser-tests:
name: Browser tests
runs-on: ubuntu-latest
steps:
- uses: google/wireit@setup-github-actions-caching/v1
- uses: actions/checkout@v4
- name: Setup Node 18.x
uses: actions/setup-node@v4
with:
node-version: 18.x
cache: npm
- name: Install Dependencies
run: npm install --ci
- uses: microsoft/playwright-github-action@v1
- name: Test
run: npm run test:browser
node-tests:
name: Node tests
runs-on: ${{ matrix.os }}
strategy:
matrix:
node-version: [16.x, 18.x]
os: [ubuntu-latest, windows-latest]
steps:
- uses: google/wireit@setup-github-actions-caching/v1
- uses: actions/checkout@v4
- name: Setup Node ${{ matrix.node-version }}
uses: actions/setup-node@v4
with:
node-version: ${{ matrix.node-version }}
cache: npm
- name: Install Dependencies
run: npm install --ci --force
- name: Test
run: npm run test:node

View file

@ -1,93 +0,0 @@
name: Verify changes
on: pull_request
jobs:
verify:
name: Verify changes
runs-on: ubuntu-latest
steps:
- uses: google/wireit@setup-github-actions-caching/v1
- uses: actions/checkout@v2
- name: Sanity check
run: node ./scripts/lock-scan.js
- name: Setup Node 16.x
uses: actions/setup-node@v1
with:
node-version: 16.x
- name: Install Dependencies
run: npm install --ci
- name: Lint
run: npm run lint
- name: Bundlesize
run: npm run bundlesize
browser-tests:
name: Browser tests
runs-on: ubuntu-latest
steps:
- uses: google/wireit@setup-github-actions-caching/v1
- uses: actions/checkout@v2
- name: Setup Node 16.x
uses: actions/setup-node@v1
with:
node-version: 16.x
- name: Install Dependencies
run: npm install --ci
- uses: microsoft/playwright-github-action@v1
- name: Test
run: npm run test:browser
node-tests:
name: Node tests
runs-on: ${{ matrix.os }}
strategy:
matrix:
node-version: [16.x]
os: [ubuntu-latest]
steps:
- uses: google/wireit@setup-github-actions-caching/v1
- uses: actions/checkout@v2
- name: Setup Node ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: Install Dependencies
run: npm install --ci
- name: Test
run: npm run test:node
# Note this is a duplicate of the matrix (so we have 2 1x1 matrices). Up for improvement...
node-tests-windows:
name: Node tests
runs-on: ${{ matrix.os }}
strategy:
matrix:
node-version: [16.x]
os: [windows-latest]
steps:
- uses: google/wireit@setup-github-actions-caching/v1
- uses: actions/checkout@v2
- name: Setup Node ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: Install Dependencies
run: npm install --ci
- name: Test
run: npm run test:node

View file

@ -4,7 +4,6 @@ module.exports = {
'*.md': [
'prettier --write',
"markdownlint --ignore '{.github/**/*.md,.changeset/*.md,**/CHANGELOG.md,packages/ui/_legacy-changelogs/*.md}'",
'git add',
],
'package-lock.json': ['node ./scripts/lock-scan.js'],
'*package.json': absolutePaths => {

View file

@ -59,6 +59,7 @@ class MdRipple extends LitElement {
}
disconnectedCallback() {
super.disconnectedCallback();
this.removeEventListener('mousedown', this.__onRipple);
}

View file

@ -612,7 +612,7 @@ However, the validation system also supports three non blocking validation feedb
- **error**: blocking the field from being submitted to the server. For example:
"Please enter an amount higher than 1000,00 euro."
- **warning**: something looks wrong, but it is not blocking. For example an optional email input:
"Please enter a valid e-mail address in the format "name@example.com"."
"Please enter a valid e-mail address in the format `name@example.com`."
- **info**: shows extra information. For example a message of a scheduled payment planner:
"Ends on 15/05/2020 after 5 payments."
- **success**: will only be triggered if there was a Message from one of the above validation types and is now correct. For example: "Ok, correct."

View file

@ -99,15 +99,6 @@ export const placementGlobal = () => {
## popperConfig
/** Viewport configuration. Will be used when placementMode is 'global' \*/
viewportConfig?: ViewportConfig;
/** Hides other overlays when multiple are opened (currently exclusive to globalOverlayController) _/
isBlocking?: boolean;
/\*\* Will align contentNode with referenceNode (invokerNode by default) for local overlays. Usually needed for dropdowns. 'max' will prevent contentNode from exceeding width of referenceNode, 'min' guarantees that contentNode will be at least as wide as referenceNode. 'full' will make sure that the invoker width always is the same. _/
inheritsReferenceWidth?: 'max' | 'full' | 'min' | 'none';
/\*_ Change the default of 9999 _/
zIndex?: number;
| Prop | Description | Type | | | |
| ---------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | ---------------------------------- | --- | --- | --- |
| placementMode | Determines the positioning anchor (viewport vs invokerNode/referenceNode) | 'global'\|'local' | | | |

24129
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -22,15 +22,14 @@
"lint:versions": "node ./scripts/lint-versions.js",
"prepare": "husky install",
"release": "changeset publish",
"rm-all-node_modules": "npm exec --workspaces -- npx rimraf node_modules && npx rimraf node_modules",
"rocket:build": "rocket build",
"rocket:build:start": "web-dev-server --root-dir _site --open",
"repo:clean-slate": "git clean -dfX",
"repo:diff-package-lock": "npx diff-package-lock",
"start": "rocket start",
"test": "run-p test:browser test:node",
"test:browser": "web-test-runner --coverage",
"test:browserstack": "web-test-runner --config ./web-test-runner-browserstack.config.js",
"test:node": "npm run test:node --workspaces --if-present",
"test:screenshots": "rimraf screenshots/.diff/ && rimraf screenshots/.current/ && mocha --require scripts/screenshots/bootstrap.js --exit --timeout 10000 \"packages/**/test/*.screenshots-test.js\"",
"test:screenshots": "npx rimraf screenshots/.diff/ && npx rimraf screenshots/.current/ && mocha --require scripts/screenshots/bootstrap.js --exit --timeout 10000 \"packages/**/test/*.screenshots-test.js\"",
"test:screenshots:update": "cross-env UPDATE_SCREENSHOTS=true npm run test:screenshots",
"types": "npm run types --workspaces --if-present",
"types-check-only": "npm run types-check-only --workspaces --if-present"
@ -40,71 +39,57 @@
"packages-node/*"
],
"devDependencies": {
"@babel/core": "^7.21.3",
"@babel/core": "^7.24.5",
"@bundled-es-modules/fetch-mock": "^6.5.2",
"@changesets/cli": "^2.26.1",
"@custom-elements-manifest/analyzer": "^0.8.0",
"@open-wc/building-rollup": "^1.10.0",
"@open-wc/eslint-config": "^10.0.0",
"@changesets/cli": "^2.27.1",
"@custom-elements-manifest/analyzer": "^0.10.2",
"@open-wc/building-rollup": "^2.2.3",
"@open-wc/eslint-config": "^12.0.3",
"@open-wc/scoped-elements": "^3.0.5",
"@open-wc/testing": "^3.1.7",
"@open-wc/testing-helpers": "^2.2.0",
"@rocket/blog": "^0.4.0",
"@open-wc/testing": "^4.0.0",
"@open-wc/testing-helpers": "^3.0.1",
"@rocket/blog": "0.4.0",
"@rocket/cli": "^0.10.2",
"@rocket/launch": "^0.6.0",
"@rocket/search": "^0.5.1",
"@types/autosize": "^4.0.3",
"@types/chai-as-promised": "^7.1.5",
"@types/chai-dom": "^0.0.8",
"@types/convert-source-map": "^1.5.2",
"@types/fs-extra": "^9.0.13",
"@types/glob": "^7.1.3",
"@types/istanbul-reports": "^3.0.1",
"@types/mocha": "^10.0.1",
"@types/prettier": "^2.7.2",
"@web/dev-server": "^0.4.2",
"@types/chai-as-promised": "^7.1.8",
"@types/chai-dom": "^1.11.3",
"@types/fs-extra": "^11.0.4",
"@types/glob": "^8.1.0",
"@types/mocha": "^10.0.6",
"@types/prettier": "^2.7.3",
"@web/dev-server-legacy": "^0.1.7",
"@web/test-runner": "^0.15.2",
"@web/test-runner-browserstack": "^0.5.1",
"@web/test-runner-commands": "^0.6.5",
"@web/test-runner-playwright": "^0.9.0",
"@webcomponents/scoped-custom-element-registry": "^0.0.8",
"@yarnpkg/lockfile": "^1.1.0",
"babel-polyfill": "^6.26.0",
"@web/test-runner": "^0.18.1",
"@web/test-runner-browserstack": "^0.7.1",
"@web/test-runner-commands": "^0.9.0",
"@web/test-runner-playwright": "^0.11.0",
"@webcomponents/scoped-custom-element-registry": "^0.0.9",
"bundlesize": "^1.0.0-beta.2",
"cem-plugin-vs-code-custom-data-generator": "^1.4.1",
"chai": "^4.2.0",
"chai-as-promised": "^7.1.1",
"changeset": "^0.2.6",
"cross-env": "^7.0.2",
"es6-promisify": "^6.1.1",
"cem-plugin-vs-code-custom-data-generator": "^1.4.2",
"chai": "^4.4.1",
"chai-as-promised": "^7.1.2",
"cross-env": "^7.0.3",
"eslint": "^8.57.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-import": "^2.29.1",
"eslint-plugin-lit": "^1.11.0",
"eslint-plugin-lit-a11y": "^4.1.2",
"eslint-plugin-wc": "^2.1.0",
"globby": "^13.1.3",
"husky": "^6.0.0",
"lint-staged": "^10.5.4",
"globby": "^14.0.1",
"husky": "^9.0.11",
"lint-staged": "^15.2.2",
"looks-same": "^7.3.0",
"markdownlint-cli": "^0.17.0",
"minimist": "^1.2.6",
"mkdirp-promise": "^5.0.1",
"mocha": "^10.1.0",
"markdownlint-cli": "^0.40.0",
"mocha": "^10.4.0",
"npm-run-all": "^4.1.5",
"playwright": "^1.32.1",
"postinstall-postinstall": "^2.1.0",
"prettier": "^2.0.5",
"prettier-package-json": "^2.1.3",
"remark-html": "^13.0.1",
"rimraf": "^2.6.3",
"prettier": "^2.8.8",
"prettier-package-json": "^2.8.0",
"remark-html": "^13.0.2",
"rollup": "^2.79.1",
"semver": "^7.5.2",
"sinon": "^7.5.0",
"ssl-root-cas": "^1.3.1",
"semver": "^7.6.2",
"sinon": "^17.0.2",
"typescript": "^4.9.5",
"wireit": "^0.7.2"
"wireit": "^0.14.4"
},
"bundlesize": [
{
@ -114,11 +99,57 @@
],
"comments": {
"overrides": {
"//": [
"For best compatibility, we sometimes need to override a package version depended upon by multiple packages."
],
"sharp": [
"Version of 'sharp' package we get via '@rocket/cli' is too old to be built on a Mac.",
"But updating package '@rocket-cli' causes our portal to break. Reason unknown.",
"So, we override the sharp version in here until '@rocket-cli' can be updated."
]
},
"olderVersions": {
"//": [
"Our aim is to keep all depencies up-to-date(for maintainability, performance and security).",
"We use [npm-outdated](https://marketplace.visualstudio.com/items?itemName=mskelton.npm-outdated) as a helper for this.",
"Sometimes we can't bc/o incompatibility issues."
],
"eslint": [
"Can't be updated yet to 9.x, because of eslint-plugin-import"
],
"chai": [
"Can't be updated to 5.x, because of (unmaintained) chai-as-promised (TODO: phase out chai-as-promised)"
],
"typescript": [
"Since changes in types can be reflected in the code, we want to keep this stable for a longer period of time.",
"As semver is not followed, we keep our major versions aligned with a minot of TS (hence '~' instead of '^' is used)"
],
"remark-html": [
"Can't be updated to 14.x, because remark-html is still in commonjs."
],
"@rocket/*": [
"Pinned, as newer versions require a complete overhaul of docs. Later we will move to astro."
],
"looks-same": [
"Part of ./scripts/screenshots, which is not incorporated atm in automated test suite. TODO: re-evaluate solution and whether visual regression testing needs to be part of this repo"
],
"@open-wc/building-rollup": [
"Can't be updated to 3.x, as v2 seems to be better compatible with rocket setup"
]
},
"toBeRemoved": {
"//": [
"For maintainability, performance and security, we want to keep the number of dependencies as low as possible (in case functionality can be achieved via existing dependencies or platform functionality)."
],
"@bundled-es-modules/fetch-mock": [
"Can be achieved via sinon as well"
],
"publish-docs/fs-extra | @types/fs-extra": [
"Copy can be achieved via node's fs module"
],
"@web/dev-server-legacy": [
"Only needed for browserstack config. Can be achieved via @web/dev-server"
]
}
},
"overrides": {

View file

@ -29,12 +29,12 @@
"types": "wireit"
},
"dependencies": {
"@babel/generator": "^7.22.5",
"@babel/parser": "^7.22.5",
"@babel/traverse": "^7.23.2",
"@babel/types": "^7.22.5",
"es-module-lexer": "^0.3.6",
"globby": "^13.2.0",
"@babel/generator": "^7.24.4",
"@babel/parser": "^7.24.4",
"@babel/traverse": "^7.24.1",
"@babel/types": "^7.24.0",
"es-module-lexer": "^0.3.26",
"globby": "^14.0.1",
"prettier": "^2.8.8"
},
"keywords": [

View file

@ -0,0 +1,266 @@
2023.10.18, v3.0.1
fix:
- d37e664 types
2023.10.17, v3.0.0
feature:
- 7aefd72 types
- aa0ef7b babel v8
- be3c7a6 ImportAttributes
- f927c37 package: putout v32.2.2
- 4a8b9e3 package: eslint-plugin-putout v20.0.0
2023.07.18, v2.2.0
fix:
- ce8a51a incorrect line number calculation and fix bug with JSX elements not having loc keys (#20)
2023.07.12, v2.1.0
feature:
- 68dfe2d package: eslint-plugin-putout v18.1.0
- a303550 package: c8 v8.0.0
- 8e70e4c package: putout v30.4.0
- 8f5df0a package: eslint-plugin-n v16.0.1
- 19700e5 package: nodemon v3.0.1
2023.04.26, v2.0.0
fix:
- 606fd45 handle null element in holey arrays (#18)
feature:
- 6a1e3a3 swc-to-babel: drop support of node < 16
- ab3263e swc-to-babel: use @putout/printer
- d21f30e package: eslint-plugin-putout v17.5.1
- e14d18c package: check-dts v0.7.1
- a3cabd8 package: typescript v5.0.4
- 48a0b6c package: putout v29.3.0
2022.10.12, v1.26.0
fix:
- swc-to-babel: parenthesized const assertion error (#15)
feature:
- package: supertape v8.1.0
2022.08.24, v1.25.1
fix:
- getters/setters: make getters work properly and fix tests (#12)
2022.08.23, v1.25.0
feature:
- make source an optional parameter (#10)
2022.08.23, v1.24.0
feature:
- add type definitions (#8)
2022.08.23, v1.23.0
feature:
- add support for GetterProperty and SetterProperty (#7)
2022.08.21, v1.22.0
feature:
- add support of KeyValueProperty nodes
2022.08.21, v1.21.0
feature:
- package: eslint-plugin-putout v16.0.1
- package: putout v27.1.0
- add support of TSKeywordType (close #1)
2022.06.12, v1.20.1
fix:
- swc-to-babel: add directives
2022.06.12, v1.20.0
feature:
- swc-to-babel: add support of ExportDefaultDeclaration
2022.06.11, v1.19.0
feature:
- swc-to-babel: ObjectProperty instead of AssignmentPatternProperty
2022.06.11, v1.18.0
feature:
- swc-to-babel: add support of ClassExpression and ExportSpecifier
2022.06.11, v1.17.1
fix:
- swc-to-babl: ImportSpecifier
2022.06.11, v1.17.0
feature:
- swc-to-babel: add support of ObjectProperty
2022.06.11, v1.16.1
fix:
- swc-to-babel: ArrowFunctionExpression
2022.06.11, v1.16.0
feature:
- swc-to-babel: NewExpression: arguments field should always present
2022.06.11, v1.15.0
feature:
- swc-to-babel: add support of ArrayExpression
2022.06.11, v1.14.0
feature:
- swc-to-babel: add support of FunctionDeclaration
2022.06.10, v1.13.0
feature:
- swc-to-babel: TSAliasDeclaration
2022.06.10, v1.12.0
feature:
- swc-to-babel: handle typeParameters
2022.06.10, v1.11.2
feature:
- swc-to-babel: improve SpreadElement support
2022.06.10, v1.11.1
feature:
- swc-to-babel: improve support of SpreadElement
2022.06.10, v1.11.0
feature:
- swc-to-babel: add support of NewExpression
2022.06.10, v1.10.0
feature:
- swc-to-babel: improve support of MemberExpression
2022.06.10, v1.9.0
feature:
- swc-to-babel: add support of ClassDeclaration
2022.06.10, v1.8.0
feature:
- swc-to-babel: add support of ParenthesisExpression
2022.06.10, v1.7.1
feature:
- swc-to-babel: improve support of typeAnnotation in Identifiers
- swc-to-babel: add support of ExportNamedDeclaration and ExportDefaultDeclaration
2022.06.10, v1.7.0
feature:
- swc-to-babel: add support of ExportNamedDeclaration and ExportDefaultDeclaration
2022.06.09, v1.6.0
feature:
- swc-to-babel: CallExpression has no typeArguments
- swc-to-babel: TemplateElement
2022.06.09, v1.5.0
feature:
- swc-to-babel: TemplateElement
- package: eslint-plugin-putout v15.6.0
2022.06.09, v1.4.0
feature:
- swc-to-babel: add support of typescript
2022.06.09, v1.3.1
fix:
- swc-to-babel: position
2022.06.09, v1.3.0
feature:
- swc-to-babel: add support of BlockStatement
2022.06.09, v1.2.0
feature:
- swc-to-babel: CallExpression
2022.06.09, v1.1.1
fix:
- swc-to-babel: no type
2022.06.09, v1.1.0
feature:
- (package) supertape v7.3.0
- (package) putout v26.13.0
- (package) madrun v9.0.4
- swc-to-babel: add support of Identifier
2022.02.06, v1.0.2
feature:
- swc-to-babel: rm unused
2022.02.05, v1.0.1
fix:
- lint

View file

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) coderaiser
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -0,0 +1,2 @@
Temp inline version of swc-to-babel, as we need to change a few things for 100% compatibility with our analyzers.
"version": "3.0.1", [swc-to-babel](http://github.com/coderaiser/swc-to-babel)

View file

@ -0,0 +1,17 @@
'use strict';
module.exports = ({tokens, ...program}) => {
const ast = {
type: 'File',
program: {
...program,
directives: [],
},
comments: [],
tokens,
};
return ast;
};

View file

@ -0,0 +1,134 @@
'use strict';
const { types, traverse } = require('@putout/babel');
const {
convertModuleToProgram,
convertSpanToPosition,
convertVariableDeclarator,
convertStringLiteral,
convertIdentifier,
convertCallExpression,
convertObjectProperty,
BlockStatement,
TemplateElement,
convertTSTypeParameter,
convertExportDeclaration,
convertExportDefaultExpression,
convertParenthesisExpression,
convertGetterSetter,
ClassMethod,
ClassDeclaration,
ArrayExpression,
MemberExpression,
NewExpression,
Function,
ImportDeclaration,
ImportSpecifier,
ExportNamedDeclaration,
ExportDefaultDeclaration,
ExportSpecifier,
TSTypeAliasDeclaration,
TSMappedType,
TSTypeReference,
TSTypeOperator,
TSTypeParameter,
TSIndexedAccessType,
TSAsExpression,
JSXElement,
JSXFragment,
} = require('./swc/index.cjs');
const getAST = require('./get-ast.cjs');
const { isIdentifier } = types;
/**
* Convert an SWC ast to a babel ast
* @param ast {Module} SWC ast
* @param {string} [src=""] Source code
* @returns {ParseResult<File>} Babel ast
*/
function toBabel(node, source = '') {
const ast = getAST(node);
traverse(ast, {
noScope: true,
BlockStatement,
TemplateElement,
ClassMethod,
ClassDeclaration,
ClassExpression: ClassDeclaration,
ArrayExpression,
MemberExpression,
NewExpression,
Function,
ImportDeclaration,
ImportSpecifier,
ExportNamedDeclaration,
ExportSpecifier,
ExportDefaultDeclaration,
TSTypeAliasDeclaration,
TSMappedType,
TSTypeReference,
TSTypeOperator,
TSTypeParameter,
TSIndexedAccessType,
TSAsExpression,
JSXElement,
JSXFragment,
enter(path) {
const { node } = path;
const { type } = node;
if ('span' in path.node) convertSpanToPosition(path, source);
delete node.start;
delete node.end;
if (type?.startsWith('Ts')) node.type = type.replace('Ts', 'TS');
if (type?.endsWith('Literal')) setEsprimaRaw(node);
if (isIdentifier(path)) return convertIdentifier(path);
if (path.isStringLiteral()) return convertStringLiteral(path);
if (type === 'Module') return convertModuleToProgram(path);
if (path.isVariableDeclarator()) return convertVariableDeclarator(path);
if (path.isCallExpression()) return convertCallExpression(path);
if (path.isTSTypeParameter()) return convertTSTypeParameter(path);
if (path.type === 'ExportDeclaration') return convertExportDeclaration(path);
if (path.type === 'ExportDefaultExpression') return convertExportDefaultExpression(path);
if (path.type === 'ParenthesisExpression') return convertParenthesisExpression(path);
if (/^(KeyValue|KeyValuePattern|AssignmentPattern)Property$/.test(path.type))
return convertObjectProperty(path);
if (path.type === 'GetterProperty' || path.type === 'SetterProperty')
return convertGetterSetter(path);
},
});
return ast;
}
module.exports = toBabel;
function setEsprimaRaw(node) {
const { raw } = node;
node.raw = raw || node.extra?.raw;
node.extra = node.extra || {
raw,
};
}

View file

@ -0,0 +1,24 @@
'use strict';
module.exports.getPositionByOffset = (offset, source) => {
let line = 1;
let column = 0;
if (offset > source.length)
throw Error('end cannot be more then length ' + offset + ', ' + source.length);
for (let i = 0; i < offset; i++) {
if (source[i] === '\n' && i !== offset - 1) {
line++;
column = 0;
} else {
column++;
}
}
return {
line,
column,
index: offset - 1,
};
};

View file

@ -0,0 +1,390 @@
'use strict';
const { getPositionByOffset } = require('./get-position-by-offset.cjs');
const isNull = a => !a && typeof a === 'object';
const { assign } = Object;
module.exports.convertModuleToProgram = path => {
path.node.type = 'Program';
path.node.sourceType = 'module';
};
module.exports.convertSpanToPosition = (path, source) => {
const { start, end } = path.node.span;
delete path.node.span;
if (end > source.length)
return assign(path.node, {
start,
end,
});
const startPosition = getPositionByOffset(start, source);
const endPosition = getPositionByOffset(end, source);
assign(path.node, {
start: startPosition.index,
end: endPosition.index,
loc: {
start: startPosition,
end: endPosition,
},
});
};
module.exports.convertVariableDeclarator = path => {
delete path.parentPath.node.declare;
delete path.node.optional;
delete path.node.definite;
};
module.exports.convertStringLiteral = path => {
delete path.node.hasEscape;
delete path.node.kind;
};
module.exports.convertIdentifier = ({ node }) => {
convertIdentifier(node);
};
function convertIdentifier(node) {
const { typeAnnotation } = node;
node.name = node.value;
if (isNull(typeAnnotation)) {
delete node.typeAnnotation;
}
delete node.value;
delete node.optional;
delete node.span;
}
module.exports.convertCallExpression = path => {
const newArgs = [];
for (const arg of path.node.arguments) {
newArgs.push(arg.expression);
}
delete path.node.typeArguments;
path.node.arguments = newArgs;
};
module.exports.BlockStatement = path => {
path.node.body = path.node.stmts;
delete path.node.stmts;
path.node.directives = [];
};
module.exports.TSMappedType = path => {
path.node.typeParameter = path.node.typeParam;
if (!path.node.nameType) path.node.nameType = null;
if (!path.node.readonly) delete path.node.readonly;
if (!path.node.optional) delete path.node.optional;
delete path.node.typeParam;
};
module.exports.convertTSTypeParameter = path => {
convertIdentifier(path.node.name);
};
module.exports.TemplateElement = path => {
const { cooked, raw } = path.node;
path.node.value = {
cooked,
raw,
};
delete path.node.cooked;
delete path.node.raw;
delete path.node.tail;
};
module.exports.convertExportDeclaration = path => {
path.node.type = 'ExportNamedDeclaration';
};
module.exports.convertExportDefaultExpression = path => {
path.node.type = 'ExportDefaultDeclaration';
path.node.declaration = path.node.expression;
delete path.node.expression;
delete path.node.declare;
};
module.exports.convertParenthesisExpression = path => {
const expressionPath = path.get('expression');
if (expressionPath.type === 'TsAsExpression') convertTSAsExpression(expressionPath);
else if (expressionPath.type === 'TsConstAssertion') convertTSConstAssertion(expressionPath);
path.replaceWith(expressionPath.node);
};
module.exports.ClassMethod = path => {
const { node } = path;
const { key } = path.node;
Object.assign(node, {
...path.node.function,
key,
});
if (node.kind === 'getter') {
node.kind = 'get';
}
if (node.kind === 'setter') {
node.kind = 'set';
}
node.static = node.isStatic;
delete path.node.isStatic;
delete path.node.accessibility;
delete path.node.isAbstract;
delete path.node.isOptional;
delete path.node.isOverride;
delete path.node.optional;
delete path.node.function;
delete path.node.decorators;
delete path.node.typeParameters;
delete path.node.returnType;
delete path.node.span;
};
module.exports.ClassDeclaration = path => {
path.node.id = path.node.identifier;
path.node.body = {
type: 'ClassBody',
body: path.node.body,
};
delete path.node.identifier;
delete path.node.declare;
delete path.node.decorators;
delete path.node.isAbstract;
delete path.node.typeParams;
delete path.node.superTypeParams;
delete path.node.implements;
};
module.exports.MemberExpression = ({ node }) => {
node.computed = node.property.type === 'Computed';
if (node.computed) node.property = node.property.expression;
};
function convertSpreadElement(node) {
const { expression } = node;
assign(node, {
type: 'SpreadElement',
argument: expression,
});
delete node.spread;
delete node.expression;
}
function maybeConvertSpread(arg) {
if (arg === null) return;
const { spread } = arg;
if (spread) {
convertSpreadElement(arg);
return;
}
assign(arg, arg.expression);
delete arg.spread;
delete arg.expression;
}
module.exports.NewExpression = path => {
path.node.arguments = path.node.arguments || [];
path.node.arguments.forEach(maybeConvertSpread);
delete path.node.typeArguments;
};
module.exports.ArrayExpression = path => {
path.node.elements.forEach(maybeConvertSpread);
};
module.exports.Function = path => {
const { node } = path;
if (path.parentPath.isExportDefaultDeclaration()) path.node.type = 'FunctionDeclaration';
const { params, typeParameters } = node;
node.id = node.identifier || null;
delete node.identifier;
delete node.decorators;
if (!node.returnType) delete node.returnType;
for (const [index, param] of params.entries()) {
if (param.type === 'Parameter') params[index] = param.pat;
}
if (isNull(typeParameters)) delete node.typeParameters;
delete node.declare;
};
module.exports.TSTypeAliasDeclaration = path => {
delete path.node.declare;
delete path.node.typeParams;
};
module.exports.TSAsExpression = convertTSAsExpression;
function convertTSAsExpression({ node }) {
node.type = 'TSAsExpression';
if (node.typeAnnotation.kind === 'any')
assign(node.typeAnnotation, {
type: 'TSAnyKeyword',
});
}
module.exports.TSConstAssertion = convertTSConstAssertion;
function convertTSConstAssertion({ node }) {
assign(node, {
type: 'TSAsExpression',
extra: {
parenthesized: true,
parenStart: 0,
},
typeAnnotation: {
type: 'TSTypeReference',
typeName: {
type: 'Identifier',
name: 'const',
},
},
});
}
module.exports.TSTypeReference = path => {
delete path.node.typeParams;
};
module.exports.TSTypeOperator = path => {
path.node.operator = path.node.op;
delete path.node.op;
};
module.exports.TSTypeParameter = path => {
path.node.name = path.node.name.name;
delete path.node.in;
delete path.node.out;
delete path.node.default;
};
module.exports.TSIndexedAccessType = path => {
delete path.node.readonly;
};
module.exports.ImportDeclaration = ({ node }) => {
const { typeOnly } = node;
node.assertions = node.asserts?.properties || [];
node.importKind = typeOnly ? 'type' : 'value';
delete node.asserts;
delete node.typeOnly;
};
module.exports.ImportSpecifier = ({ node }) => {
if (!node.imported)
node.imported = {
...node.local,
};
delete node.isTypeOnly;
};
module.exports.convertObjectProperty = path => {
const { node } = path;
node.type = 'ObjectProperty';
node.shorthand = !node.value;
if (!node.value)
node.value = {
...node.key,
};
delete path.parentPath.node.optional;
};
module.exports.convertGetterSetter = ({ node }) => {
node.kind = node.type === 'GetterProperty' ? 'get' : 'set';
node.type = 'ObjectMethod';
node.params = node.param ? [node.param] : [];
delete node.param;
};
module.exports.ExportDefaultDeclaration = ({ node }) => {
// node.declaration may have been already provided by convertExportDefaultExpression
node.declaration = node.declaration || node.decl;
node.exportKind = 'value';
node.assertions = node.asserts?.properties || [];
delete node.decl;
};
module.exports.ExportNamedDeclaration = ({ node }) => {
const { typeOnly } = node;
node.assertions = node.asserts?.properties || [];
// node.source = null;
node.specifiers = node.specifiers || [];
node.exportKind = typeOnly ? 'type' : 'value';
delete node.asserts;
delete node.typeOnly;
};
module.exports.ExportSpecifier = ({ node }) => {
const { orig, exported } = node;
node.local = orig;
node.exported = exported || {
...orig,
};
delete node.isTypeOnly;
delete node.orig;
};
module.exports.JSXElement = path => {
path.node.openingElement = path.node.opening;
delete path.node.opening;
path.node.closingElement = path.node.closing;
delete path.node.closing;
};
module.exports.JSXFragment = path => {
path.node.openingFragment = path.node.opening;
delete path.node.opening;
path.node.closingFragment = path.node.closing;
delete path.node.closing;
};

View file

@ -30,40 +30,32 @@
"postinstall": "npx patch-package",
"match-lion-imports": "npm run providence -- analyze match-imports --search-target-collection @lion-targets --reference-collection @lion-references --measure-perf --skip-check-match-compatibility",
"providence": "node --max-old-space-size=8192 ./src/cli/index.js",
"publish-docs": "node ../../packages-node/publish-docs/src/cli.js --github-url https://github.com/ing-bank/lion/ --git-root-dir ../../",
"prepublishOnly": "npm run publish-docs",
"publish-docs": "node ../../packages-node/publish-docs/src/cli.js --github-url https://github.com/ing-bank/lion/ --git-root-dir ../../",
"test:node": "npm run test:node:unit && npm run test:node:e2e",
"test:node:e2e": "mocha './test-node/**/*.e2e.js' --timeout 60000",
"test:node:unit": "mocha './test-node/**/*.test.js'"
"test:node:unit": "mocha './{test-node,src}/**/*.test.js'"
},
"dependencies": {
"@babel/core": "^7.21.4",
"@babel/parser": "^7.21.4",
"@babel/plugin-proposal-class-properties": "^7.18.6",
"@babel/plugin-syntax-export-default-from": "^7.18.6",
"@babel/plugin-syntax-import-assertions": "^7.20.0",
"@babel/register": "^7.21.0",
"@babel/traverse": "^7.21.4",
"@babel/types": "^7.21.4",
"@rollup/plugin-node-resolve": "^15.0.2",
"@swc/core": "^1.3.46",
"@web/dev-server": "^0.4.2",
"anymatch": "^3.1.3",
"@babel/parser": "^7.24.5",
"@babel/plugin-syntax-import-assertions": "^7.24.1",
"@babel/traverse": "^7.24.5",
"@babel/types": "^7.24.5",
"@putout/babel": "^2.4.0",
"@rollup/plugin-node-resolve": "^15.2.3",
"@swc/core": "^1.5.6",
"commander": "^2.20.3",
"glob": "^8.1.0",
"inquirer": "^9.1.5",
"is-negated-glob": "^1.0.0",
"lit-element": "~3.3.1",
"parse5": "^7.1.2",
"read-package-tree": "5.3.1",
"semver": "^7.3.8",
"swc-to-babel": "^1.26.0"
"semver": "^7.6.2"
},
"devDependencies": {
"@types/chai": "^4.3.4",
"@types/inquirer": "^9.0.3",
"@types/mocha": "^10.0.1",
"@web/dev-server-core": "^0.4.0",
"@types/chai": "^4.3.16",
"@types/inquirer": "^9.0.7",
"@types/mocha": "^10.0.6",
"@web/dev-server": "^0.4.5",
"@web/dev-server-core": "^0.7.2",
"globby": "^14.0.1",
"lit-element": "^4.0.5",
"mock-fs": "^5.2.0",
"mock-require": "^3.0.3"
},
@ -78,6 +70,9 @@
"semver",
"software"
],
"engines": {
"node": ">=18.0.0"
},
"publishConfig": {
"access": "public"
}

View file

@ -1,11 +0,0 @@
diff --git a/node_modules/@web/dev-server-core/test-helpers.mjs b/node_modules/@web/dev-server-core/test-helpers.mjs
index 1a4d604..9c0d714 100644
--- a/node_modules/@web/dev-server-core/test-helpers.mjs
+++ b/node_modules/@web/dev-server-core/test-helpers.mjs
@@ -1,5 +1,5 @@
// this file is autogenerated with the generate-mjs-dts-entrypoints script
-import cjsEntrypoint from './dist/index.js';
+import cjsEntrypoint from './dist/test-helpers.js';
const {
virtualFilesPlugin,

View file

@ -1,10 +1,11 @@
/* eslint-disable no-shadow */
import pathLib from 'path';
import child_process from 'child_process'; // eslint-disable-line camelcase
import glob from 'glob';
import readPackageTree from '../program/utils/read-package-tree-with-bower-support.js';
import { LogService } from '../program/core/LogService.js';
import path from 'path';
import { optimisedGlob } from '../program/utils/optimised-glob.js';
import { toPosixPath } from '../program/utils/to-posix-path.js';
import { LogService } from '../program/core/LogService.js';
import { fsAdapter } from '../program/utils/fs-adapter.js';
/**
* @param {any[]} arr
@ -31,7 +32,6 @@ export function extensionsFromCs(v) {
}
/**
*
* @param {*} m
* @returns
*/
@ -45,29 +45,29 @@ export function setQueryMethod(m) {
}
/**
* @param {string} t
* @returns {string[]|undefined}
* @param {string} targets
* @returns {Promise<string[]|undefined>}
*/
export function pathsArrayFromCs(t, cwd = process.cwd()) {
if (!t) {
return undefined;
}
export async function pathsArrayFromCs(targets, cwd = process.cwd()) {
if (!targets) return undefined;
return flatten(
t.split(',').map(t => {
const resultPaths = [];
for (const t of targets.split(',')) {
if (t.startsWith('/')) {
return t;
resultPaths.push(t);
continue; // eslint-disable-line no-continue
}
if (t.includes('*')) {
if (!t.endsWith('/')) {
// eslint-disable-next-line no-param-reassign
t = `${t}/`;
}
return glob.sync(t, { cwd, absolute: true }).map(toPosixPath);
}
return toPosixPath(pathLib.resolve(cwd, t.trim()));
}),
const x = (await optimisedGlob(t, { cwd, absolute: true, onlyFiles: false })).map(
toPosixPath,
);
resultPaths.push(...x);
continue; // eslint-disable-line no-continue
}
resultPaths.push(toPosixPath(path.resolve(cwd, t.trim())));
}
return resultPaths;
}
/**
@ -75,9 +75,9 @@ export function pathsArrayFromCs(t, cwd = process.cwd()) {
* @param {'search-target'|'reference'} collectionType collection type
* @param {{searchTargetCollections: {[repo:string]:string[]}; referenceCollections:{[repo:string]:string[]}}} [eCfg] external configuration. Usually providence.conf.js
* @param {string} [cwd]
* @returns {string[]|undefined}
* @returns {Promise<string[]|undefined>}
*/
export function pathsArrayFromCollectionName(
export async function pathsArrayFromCollectionName(
name,
collectionType = 'search-target',
eCfg = undefined,
@ -132,6 +132,49 @@ export function targetDefault(cwd) {
return [toPosixPath(cwd)];
}
/**
* @param {string} targetPath
* @param {((s:string) => boolean)|null} matcher
* @param {'npm'|'bower'} [mode]
*/
async function readPackageTree(targetPath, matcher, mode) {
const folderName = mode === 'npm' ? 'node_modules' : 'bower_components';
const potentialPaths = await optimisedGlob(`${folderName}/**/*`, {
onlyDirectories: true,
fs: fsAdapter.fs,
cwd: targetPath,
absolute: true,
});
const matchingPaths = potentialPaths.filter(potentialPath => {
// Only dirs that are direct children of node_modules. So '**/node_modules/a' will match, but '**/node_modules/a/b' won't
const [, projectName] =
toPosixPath(potentialPath).match(new RegExp(`^.*/${folderName}/([^/]*)$`)) || [];
return matcher ? matcher(projectName) : true;
});
return matchingPaths;
}
/**
* @param {string|undefined} matchPattern
*/
function getMatcher(matchPattern) {
if (!matchPattern) return null;
const isValidMatchPattern = matchPattern.startsWith('/') && matchPattern.endsWith('/');
if (!isValidMatchPattern) {
LogService.error(
`[appendProjectDependencyPaths] Please provide a matchPattern enclosed by '/'. Found: ${matchPattern}`,
);
return null;
}
return (/** @type {string} */ d) => {
const reString = matchPattern.slice(1, -1);
const result = new RegExp(reString).test(d);
LogService.debug(`[appendProjectDependencyPaths]: /${reString}/.test(${d} => ${result})`);
return result;
};
}
/**
* Returns all sub projects matching condition supplied in matchFn
* @param {string[]} rootPaths all search-target project paths
@ -143,82 +186,27 @@ export async function appendProjectDependencyPaths(
matchPattern,
modes = ['npm', 'bower'],
) {
let matchFn;
if (matchPattern) {
if (matchPattern.startsWith('/') && matchPattern.endsWith('/')) {
matchFn = (/** @type {any} */ _, /** @type {string} */ d) => {
const reString = matchPattern.slice(1, -1);
const result = new RegExp(reString).test(d);
LogService.debug(`[appendProjectDependencyPaths]: /${reString}/.test(${d} => ${result})`);
return result;
};
} else {
LogService.error(
`[appendProjectDependencyPaths] Please provide a matchPattern enclosed by '/'. Found: ${matchPattern}`,
);
}
}
const matcher = getMatcher(matchPattern);
/** @type {string[]} */
const depProjectPaths = [];
for (const targetPath of rootPaths) {
for (const mode of modes) {
await readPackageTree(
targetPath,
matchFn,
(/** @type {string | undefined} */ err, /** @type {{ children: any[]; }} */ tree) => {
if (err) {
throw new Error(err);
}
const paths = tree.children.map(child => child.realpath);
depProjectPaths.push(...paths);
},
mode,
);
depProjectPaths.push(...(await readPackageTree(targetPath, matcher, mode)));
}
}
// Write all data to {outputPath}/projectDeps.json
// const projectDeps = {};
// rootPaths.forEach(rootP => {
// depProjectPaths.filter(depP => depP.startsWith(rootP)).;
// });
return depProjectPaths.concat(rootPaths).map(toPosixPath);
}
/**
* Will install all npm and bower deps, so an analysis can be performed on them as well.
* Relevant when '--target-dependencies' is supplied.
* @param {string[]} searchTargetPaths
*/
export async function installDeps(searchTargetPaths) {
for (const targetPath of searchTargetPaths) {
LogService.info(`Installing npm dependencies for ${pathLib.basename(targetPath)}`);
try {
await spawnProcess('npm i --no-progress', { cwd: targetPath });
} catch (e) {
// @ts-expect-error
LogService.error(e);
}
LogService.info(`Installing bower dependencies for ${pathLib.basename(targetPath)}`);
try {
await spawnProcess(`bower i --production --force-latest`, { cwd: targetPath });
} catch (e) {
// @ts-expect-error
LogService.error(e);
}
}
}
export const _cliHelpersModule = {
csToArray,
extensionsFromCs,
setQueryMethod,
pathsArrayFromCs,
targetDefault,
appendProjectDependencyPaths,
spawnProcess,
installDeps,
pathsArrayFromCollectionName,
extensionsFromCs,
pathsArrayFromCs,
setQueryMethod,
targetDefault,
spawnProcess,
csToArray,
flatten,
};

View file

@ -1,27 +1,26 @@
import child_process from 'child_process'; // eslint-disable-line camelcase
import path from 'path';
import fs from 'fs';
import commander from 'commander';
import { LogService } from '../program/core/LogService.js';
import { QueryService } from '../program/core/QueryService.js';
import { InputDataService } from '../program/core/InputDataService.js';
import { toPosixPath } from '../program/utils/to-posix-path.js';
import { getCurrentDir } from '../program/utils/get-current-dir.js';
import { dashboardServer } from '../dashboard/server.js';
import { QueryService } from '../program/core/QueryService.js';
import { _providenceModule } from '../program/providence.js';
import { fsAdapter } from '../program/utils/fs-adapter.js';
import { _cliHelpersModule } from './cli-helpers.js';
import { _extendDocsModule } from './launch-providence-with-extend-docs.js';
import { _promptAnalyzerMenuModule } from './prompt-analyzer-menu.js';
/**
* @typedef {import('../../types/index.js').AnalyzerName} AnalyzerName
* @typedef {import('../../types/index.js').ProvidenceCliConf} ProvidenceCliConf
* @typedef {import('../../types/index.js').AnalyzerName} AnalyzerName
*/
const { version } = JSON.parse(
fs.readFileSync(path.resolve(getCurrentDir(import.meta.url), '../../package.json'), 'utf8'),
fsAdapter.fs.readFileSync(
path.resolve(getCurrentDir(import.meta.url), '../../package.json'),
'utf8',
),
);
const { extensionsFromCs, setQueryMethod, targetDefault, installDeps } = _cliHelpersModule;
const { extensionsFromCs, targetDefault } = _cliHelpersModule;
/**
* @param {{cwd?:string; argv?: string[]; providenceConf?: Partial<ProvidenceCliConf>}} cfg
@ -37,77 +36,34 @@ export async function cli({ cwd = process.cwd(), providenceConf, argv = process.
rejectCli = reject;
});
/** @type {'analyzer'|'queryString'} */
let searchMode;
/** @type {object} */
let analyzerOptions;
/** @type {object} */
let featureOptions;
/** @type {object} */
let regexSearchOptions;
// TODO: change back to "InputDataService.getExternalConfig();" once full package ESM
const externalConfig = providenceConf;
/**
* @param {'search-query'|'feature-query'|'analyzer-query'} searchMode
* @param {{regexString: string}} regexSearchOptions
* @param {{queryString: string}} featureOptions
* @param {{name:AnalyzerName; config:object;promptOptionalConfig:object}} analyzerOptions
* @returns
* @param {{analyzerOptions:{name:AnalyzerName; config:object;promptOptionalConfig:object}}} opts
*/
async function getQueryConfigAndMeta(
/* eslint-disable no-shadow */
searchMode,
regexSearchOptions,
featureOptions,
analyzerOptions,
/* eslint-enable no-shadow */
) {
async function getQueryConfigAndMeta(opts) {
let queryConfig = null;
let queryMethod = null;
if (searchMode === 'search-query') {
queryConfig = QueryService.getQueryConfigFromRegexSearchString(
regexSearchOptions.regexString,
);
queryMethod = 'grep';
} else if (searchMode === 'feature-query') {
queryConfig = QueryService.getQueryConfigFromFeatureString(featureOptions.queryString);
queryMethod = 'grep';
} else if (searchMode === 'analyzer-query') {
let { name, config } = analyzerOptions;
// eslint-disable-next-line prefer-const
let { name, config } = opts.analyzerOptions;
if (!name) {
const answers = await _promptAnalyzerMenuModule.promptAnalyzerMenu();
name = answers.analyzerName;
}
if (!config) {
const answers = await _promptAnalyzerMenuModule.promptAnalyzerConfigMenu(
name,
analyzerOptions.promptOptionalConfig,
);
config = answers.analyzerConfig;
throw new Error('Please provide an analyzer name');
}
// Will get metaConfig from ./providence.conf.js
const metaConfig = externalConfig ? externalConfig.metaConfig : {};
config = { ...config, metaConfig };
queryConfig = await QueryService.getQueryConfigFromAnalyzer(name, config);
queryMethod = 'ast';
} else {
LogService.error('Please define a feature, analyzer or search');
process.exit(1);
}
return { queryConfig, queryMethod };
}
async function launchProvidence() {
const { queryConfig, queryMethod } = await getQueryConfigAndMeta(
searchMode,
regexSearchOptions,
featureOptions,
analyzerOptions,
);
const { queryConfig, queryMethod } = await getQueryConfigAndMeta({ analyzerOptions });
const searchTargetPaths = commander.searchTargetCollection || commander.searchTargetPaths;
let referencePaths;
@ -158,29 +114,6 @@ export async function cli({ cwd = process.cwd(), providenceConf, argv = process.
});
}
/**
* @param {{update:boolean; deps:boolean;createVersionHistory:boolean}} options
*/
async function manageSearchTargets(options) {
const basePath = path.join(__dirname, '../..');
if (options.update) {
LogService.info('git submodule update --init --recursive');
// eslint-disable-next-line camelcase
const updateResult = child_process.execSync('git submodule update --init --recursive', {
cwd: basePath,
});
LogService.info(String(updateResult));
}
if (options.deps) {
await installDeps(commander.searchTargetPaths);
}
if (options.createVersionHistory) {
await installDeps(commander.searchTargetPaths);
}
}
commander
.version(version, '-v, --version')
.option('-e, --extensions [extensions]', 'extensions like "js,html"', extensionsFromCs, [
@ -259,29 +192,6 @@ export async function cli({ cwd = process.cwd(), providenceConf, argv = process.
'Uses babel instead of swc. This will be slower, but guaranteed to be 100% compatible with @babel/generate and @babel/traverse',
);
commander
.command('search <regex>')
.alias('s')
.description('perfoms regex search string like "my-.*-comp"')
.action((regexString, options) => {
searchMode = 'search-query';
regexSearchOptions = options;
regexSearchOptions.regexString = regexString;
launchProvidence().then(resolveCli).catch(rejectCli);
});
commander
.command('feature <query-string>')
.alias('f')
.description('query like "tg-icon[size=xs]"')
.option('-m, --method [method]', 'query method: "grep" or "ast"', setQueryMethod, 'grep')
.action((queryString, options) => {
searchMode = 'feature-query';
featureOptions = options;
featureOptions.queryString = queryString;
launchProvidence().then(resolveCli).catch(rejectCli);
});
commander
.command('analyze [analyzer-name]')
.alias('a')
@ -296,84 +206,11 @@ export async function cli({ cwd = process.cwd(), providenceConf, argv = process.
)
.option('-c, --config [config]', 'configuration object for analyzer', c => JSON.parse(c))
.action((analyzerName, options) => {
searchMode = 'analyzer-query';
analyzerOptions = options;
analyzerOptions.name = analyzerName;
launchProvidence().then(resolveCli).catch(rejectCli);
});
commander
.command('extend-docs')
.alias('e')
.description(
`Generates data for "babel-extend-docs" plugin. These data are generated by the "match-paths"
plugin, which automatically resolves import paths from reference projects
(say [@lion/input, @lion/textarea, ...etc]) to a target project (say "wolf-ui").`,
)
.option(
'--prefix-from [prefix-from]',
`Prefix for components of reference layer. By default "lion"`,
a => a,
'lion',
)
.option(
'--prefix-to [prefix-to]',
`Prefix for components of reference layer. For instance "wolf"`,
)
.option(
'--output-folder [output-folder]',
`This is the file path where the result file "providence-extend-docs-data.json" will be written to`,
p => toPosixPath(path.resolve(process.cwd(), p.trim())),
process.cwd(),
)
.action(options => {
if (!options.prefixTo) {
LogService.error(`Please provide a "prefix to" like '--prefix-to "myprefix"'`);
process.exit(1);
}
if (!commander.referencePaths) {
LogService.error(`Please provide referencePaths path like '-r "node_modules/@lion/*"'`);
process.exit(1);
}
const prefixCfg = { from: options.prefixFrom, to: options.prefixTo };
_extendDocsModule
.launchProvidenceWithExtendDocs({
referenceProjectPaths: commander.referencePaths,
prefixCfg,
outputFolder: options.outputFolder,
extensions: commander.extensions,
allowlist: commander.allowlist,
allowlistReference: commander.allowlistReference,
skipCheckMatchCompatibility: commander.skipCheckMatchCompatibility,
cwd,
})
.then(resolveCli)
.catch(rejectCli);
});
commander
.command('manage-projects')
.description(
`Before running a query, be sure to have search-targets up to date (think of
npm/bower dependencies, latest version etc.)`,
)
.option('-u, --update', 'gets latest of all search-targets and references')
.option('-d, --deps', 'installs npm/bower dependencies of search-targets')
.option('-h, --create-version-history', 'gets latest of all search-targets and references')
.action(options => {
manageSearchTargets(options);
});
commander
.command('dashboard')
.description(
`Runs an interactive dashboard that shows all aggregated data from proivdence-output, configured
via providence.conf`,
)
.action(() => {
dashboardServer.start();
});
commander.parse(argv);
await cliPromise;

View file

@ -1,13 +1,15 @@
/* eslint-disable import/no-extraneous-dependencies */
import fs from 'fs';
import pathLib from 'path';
import { performance } from 'perf_hooks';
import { _providenceModule } from '../program/providence.js';
import { QueryService } from '../program/core/QueryService.js';
import path from 'path';
import { InputDataService } from '../program/core/InputDataService.js';
import { LogService } from '../program/core/LogService.js';
import { flatten } from './cli-helpers.js';
import MatchPathsAnalyzer from '../program/analyzers/match-paths.js';
import { toPosixPath } from '../program/utils/to-posix-path.js';
import { QueryService } from '../program/core/QueryService.js';
import { _providenceModule } from '../program/providence.js';
import { LogService } from '../program/core/LogService.js';
import { fsAdapter } from '../program/utils/fs-adapter.js';
import { flatten } from './cli-helpers.js';
/**
* @typedef {import('../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
@ -27,13 +29,13 @@ import MatchPathsAnalyzer from '../program/analyzers/match-paths.js';
*/
export async function getExtendDocsResults({
referenceProjectPaths,
prefixCfg,
extensions,
allowlist,
allowlistReference,
extensions,
prefixCfg,
allowlist,
cwd,
}) {
const monoPkgs = InputDataService.getMonoRepoPackages(cwd);
const monoPkgs = await InputDataService.getMonoRepoPackages(cwd);
const results = await _providenceModule.providence(
await QueryService.getQueryConfigFromAnalyzer(MatchPathsAnalyzer, { prefix: prefixCfg }),
@ -71,7 +73,7 @@ export async function getExtendDocsResults({
const normalizedP = `./${p}`;
if (pathStr.startsWith(normalizedP)) {
const localPath = pathStr.replace(normalizedP, ''); // 'lea-tabs.js'
result = `${name}/${localPath}`; // 'lea-tabs/lea-tabs.js'
result = toPosixPath(path.join(name, localPath)); // 'lea-tabs/lea-tabs.js'
return true;
}
return false;
@ -124,12 +126,12 @@ export async function launchProvidenceWithExtendDocs({
});
// Write results
const outputFilePath = pathLib.join(outputFolder, 'providence-extend-docs-data.json');
const outputFilePath = path.join(outputFolder, 'providence-extend-docs-data.json');
if (fs.existsSync(outputFilePath)) {
fs.unlinkSync(outputFilePath);
if (fsAdapter.fs.existsSync(outputFilePath)) {
fsAdapter.fs.unlinkSync(outputFilePath);
}
fs.writeFile(outputFilePath, JSON.stringify(queryOutputs, null, 2), err => {
fsAdapter.fs.writeFile(outputFilePath, JSON.stringify(queryOutputs, null, 2), err => {
if (err) {
throw err;
}

View file

@ -1,163 +0,0 @@
import fs from 'fs';
import path from 'path';
import inquirer from 'inquirer';
import traverse from '@babel/traverse';
import { InputDataService } from '../program/core/InputDataService.js';
import { AstService } from '../program/core/AstService.js';
import { LogService } from '../program/core/LogService.js';
import JsdocCommentParser from '../program/utils/jsdoc-comment-parser.js';
import { getCurrentDir } from '../program/utils/get-current-dir.js';
/**
* @typedef {import('../../types/index.js').TargetDepsObj} TargetDepsObj
* @typedef {import('../../types/index.js').TargetOrRefCollectionsObj} TargetOrRefCollectionsObj
* @typedef {import('../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
* @typedef {import('../../types/index.js').AnalyzerName} AnalyzerName
*/
/**
* Extracts name, defaultValue, optional, type, desc from JsdocCommentParser.parse method
* result
* @param {{tagName:string;tagValue:string}[]} jsdoc
* @returns {{ name:string, defaultValue:string, optional:boolean, type:string, desc:string }[]}
*/
function getPropsFromParsedJsDoc(jsdoc) {
const jsdocProps = jsdoc.filter(p => p.tagName === '@property');
const options = jsdocProps.map(({ tagValue }) => {
// eslint-disable-next-line no-unused-vars
const [_, type, nameOptionalDefault, desc] = tagValue.match(/\{(.*)\}\s*([^\s]*)\s*(.*)/) || [];
let nameDefault = nameOptionalDefault;
let optional = false;
if (nameOptionalDefault.startsWith('[') && nameOptionalDefault.endsWith(']')) {
optional = true;
nameDefault = nameOptionalDefault.slice(1).slice(0, -1);
}
const [name, defaultValue] = nameDefault.split('=');
return { name, defaultValue, optional, type, desc };
});
return options;
}
/**
* @param {PathFromSystemRoot} file
*/
function getAnalyzerOptions(file) {
const code = fs.readFileSync(file, 'utf8');
const babelAst = AstService.getAst(code, 'swc-to-babel', { filePath: file });
let commentNode;
traverse.default(babelAst, {
// eslint-disable-next-line no-shadow
VariableDeclaration(astPath) {
const { node } = astPath;
if (!node.leadingComments) {
return;
}
node.declarations.forEach(decl => {
// @ts-expect-error
if (decl?.id?.name === 'cfg') {
// eslint-disable-next-line prefer-destructuring
commentNode = node.leadingComments?.[0];
}
});
},
});
if (commentNode) {
const jsdoc = JsdocCommentParser.parse(commentNode);
return getPropsFromParsedJsDoc(jsdoc);
}
return undefined;
}
/**
* @param {PathFromSystemRoot} dir
* @param {boolean} [shouldGetOptions]
*/
function gatherAnalyzers(dir, shouldGetOptions) {
return InputDataService.gatherFilesFromDir(dir, { depth: 0 }).map(file => {
const analyzerObj = { file, name: path.basename(file, '.js') };
if (shouldGetOptions) {
analyzerObj.options = getAnalyzerOptions(file);
}
return analyzerObj;
});
}
/**
*
* @param {AnalyzerName} analyzerName
* @param {*} promptOptionalConfig
* @param {PathFromSystemRoot} [dir]
* @returns
*/
export async function promptAnalyzerConfigMenu(
analyzerName,
promptOptionalConfig,
dir = /** @type {PathFromSystemRoot} */ (
path.resolve(getCurrentDir(import.meta.url), '../program/analyzers')
),
) {
const menuOptions = gatherAnalyzers(dir, true);
const analyzer = menuOptions.find(o => o.name === analyzerName);
if (!analyzer) {
LogService.error(`[promptAnalyzerConfigMenu] analyzer "${analyzerName}" not found.`);
process.exit(1);
}
let configAnswers;
if (analyzer.options) {
configAnswers = await inquirer.prompt(
analyzer.options
.filter(a => promptOptionalConfig || !a.optional)
.map(a => ({
name: a.name,
message: a.description,
...(a.defaultValue ? { default: a.defaultValue } : {}),
})),
);
Object.entries(configAnswers).forEach(([key, value]) => {
const { type } = analyzer.options.find(o => o.name === key);
if (type.toLowerCase() === 'boolean') {
configAnswers[key] = value === 'false' ? false : Boolean(value);
} else if (type.toLowerCase() === 'number') {
configAnswers[key] = Number(value);
} else if (type.toLowerCase() !== 'string') {
if (value) {
configAnswers[key] = JSON.parse(value);
} else {
// Make sure to not override predefined values with undefined ones
delete configAnswers[key];
}
}
});
}
return {
analyzerConfig: configAnswers,
};
}
export async function promptAnalyzerMenu(
dir = /** @type {PathFromSystemRoot} */ (
path.resolve(getCurrentDir(import.meta.url), '../program/analyzers')
),
) {
const menuOptions = gatherAnalyzers(dir);
const answers = await inquirer.prompt([
{
type: 'list',
name: 'analyzerName',
message: 'Which analyzer do you want to run?',
choices: menuOptions.map(o => o.name),
},
]);
return {
analyzerName: answers.analyzerName,
};
}
export const _promptAnalyzerMenuModule = {
promptAnalyzerMenu,
promptAnalyzerConfigMenu,
};

View file

@ -1,4 +1,4 @@
// eslint-disable-next-line max-classes-per-file
// eslint-disable-next-line max-classes-per-file, import/no-extraneous-dependencies
import { LitElement, html, css } from 'lit-element';
import { DecorateMixin } from '../../utils/DecorateMixin.js';

View file

@ -1,5 +1,7 @@
/* eslint-disable lit-a11y/no-invalid-change-handler */
/* eslint-disable max-classes-per-file */
// eslint-disable-next-line import/no-extraneous-dependencies
import { LitElement, html, css } from 'lit-element';
import { tooltip as tooltipStyles } from './styles/tooltip.css.js';
import { global as globalStyles } from './styles/global.css.js';

View file

@ -1,9 +1,10 @@
import fs from 'fs';
import pathLib from 'path';
import path from 'path';
// eslint-disable-next-line import/no-extraneous-dependencies
import { startDevServer } from '@web/dev-server';
import { ReportService } from '../program/core/ReportService.js';
import { providenceConfUtil } from '../program/utils/providence-conf-util.js';
import { getCurrentDir } from '../program/utils/get-current-dir.js';
import { fsAdapter } from '../program/utils/fs-adapter.js';
/**
* @typedef {import('../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
@ -25,7 +26,7 @@ async function getCachedProvidenceResults({
*/
let outputFilePaths;
try {
outputFilePaths = fs.readdirSync(resultsPath);
outputFilePaths = fsAdapter.fs.readdirSync(resultsPath);
} catch (_) {
throw new Error(`Please make sure providence results can be found in ${resultsPath}`);
}
@ -33,7 +34,9 @@ async function getCachedProvidenceResults({
const resultFiles = {};
let searchTargetDeps;
outputFilePaths.forEach(fileName => {
const content = JSON.parse(fs.readFileSync(pathLib.join(resultsPath, fileName), 'utf-8'));
const content = JSON.parse(
fsAdapter.fs.readFileSync(path.join(resultsPath, fileName), 'utf-8'),
);
if (fileName === 'search-target-deps-file.json') {
searchTargetDeps = content;
} else {
@ -62,8 +65,8 @@ function createMiddleWares({ providenceConf, providenceConfRaw, searchTargetDeps
*/
function getPackageJson(projectPath) {
try {
const file = pathLib.resolve(projectPath, 'package.json');
return JSON.parse(fs.readFileSync(file, 'utf8'));
const file = path.resolve(projectPath, 'package.json');
return JSON.parse(fsAdapter.fs.readFileSync(file, 'utf8'));
} catch (_) {
return null;
}
@ -85,7 +88,7 @@ function createMiddleWares({ providenceConf, providenceConfRaw, searchTargetDeps
return res;
}
const pathFromServerRootToHere = `/${pathLib.relative(
const pathFromServerRootToHere = `/${path.relative(
process.cwd(),
getCurrentDir(import.meta.url),
)}`;
@ -148,13 +151,13 @@ export async function createDashboardServerConfig() {
// Needed for dev purposes (we call it from ./packages-node/providence-analytics/ instead of ./)
// Allows es-dev-server to find the right moduleDirs
const fromPackageRoot = process.argv.includes('--serve-from-package-root');
const moduleRoot = fromPackageRoot ? pathLib.resolve(process.cwd(), '../../') : process.cwd();
const moduleRoot = fromPackageRoot ? path.resolve(process.cwd(), '../../') : process.cwd();
return {
appIndex: pathLib.resolve(getCurrentDir(import.meta.url), 'index.html'),
appIndex: path.resolve(getCurrentDir(import.meta.url), 'index.html'),
rootDir: moduleRoot,
nodeResolve: true,
moduleDirs: pathLib.resolve(moduleRoot, 'node_modules'),
moduleDirs: path.resolve(moduleRoot, 'node_modules'),
watch: false,
open: true,
middleware: createMiddleWares({

View file

@ -1,10 +1,10 @@
/* eslint-disable no-shadow, no-param-reassign */
import path from 'path';
import t from '@babel/types';
// @ts-ignore
import babelTraverse from '@babel/traverse';
import { Analyzer } from '../core/Analyzer.js';
import { trackDownIdentifierFromScope } from './helpers/track-down-identifier--legacy.js';
import { trackDownIdentifierFromScope } from '../utils/track-down-identifier--legacy.js';
/**
* @typedef {import('@babel/types').File} File
@ -241,7 +241,7 @@ export default class FindClassesAnalyzer extends Analyzer {
/**
* Prepare
*/
const analyzerResult = this._prepare(cfg);
const analyzerResult = await this._prepare(cfg);
if (analyzerResult) {
return analyzerResult;
}

View file

@ -1,13 +1,15 @@
import path from 'path';
import t from '@babel/types';
import babelTraverse from '@babel/traverse';
import t from '@babel/types';
import { trackDownIdentifierFromScope } from '../utils/track-down-identifier--legacy.js';
import { Analyzer } from '../core/Analyzer.js';
import { trackDownIdentifierFromScope } from './helpers/track-down-identifier--legacy.js';
/**
* @typedef {import('@babel/types').File} File
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
* @typedef {import('../../../types/index.js').FindCustomelementsConfig} FindCustomelementsConfig
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
* @typedef {import('@babel/types').File} File
*/
function cleanup(transformedEntry) {
@ -109,7 +111,7 @@ export default class FindCustomelementsAnalyzer extends Analyzer {
/**
* Prepare
*/
const cachedAnalyzerResult = this._prepare(cfg);
const cachedAnalyzerResult = await this._prepare(cfg);
if (cachedAnalyzerResult) {
return cachedAnalyzerResult;
}

View file

@ -1,32 +1,33 @@
/* eslint-disable no-shadow, no-param-reassign */
import path from 'path';
import { swcTraverse } from '../utils/swc-traverse.js';
import { getAssertionType } from '../utils/get-assertion-type.js';
import { Analyzer } from '../core/Analyzer.js';
import { trackDownIdentifier } from './helpers/track-down-identifier.js';
import { normalizeSourcePaths } from './helpers/normalize-source-paths.js';
import { getReferencedDeclaration } from '../utils/get-source-code-fragment-of-declaration.js';
import { normalizeSourcePaths } from './helpers/normalize-source-paths.js';
import { trackDownIdentifier } from '../utils/track-down-identifier.js';
import { getAssertionType } from '../utils/get-assertion-type.js';
import { swcTraverse } from '../utils/swc-traverse.js';
import { LogService } from '../core/LogService.js';
import { Analyzer } from '../core/Analyzer.js';
/**
* @typedef {import("@swc/core").Module} SwcAstModule
* @typedef {import("@swc/core").Node} SwcNode
* @typedef {import("@swc/core").VariableDeclaration} SwcVariableDeclaration
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
* @typedef {import('../../../types/index.js').AnalyzerAst} AnalyzerAst
* @typedef {{ exportSpecifiers:string[]; localMap: object; source:string, __tmp: { path:string } }} FindExportsSpecifierObj
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
* @typedef {import('../../../types/index.js').FindExportsAnalyzerResult} FindExportsAnalyzerResult
* @typedef {import('../../../types/index.js').FindExportsAnalyzerEntry} FindExportsAnalyzerEntry
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
* @typedef {import('../../../types/index.js').SwcScope} SwcScope
* @typedef {import("@swc/core").VariableDeclaration} SwcVariableDeclaration
* @typedef {import('../utils/track-down-identifier.js').RootFile} RootFile
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
* @typedef {import('../../../types/index.js').AnalyzerAst} AnalyzerAst
* @typedef {import('../../../types/index.js').SwcBinding} SwcBinding
* @typedef {import('../../../types/index.js').SwcPath} SwcPath
* @typedef {import('../../../types/index.js').SwcVisitor} SwcVisitor
* @typedef {import('./helpers/track-down-identifier.js').RootFile} RootFile
* @typedef {object} RootFileMapEntry
* @typedef {string} currentFileSpecifier this is the local name in the file we track from
* @typedef {RootFile} rootFile contains file(filePath) and specifier
* @typedef {import('../../../types/index.js').SwcScope} SwcScope
* @typedef {import('../../../types/index.js').SwcPath} SwcPath
* @typedef {import("@swc/core").Module} SwcAstModule
* @typedef {import("@swc/core").Node} SwcNode
* @typedef {RootFileMapEntry[]} RootFileMap
* @typedef {{ exportSpecifiers:string[]; localMap: object; source:string, __tmp: { path:string } }} FindExportsSpecifierObj
* @typedef {string} currentFileSpecifier this is the local name in the file we track from
* @typedef {object} RootFileMapEntry
* @typedef {RootFile} rootFile contains file(filePath) and specifier
*/
/**
@ -108,14 +109,12 @@ function cleanup(transformedFile) {
*/
function getExportSpecifiers(node) {
// handles default [export const g = 4];
if (node.declaration) {
if (node.declaration.declarations) {
if (node.declaration?.declarations) {
return [node.declaration.declarations[0].id.value];
}
if (node.declaration.identifier) {
if (node.declaration?.identifier) {
return [node.declaration.identifier.value];
}
}
// handles (re)named specifiers [export { x (as y)} from 'y'];
return (node.specifiers || []).map(s => {

View file

@ -1,20 +1,20 @@
/* eslint-disable no-shadow, no-param-reassign */
import { isRelativeSourcePath } from '../utils/relative-source-path.js';
import { swcTraverse } from '../utils/swc-traverse.js';
import { getAssertionType } from '../utils/get-assertion-type.js';
import { normalizeSourcePaths } from './helpers/normalize-source-paths.js';
import { Analyzer } from '../core/Analyzer.js';
import { isRelativeSourcePath } from '../utils/relative-source-path.js';
import { getAssertionType } from '../utils/get-assertion-type.js';
import { swcTraverse } from '../utils/swc-traverse.js';
import { LogService } from '../core/LogService.js';
import { Analyzer } from '../core/Analyzer.js';
/**
* @typedef {import("@swc/core").Module} SwcAstModule
* @typedef {import("@swc/core").Node} SwcNode
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
* @typedef {import('../../../types/index.js').AnalyzerAst} AnalyzerAst
* @typedef {import('../../../types/index.js').AnalyzerConfig} AnalyzerConfig
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
* @typedef {import('../../../types/index.js').FindImportsAnalyzerResult} FindImportsAnalyzerResult
* @typedef {import('../../../types/index.js').FindImportsAnalyzerEntry} FindImportsAnalyzerEntry
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
* @typedef {import('../../../types/index.js').AnalyzerConfig} AnalyzerConfig
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
* @typedef {import('../../../types/index.js').AnalyzerAst} AnalyzerAst
* @typedef {import("@swc/core").Module} SwcAstModule
* @typedef {import("@swc/core").Node} SwcNode
*/
/**
@ -60,7 +60,7 @@ function findImportsPerAstFile(swcAst) {
const entry = /** @type {Partial<FindImportsAnalyzerEntry>} */ ({ importSpecifiers, source });
const assertionType = getAssertionType(node);
if (assertionType) {
entry.assertionType = getAssertionType(node);
entry.assertionType = assertionType;
}
transformedFile.push(entry);
},
@ -124,7 +124,7 @@ export default class FindImportsSwcAnalyzer extends Analyzer {
/**
* Prepare
*/
const cachedAnalyzerResult = this._prepare(cfg);
const cachedAnalyzerResult = await this._prepare(cfg);
if (cachedAnalyzerResult) {
return cachedAnalyzerResult;
}

View file

@ -1,5 +1,6 @@
/* eslint-disable no-param-reassign */
import path from 'path';
import { isRelativeSourcePath } from '../../utils/relative-source-path.js';
import { resolveImportPath } from '../../utils/resolve-import-path.js';
import { toPosixPath } from '../../utils/to-posix-path.js';

View file

@ -4,7 +4,7 @@ import pathLib from 'path';
import FindImportsAnalyzer from './find-imports.js';
import FindExportsAnalyzer from './find-exports.js';
import { Analyzer } from '../core/Analyzer.js';
import { fromImportToExportPerspective } from './helpers/from-import-to-export-perspective.js';
import { fromImportToExportPerspective } from '../utils/from-import-to-export-perspective.js';
import { transformIntoIterableFindExportsOutput } from './helpers/transform-into-iterable-find-exports-output.js';
import { transformIntoIterableFindImportsOutput } from './helpers/transform-into-iterable-find-imports-output.js';
@ -192,14 +192,11 @@ export default class MatchImportsAnalyzer extends Analyzer {
/**
* Prepare
*/
const cachedAnalyzerResult = this._prepare(cfg);
const cachedAnalyzerResult = await this._prepare(cfg);
if (cachedAnalyzerResult) {
return cachedAnalyzerResult;
}
/**
* Traverse
*/
let { referenceProjectResult } = cfg;
if (!referenceProjectResult) {
const findExportsAnalyzer = new FindExportsAnalyzer();
@ -222,6 +219,9 @@ export default class MatchImportsAnalyzer extends Analyzer {
});
}
/**
* Traverse
*/
const queryOutput = await matchImportsPostprocess(
referenceProjectResult,
targetProjectResult,

View file

@ -109,7 +109,6 @@ function getClosestToRootTargetPath(targetPaths, targetExportsResult) {
}
/**
*
* @param {FindExportsAnalyzerResult} targetExportsResult
* @param {FindExportsAnalyzerResult} refFindExportsResult
* @param {string} targetMatchedFile file where `toClass` from match-subclasses is defined
@ -198,7 +197,6 @@ function getVariablePaths(
}
/**
*
* @param {FindCustomelementsAnalyzerResult} targetFindCustomelementsResult
* @param {FindCustomelementsAnalyzerResult} refFindCustomelementsResult
* @param {FindExportsAnalyzerResult} refFindExportsResult
@ -240,8 +238,10 @@ function getTagPaths(
if (!matchSubclassSpecifierRootFile) {
return false;
}
const sameRoot = entry.rootFile.file === matchSubclassSpecifierRootFile.file;
const sameIdentifier = entry.rootFile.specifier === matchSubclassEntry.exportSpecifier.name;
return sameRoot && sameIdentifier;
});
if (refPathMatch) {
@ -274,11 +274,11 @@ function matchPathsPostprocess(
/** @type {AnalyzerQueryResult} */
const resultsArray = [];
targetMatchSubclassesResult.queryOutput.forEach(matchSubclassEntry => {
for (const matchSubclassEntry of targetMatchSubclassesResult.queryOutput) {
const fromClass = matchSubclassEntry.exportSpecifier.name;
matchSubclassEntry.matchesPerProject.forEach(projectMatch => {
projectMatch.files.forEach(({ identifier: toClass, file: targetMatchedFile }) => {
for (const projectMatch of matchSubclassEntry.matchesPerProject) {
for (const { identifier: toClass, file: targetMatchedFile } of projectMatch.files) {
const resultEntry = {
name: fromClass,
};
@ -293,7 +293,7 @@ function matchPathsPostprocess(
refProjectName,
);
if (paths && paths.length) {
if (paths?.length) {
resultEntry.variable = {
from: fromClass,
to: toClass,
@ -324,9 +324,9 @@ function matchPathsPostprocess(
if (resultEntry.variable || resultEntry.tag) {
resultsArray.push(resultEntry);
}
});
});
});
}
}
}
return resultsArray;
}
@ -394,7 +394,7 @@ export default class MatchPathsAnalyzer extends Analyzer {
/**
* Prepare
*/
const analyzerResult = this._prepare(cfg);
const analyzerResult = await this._prepare(cfg);
if (analyzerResult) {
return analyzerResult;
}

View file

@ -1,10 +1,10 @@
/* eslint-disable no-continue */
import pathLib from 'path';
import path from 'path';
/* eslint-disable no-shadow, no-param-reassign */
import FindClassesAnalyzer from './find-classes.js';
import FindExportsAnalyzer from './find-exports.js';
import { Analyzer } from '../core/Analyzer.js';
import { fromImportToExportPerspective } from './helpers/from-import-to-export-perspective.js';
import { fromImportToExportPerspective } from '../utils/from-import-to-export-perspective.js';
/**
* @typedef {import('../../../types/index.js').FindClassesAnalyzerResult} FindClassesAnalyzerResult
@ -126,7 +126,7 @@ async function matchSubclassesPostprocess(
const importProjectPath = cfg.targetProjectPath;
for (const { result, file } of targetClassesAnalyzerResult.queryOutput) {
const importerFilePath = /** @type {PathFromSystemRoot} */ (
pathLib.resolve(importProjectPath, file)
path.resolve(importProjectPath, file)
);
for (const classEntryResult of result) {
/**
@ -312,7 +312,7 @@ export default class MatchSubclassesAnalyzer extends Analyzer {
/**
* Prepare
*/
const analyzerResult = this._prepare(cfg);
const analyzerResult = await this._prepare(cfg);
if (analyzerResult) {
return analyzerResult;
}

View file

@ -54,9 +54,8 @@ async function analyzePerAstFile(projectData, astAnalysis, analyzerCfg) {
* @param {object[]|object} data
*/
function posixify(data) {
if (!data) {
return;
}
if (!data) return;
if (Array.isArray(data)) {
data.forEach(posixify);
} else if (typeof data === 'object') {
@ -212,7 +211,7 @@ export class Analyzer {
* @param {AnalyzerConfig} cfg
* @returns {CachedAnalyzerResult|undefined}
*/
_prepare(cfg) {
async _prepare(cfg) {
LogService.debug(`Analyzer "${this.name}": started _prepare method`);
/** @type {typeof Analyzer} */ (this.constructor).__unwindProvidedResults(cfg);
@ -281,14 +280,14 @@ export class Analyzer {
* Get reference and search-target data
*/
if (!cfg.targetProjectResult) {
this.targetData = InputDataService.createDataObject(
this.targetData = await InputDataService.createDataObject(
[cfg.targetProjectPath],
cfg.gatherFilesConfig,
);
}
if (cfg.referenceProjectPath) {
this.referenceData = InputDataService.createDataObject(
this.referenceData = await InputDataService.createDataObject(
[cfg.referenceProjectPath],
cfg.gatherFilesConfigReference || cfg.gatherFilesConfig,
);
@ -333,7 +332,7 @@ export class Analyzer {
if (!projectPath) {
LogService.error(`[Analyzer._traverse]: you must provide a projectPath`);
}
finalTargetData = InputDataService.createDataObject([
finalTargetData = await InputDataService.createDataObject([
{
project: {
name: projectName || '[n/a]',
@ -366,7 +365,7 @@ export class Analyzer {
/**
* Prepare
*/
const cachedAnalyzerResult = this._prepare(cfg);
const cachedAnalyzerResult = await this._prepare(cfg);
if (cachedAnalyzerResult) {
return cachedAnalyzerResult;
}

View file

@ -126,6 +126,5 @@ export class AstService {
/**
* This option can be used as a last resort when an swc AST combined with swc-to-babel, is backwards incompatible
* (for instance when @babel/generator expects a different ast structure and fails).
* Analyzers should use guarded-swc-to-babel util.
*/
AstService.fallbackToBabel = false;

View file

@ -1,42 +1,37 @@
/* eslint-disable no-param-reassign */
import fs from 'fs';
import pathLib from 'path';
import child_process from 'child_process'; // eslint-disable-line camelcase
import glob from 'glob';
import anymatch from 'anymatch';
// @ts-expect-error
import isNegatedGlob from 'is-negated-glob';
import path from 'path';
import { getFilePathRelativeFromRoot } from '../utils/get-file-path-relative-from-root.js';
import { optimisedGlob } from '../utils/optimised-glob.js';
import { toPosixPath } from '../utils/to-posix-path.js';
import { fsAdapter } from '../utils/fs-adapter.js';
import { memoize } from '../utils/memoize.js';
import { LogService } from './LogService.js';
import { AstService } from './AstService.js';
import { getFilePathRelativeFromRoot } from '../utils/get-file-path-relative-from-root.js';
import { toPosixPath } from '../utils/to-posix-path.js';
import { memoize } from '../utils/memoize.js';
// const memoize = fn => fn;
/**
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
* @typedef {import('../../../types/index.js').FindImportsAnalyzerResult} FindImportsAnalyzerResult
* @typedef {import('../../../types/index.js').FindImportsAnalyzerEntry} FindImportsAnalyzerEntry
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
* @typedef {import('../../../types/index.js').ProjectInputDataWithMeta} ProjectInputDataWithMeta
* @typedef {import('../../../types/index.js').AnalyzerQueryResult} AnalyzerQueryResult
* @typedef {import('../../../types/index.js').AnalyzerQueryConfig} AnalyzerQueryConfig
* @typedef {import('../../../types/index.js').FeatureQueryConfig} FeatureQueryConfig
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
* @typedef {import('../../../types/index.js').SearchQueryConfig} SearchQueryConfig
* @typedef {import('../../../types/index.js').GatherFilesConfig} GatherFilesConfig
* @typedef {import('../../../types/index.js').ProjectInputData} ProjectInputData
* @typedef {import('../../../types/index.js').AnalyzerConfig} AnalyzerConfig
* @typedef {{path:PathFromSystemRoot; name:ProjectName}} ProjectNameAndPath
* @typedef {import('../../../types/index.js').PathRelative} PathRelative
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
* @typedef {import('../../../types/index.js').QueryConfig} QueryConfig
* @typedef {import('../../../types/index.js').QueryResult} QueryResult
* @typedef {import('../../../types/index.js').FeatureQueryConfig} FeatureQueryConfig
* @typedef {import('../../../types/index.js').SearchQueryConfig} SearchQueryConfig
* @typedef {import('../../../types/index.js').AnalyzerQueryConfig} AnalyzerQueryConfig
* @typedef {import('../../../types/index.js').Feature} Feature
* @typedef {import('../../../types/index.js').AnalyzerConfig} AnalyzerConfig
* @typedef {import('../../../types/index.js').Analyzer} Analyzer
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
* @typedef {import('../../../types/index.js').GatherFilesConfig} GatherFilesConfig
* @typedef {import('../../../types/index.js').AnalyzerQueryResult} AnalyzerQueryResult
* @typedef {import('../../../types/index.js').ProjectInputData} ProjectInputData
* @typedef {import('../../../types/index.js').ProjectInputDataWithMeta} ProjectInputDataWithMeta
* @typedef {import('../../../types/index.js').Project} Project
* @typedef {import('../../../types/index.js').ProjectName} ProjectName
* @typedef {import('../../../types/index.js').PackageJson} PackageJson
* @typedef {{path:PathFromSystemRoot; name:ProjectName}} ProjectNameAndPath
* @typedef {import('../../../types/index.js').Analyzer} Analyzer
* @typedef {import('../../../types/index.js').Project} Project
* @typedef {import('../../../types/index.js').Feature} Feature
*/
/**
@ -45,13 +40,13 @@ import { memoize } from '../utils/memoize.js';
*/
const getPackageJson = memoize((/** @type {PathFromSystemRoot} */ rootPath) => {
try {
const fileContent = fs.readFileSync(`${rootPath}/package.json`, 'utf8');
const fileContent = fsAdapter.fs.readFileSync(`${rootPath}/package.json`, 'utf8');
return JSON.parse(fileContent);
} catch (_) {
try {
// For testing purposes, we allow to have a package.mock.json that contains 'fictional'
// packages (like 'exporting-ref-project') not on npm registry
const fileContent = fs.readFileSync(`${rootPath}/package.mock.json`, 'utf8');
const fileContent = fsAdapter.fs.readFileSync(`${rootPath}/package.mock.json`, 'utf8');
return JSON.parse(fileContent);
} catch (__) {
return undefined;
@ -65,7 +60,7 @@ const getPackageJson = memoize((/** @type {PathFromSystemRoot} */ rootPath) => {
*/
const getLernaJson = memoize((/** @type {PathFromSystemRoot} */ rootPath) => {
try {
const fileContent = fs.readFileSync(`${rootPath}/lerna.json`, 'utf8');
const fileContent = fsAdapter.fs.readFileSync(`${rootPath}/lerna.json`, 'utf8');
return JSON.parse(fileContent);
} catch (_) {
return undefined;
@ -73,34 +68,31 @@ const getLernaJson = memoize((/** @type {PathFromSystemRoot} */ rootPath) => {
});
/**
* @typedef {(list:PathFromSystemRoot[]|string[], rootPath:PathFromSystemRoot) => ProjectNameAndPath[]} GetPathsFromGlobListFn
* @typedef {(list:PathFromSystemRoot[]|string[], rootPath:PathFromSystemRoot) => Promise<ProjectNameAndPath[]>} GetPathsFromGlobListFn
* @type {GetPathsFromGlobListFn}
*/
const getPathsFromGlobList = memoize(
(
async (
/** @type {PathFromSystemRoot[]|string[]} */ list,
/** @type {PathFromSystemRoot} */ rootPath,
) => {
/** @type {string[]} */
const results = [];
list.forEach(pathOrGlob => {
if (!pathOrGlob.endsWith('/')) {
// eslint-disable-next-line no-param-reassign
pathOrGlob = `${pathOrGlob}/`;
}
for (const pathOrGlob of list) {
if (pathOrGlob.includes('*')) {
const globResults = glob.sync(pathOrGlob, { cwd: rootPath, absolute: false });
globResults.forEach(r => {
results.push(r);
const globResults = await optimisedGlob(pathOrGlob, {
cwd: rootPath,
absolute: false,
onlyFiles: false,
});
results.push(...globResults);
} else {
results.push(pathOrGlob);
}
});
}
return results.map(pkgPath => {
const packageRoot = pathLib.resolve(rootPath, pkgPath);
const basename = pathLib.basename(pkgPath);
const packageRoot = path.resolve(rootPath, pkgPath);
const basename = path.basename(pkgPath);
const pkgJson = getPackageJson(/** @type {PathFromSystemRoot} */ (packageRoot));
const name = /** @type {ProjectName} */ ((pkgJson && pkgJson.name) || basename);
return { name, path: /** @type {PathFromSystemRoot} */ (pkgPath) };
@ -114,7 +106,7 @@ const getPathsFromGlobList = memoize(
*/
const getGitignoreFile = memoize((/** @type {PathFromSystemRoot} */ rootPath) => {
try {
return fs.readFileSync(`${rootPath}/.gitignore`, 'utf8');
return fsAdapter.fs.readFileSync(`${rootPath}/.gitignore`, 'utf8');
} catch (_) {
return undefined;
}
@ -131,6 +123,7 @@ const getGitIgnorePaths = memoize((/** @type {PathFromSystemRoot} */ rootPath) =
}
const entries = fileContent.split('\n').filter(entry => {
// eslint-disable-next-line no-param-reassign
entry = entry.trim();
if (entry.startsWith('#')) {
return false;
@ -143,15 +136,19 @@ const getGitIgnorePaths = memoize((/** @type {PathFromSystemRoot} */ rootPath) =
// normalize entries to be compatible with anymatch
const normalizedEntries = entries.map(entry => {
// eslint-disable-next-line no-param-reassign
entry = toPosixPath(entry);
if (entry.startsWith('/')) {
// eslint-disable-next-line no-param-reassign
entry = entry.slice(1);
}
const isFile = entry.indexOf('.') > 0; // index of 0 means hidden file.
if (entry.endsWith('/')) {
// eslint-disable-next-line no-param-reassign
entry += '**';
} else if (!isFile) {
// eslint-disable-next-line no-param-reassign
entry += '/**';
}
return entry;
@ -189,30 +186,6 @@ function ensureArray(v) {
return Array.isArray(v) ? v : [v];
}
/**
* @param {string|string[]} patterns
* @param {Partial<{keepDirs:boolean;root:string}>} [options]
*
* @typedef {(patterns:string|string[], opts: {keepDirs?:boolean;root:string}) => string[]} MultiGlobSyncFn
* @type {MultiGlobSyncFn}
*/
const multiGlobSync = memoize(
(/** @type {string|string[]} */ patterns, { keepDirs = false, root } = {}) => {
patterns = ensureArray(patterns);
const res = new Set();
patterns.forEach(pattern => {
const files = glob.sync(pattern, { root });
files.forEach(filePath => {
if (fs.lstatSync(filePath).isDirectory() && !keepDirs) {
return;
}
res.add(filePath);
});
});
return Array.from(res);
},
);
/**
* @param {string} localPathWithDotSlash
* @returns {string}
@ -233,14 +206,14 @@ function normalizeLocalPathWithDotSlash(localPathWithoutDotSlash) {
}
/**
* @param {{valObjOrStr:object|string;nodeResolveMode:string}} opts
* @param {{valObjOrStr:object|string|null;nodeResolveMode:string}} opts
* @returns {string|null}
*/
function getStringOrObjectValOfExportMapEntry({ valObjOrStr, nodeResolveMode }) {
if (typeof valObjOrStr !== 'object') {
return valObjOrStr;
}
if (!valObjOrStr[nodeResolveMode]) {
if (!valObjOrStr?.[nodeResolveMode]) {
// This is allowed: it makes sense to have an entrypoint on the root for typescript, not for others
return null;
}
@ -259,29 +232,30 @@ export class InputDataService {
* Create an array of ProjectData
* @param {(PathFromSystemRoot|ProjectInputData)[]} projectPaths
* @param {Partial<GatherFilesConfig>} gatherFilesConfig
* @returns {ProjectInputDataWithMeta[]}
* @returns {Promise<ProjectInputDataWithMeta[]>}
*/
static createDataObject(projectPaths, gatherFilesConfig = {}) {
static async createDataObject(projectPaths, gatherFilesConfig = {}) {
/** @type {ProjectInputData[]} */
const inputData = projectPaths.map(projectPathOrObj => {
const inputData = [];
for (const projectPathOrObj of projectPaths) {
if (typeof projectPathOrObj === 'object') {
// ProjectInputData was provided already manually
return projectPathOrObj;
inputData.push(projectPathOrObj);
continue; // eslint-disable-line no-continue
}
const projectPath = projectPathOrObj;
return {
inputData.push({
project: /** @type {Project} */ ({
name: pathLib.basename(projectPath),
name: path.basename(projectPath),
path: projectPath,
}),
entries: this.gatherFilesFromDir(projectPath, {
entries: await this.gatherFilesFromDir(projectPath, {
...this.defaultGatherFilesConfig,
...gatherFilesConfig,
}),
};
});
// @ts-ignore
}
return this._addMetaToProjectsData(inputData);
}
@ -333,7 +307,7 @@ export class InputDataService {
let commitHash;
let isGitRepo;
try {
isGitRepo = fs.lstatSync(pathLib.resolve(projectPath, '.git')).isDirectory();
isGitRepo = fsAdapter.fs.lstatSync(path.resolve(projectPath, '.git')).isDirectory();
// eslint-disable-next-line no-empty
} catch (_) {}
@ -372,7 +346,7 @@ export class InputDataService {
projectObj.entries.forEach(entry => {
let code;
try {
code = fs.readFileSync(entry, 'utf8');
code = fsAdapter.fs.readFileSync(entry, 'utf8');
} catch (e) {
LogService.error(`Could not find "${entry}"`);
}
@ -380,7 +354,7 @@ export class InputDataService {
toPosixPath(entry),
toPosixPath(projectObj.project.path),
);
if (pathLib.extname(file) === '.html') {
if (path.extname(file) === '.html') {
const extractedScripts = AstService.getScriptsFromHtml(/** @type {string} */ (code));
// eslint-disable-next-line no-shadow
extractedScripts.forEach((code, i) => {
@ -409,19 +383,16 @@ export class InputDataService {
if (this.__targetProjectPaths) {
return this.__targetProjectPaths;
}
const submoduleDir = pathLib.resolve(
__dirname,
'../../../providence-input-data/search-targets',
);
const submoduleDir = path.resolve(__dirname, '../../../providence-input-data/search-targets');
let dirs;
try {
dirs = fs.readdirSync(submoduleDir);
dirs = fsAdapter.fs.readdirSync(submoduleDir);
} catch (_) {
return [];
}
return dirs
.map(dir => /** @type {PathFromSystemRoot} */ (pathLib.join(submoduleDir, dir)))
.filter(dirPath => fs.lstatSync(dirPath).isDirectory());
.map(dir => /** @type {PathFromSystemRoot} */ (path.join(submoduleDir, dir)))
.filter(dirPath => fsAdapter.fs.lstatSync(dirPath).isDirectory());
}
static set targetProjectPaths(v) {
@ -438,11 +409,11 @@ export class InputDataService {
let dirs;
try {
const referencesDir = pathLib.resolve(__dirname, '../../../providence-input-data/references');
dirs = fs.readdirSync(referencesDir);
const referencesDir = path.resolve(__dirname, '../../../providence-input-data/references');
dirs = fsAdapter.fs.readdirSync(referencesDir);
dirs = dirs
.map(dir => pathLib.join(referencesDir, dir))
.filter(dirPath => fs.lstatSync(dirPath).isDirectory());
.map(dir => path.join(referencesDir, dir))
.filter(dirPath => fsAdapter.fs.lstatSync(dirPath).isDirectory());
// eslint-disable-next-line no-empty
} catch (_) {}
return /** @type {PathFromSystemRoot[]} */ (dirs);
@ -457,31 +428,31 @@ export class InputDataService {
*/
static get defaultGatherFilesConfig() {
return {
extensions: ['.js'],
allowlist: ['!node_modules/**', '!bower_components/**', '!**/*.conf.js', '!**/*.config.js'],
extensions: ['.js'],
depth: Infinity,
};
}
/**
* @param {PathFromSystemRoot} startPath
* @param {GatherFilesConfig} cfg
* @param {boolean} withoutDepth
* @protected
* @param {number} depth
* @param {string[]} extensions
* @returns {string}
*/
static getGlobPattern(startPath, cfg, withoutDepth = false) {
// if startPath ends with '/', remove
let globPattern = startPath.replace(/\/$/, '');
if (process.platform === 'win32') {
globPattern = globPattern.replace(/^.:/, '').replace(/\\/g, '/');
static _getDefaultGlobDepthPattern(depth = Infinity, extensions = ['.js']) {
// `.{${cfg.extensions.map(e => e.slice(1)).join(',')},}`;
const extensionsGlobPart = `.{${extensions.map(extension => extension.slice(1)).join(',')},}`;
if (depth === Infinity) {
return `**/*${extensionsGlobPart}`;
}
if (!withoutDepth) {
if (typeof cfg.depth === 'number' && cfg.depth !== Infinity) {
globPattern += `/*`.repeat(cfg.depth + 1);
} else {
globPattern += `/**/*`;
if (depth > 1) {
return `${`/*`.repeat(depth + 1)}${extensionsGlobPart}`;
}
if (depth === 0) {
return `*${extensionsGlobPart}`;
}
return { globPattern };
return '';
}
/**
@ -498,9 +469,9 @@ export class InputDataService {
* Gets an array of files for given extension
* @param {PathFromSystemRoot} startPath - local filesystem path
* @param {Partial<GatherFilesConfig>} customConfig - configuration object
* @returns {PathFromSystemRoot[]} result list of file paths
* @returns {Promise<PathFromSystemRoot[]>} result list of file paths
*/
static gatherFilesFromDir(startPath, customConfig = {}) {
static async gatherFilesFromDir(startPath, customConfig = {}) {
const cfg = {
...this.defaultGatherFilesConfig,
...customConfig,
@ -523,88 +494,89 @@ export class InputDataService {
if (cfg.allowlistMode === 'export-map') {
const pkgJson = getPackageJson(startPath);
if (!pkgJson.exports) {
if (!pkgJson?.exports) {
LogService.error(`No exports found in package.json of ${startPath}`);
}
const exposedAndInternalPaths = this.getPathsFromExportMap(pkgJson.exports, {
if (pkgJson?.exports) {
const exposedAndInternalPaths = await this.getPathsFromExportMap(pkgJson?.exports, {
packageRootPath: startPath,
});
return exposedAndInternalPaths
return /** @type {PathFromSystemRoot[]} */ (
exposedAndInternalPaths
// TODO: path.resolve(startPath, p.internal)?
.map(p => p.internal)
.filter(p => cfg.extensions.includes(`${pathLib.extname(p)}`));
.filter(p =>
cfg.extensions.includes(/** @type {`.${string}`} */ (`${path.extname(p)}`)),
)
);
}
}
/** @type {string[]} */
let gitIgnorePaths = [];
const negativeGitGlobs = [];
/** @type {string[]} */
let npmPackagePaths = [];
const npmGlobs = [];
const allowlistMode = cfg.allowlistMode || this._determineAllowListMode(startPath);
if (allowlistMode === 'git') {
gitIgnorePaths = getGitIgnorePaths(startPath);
negativeGitGlobs.push(
...getGitIgnorePaths(startPath).map(gitIgnorePath => `!${gitIgnorePath}`),
);
} else if (allowlistMode === 'npm') {
npmPackagePaths = getNpmPackagePaths(startPath);
}
const removeFilter = gitIgnorePaths;
const keepFilter = npmPackagePaths;
cfg.allowlist.forEach(allowEntry => {
const { negated, pattern } = isNegatedGlob(allowEntry);
if (negated) {
removeFilter.push(pattern);
} else {
keepFilter.push(allowEntry);
}
});
let { globPattern } = this.getGlobPattern(startPath, cfg);
globPattern += `.{${cfg.extensions.map(e => e.slice(1)).join(',')},}`;
const globRes = multiGlobSync(globPattern);
let filteredGlobRes;
if (removeFilter.length || keepFilter.length) {
filteredGlobRes = globRes.filter(filePath => {
const localFilePath = toPosixPath(filePath).replace(`${toPosixPath(startPath)}/`, '');
// @ts-expect-error
let shouldRemove = removeFilter.length && anymatch(removeFilter, localFilePath);
// @ts-expect-error
let shouldKeep = keepFilter.length && anymatch(keepFilter, localFilePath);
if (shouldRemove && shouldKeep) {
// Contradicting configs: the one defined by end user takes highest precedence
// If the match came from allowListMode, it loses.
// @ts-expect-error
if (allowlistMode === 'git' && anymatch(gitIgnorePaths, localFilePath)) {
// shouldRemove was caused by .gitignore, shouldKeep by custom allowlist
shouldRemove = false;
// @ts-expect-error
} else if (allowlistMode === 'npm' && anymatch(npmPackagePaths, localFilePath)) {
// shouldKeep was caused by npm "files", shouldRemove by custom allowlist
shouldKeep = false;
}
npmGlobs.push(...getNpmPackagePaths(startPath));
}
if (removeFilter.length && shouldRemove) {
return false;
}
if (!keepFilter.length) {
return true;
}
return shouldKeep;
});
const combinedGlobs = [...cfg.allowlist, ...npmGlobs, ...negativeGitGlobs];
const hasProvidedPositiveGlob = cfg.allowlist.some(glob => !glob.startsWith('!'));
// We need to expand
const shouldLookForAllFilesInProject =
allowlistMode === 'all' || (!npmGlobs.length && !hasProvidedPositiveGlob);
if (shouldLookForAllFilesInProject) {
combinedGlobs.push(this._getDefaultGlobDepthPattern(cfg.depth, cfg.extensions));
}
if (!filteredGlobRes || !filteredGlobRes.length) {
const globbyCfg = {
expandDirectories: false,
onlyFiles: true,
absolute: true,
cwd: startPath,
};
let filteredGlobRes = await optimisedGlob(combinedGlobs, globbyCfg);
// Unfortunatly, globby does not correctly remove the negated globs,
// so we have to do it manually
const negatedGlobs = combinedGlobs.filter(p => p.startsWith('!'));
if (negatedGlobs.length) {
const subtract = await optimisedGlob(
negatedGlobs.map(p => p.slice(1)),
globbyCfg,
);
// eslint-disable-next-line no-shadow
filteredGlobRes = filteredGlobRes.filter(file => !subtract.includes(file));
}
// Make sure we don't delete to much by giving customConfig.allowlist priority
if (customConfig.allowlist?.length) {
const customResults = await optimisedGlob(customConfig.allowlist, globbyCfg);
filteredGlobRes = Array.from(new Set([...filteredGlobRes, ...customResults]));
}
// Filter by extension (in the future: use globs exclusively for this?)
if (cfg.extensions.length) {
filteredGlobRes = filteredGlobRes.filter(glob =>
cfg.extensions.some(ext => glob.endsWith(ext)),
);
}
if (!filteredGlobRes?.length) {
LogService.warn(`No files found for path '${startPath}'`);
return [];
}
// reappend startPath
// const res = filteredGlobRes.map(f => pathLib.resolve(startPath, f));
return /** @type {PathFromSystemRoot[]} */ (filteredGlobRes.map(toPosixPath));
}
// TODO: use modern web config helper
/**
* Allows the user to provide a providence.conf.js file in its repository root
*/
@ -619,7 +591,7 @@ export class InputDataService {
* @param {PathFromSystemRoot} rootPath
* @returns {ProjectNameAndPath[]|undefined}
*/
static getMonoRepoPackages(rootPath) {
static async getMonoRepoPackages(rootPath) {
// [1] Look for npm/yarn workspaces
const pkgJson = getPackageJson(rootPath);
if (pkgJson?.workspaces) {
@ -639,9 +611,9 @@ export class InputDataService {
* @param {object} opts
* @param {'default'|'development'|string} [opts.nodeResolveMode='default']
* @param {string} opts.packageRootPath
* @returns {Promise<{internalExportMapPaths:string[]; exposedExportMapPaths:string[]}>}
* @returns {Promise<{internal:string; exposed:string}[]>}
*/
static getPathsFromExportMap(exports, { nodeResolveMode = 'default', packageRootPath }) {
static async getPathsFromExportMap(exports, { nodeResolveMode = 'default', packageRootPath }) {
const exportMapPaths = [];
for (const [key, valObjOrStr] of Object.entries(exports)) {
@ -672,25 +644,24 @@ export class InputDataService {
const valueToUseForGlob = stripDotSlashFromLocalPath(resolvedVal).replace('*', '**/*');
// Generate all possible entries via glob, first strip './'
const internalExportMapPathsForKeyRaw = glob.sync(valueToUseForGlob, {
const internalExportMapPathsForKeyRaw = await optimisedGlob(valueToUseForGlob, {
cwd: packageRootPath,
nodir: true,
onlyFiles: true,
});
const exposedExportMapPathsForKeyRaw = internalExportMapPathsForKeyRaw.map(pathInside => {
// Say we have "exports": { "./*.js": "./src/*.js" }
// => internalExportMapPathsForKey: ['./src/a.js', './src/b.js']
// => exposedExportMapPathsForKey: ['./a.js', './b.js']
const [, variablePart] = pathInside.match(
new RegExp(valueToUseForGlob.replace('*', '(.*)')),
);
const [, variablePart] =
pathInside.match(new RegExp(valueToUseForGlob.replace('*', '(.*)'))) || [];
return resolvedKey.replace('*', variablePart);
});
const internalExportMapPathsForKey = internalExportMapPathsForKeyRaw.map(filePath =>
normalizeLocalPathWithDotSlash(filePath),
const internalExportMapPathsForKey = internalExportMapPathsForKeyRaw.map(
normalizeLocalPathWithDotSlash,
);
const exposedExportMapPathsForKey = exposedExportMapPathsForKeyRaw.map(filePath =>
normalizeLocalPathWithDotSlash(filePath),
const exposedExportMapPathsForKey = exposedExportMapPathsForKeyRaw.map(
normalizeLocalPathWithDotSlash,
);
exportMapPaths.push(
@ -704,9 +675,6 @@ export class InputDataService {
return exportMapPaths;
}
}
// TODO: Remove memoizeConfig.isCacheDisabled this once whole providence uses cacheConfig instead of
// memoizeConfig.isCacheDisabled
// InputDataService.cacheDisabled = memoizeConfig.isCacheDisabled;
InputDataService.getProjectMeta = memoize(InputDataService.getProjectMeta);
InputDataService.gatherFilesFromDir = memoize(InputDataService.gatherFilesFromDir);

View file

@ -1,5 +1,5 @@
import pathLib from 'path';
import fs from 'fs';
import path from 'path';
import { fsAdapter } from '../utils/fs-adapter.js';
const { log } = console;
@ -111,14 +111,14 @@ export class LogService {
}
static writeLogFile() {
const filePath = pathLib.join(process.cwd(), 'providence.log');
const filePath = path.join(process.cwd(), 'providence.log');
let file = `[log ${new Date()}]\n`;
// @ts-ignore
this._logHistory.forEach(l => {
file += `${l}\n`;
});
file += `[/log ${new Date()}]\n\n`;
fs.writeFileSync(filePath, file, { flag: 'a' });
fsAdapter.fs.writeFileSync(filePath, file, { flag: 'a' });
// @ts-ignore
this._logHistory = [];
}

View file

@ -1,8 +1,7 @@
import child_process from 'child_process'; // eslint-disable-line camelcase
import path from 'path';
import { AstService } from './AstService.js';
import { LogService } from './LogService.js';
import { getFilePathRelativeFromRoot } from '../utils/get-file-path-relative-from-root.js';
import { getCurrentDir } from '../utils/get-current-dir.js';
// import { memoize } from '../utils/memoize.js';
@ -31,81 +30,6 @@ const memoize = fn => fn;
const astProjectsDataCache = new Map();
export class QueryService {
/**
* @param {string} regexString string for 'free' regex searches.
* @returns {SearchQueryConfig}
*/
static getQueryConfigFromRegexSearchString(regexString) {
if (typeof regexString !== 'string') {
throw new Error('[QueryService.getQueryConfigFromRegexSearchString]: provide a string');
}
return { type: 'search', regexString };
}
/**
* Util function that can be used to parse cli input and feed the result object to a new
* instance of QueryResult
* @example
* const queryConfig = QueryService.getQueryConfigFromFeatureString(tg-icon[size=xs])
* const myQueryResult = QueryService.grepSearch(inputData, queryConfig)
* @param {string} queryString - string like tg-icon[size=xs]
* @returns {FeatureQueryConfig}
*/
static getQueryConfigFromFeatureString(queryString) {
if (typeof queryString !== 'string') {
throw new Error('[QueryService.getQueryConfigFromFeatureString]: provide a string');
}
/**
* Each candidate (tag, attrKey or attrValue) can end with asterisk.
* @param {string} candidate for my-*[attr*=x*] 'my-*', 'attr*' or 'x*'
* @returns {[string, boolean]}
*/
function parseContains(candidate) {
const hasAsterisk = candidate ? candidate.endsWith('*') : false;
const filtered = hasAsterisk ? candidate.slice(0, -1) : candidate;
return [filtered, hasAsterisk];
}
// Detect the features in the query
let tagCandidate;
let featString;
// Creates tag ('tg-icon') and featString ('font-icon+size=xs')
const attrMatch = queryString.match(/(^.*)(\[(.+)\])+/);
if (attrMatch) {
// eslint-disable-next-line prefer-destructuring
tagCandidate = attrMatch[1];
// eslint-disable-next-line prefer-destructuring
featString = attrMatch[3];
} else {
tagCandidate = queryString;
}
const [tag, usesTagPartialMatch] = parseContains(tagCandidate);
let featureObj;
if (featString) {
const [nameCandidate, valueCandidate] = featString.split('=');
const [name, usesValueContains] = parseContains(nameCandidate);
const [value, usesValuePartialMatch] = parseContains(valueCandidate);
featureObj = /** @type {Feature} */ {
name,
value,
tag,
isAttribute: true,
usesValueContains,
usesValuePartialMatch,
usesTagPartialMatch,
};
} else {
// Just look for tag name
featureObj = /** @type {Feature} */ ({ tag, usesTagPartialMatch });
}
return { type: 'feature', feature: featureObj };
}
/**
* Retrieves the default export found in ./program/analyzers/find-import.js
* @param {typeof Analyzer} analyzerCtor
@ -147,68 +71,6 @@ export class QueryService {
});
}
/**
* Search via unix grep
* @param {ProjectInputData} inputData
* @param {FeatureQueryConfig|SearchQueryConfig} queryConfig
* @param {{hasVerboseReporting:boolean;gatherFilesConfig:GatherFilesConfig}} [customConfig]
* @returns {Promise<QueryResult>}
*/
static async grepSearch(inputData, queryConfig, customConfig) {
const cfg = {
hasVerboseReporting: false,
gatherFilesConfig: {},
...customConfig,
};
const results = [];
// 1. Analyze the type of query from the QueryConfig (for instance 'feature' or 'search').
let regex;
if (queryConfig.type === 'feature') {
regex = this._getFeatureRegex(queryConfig.feature);
} else if (queryConfig.type === 'search') {
regex = queryConfig.regexString;
}
await Promise.all(
inputData.map(async projectData => {
// 2. For all files found in project, we will do a different grep
const projectResult = {};
const countStdOut = await this._performGrep(projectData.project.path, regex, {
count: true,
gatherFilesConfig: cfg.gatherFilesConfig,
});
projectResult.count = Number(countStdOut);
if (cfg.hasVerboseReporting) {
const detailStdout = await this._performGrep(projectData.project.path, regex, {
count: false,
gatherFilesConfig: cfg.gatherFilesConfig,
});
projectResult.files = detailStdout
.split('\n')
.filter(l => l)
.map(l => {
const [absolutePath, line] = l.split(':');
const file = getFilePathRelativeFromRoot(absolutePath, projectData.path);
const link = l.split(':').slice(0, 2).join(':');
const match = l.split(':').slice(2);
return { file, line: Number(line), match, link };
});
}
results.push({ project: projectData.project, ...projectResult });
}),
);
return /** @type {QueryResult} */ {
meta: {
searchType: 'grep',
query: queryConfig,
},
queryOutput: results,
};
}
/**
* Perform ast analysis
* @param {AnalyzerQueryConfig} analyzerQueryConfig
@ -282,81 +144,6 @@ export class QueryService {
}
astProjectsDataCache.set(pathAndRequiredAst, astData);
}
/**
* Performs a grep on given path for a certain tag name and feature
* @param {Feature} feature
*/
static _getFeatureRegex(feature) {
const { name, value, tag } = feature;
let potentialTag;
if (tag) {
potentialTag = feature.usesTagPartialMatch ? `.*${tag}.+` : tag;
} else {
potentialTag = '.*';
}
let regex;
if (name) {
if (value) {
// We are looking for an exact match: div[class=foo] -> <div class="foo">
let valueRe = value;
if (feature.usesValueContains) {
if (feature.usesValuePartialMatch) {
// We are looking for a partial match: div[class*=foo*] -> <div class="baz foo-bar">
valueRe = `.+${value}.+`;
} else {
// We are looking for an exact match inside a space separated list within an
// attr: div[class*=foo] -> <div class="baz foo bar">
valueRe = `((${value})|("${value} .*)|(.* ${value}")|(.* ${value} .*))`;
}
}
regex = `<${potentialTag} .*${name}="${valueRe}".+>`;
} else {
regex = `<${potentialTag} .*${name}(>|( |=).+>)`;
}
} else if (tag) {
regex = `<${potentialTag} .+>`;
} else {
LogService.error('Please provide a proper Feature');
}
return regex;
}
/**
*
* @param {PathFromSystemRoot} searchPath
* @param {string} regex
* @param {{ count:number; gatherFilesConfig:GatherFilesConfig; hasDebugEnabled:boolean }} customConfig
* @returns
*/
static _performGrep(searchPath, regex, customConfig) {
const cfg = {
count: false,
gatherFilesConfig: {},
hasDebugEnabled: false,
...customConfig,
};
const /** @type {string[]} */ ext = cfg.gatherFilesConfig.extensions;
const include = ext ? `--include="\\.(${ext.map(e => e.slice(1)).join('|')})" ` : '';
const count = cfg.count ? ' | wc -l' : '';
// TODO: test on Linux (only tested on Mac)
const cmd = `pcregrep -ornM ${include} '${regex}' ${searchPath} ${count}`;
if (cfg.hasDebugEnabled) {
LogService.debug(cmd, 'grep command');
}
return new Promise(resolve => {
// eslint-disable-next-line camelcase
child_process.exec(cmd, { maxBuffer: 200000000 }, (err, stdout) => {
resolve(stdout);
});
});
}
}
QueryService.cacheDisabled = false;
QueryService.addAstToProjectsData = memoize(QueryService.addAstToProjectsData);

View file

@ -1,17 +1,18 @@
import fs from 'fs';
import pathLib from 'path';
import { getHash } from '../utils/get-hash.js';
// import { memoize } from '../utils/memoize.js';
const memoize = fn => fn;
import path from 'path';
import { hash } from '../utils/hash.js';
import { fsAdapter } from '../utils/fs-adapter.js';
import { memoize } from '../utils/memoize.js';
// const memoize = fn => fn;
/**
* @typedef {import('../../../types/index.js').Project} Project
* @typedef {import('../../../types/index.js').ProjectName} ProjectName
* @typedef {import('../../../types/index.js').AnalyzerQueryResult} AnalyzerQueryResult
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
* @typedef {import('../../../types/index.js').AnalyzerConfig} AnalyzerConfig
* @typedef {import('../../../types/index.js').AnalyzerName} AnalyzerName
* @typedef {import('../../../types/index.js').ProjectName} ProjectName
* @typedef {import('../../../types/index.js').QueryResult} QueryResult
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
* @typedef {import('../../../types/index.js').Project} Project
*/
/**
@ -26,7 +27,7 @@ function createResultIdentifier(searchP, cfg, refP) {
// why encodeURIComponent: filters out slashes for path names for stuff like @lion/button
const format = (/** @type {Project} */ p) =>
`${encodeURIComponent(p.name)}_${p.version || (p.commitHash && p.commitHash.slice(0, 5))}`;
const cfgHash = getHash(cfg);
const cfgHash = hash(cfg);
return `${format(searchP)}${refP ? `_+_${format(refP)}` : ''}__${cfgHash}`;
}
@ -57,20 +58,20 @@ export class ReportService {
outputPath = this.outputPath,
) {
const output = JSON.stringify(queryResult, null, 2);
if (!fs.existsSync(outputPath)) {
fs.mkdirSync(outputPath);
if (!fsAdapter.fs.existsSync(outputPath)) {
fsAdapter.fs.mkdirSync(outputPath);
}
const { name } = queryResult.meta.analyzerMeta;
const filePath = this._getResultFileNameAndPath(name, identifier);
fs.writeFileSync(filePath, output, { flag: 'w' });
fsAdapter.fs.writeFileSync(filePath, output, { flag: 'w' });
}
/**
* @type {string}
*/
static get outputPath() {
return this.__outputPath || pathLib.join(process.cwd(), '/providence-output');
return this.__outputPath || path.join(process.cwd(), '/providence-output');
}
static set outputPath(p) {
@ -93,7 +94,10 @@ export class ReportService {
let cachedResult;
try {
cachedResult = JSON.parse(
fs.readFileSync(this._getResultFileNameAndPath(analyzerName, identifier), 'utf-8'),
fsAdapter.fs.readFileSync(
this._getResultFileNameAndPath(analyzerName, identifier),
'utf-8',
),
);
// eslint-disable-next-line no-empty
} catch (_) {}
@ -107,7 +111,7 @@ export class ReportService {
*/
static _getResultFileNameAndPath(name, identifier) {
return /** @type {PathFromSystemRoot} */ (
pathLib.join(this.outputPath, `${name || 'query'}_-_${identifier}.json`)
path.join(this.outputPath, `${name || 'query'}_-_${identifier}.json`)
);
}
@ -117,15 +121,15 @@ export class ReportService {
*/
static writeEntryToSearchTargetDepsFile(depProj, rootProjectMeta) {
const rootProj = `${rootProjectMeta.name}#${rootProjectMeta.version}`;
const filePath = pathLib.join(this.outputPath, 'search-target-deps-file.json');
const filePath = path.join(this.outputPath, 'search-target-deps-file.json');
let file = {};
try {
file = JSON.parse(fs.readFileSync(filePath, 'utf-8'));
file = JSON.parse(fsAdapter.fs.readFileSync(filePath, 'utf-8'));
// eslint-disable-next-line no-empty
} catch (_) {}
const deps = [...(file[rootProj] || []), depProj];
file[rootProj] = [...new Set(deps)];
fs.writeFileSync(filePath, JSON.stringify(file, null, 2), { flag: 'w' });
fsAdapter.fs.writeFileSync(filePath, JSON.stringify(file, null, 2), { flag: 'w' });
}
}
ReportService.createIdentifier = memoize(ReportService.createIdentifier);

View file

@ -1,8 +1,11 @@
import { performance } from 'perf_hooks';
import { ReportService } from './core/ReportService.js';
import nodeFs from 'fs';
import { InputDataService } from './core/InputDataService.js';
import { LogService } from './core/LogService.js';
import { ReportService } from './core/ReportService.js';
import { QueryService } from './core/QueryService.js';
import { fsAdapter } from './utils/fs-adapter.js';
import { LogService } from './core/LogService.js';
import { AstService } from './core/AstService.js';
/**
@ -145,28 +148,6 @@ async function handleAnalyzer(queryConfig, cfg) {
return queryResults;
}
async function handleFeature(queryConfig, cfg, inputData) {
if (cfg.queryMethod === 'grep') {
const queryResult = await QueryService.grepSearch(inputData, queryConfig, {
gatherFilesConfig: cfg.gatherFilesConfig,
gatherFilesConfigReference: cfg.gatherFilesConfigReference,
});
return queryResult;
}
return undefined;
}
async function handleRegexSearch(queryConfig, cfg, inputData) {
if (cfg.queryMethod === 'grep') {
const queryResult = await QueryService.grepSearch(inputData, queryConfig, {
gatherFilesConfig: cfg.gatherFilesConfig,
gatherFilesConfigReference: cfg.gatherFilesConfigReference,
});
return queryResult;
}
return undefined;
}
/**
* Creates a report with usage metrics, based on a queryConfig.
*
@ -196,9 +177,15 @@ export async function providence(queryConfig, customConfig) {
/** Allows to navigate to source file in code editor */
addSystemPathsInResult: false,
fallbackToBabel: false,
fs: nodeFs,
...customConfig,
});
if (cfg.fs) {
// Allow to mock fs for testing
fsAdapter.setFs(cfg.fs);
}
if (cfg.debugEnabled) {
LogService.debugEnabled = true;
}
@ -211,23 +198,7 @@ export async function providence(queryConfig, customConfig) {
AstService.fallbackToBabel = true;
}
let queryResults;
if (queryConfig.type === 'ast-analyzer') {
queryResults = await handleAnalyzer(queryConfig, cfg);
} else {
const inputData = InputDataService.createDataObject(
cfg.targetProjectPaths,
cfg.gatherFilesConfig,
);
if (queryConfig.type === 'feature') {
queryResults = await handleFeature(queryConfig, cfg, inputData);
report(queryResults, cfg);
} else if (queryConfig.type === 'search') {
queryResults = await handleRegexSearch(queryConfig, cfg, inputData);
report(queryResults, cfg);
}
}
const queryResults = await handleAnalyzer(queryConfig, cfg);
if (cfg.writeLogFile) {
LogService.writeLogFile();

View file

@ -1,13 +1,14 @@
import path from 'path';
import { isRelativeSourcePath } from '../../utils/relative-source-path.js';
import { LogService } from '../../core/LogService.js';
import { resolveImportPath } from '../../utils/resolve-import-path.js';
import { toPosixPath } from '../../utils/to-posix-path.js';
import { isRelativeSourcePath } from './relative-source-path.js';
import { resolveImportPath } from './resolve-import-path.js';
import { LogService } from '../core/LogService.js';
import { toPosixPath } from './to-posix-path.js';
/**
* @typedef {import('../../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
* @typedef {import('../../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
* @typedef {import('../../../../types/index.js').SpecifierSource} SpecifierSource
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
* @typedef {import('../../../types/index.js').SpecifierSource} SpecifierSource
*/
/**

View file

@ -0,0 +1,27 @@
import originalNodeFs from 'fs';
/**
* Provides access to the file system (fs) which can be the real file system or a mock.
*/
class FsAdapter {
constructor() {
this.fs = originalNodeFs;
}
/**
* Call this for mocking or compatibility with non-node environments.
* @param {originalNodeFs} fs
*/
setFs(fs) {
this.fs = fs;
}
/**
* When done testing, call this to restore the real file system.
*/
restoreFs() {
this.fs = originalNodeFs;
}
}
export const fsAdapter = new FsAdapter();

View file

@ -1,16 +0,0 @@
/**
* @param {string|object} inputValue
* @returns {number}
*/
export function getHash(inputValue) {
if (typeof inputValue === 'object') {
// eslint-disable-next-line no-param-reassign
inputValue = JSON.stringify(inputValue);
}
return inputValue.split('').reduce(
(prevHash, currVal) =>
// eslint-disable-next-line no-bitwise
((prevHash << 5) - prevHash + currVal.charCodeAt(0)) | 0,
0,
);
}

View file

@ -1,15 +1,17 @@
import fs from 'fs';
import path from 'path';
import babelTraversePkg from '@babel/traverse';
import { trackDownIdentifier } from './track-down-identifier.js';
import { AstService } from '../core/AstService.js';
import { trackDownIdentifier } from '../analyzers/helpers/track-down-identifier.js';
import { toPosixPath } from './to-posix-path.js';
import { fsAdapter } from './fs-adapter.js';
/**
* @typedef {import('@babel/types').Node} Node
* @typedef {import('@babel/traverse').NodePath} NodePath
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
* @typedef {import('@babel/traverse').NodePath} NodePath
* @typedef {import('@babel/types').Node} Node
*/
/**
@ -82,7 +84,7 @@ export async function getSourceCodeFragmentOfDeclaration({
exportedIdentifier,
projectRootPath,
}) {
const code = fs.readFileSync(filePath, 'utf8');
const code = fsAdapter.fs.readFileSync(filePath, 'utf8');
// TODO: fix swc-to-babel lib to make this compatible with 'swc-to-babel' mode of getAst
const babelAst = AstService.getAst(code, 'babel', { filePath });

View file

@ -1,21 +1,22 @@
import fs from 'fs';
import path from 'path';
import { trackDownIdentifier } from './track-down-identifier.js';
import { swcTraverse, getPathFromNode } from './swc-traverse.js';
import { AstService } from '../core/AstService.js';
import { trackDownIdentifier } from '../analyzers/helpers/track-down-identifier.js';
import { toPosixPath } from './to-posix-path.js';
import { fsAdapter } from './fs-adapter.js';
/**
* @typedef {import('@swc/core').Node} SwcNode
* @typedef {import('../../../types/index.js').SwcPath} SwcPath
* @typedef {import('../../../types/index.js').SwcBinding} SwcBinding
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
* @typedef {import('../../../types/index.js').SwcBinding} SwcBinding
* @typedef {import('../../../types/index.js').SwcPath} SwcPath
* @typedef {import('@swc/core').Node} SwcNode
*/
/**
* @param {{rootPath:PathFromSystemRoot; localPath:PathRelativeFromProjectRoot}} opts
* @returns
* @returns {PathRelativeFromProjectRoot}
*/
export function getFilePathOrExternalSource({ rootPath, localPath }) {
if (!localPath.startsWith('.')) {
@ -23,7 +24,9 @@ export function getFilePathOrExternalSource({ rootPath, localPath }) {
// but we give a 100% score if from and to are same here..
return localPath;
}
return toPosixPath(path.resolve(rootPath, localPath));
return /** @type {PathRelativeFromProjectRoot} */ (
toPosixPath(path.resolve(rootPath, localPath))
);
}
/**
@ -80,11 +83,11 @@ export function getReferencedDeclaration({ referencedIdentifierName, globalScope
* @returns {Promise<{ sourceNodePath: SwcPath; sourceFragment: string|null; externalImportSource: string|null; }>}
*/
export async function getSourceCodeFragmentOfDeclaration({
filePath,
exportedIdentifier,
projectRootPath,
filePath,
}) {
const code = fs.readFileSync(filePath, 'utf8');
const code = fsAdapter.fs.readFileSync(filePath, 'utf8');
// compensate for swc span bug: https://github.com/swc-project/swc/issues/1366#issuecomment-1516539812
const offset = AstService._getSwcOffset();

View file

@ -1,4 +1,4 @@
import toBabel from 'swc-to-babel';
import toBabel from '../../../inlined-swc-to-babel/lib/swc-to-babel.cjs';
/**
* @typedef {import('@babel/types').File} File

View file

@ -0,0 +1,18 @@
/**
* @param {string|object} inputValue
* @returns {string}
*/
export function hash(inputValue) {
if (typeof inputValue === 'object') {
// eslint-disable-next-line no-param-reassign
inputValue = JSON.stringify(inputValue);
}
return String(
inputValue.split('').reduce(
(prevHash, currVal) =>
// eslint-disable-next-line no-bitwise
((prevHash << 5) - prevHash + currVal.charCodeAt(0)) | 0,
0,
),
);
}

View file

@ -1,8 +1,13 @@
export { toRelativeSourcePath, isRelativeSourcePath } from './relative-source-path.js';
export { trackDownIdentifier } from './track-down-identifier.js';
export {
getSourceCodeFragmentOfDeclaration,
getFilePathOrExternalSource,
} from './get-source-code-fragment-of-declaration.js';
export { optimisedGlob } from './optimised-glob.js';
export { swcTraverse } from './swc-traverse.js';
export { fsAdapter } from './fs-adapter.js';
export { memoize } from './memoize.js';
export { toRelativeSourcePath, isRelativeSourcePath } from './relative-source-path.js';
export { hash } from './hash.js';
// TODO: move trackdownIdentifier to utils as well

View file

@ -1,124 +0,0 @@
// @ts-nocheck
/* eslint-disable */
/**
* The MIT License (MIT)
*
* Copyright (c) 2015 Ryo Maruyama
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
// From: https://github.com/esdoc/esdoc/blob/master/src/Parser/CommentParser.js
/**
* Doc Comment Parser class.
*
* @example
* for (let comment of node.leadingComments) {
* let tags = CommentParser.parse(comment);
* console.log(tags);
* }
*/
export default class JsdocCommentParser {
/**
* parse comment to tags.
* @param {ASTNode} commentNode - comment node.
* @param {string} commentNode.value - comment body.
* @param {string} commentNode.type - CommentBlock or CommentLine.
* @returns {Tag[]} parsed comment.
*/
static parse(commentNode) {
if (!this.isESDoc(commentNode)) return [];
let comment = commentNode.value;
// TODO: refactor
comment = comment.replace(/\r\n/gm, '\n'); // for windows
comment = comment.replace(/^[\t ]*/gm, ''); // remove line head space
comment = comment.replace(/^\*[\t ]?/, ''); // remove first '*'
comment = comment.replace(/[\t ]$/, ''); // remove last space
comment = comment.replace(/^\*[\t ]?/gm, ''); // remove line head '*'
if (comment.charAt(0) !== '@') comment = `@desc ${comment}`; // auto insert @desc
comment = comment.replace(/[\t ]*$/, ''); // remove tail space.
comment = comment.replace(/```[\s\S]*?```/g, match => match.replace(/@/g, '\\ESCAPED_AT\\')); // escape code in descriptions
comment = comment.replace(/^[\t ]*(@\w+)$/gm, '$1 \\TRUE'); // auto insert tag text to non-text tag (e.g. @interface)
comment = comment.replace(/^[\t ]*(@\w+)[\t ](.*)/gm, '\\Z$1\\Z$2'); // insert separator (\\Z@tag\\Ztext)
const lines = comment.split('\\Z');
let tagName = '';
let tagValue = '';
const tags = [];
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
if (line.charAt(0) === '@') {
tagName = line;
const nextLine = lines[i + 1];
if (nextLine.charAt(0) === '@') {
tagValue = '';
} else {
tagValue = nextLine;
i++;
}
tagValue = tagValue
.replace('\\TRUE', '')
.replace(/\\ESCAPED_AT\\/g, '@')
.replace(/^\n/, '')
.replace(/\n*$/, '');
tags.push({ tagName, tagValue });
}
}
return tags;
}
/**
* parse node to tags.
* @param {ASTNode} node - node.
* @returns {{tags: Tag[], commentNode: CommentNode}} parsed comment.
*/
static parseFromNode(node) {
if (!node.leadingComments) node.leadingComments = [{ type: 'CommentBlock', value: '' }];
const commentNode = node.leadingComments[node.leadingComments.length - 1];
const tags = this.parse(commentNode);
return { tags, commentNode };
}
/**
* judge doc comment or not.
* @param {ASTNode} commentNode - comment node.
* @returns {boolean} if true, this comment node is doc comment.
*/
static isESDoc(commentNode) {
if (commentNode.type !== 'CommentBlock') return false;
return commentNode.value.charAt(0) === '*';
}
/**
* build comment from tags
* @param {Tag[]} tags
* @returns {string} block comment value.
*/
static buildComment(tags) {
return tags.reduce((comment, tag) => {
const line = tag.tagValue.replace(/\n/g, '\n * ');
return `${comment} * ${tag.tagName} \n * ${line} \n`;
}, '*\n');
}
}

View file

@ -1,6 +1,7 @@
export const memoizeConfig = {
isCacheDisabled: false,
};
/**
* For testing purposes, it is possible to disable caching.
*/
let shouldCache = true;
/**
* @param {object|any[]|string} arg
@ -13,37 +14,60 @@ function isObject(arg) {
* @param {object|any[]|string} arg
*/
function createCachableArg(arg) {
if (isObject(arg)) {
if (!isObject(arg)) return arg;
try {
return JSON.stringify(arg);
} catch {
return arg;
}
}
return arg;
}
/**
* @template T
* @type {<T>(functionToMemoize:T, opts?:{ storage?:object; serializeObjects?: boolean }) => T}
*/
export function memoize(functionToMemoize, { storage = {}, serializeObjects = false } = {}) {
// @ts-ignore
// @ts-expect-erro
// eslint-disable-next-line func-names
return function () {
return /** @type {* & T} */ (
function memoizedFn() {
// eslint-disable-next-line prefer-rest-params
const args = [...arguments];
const cachableArgs = !serializeObjects ? args : args.map(createCachableArg);
// Allow disabling of cache for testing purposes
// @ts-ignore
if (!memoizeConfig.isCacheDisabled && cachableArgs in storage) {
// @ts-ignore
// @ts-expect-error
if (shouldCache && cachableArgs in storage) {
// @ts-expect-error
return storage[cachableArgs];
}
// @ts-ignore
// @ts-expect-error
const outcome = functionToMemoize.apply(this, args);
// @ts-ignore
// @ts-expect-error
// eslint-disable-next-line no-param-reassign
storage[cachableArgs] = outcome;
return outcome;
};
}
);
}
/**
* For testing purposes, it is possible to disable caching.
*/
memoize.disableCaching = () => {
shouldCache = false;
};
/**
* Once testing is done, it is possible to restore caching.
* @param {boolean} [initialValue]
*/
memoize.restoreCaching = initialValue => {
shouldCache = initialValue || true;
};
Object.defineProperty(memoize, 'isCacheEnabled', {
// writable: false,
// enumerable: true,
get() {
return shouldCache;
},
});

View file

@ -0,0 +1,286 @@
/* eslint-disable no-case-declarations */
/* eslint-disable no-fallthrough */
import nodeFs from 'fs';
import path from 'path';
import { toPosixPath } from './to-posix-path.js';
/**
* @typedef {nodeFs} FsLike
* @typedef {{onlyDirectories:boolean;onlyFiles:boolean;deep:number;suppressErrors:boolean;fs: FsLike;cwd:string;absolute:boolean;extglob:boolean;}} FastGlobtions
*/
const [nodeMajor] = process.versions.node.split('.').map(Number);
/**
* @param {string} glob
* @param {object} [providedOpts]
* @param {boolean} [providedOpts.globstar=true] if true, '/foo/*' => '^\/foo\/[^/]*$' (not allowing folders inside *), else '/foo/*' => '^\/foo\/.*$'
* @param {boolean} [providedOpts.extglob=true] if true, supports so called "extended" globs (like bash) and single character matching, matching ranges of characters, group matching etc.
* @returns {RegExp}
*/
export function parseGlobToRegex(glob, providedOpts) {
if (typeof glob !== 'string') throw new TypeError('Expected a string');
const options = {
globstar: true,
extglob: true,
...providedOpts,
};
let regexResultStr = '';
let isInGroup = false;
let currentChar;
for (let i = 0; i < glob.length; i += 1) {
currentChar = glob[i];
const charsToEscape = ['/', '$', '^', '+', '.', '(', ')', '=', '!', '|'];
if (charsToEscape.includes(currentChar)) {
regexResultStr += `\\${currentChar}`;
continue; // eslint-disable-line no-continue
}
if (options.extglob) {
if (currentChar === '?') {
regexResultStr += '.';
continue; // eslint-disable-line no-continue
}
if (['[', ']'].includes(currentChar)) {
regexResultStr += currentChar;
continue; // eslint-disable-line no-continue
}
if (currentChar === '{') {
isInGroup = true;
regexResultStr += '(';
continue; // eslint-disable-line no-continue
}
if (currentChar === '}') {
isInGroup = false;
regexResultStr += ')';
continue; // eslint-disable-line no-continue
}
}
if (currentChar === ',') {
if (isInGroup) {
regexResultStr += '|';
continue; // eslint-disable-line no-continue
}
regexResultStr += `\\${currentChar}`;
continue; // eslint-disable-line no-continue
}
if (currentChar === '*') {
const prevChar = glob[i - 1];
let isMultiStar = false;
while (glob[i + 1] === '*') {
isMultiStar = true;
i += 1;
}
const nextChar = glob[i + 1];
if (!options.globstar) {
// Treat any number of "*" as one
regexResultStr += '.*';
} else {
const isGlobstarSegment =
isMultiStar && ['/', undefined].includes(prevChar) && ['/', undefined].includes(nextChar);
if (isGlobstarSegment) {
// Match zero or more path segments
regexResultStr += '((?:[^/]*(?:/|$))*)';
// Move over the "/"
i += 1;
} else {
// Only match one path segment
regexResultStr += '([^/]*)';
}
}
continue; // eslint-disable-line no-continue
}
regexResultStr += currentChar;
}
return new RegExp(`^${regexResultStr}$`);
}
/**
* @param {string} glob
*/
function getStartPath(glob) {
const reservedChars = ['?', '[', ']', '{', '}', ',', '.', '*'];
let hasFoundReservedChar = false;
return glob
.split('/')
.map(part => {
if (hasFoundReservedChar) return undefined;
hasFoundReservedChar = reservedChars.some(reservedChar => part.includes(reservedChar));
return hasFoundReservedChar ? undefined : part;
})
.filter(Boolean)
.join('/');
}
let isCacheEnabled = false;
/** @type {{[path:string]:nodeFs.Dirent[]}} */
const cache = {};
/**
* @param {string} startPath
* @param {{fs?:FsLike, dirents?:nodeFs.Dirent[]}} providedOptions
* @returns {Promise<nodeFs.Dirent[]>}
*/
async function getAllFilesFromStartPath(
startPath,
{ fs = /** @type {* & FsLike} */ (nodeFs), dirents = [] } = {},
) {
if (isCacheEnabled && cache[startPath]) return cache[startPath];
// Older node doesn't support recursive option
if (nodeMajor < 18) {
/** @type {nodeFs.Dirent[]} */
const direntsForLvl = await fs.promises.readdir(startPath, { withFileTypes: true });
for (const dirent of direntsForLvl) {
// @ts-expect-error
dirent.parentPath = startPath;
dirents.push(dirent);
if (dirent.isDirectory()) {
const subDir = path.join(startPath, dirent.name);
await getAllFilesFromStartPath(subDir, { fs, dirents });
}
}
return /** @type {nodeFs.Dirent[]} */ (dirents);
}
// @ts-expect-error
dirents.push(...(await fs.promises.readdir(startPath, { withFileTypes: true, recursive: true })));
cache[startPath] = dirents;
return dirents;
}
/**
* Lightweight glob implementation.
* It's a drop-in replacement for globby, but it's faster, a few hundred lines of code and has no dependencies.
* @param {string|string[]} globOrGlobs
* @param {Partial<FastGlobtions>} providedOptions
* @returns {Promise<string[]>}
*/
export async function optimisedGlob(globOrGlobs, providedOptions = {}) {
const options = {
fs: /** @type {* & FsLike} */ (nodeFs),
onlyDirectories: false,
suppressErrors: true,
cwd: process.cwd(),
absolute: false,
onlyFiles: true,
deep: Infinity,
globstar: true,
extglob: true,
unique: true,
sync: false,
dot: false,
// TODO: ignore, throwErrorOnBrokenSymbolicLink, markDirectories, objectMode, onlyDirectories, onlyFiles, stats
// https://github.com/mrmlnc/fast-glob?tab=readme-ov-file
...providedOptions,
};
if (!options.onlyFiles) {
// This makes behavior aligned with globby
options.onlyDirectories = true;
}
const globs = Array.isArray(globOrGlobs) ? globOrGlobs : [globOrGlobs];
/** @type {RegExp[]} */
const matchRegexesNegative = [];
/** @type {RegExp[]} */
const matchRegexes = [];
/** @type {{dirent:nodeFs.Dirent;relativeToCwdPath:string}[]} */
const globEntries = [];
for (const glob of globs) {
const isNegative = glob.startsWith('!');
// Relative paths like './my/folder/**/*.js' are changed to 'my/folder/**/*.js'
const globNormalized = glob.replace(/^\.\//g, '').slice(isNegative ? 1 : 0);
const regexForGlob = parseGlobToRegex(globNormalized, {
globstar: options.globstar,
extglob: options.extglob,
});
if (isNegative) {
matchRegexesNegative.push(regexForGlob);
} else {
matchRegexes.push(regexForGlob);
}
// Search for the "deepest" starting point in the filesystem that we can use to search the fs
const startPath = getStartPath(globNormalized);
const fullStartPath = path.join(options.cwd, startPath);
try {
const allDirentsRelativeToStartPath = await getAllFilesFromStartPath(fullStartPath, {
fs: options.fs,
});
const allDirEntsRelativeToCwd = allDirentsRelativeToStartPath.map(dirent => ({
// @ts-expect-error
relativeToCwdPath: toPosixPath(path.join(dirent.parentPath, dirent.name)).replace(
`${toPosixPath(options.cwd)}/`,
'',
),
dirent,
}));
globEntries.push(...allDirEntsRelativeToCwd);
} catch (e) {
if (!options.suppressErrors) {
throw e;
}
}
}
// TODO: for perf, combine options checks instead of doing multiple filters and maps
const matchedEntries = globEntries.filter(
globEntry =>
matchRegexes.some(globRe => globRe.test(globEntry.relativeToCwdPath)) &&
!matchRegexesNegative.some(globReNeg => globReNeg.test(globEntry.relativeToCwdPath)),
);
const allFileOrDirectoryEntries = matchedEntries.filter(({ dirent }) =>
options.onlyDirectories ? dirent.isDirectory() : dirent.isFile(),
);
let filteredPaths = allFileOrDirectoryEntries.map(({ relativeToCwdPath }) => relativeToCwdPath);
if (!options.dot) {
filteredPaths = filteredPaths.filter(
f => !f.split('/').some(folderOrFile => folderOrFile.startsWith('.')),
);
}
if (options.absolute) {
filteredPaths = filteredPaths.map(f => toPosixPath(path.join(options.cwd, f)));
if (process.platform === 'win32') {
const driveLetter = path.win32.resolve(options.cwd).slice(0, 1).toUpperCase();
filteredPaths = filteredPaths.map(f => `${driveLetter}:${f}`);
}
}
if (options.deep !== Infinity) {
filteredPaths = filteredPaths.filter(f => f.split('/').length <= options.deep + 2);
}
const result = options.unique ? Array.from(new Set(filteredPaths)) : filteredPaths;
return result.sort((a, b) => {
const pathDiff = a.split('/').length - b.split('/').length;
return pathDiff !== 0 ? pathDiff : a.localeCompare(b);
});
}
optimisedGlob.disableCache = () => {
isCacheEnabled = false;
};

View file

@ -1,6 +1,7 @@
import pathLib from 'path';
import fs from 'fs';
import { pathToFileURL } from 'url';
import path from 'path';
import { fsAdapter } from './fs-adapter.js';
/**
* @typedef {import('../../../types/index.js').ProvidenceCliConf} ProvidenceCliConf
@ -10,12 +11,12 @@ import { pathToFileURL } from 'url';
* @returns {Promise<{providenceConf:Partial<ProvidenceCliConf>;providenceConfRaw:string}|null>}
*/
async function getConf() {
const confPathWithoutExtension = `${pathLib.join(process.cwd(), 'providence.conf')}`;
const confPathWithoutExtension = `${path.join(process.cwd(), 'providence.conf')}`;
let confPathFound;
try {
if (fs.existsSync(`${confPathWithoutExtension}.js`)) {
if (fsAdapter.fs.existsSync(`${confPathWithoutExtension}.js`)) {
confPathFound = `${confPathWithoutExtension}.js`;
} else if (fs.existsSync(`${confPathWithoutExtension}.mjs`)) {
} else if (fsAdapter.fs.existsSync(`${confPathWithoutExtension}.mjs`)) {
confPathFound = `${confPathWithoutExtension}.mjs`;
}
} catch (_) {
@ -36,7 +37,7 @@ async function getConf() {
);
}
const providenceConfRaw = fs.readFileSync(confPathFound, 'utf8');
const providenceConfRaw = fsAdapter.fs.readFileSync(confPathFound, 'utf8');
return { providenceConf, providenceConfRaw };
}

View file

@ -1,227 +0,0 @@
// @ts-nocheck
/* eslint-disable */
/**
* This is a modified version of https://github.com/npm/read-package-tree/blob/master/rpt.js
* The original is meant for npm dependencies only. In our (rare) case, we have a hybrid landscape
* where we also want to look for npm dependencies inside bower dependencies (bower_components folder).
*
* Original: https://github.com/npm/read-package-tree
*
* The ISC License
*
* Copyright (c) Isaac Z. Schlueter and Contributors
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
* IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*/
import fs from 'fs';
/* istanbul ignore next */
import { promisify } from 'util';
import { basename, dirname, join } from 'path';
import rpjSync from 'read-package-json';
import readdirSync from 'readdir-scoped-modules';
import realpath from 'read-package-tree/realpath.js';
const rpj = promisify(rpjSync);
const readdir = promisify(readdirSync);
let ID = 0;
class Node {
constructor(pkg, logical, physical, er, cache) {
// should be impossible.
const cached = cache.get(physical);
/* istanbul ignore next */
if (cached && !cached.then) throw new Error('re-creating already instantiated node');
cache.set(physical, this);
const parent = basename(dirname(logical));
if (parent.charAt(0) === '@') this.name = `${parent}/${basename(logical)}`;
else this.name = basename(logical);
this.path = logical;
this.realpath = physical;
this.error = er;
this.id = ID++;
this.package = pkg || {};
this.parent = null;
this.isLink = false;
this.children = [];
}
}
class Link extends Node {
constructor(pkg, logical, physical, realpath, er, cache) {
super(pkg, logical, physical, er, cache);
// if the target has started, but not completed, then
// a Promise will be in the cache to indicate this.
const cachedTarget = cache.get(realpath);
if (cachedTarget && cachedTarget.then)
cachedTarget.then(node => {
this.target = node;
this.children = node.children;
});
this.target = cachedTarget || new Node(pkg, logical, realpath, er, cache);
this.realpath = realpath;
this.isLink = true;
this.error = er;
this.children = this.target.children;
}
}
// this is the way it is to expose a timing issue which is difficult to
// test otherwise. The creation of a Node may take slightly longer than
// the creation of a Link that targets it. If the Node has _begun_ its
// creation phase (and put a Promise in the cache) then the Link will
// get a Promise as its cachedTarget instead of an actual Node object.
// This is not a problem, because it gets resolved prior to returning
// the tree or attempting to load children. However, it IS remarkably
// difficult to get to happen in a test environment to verify reliably.
// Hence this kludge.
const newNode = (pkg, logical, physical, er, cache) =>
process.env._TEST_RPT_SLOW_LINK_TARGET_ === '1'
? new Promise(res => setTimeout(() => res(new Node(pkg, logical, physical, er, cache)), 10))
: new Node(pkg, logical, physical, er, cache);
const loadNode = (logical, physical, cache, rpcache, stcache) => {
// cache temporarily holds a promise placeholder so we
// don't try to create the same node multiple times.
// this is very rare to encounter, given the aggressive
// caching on fs.realpath and fs.lstat calls, but
// it can happen in theory.
const cached = cache.get(physical);
/* istanbul ignore next */
if (cached) return Promise.resolve(cached);
const p = realpath(physical, rpcache, stcache, 0).then(
real =>
rpj(join(real, 'package.json'))
.then(
pkg => [pkg, null],
er => [null, er],
)
.then(([pkg, er]) =>
physical === real
? newNode(pkg, logical, physical, er, cache)
: new Link(pkg, logical, physical, real, er, cache),
),
// if the realpath fails, don't bother with the rest
er => new Node(null, logical, physical, er, cache),
);
cache.set(physical, p);
return p;
};
const loadChildren = (node, cache, filterWith, rpcache, stcache, mode) => {
// if a Link target has started, but not completed, then
// a Promise will be in the cache to indicate this.
//
// XXX When we can one day loadChildren on the link *target* instead of
// the link itself, to match real dep resolution, then we may end up with
// a node target in the cache that isn't yet done resolving when we get
// here. For now, though, this line will never be reached, so it's hidden
//
// if (node.then)
// return node.then(node => loadChildren(node, cache, filterWith, rpcache, stcache))
let depFolder = 'node_modules';
if (mode === 'bower') {
// TODO: if people rename their bower_components folder to smth like "lib", please handle
depFolder = 'bower_components';
try {
const bowerrc = JSON.parse(fs.readFileSync(join(node.path, '.bowerrc')));
if (bowerrc && bowerrc.directory) {
depFolder = bowerrc.directory;
}
} catch (_) {}
}
const nm = join(node.path, depFolder);
// const nm = join(node.path, 'bower_components')
return realpath(nm, rpcache, stcache, 0)
.then(rm => readdir(rm).then(kids => [rm, kids]))
.then(([rm, kids]) =>
Promise.all(
kids
.filter(kid => kid.charAt(0) !== '.' && (!filterWith || filterWith(node, kid)))
.map(kid => loadNode(join(nm, kid), join(rm, kid), cache, rpcache, stcache)),
),
)
.then(kidNodes => {
kidNodes.forEach(k => (k.parent = node));
node.children.push.apply(
node.children,
kidNodes.sort((a, b) =>
(a.package.name ? a.package.name.toLowerCase() : a.path).localeCompare(
b.package.name ? b.package.name.toLowerCase() : b.path,
),
),
);
return node;
})
.catch(() => node);
};
const loadTree = (node, did, cache, filterWith, rpcache, stcache, mode) => {
// impossible except in pathological ELOOP cases
/* istanbul ignore next */
if (did.has(node.realpath)) return Promise.resolve(node);
did.add(node.realpath);
// load children on the target, not the link
return loadChildren(node, cache, filterWith, rpcache, stcache, mode)
.then(node =>
Promise.all(
node.children
.filter(kid => !did.has(kid.realpath))
.map(kid => loadTree(kid, did, cache, filterWith, rpcache, stcache, mode)),
),
)
.then(() => node);
};
// XXX Drop filterWith and/or cb in next semver major bump
/**
*
* @param {*} root
* @param {*} filterWith
* @param {*} cb
* @param {'npm'|'bower'} [mode='npm'] if mode is 'bower', will look in 'bower_components' instead
* of 'node_modules'
*/
const rpt = (root, filterWith, cb, mode = 'npm') => {
if (!cb && typeof filterWith === 'function') {
cb = filterWith;
filterWith = null;
}
const cache = new Map();
// we can assume that the cwd is real enough
const cwd = process.cwd();
const rpcache = new Map([[cwd, cwd]]);
const stcache = new Map();
const p = realpath(root, rpcache, stcache, 0)
.then(realRoot => loadNode(root, realRoot, cache, rpcache, stcache))
.then(node => loadTree(node, new Set(), cache, filterWith, rpcache, stcache, mode));
if (typeof cb === 'function') p.then(tree => cb(null, tree), cb);
return p;
};
rpt.Node = Node;
rpt.Link = Link;
export default rpt;

View file

@ -1,10 +1,12 @@
import { builtinModules } from 'module';
import path from 'path';
import { nodeResolve } from '@rollup/plugin-node-resolve';
import { LogService } from '../core/LogService.js';
import { memoize } from './memoize.js';
import { toPosixPath } from './to-posix-path.js';
import { isRelativeSourcePath } from './relative-source-path.js';
import { LogService } from '../core/LogService.js';
import { toPosixPath } from './to-posix-path.js';
import { memoize } from './memoize.js';
/**
* @typedef {import('../../../types/index.js').PathRelativeFromProjectRoot} PathRelativeFromProjectRoot

View file

@ -1,20 +1,22 @@
/* eslint-disable no-shadow */
// @ts-nocheck
import fs from 'fs';
import pathLib from 'path';
import path from 'path';
import babelTraverse from '@babel/traverse';
import { isRelativeSourcePath, toRelativeSourcePath } from '../../utils/relative-source-path.js';
import { InputDataService } from '../../core/InputDataService.js';
import { resolveImportPath } from '../../utils/resolve-import-path.js';
import { AstService } from '../../core/AstService.js';
import { LogService } from '../../core/LogService.js';
import { memoize } from '../../utils/memoize.js';
import { isRelativeSourcePath, toRelativeSourcePath } from './relative-source-path.js';
import { InputDataService } from '../core/InputDataService.js';
import { resolveImportPath } from './resolve-import-path.js';
import { AstService } from '../core/AstService.js';
import { LogService } from '../core/LogService.js';
import { fsAdapter } from './fs-adapter.js';
import { memoize } from './memoize.js';
/**
* @typedef {import('../../../../types/index.js').RootFile} RootFile
* @typedef {import('../../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
* @typedef {import('../../../../types/index.js').SpecifierSource} SpecifierSource
* @typedef {import('../../../../types/index.js').IdentifierName} IdentifierName
* @typedef {import('../../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
* @typedef {import('../../../../types/index.js').RootFile} RootFile
* @typedef {import('@babel/traverse').NodePath} NodePath
*/
@ -23,7 +25,7 @@ import { memoize } from '../../utils/memoize.js';
* @param {string} projectName
*/
function isSelfReferencingProject(source, projectName) {
return source.startsWith(`${projectName}`);
return source.split('/')[0] === projectName;
}
/**
@ -177,14 +179,14 @@ async function trackDownIdentifierFn(
LogService.debug(`[trackDownIdentifier] ${resolvedSourcePath}`);
const allowedJsModuleExtensions = ['.mjs', '.js'];
if (!allowedJsModuleExtensions.includes(pathLib.extname(resolvedSourcePath))) {
if (!allowedJsModuleExtensions.includes(path.extname(resolvedSourcePath))) {
// We have an import assertion
return /** @type { RootFile } */ {
file: toRelativeSourcePath(resolvedSourcePath, rootPath),
specifier: '[default]',
};
}
const code = fs.readFileSync(resolvedSourcePath, 'utf8');
const code = fsAdapter.fs.readFileSync(resolvedSourcePath, 'utf8');
const babelAst = AstService.getAst(code, 'swc-to-babel', { filePath: resolvedSourcePath });
const shouldLookForDefaultExport = identifierName === '[default]';

View file

@ -1,18 +1,19 @@
import fs from 'fs';
import path from 'path';
import { swcTraverse } from '../../utils/swc-traverse.js';
import { isRelativeSourcePath, toRelativeSourcePath } from '../../utils/relative-source-path.js';
import { InputDataService } from '../../core/InputDataService.js';
import { resolveImportPath } from '../../utils/resolve-import-path.js';
import { AstService } from '../../core/AstService.js';
import { memoize } from '../../utils/memoize.js';
import { isRelativeSourcePath, toRelativeSourcePath } from './relative-source-path.js';
import { InputDataService } from '../core/InputDataService.js';
import { resolveImportPath } from './resolve-import-path.js';
import { AstService } from '../core/AstService.js';
import { swcTraverse } from './swc-traverse.js';
import { fsAdapter } from './fs-adapter.js';
import { memoize } from './memoize.js';
/**
* @typedef {import('../../../../types/index.js').RootFile} RootFile
* @typedef {import('../../../../types/index.js').SpecifierSource} SpecifierSource
* @typedef {import('../../../../types/index.js').IdentifierName} IdentifierName
* @typedef {import('../../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
* @typedef {import('../../../../types/index.js').SwcPath} SwcPath
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
* @typedef {import('../../../types/index.js').SpecifierSource} SpecifierSource
* @typedef {import('../../../types/index.js').IdentifierName} IdentifierName
* @typedef {import('../../../types/index.js').RootFile} RootFile
* @typedef {import('../../../types/index.js').SwcPath} SwcPath
*/
/**
@ -20,7 +21,7 @@ import { memoize } from '../../utils/memoize.js';
* @param {string} projectName
*/
function isSelfReferencingProject(source, projectName) {
return source.startsWith(`${projectName}`);
return source.split('/')[0] === projectName;
}
/**
@ -193,7 +194,7 @@ async function trackDownIdentifierFn(
specifier: '[default]',
};
}
const code = fs.readFileSync(/** @type {string} */ (resolvedSourcePath), 'utf8');
const code = fsAdapter.fs.readFileSync(/** @type {string} */ (resolvedSourcePath), 'utf8');
const swcAst = AstService._getSwcAst(code);
const shouldLookForDefaultExport = identifierName === '[default]';

View file

@ -5,7 +5,7 @@ import {
suppressNonCriticalLogs,
restoreSuppressNonCriticalLogs,
} from './mock-log-service-helpers.js';
import { memoizeConfig } from '../src/program/utils/memoize.js';
import { memoize } from '../src/program/utils/memoize.js';
/**
* @typedef {import('../types/index.js').QueryResult} QueryResult
@ -20,17 +20,17 @@ export function setupAnalyzerTest() {
const originalReferenceProjectPaths = InputDataService.referenceProjectPaths;
const cacheDisabledQInitialValue = QueryService.cacheDisabled;
const cacheDisabledIInitialValue = memoizeConfig.isCacheDisabled;
const cacheEnabledIInitialValue = memoize.isCacheEnabled;
before(() => {
QueryService.cacheDisabled = true;
memoizeConfig.isCacheDisabled = true;
memoize.disableCaching();
suppressNonCriticalLogs();
});
after(() => {
QueryService.cacheDisabled = cacheDisabledQInitialValue;
memoizeConfig.isCacheDisabled = cacheDisabledIInitialValue;
memoize.restoreCaching(cacheEnabledIInitialValue);
restoreSuppressNonCriticalLogs();
});

View file

@ -61,7 +61,7 @@ export class DummyAnalyzer extends Analyzer {
/**
* Prepare
*/
const analyzerResult = this._prepare(cfg);
const analyzerResult = await this._prepare(cfg);
if (analyzerResult) {
return analyzerResult;
}

View file

@ -1,22 +1,24 @@
/* eslint-disable no-unused-expressions */
/* eslint-disable import/no-extraneous-dependencies */
import sinon from 'sinon';
import pathLib from 'path';
import { fileURLToPath } from 'url';
import pathLib from 'path';
import { expect } from 'chai';
import { it } from 'mocha';
import {
mockProject,
restoreMockedProjects,
mockTargetAndReferenceProject,
} from '../../test-helpers/mock-project-helpers.js';
import sinon from 'sinon';
import { getExtendDocsResults } from '../../src/cli/launch-providence-with-extend-docs.js';
import { setupAnalyzerTest } from '../../test-helpers/setup-analyzer-test.js';
import { toPosixPath } from '../../src/program/utils/to-posix-path.js';
import { _providenceModule } from '../../src/program/providence.js';
import { _cliHelpersModule } from '../../src/cli/cli-helpers.js';
import { toPosixPath } from '../../src/program/utils/to-posix-path.js';
import { memoizeConfig } from '../../src/program/utils/memoize.js';
import { getExtendDocsResults } from '../../src/cli/launch-providence-with-extend-docs.js';
import { memoize } from '../../src/program/utils/memoize.js';
import {
mockTargetAndReferenceProject,
restoreMockedProjects,
mockProject,
} from '../../test-helpers/mock-project-helpers.js';
import { AstService } from '../../src/index.js';
import { setupAnalyzerTest } from '../../test-helpers/setup-analyzer-test.js';
/**
* @typedef {import('../../types/index.js').QueryResult} QueryResult
@ -50,22 +52,22 @@ describe('CLI helpers', () => {
describe('pathsArrayFromCs', () => {
it('allows absolute paths', async () => {
expect(pathsArrayFromCs('/mocked/path/example-project', rootDir)).to.eql([
expect(await pathsArrayFromCs('/mocked/path/example-project', rootDir)).to.deep.equal([
'/mocked/path/example-project',
]);
});
it('allows relative paths', async () => {
expect(
pathsArrayFromCs('./test-helpers/project-mocks/importing-target-project', rootDir),
).to.eql([`${rootDir}/test-helpers/project-mocks/importing-target-project`]);
await pathsArrayFromCs('./test-helpers/project-mocks/importing-target-project', rootDir),
).to.deep.equal([`${rootDir}/test-helpers/project-mocks/importing-target-project`]);
expect(
pathsArrayFromCs('test-helpers/project-mocks/importing-target-project', rootDir),
).to.eql([`${rootDir}/test-helpers/project-mocks/importing-target-project`]);
await pathsArrayFromCs('test-helpers/project-mocks/importing-target-project', rootDir),
).to.deep.equal([`${rootDir}/test-helpers/project-mocks/importing-target-project`]);
});
it('allows globs', async () => {
expect(pathsArrayFromCs('test-helpers/project-mocks*', rootDir)).to.eql([
expect(await pathsArrayFromCs('test-helpers/project-mocks*', rootDir)).to.deep.equal([
`${rootDir}/test-helpers/project-mocks`,
`${rootDir}/test-helpers/project-mocks-analyzer-outputs`,
]);
@ -74,7 +76,7 @@ describe('CLI helpers', () => {
it('allows multiple comma separated paths', async () => {
const paths =
'test-helpers/project-mocks*, ./test-helpers/project-mocks/importing-target-project,/mocked/path/example-project';
expect(pathsArrayFromCs(paths, rootDir)).to.eql([
expect(await pathsArrayFromCs(paths, rootDir)).to.deep.equal([
`${rootDir}/test-helpers/project-mocks`,
`${rootDir}/test-helpers/project-mocks-analyzer-outputs`,
`${rootDir}/test-helpers/project-mocks/importing-target-project`,
@ -86,8 +88,13 @@ describe('CLI helpers', () => {
describe('pathsArrayFromCollectionName', () => {
it('gets collections from external target config', async () => {
expect(
pathsArrayFromCollectionName('lion-collection', 'search-target', externalCfgMock, rootDir),
).to.eql(
await pathsArrayFromCollectionName(
'lion-collection',
'search-target',
externalCfgMock,
rootDir,
),
).to.deep.equal(
externalCfgMock.searchTargetCollections['lion-collection'].map(p =>
toPosixPath(pathLib.join(rootDir, p)),
),
@ -96,13 +103,13 @@ describe('CLI helpers', () => {
it('gets collections from external reference config', async () => {
expect(
pathsArrayFromCollectionName(
await pathsArrayFromCollectionName(
'lion-based-ui-collection',
'reference',
externalCfgMock,
rootDir,
),
).to.eql(
).to.deep.equal(
externalCfgMock.referenceCollections['lion-based-ui-collection'].map(p =>
toPosixPath(pathLib.join(rootDir, p)),
),
@ -130,7 +137,7 @@ describe('CLI helpers', () => {
it('adds bower and node dependencies', async () => {
const result = await appendProjectDependencyPaths(['/mocked/path/example-project']);
expect(result).to.eql([
expect(result).to.deep.equal([
'/mocked/path/example-project/node_modules/dependency-a',
'/mocked/path/example-project/node_modules/my-dependency',
'/mocked/path/example-project/bower_components/dependency-b',
@ -143,7 +150,7 @@ describe('CLI helpers', () => {
['/mocked/path/example-project'],
'/^dependency-/',
);
expect(result).to.eql([
expect(result).to.deep.equal([
'/mocked/path/example-project/node_modules/dependency-a',
// in windows, it should not add '/mocked/path/example-project/node_modules/my-dependency',
'/mocked/path/example-project/bower_components/dependency-b',
@ -151,7 +158,7 @@ describe('CLI helpers', () => {
]);
const result2 = await appendProjectDependencyPaths(['/mocked/path/example-project'], '/b$/');
expect(result2).to.eql([
expect(result2).to.deep.equal([
'/mocked/path/example-project/bower_components/dependency-b',
'/mocked/path/example-project',
]);
@ -163,7 +170,7 @@ describe('CLI helpers', () => {
undefined,
['npm'],
);
expect(result).to.eql([
expect(result).to.deep.equal([
'/mocked/path/example-project/node_modules/dependency-a',
'/mocked/path/example-project/node_modules/my-dependency',
'/mocked/path/example-project',
@ -174,7 +181,7 @@ describe('CLI helpers', () => {
undefined,
['bower'],
);
expect(result2).to.eql([
expect(result2).to.deep.equal([
'/mocked/path/example-project/bower_components/dependency-b',
'/mocked/path/example-project',
]);
@ -189,7 +196,7 @@ describe('CLI helpers', () => {
it('rewrites monorepo package paths when analysis is run from monorepo root', async () => {
// This fails after InputDataService.addAstToProjectsData is memoized
// (it does pass when run in isolation however, as a quick fix we disable memoization cache here...)
memoizeConfig.isCacheDisabled = true;
memoize.disableCaching();
// Since we use the print method here, we need to force Babel, bc swc-to-babel output is not compatible
// with @babel/generate
const initialAstServiceFallbackToBabel = AstService.fallbackToBabel;
@ -268,7 +275,7 @@ describe('CLI helpers', () => {
cwd: '/my-components',
});
expect(result).to.eql([
expect(result).to.deep.equal([
{
name: 'TheirButton',
variable: {

View file

@ -1,5 +1,5 @@
/* eslint-disable import/no-extraneous-dependencies */
import pathLib from 'path';
import path from 'path';
import { expect } from 'chai';
import { it } from 'mocha';
import { appendProjectDependencyPaths } from '../../src/cli/cli-helpers.js';
@ -15,13 +15,13 @@ describe('CLI helpers against filesystem', () => {
describe('appendProjectDependencyPaths', () => {
it('allows a regex filter', async () => {
const targetFilePath = toPosixPath(
pathLib.resolve(
path.resolve(
getCurrentDir(import.meta.url),
'../../test-helpers/project-mocks/importing-target-project',
),
);
const result = await appendProjectDependencyPaths([targetFilePath], '/^dep-/');
expect(result).to.eql([
expect(result).to.deep.equal([
`${targetFilePath}/node_modules/dep-a`,
// in windows, it should not add `${targetFilePath}/node_modules/my-dep-b`,
targetFilePath,

View file

@ -1,19 +1,17 @@
/* eslint-disable no-unused-expressions */
/* eslint-disable import/no-extraneous-dependencies */
import sinon from 'sinon';
import commander from 'commander';
import { expect } from 'chai';
import { it } from 'mocha';
import commander from 'commander';
import sinon from 'sinon';
import { mockProject } from '../../test-helpers/mock-project-helpers.js';
import { InputDataService } from '../../src/program/core/InputDataService.js';
import { QueryService } from '../../src/program/core/QueryService.js';
import { _providenceModule } from '../../src/program/providence.js';
import { _cliHelpersModule } from '../../src/cli/cli-helpers.js';
import { cli } from '../../src/cli/cli.js';
import { _promptAnalyzerMenuModule } from '../../src/cli/prompt-analyzer-menu.js';
import { memoizeConfig } from '../../src/program/utils/memoize.js';
import { _extendDocsModule } from '../../src/cli/launch-providence-with-extend-docs.js';
import { dashboardServer } from '../../src/dashboard/server.js';
import { memoize } from '../../src/program/utils/memoize.js';
import { setupAnalyzerTest } from '../../test-helpers/setup-analyzer-test.js';
/**
@ -56,12 +54,8 @@ describe('Providence CLI', () => {
/** @type {sinon.SinonStub} */
let providenceStub;
/** @type {sinon.SinonStub} */
let promptCfgStub;
/** @type {sinon.SinonStub} */
let iExtConfStub;
/** @type {sinon.SinonStub} */
let promptStub;
/** @type {sinon.SinonStub} */
let qConfStub;
before(() => {
@ -71,19 +65,9 @@ describe('Providence CLI', () => {
/** @type {sinon.SinonStub} */
providenceStub = sinon.stub(_providenceModule, 'providence').returns(Promise.resolve());
/** @type {sinon.SinonStub} */
promptCfgStub = sinon
.stub(_promptAnalyzerMenuModule, 'promptAnalyzerConfigMenu')
.returns(Promise.resolve({ analyzerConfig: { con: 'fig' } }));
/** @type {sinon.SinonStub} */
iExtConfStub = sinon.stub(InputDataService, 'getExternalConfig').returns(externalCfgMock);
/** @type {sinon.SinonStub} */
promptStub = sinon
.stub(_promptAnalyzerMenuModule, 'promptAnalyzerMenu')
.returns(Promise.resolve({ analyzerName: 'match-analyzer-mock' }));
/** @type {sinon.SinonStub} */
qConfStub = sinon.stub(QueryService, 'getQueryConfigFromAnalyzer').returns(
// @ts-expect-error
@ -100,9 +84,7 @@ describe('Providence CLI', () => {
commander.setMaxListeners(10);
providenceStub.restore();
promptCfgStub.restore();
iExtConfStub.restore();
promptStub.restore();
qConfStub.restore();
});
@ -120,14 +102,13 @@ describe('Providence CLI', () => {
projectPath: '/mocked/path/example-project',
},
);
memoizeConfig.isCacheDisabled = true;
// memoizeConfig.isCacheDisabled = true;
memoize.disableCaching();
});
afterEach(() => {
providenceStub.resetHistory();
promptCfgStub.resetHistory();
iExtConfStub.resetHistory();
promptStub.resetHistory();
qConfStub.resetHistory();
});
@ -186,18 +167,26 @@ describe('Providence CLI', () => {
it('"-e --extensions"', async () => {
await runCli(`${anyCmdThatAcceptsGlobalOpts} -e bla,blu`, rootDir);
expect(providenceStub.args[0][1].gatherFilesConfig.extensions).to.eql(['.bla', '.blu']);
expect(providenceStub.args[0][1].gatherFilesConfig.extensions).to.deep.equal([
'.bla',
'.blu',
]);
providenceStub.resetHistory();
await runCli(`${anyCmdThatAcceptsGlobalOpts} --extensions bla,blu`, rootDir);
expect(providenceStub.args[0][1].gatherFilesConfig.extensions).to.eql(['.bla', '.blu']);
expect(providenceStub.args[0][1].gatherFilesConfig.extensions).to.deep.equal([
'.bla',
'.blu',
]);
});
it('"-t --search-target-paths"', async () => {
await runCli(`${anyCmdThatAcceptsGlobalOpts} -t /mocked/path/example-project`, rootDir);
expect(pathsArrayFromCsStub.args[0][0]).to.equal('/mocked/path/example-project');
expect(providenceStub.args[0][1].targetProjectPaths).to.eql(['/mocked/path/example-project']);
expect(providenceStub.args[0][1].targetProjectPaths).to.deep.equal([
'/mocked/path/example-project',
]);
pathsArrayFromCsStub.resetHistory();
providenceStub.resetHistory();
@ -207,13 +196,15 @@ describe('Providence CLI', () => {
rootDir,
);
expect(pathsArrayFromCsStub.args[0][0]).to.equal('/mocked/path/example-project');
expect(providenceStub.args[0][1].targetProjectPaths).to.eql(['/mocked/path/example-project']);
expect(providenceStub.args[0][1].targetProjectPaths).to.deep.equal([
'/mocked/path/example-project',
]);
});
it('"-r --reference-paths"', async () => {
await runCli(`${anyCmdThatAcceptsGlobalOpts} -r /mocked/path/example-project`, rootDir);
expect(pathsArrayFromCsStub.args[0][0]).to.equal('/mocked/path/example-project');
expect(providenceStub.args[0][1].referenceProjectPaths).to.eql([
expect(providenceStub.args[0][1].referenceProjectPaths).to.deep.equal([
'/mocked/path/example-project',
]);
@ -225,7 +216,7 @@ describe('Providence CLI', () => {
rootDir,
);
expect(pathsArrayFromCsStub.args[0][0]).to.equal('/mocked/path/example-project');
expect(providenceStub.args[0][1].referenceProjectPaths).to.eql([
expect(providenceStub.args[0][1].referenceProjectPaths).to.deep.equal([
'/mocked/path/example-project',
]);
});
@ -236,7 +227,9 @@ describe('Providence CLI', () => {
rootDir,
);
expect(pathsArrayFromCollectionStub.args[0][0]).to.equal('lion-collection');
expect(providenceStub.args[0][1].targetProjectPaths).to.eql(['/mocked/path/example-project']);
expect(providenceStub.args[0][1].targetProjectPaths).to.deep.equal([
'/mocked/path/example-project',
]);
});
it('"--reference-collection"', async () => {
@ -245,14 +238,14 @@ describe('Providence CLI', () => {
rootDir,
);
expect(pathsArrayFromCollectionStub.args[0][0]).to.equal('lion-based-ui-collection');
expect(providenceStub.args[0][1].referenceProjectPaths).to.eql([
expect(providenceStub.args[0][1].referenceProjectPaths).to.deep.equal([
'/mocked/path/example-project',
]);
});
it('"-a --allowlist"', async () => {
await runCli(`${anyCmdThatAcceptsGlobalOpts} -a mocked/**/*,rocked/*`, rootDir);
expect(providenceStub.args[0][1].gatherFilesConfig.allowlist).to.eql([
expect(providenceStub.args[0][1].gatherFilesConfig.allowlist).to.deep.equal([
'mocked/**/*',
'rocked/*',
]);
@ -260,7 +253,7 @@ describe('Providence CLI', () => {
providenceStub.resetHistory();
await runCli(`${anyCmdThatAcceptsGlobalOpts} --allowlist mocked/**/*,rocked/*`, rootDir);
expect(providenceStub.args[0][1].gatherFilesConfig.allowlist).to.eql([
expect(providenceStub.args[0][1].gatherFilesConfig.allowlist).to.deep.equal([
'mocked/**/*',
'rocked/*',
]);
@ -271,7 +264,7 @@ describe('Providence CLI', () => {
`${anyCmdThatAcceptsGlobalOpts} --allowlist-reference mocked/**/*,rocked/*`,
rootDir,
);
expect(providenceStub.args[0][1].gatherFilesConfigReference.allowlist).to.eql([
expect(providenceStub.args[0][1].gatherFilesConfigReference.allowlist).to.deep.equal([
'mocked/**/*',
'rocked/*',
]);
@ -311,7 +304,7 @@ describe('Providence CLI', () => {
await runCli(`${anyCmdThatAcceptsGlobalOpts} --target-dependencies`, rootDir);
expect(appendProjectDependencyPathsStub.called).to.be.true;
expect(providenceStub.args[0][1].targetProjectPaths).to.eql([
expect(providenceStub.args[0][1].targetProjectPaths).to.deep.equal([
'/mocked/path/example-project',
'/mocked/path/example-project/node_modules/mock-dep-a',
'/mocked/path/example-project/bower_components/mock-dep-b',
@ -342,93 +335,16 @@ describe('Providence CLI', () => {
});
describe('Options', () => {
it('"-o --prompt-optional-config"', async () => {
await runCli(`analyze -o`, rootDir);
expect(promptStub.called).to.be.true;
promptStub.resetHistory();
await runCli(`analyze --prompt-optional-config`, rootDir);
expect(promptStub.called).to.be.true;
});
it('"-c --config"', async () => {
await runCli(`analyze match-analyzer-mock -c {"a":"2"}`, rootDir);
expect(qConfStub.args[0][0]).to.equal('match-analyzer-mock');
expect(qConfStub.args[0][1]).to.eql({ a: '2', metaConfig: {} });
expect(qConfStub.args[0][1]).to.deep.equal({ a: '2', metaConfig: {} });
qConfStub.resetHistory();
await runCli(`analyze match-analyzer-mock --config {"a":"2"}`, rootDir);
expect(qConfStub.args[0][0]).to.equal('match-analyzer-mock');
expect(qConfStub.args[0][1]).to.eql({ a: '2', metaConfig: {} });
});
it('calls "promptAnalyzerConfigMenu" without config given', async () => {
await runCli(`analyze match-analyzer-mock`, rootDir);
expect(promptCfgStub.called).to.be.true;
});
});
});
describe.skip('Query', () => {});
describe.skip('Search', () => {});
describe('Manage', () => {});
describe('Dashboard', () => {
/** @type {sinon.SinonStub} */
const startStub = sinon.stub(dashboardServer, 'start');
it('spawns a dashboard', async () => {
runCli(`dashboard`, rootDir);
expect(startStub.called).to.be.true;
});
});
describe('Extend docs', () => {
/** @type {sinon.SinonStub} */
let extendDocsStub;
before(() => {
extendDocsStub = sinon
.stub(_extendDocsModule, 'launchProvidenceWithExtendDocs')
.returns(Promise.resolve());
});
after(() => {
extendDocsStub.restore();
});
afterEach(() => {
extendDocsStub.resetHistory();
});
it('allows configuration', async () => {
await runCli(
[
'extend-docs',
'-t /xyz',
'-r /xyz/x',
'--prefix-from pfrom --prefix-to pto',
'--output-folder /outp',
'--extensions bla',
'--allowlist al --allowlist-reference alr',
].join(' '),
rootDir,
);
expect(extendDocsStub.called).to.be.true;
expect(extendDocsStub.args[0][0]).to.eql({
referenceProjectPaths: ['/xyz/x'],
prefixCfg: {
from: 'pfrom',
to: 'pto',
},
outputFolder: '/outp',
extensions: ['.bla'],
allowlist: ['al'],
allowlistReference: ['alr'],
cwd: '/mocked/path/example-project',
skipCheckMatchCompatibility: true,
expect(qConfStub.args[0][1]).to.deep.equal({ a: '2', metaConfig: {} });
});
});
});

View file

@ -1,15 +1,16 @@
/* eslint-disable import/no-extraneous-dependencies */
import fs from 'fs';
import { fileURLToPath, pathToFileURL } from 'url';
import pathLib from 'path';
import sinon from 'sinon';
import { fileURLToPath, pathToFileURL } from 'url';
import { createTestServer } from '@web/dev-server-core/test-helpers';
import { expect } from 'chai';
import { it } from 'mocha';
import fetch from 'node-fetch';
import { createTestServer } from '@web/dev-server-core/test-helpers';
import { providenceConfUtil } from '../../src/program/utils/providence-conf-util.js';
import { createDashboardServerConfig } from '../../src/dashboard/server.js';
import { ReportService } from '../../src/program/core/ReportService.js';
import { providenceConfUtil } from '../../src/program/utils/providence-conf-util.js';
import { fsAdapter } from '../../src/program/utils/fs-adapter.js';
/**
* @typedef {import('@web/dev-server-core').DevServer} DevServer
@ -27,11 +28,18 @@ const mockedOutputPath = pathLib.join(__dirname, 'fixtures/providence-output');
async function getConf(url) {
const { href } = pathToFileURL(url);
const { default: providenceConf } = await import(href);
const providenceConfRaw = fs.readFileSync(url, 'utf8');
const providenceConfRaw = fsAdapter.fs.readFileSync(url, 'utf8');
return { providenceConf, providenceConfRaw };
}
describe('Dashboard Server', () => {
const [nodeMajor] = process.versions.node.split('.').map(Number);
if (nodeMajor < 18) {
// Skipping tests for now, since nopde < 18 will be phased out and we want to use native fetch...
return;
}
/** @type {string} */
let host;
/** @type {DevServer} */
@ -40,7 +48,7 @@ describe('Dashboard Server', () => {
let providenceConfStub;
before(() => {
// N.B. don't use mock-fs, since it doesn't correctly handle dynamic imports and fs.promises
// N.B. don't use mock-fs, since it doesn't correctly handle dynamic imports and fsAdapter.fs.promises
ReportService.outputPath = mockedOutputPath;
});
@ -81,8 +89,11 @@ describe('Dashboard Server', () => {
const response = await fetch(`${host}/menu-data.json`);
expect(response.status).to.equal(200);
const responseJSON = await response.json();
const expectedResult = fs.readFileSync(`${mockedResponsesPath}/menu-data.json`, 'utf8');
expect(responseJSON).to.eql(JSON.parse(expectedResult));
const expectedResult = fsAdapter.fs.readFileSync(
`${mockedResponsesPath}/menu-data.json`,
'utf8',
);
expect(responseJSON).to.deep.equal(JSON.parse(expectedResult));
});
});
@ -91,8 +102,11 @@ describe('Dashboard Server', () => {
const response = await fetch(`${host}/results.json`);
expect(response.status).to.equal(200);
const responseJson = await response.json();
const expectedResult = fs.readFileSync(`${mockedResponsesPath}/results.json`, 'utf8');
expect(responseJson).to.eql(JSON.parse(expectedResult));
const expectedResult = fsAdapter.fs.readFileSync(
`${mockedResponsesPath}/results.json`,
'utf8',
);
expect(responseJson).to.deep.equal(JSON.parse(expectedResult));
});
});

View file

@ -1,13 +1,12 @@
/* eslint-disable import/no-extraneous-dependencies */
import pathLib, { dirname } from 'path';
import { fileURLToPath } from 'url';
import fs from 'fs';
import { expect } from 'chai';
import { it } from 'mocha';
import { providence } from '../../../../src/program/providence.js';
import { QueryService } from '../../../../src/program/core/QueryService.js';
import { ReportService } from '../../../../src/program/core/ReportService.js';
import { memoizeConfig } from '../../../../src/program/utils/memoize.js';
import { memoize } from '../../../../src/program/utils/memoize.js';
import { setupAnalyzerTest } from '../../../../test-helpers/setup-analyzer-test.js';
import {
FindExportsAnalyzer,
@ -18,6 +17,7 @@ import MatchSubclassesAnalyzer from '../../../../src/program/analyzers/match-sub
import MatchPathsAnalyzer from '../../../../src/program/analyzers/match-paths.js';
import FindCustomelementsAnalyzer from '../../../../src/program/analyzers/find-customelements.js';
import FindClassesAnalyzer from '../../../../src/program/analyzers/find-classes.js';
import { fsAdapter } from '../../../../src/program/utils/fs-adapter.js';
/**
* @typedef {import('../../../../types/index.js').ProvidenceConfig} ProvidenceConfig
@ -48,13 +48,16 @@ describe('Analyzers file-system integration', () => {
const originalGetResultFileNameAndPath = ReportService._getResultFileNameAndPath;
const originalOutputPath = ReportService.outputPath;
const memoizeCacheDisabledInitial = memoizeConfig.isCacheDisabled;
memoizeConfig.isCacheDisabled = true;
const originalGetCachedResult = ReportService.getCachedResult;
const memoizeCacheEnabledInitial = memoize.isCacheEnabled;
memoize.disableCaching();
after(() => {
ReportService._getResultFileNameAndPath = originalGetResultFileNameAndPath;
ReportService.getCachedResult = originalGetCachedResult;
ReportService.outputPath = originalOutputPath;
memoizeConfig.isCacheDisabled = memoizeCacheDisabledInitial;
memoize.restoreCaching(memoizeCacheEnabledInitial);
});
if (generateE2eMode) {
@ -69,6 +72,8 @@ describe('Analyzers file-system integration', () => {
};
} else {
ReportService.outputPath = __dirname; // prevents cache to fail the test
// @ts-ignore
ReportService.getCachedResult = () => undefined;
}
const analyzers = [
{
@ -120,9 +125,9 @@ describe('Analyzers file-system integration', () => {
for (const { ctor, providenceConfig } of analyzers) {
it(`"${ctor.analyzerName}" analyzer`, async () => {
const findExportsQueryConfig = await QueryService.getQueryConfigFromAnalyzer(ctor);
const currentQueryConfig = await QueryService.getQueryConfigFromAnalyzer(ctor);
const queryResults = await providence(
findExportsQueryConfig,
currentQueryConfig,
/** @type {ProvidenceConfig} */ (providenceConfig),
);
if (generateE2eMode) {
@ -132,7 +137,7 @@ describe('Analyzers file-system integration', () => {
return;
}
const expectedOutput = JSON.parse(
fs.readFileSync(
fsAdapter.fs.readFileSync(
pathLib.resolve(
__dirname,
`../../../../test-helpers/project-mocks-analyzer-outputs/${ctor.analyzerName}.json`,
@ -141,8 +146,8 @@ describe('Analyzers file-system integration', () => {
),
);
const { queryOutput } = JSON.parse(JSON.stringify(queryResults[0]));
expect(queryOutput).not.to.eql([]);
expect(queryOutput).to.eql(expectedOutput.queryOutput);
// expect(queryOutput).not.to.deep.equal([]);
expect(queryOutput).to.deep.equal(expectedOutput.queryOutput);
});
}
});

View file

@ -3,7 +3,7 @@
"searchType": "ast-analyzer",
"analyzerMeta": {
"name": "find-exports",
"requiredAst": "swc-to-babel",
"requiredAst": "swc",
"identifier": "exporting-ref-project_1.0.0__-42206859",
"targetProject": {
"mainEntry": "./index.js",

View file

@ -3,7 +3,7 @@
"searchType": "ast-analyzer",
"analyzerMeta": {
"name": "find-imports",
"requiredAst": "swc-to-babel",
"requiredAst": "swc",
"identifier": "importing-target-project_0.0.2-target-mock__349742630",
"targetProject": {
"mainEntry": "./target-src/match-imports/root-level-imports.js",

View file

@ -25,7 +25,7 @@ describe('Analyzer "find-classes"', async () => {
mockProject([`class EmptyClass {}`]);
const queryResults = await providence(findClassesQueryConfig, _providenceCfg);
const firstEntry = getEntry(queryResults[0]);
expect(firstEntry.result).to.eql([
expect(firstEntry.result).to.deep.equal([
{
name: 'EmptyClass',
isMixin: false,
@ -41,7 +41,7 @@ describe('Analyzer "find-classes"', async () => {
mockProject([`const m = superclass => class MyMixin extends superclass {}`]);
const queryResults = await providence(findClassesQueryConfig, _providenceCfg);
const firstEntry = getEntry(queryResults[0]);
expect(firstEntry.result).to.eql([
expect(firstEntry.result).to.deep.equal([
{
name: 'MyMixin',
superClasses: [
@ -72,7 +72,7 @@ describe('Analyzer "find-classes"', async () => {
});
const queryResults = await providence(findClassesQueryConfig, _providenceCfg);
const firstEntry = getEntry(queryResults[0]);
expect(firstEntry.result[1].superClasses).to.eql([
expect(firstEntry.result[1].superClasses).to.deep.equal([
{
isMixin: true,
name: 'Mixin',
@ -109,7 +109,7 @@ describe('Analyzer "find-classes"', async () => {
]);
const queryResults = await providence(findClassesQueryConfig, _providenceCfg);
const firstEntry = getEntry(queryResults[0]);
expect(firstEntry.result[0].members.methods).to.eql([
expect(firstEntry.result[0].members.methods).to.deep.equal([
{
accessType: 'public',
name: 'method',
@ -145,7 +145,7 @@ describe('Analyzer "find-classes"', async () => {
]);
const queryResults = await providence(findClassesQueryConfig, _providenceCfg);
const firstEntry = getEntry(queryResults[0]);
expect(firstEntry.result[0].members.props).to.eql([
expect(firstEntry.result[0].members.props).to.deep.equal([
{
accessType: 'public',
kind: ['get', 'set'],

View file

@ -58,7 +58,7 @@ describe('Analyzer "find-customelements"', async () => {
const queryResults = await providence(findCustomelementsQueryConfig, _providenceCfg);
const queryResult = queryResults[0];
const firstEntry = getEntry(queryResult);
expect(firstEntry.result[0].rootFile).to.eql({
expect(firstEntry.result[0].rootFile).to.deep.equal({
file: './src/CustomEl.js',
specifier: 'CustomEl',
});

View file

@ -26,7 +26,7 @@ describe('Analyzer "find-exports"', async () => {
const queryResults = await providence(findExportsQueryConfig, _providenceCfg);
const firstResult = getEntry(queryResults[0]).result[0];
expect(firstResult.exportSpecifiers).to.eql(['x']);
expect(firstResult.exportSpecifiers).to.deep.equal(['x']);
expect(firstResult.source).to.be.undefined;
});
@ -34,7 +34,7 @@ describe('Analyzer "find-exports"', async () => {
mockProject([`export default class X {}`]);
const queryResults = await providence(findExportsQueryConfig, _providenceCfg);
const firstResult = getEntry(queryResults[0]).result[0];
expect(firstResult.exportSpecifiers).to.eql(['[default]']);
expect(firstResult.exportSpecifiers).to.deep.equal(['[default]']);
expect(firstResult.source).to.be.undefined;
});
@ -43,7 +43,7 @@ describe('Analyzer "find-exports"', async () => {
const queryResults = await providence(findExportsQueryConfig, _providenceCfg);
const firstResult = getEntry(queryResults[0]).result[0];
expect(firstResult.exportSpecifiers).to.eql(['[default]']);
expect(firstResult.exportSpecifiers).to.deep.equal(['[default]']);
expect(firstResult.source).to.be.undefined;
});
@ -56,7 +56,7 @@ describe('Analyzer "find-exports"', async () => {
const queryResults = await providence(findExportsQueryConfig, _providenceCfg);
const firstResult = getEntry(queryResults[0]).result[0];
expect(firstResult).to.eql({
expect(firstResult).to.deep.equal({
exportSpecifiers: ['[default]'],
source: undefined,
rootFileMap: [
@ -68,7 +68,7 @@ describe('Analyzer "find-exports"', async () => {
});
const secondEntry = getEntry(queryResults[0], 1);
expect(secondEntry.result[0]).to.eql({
expect(secondEntry.result[0]).to.deep.equal({
exportSpecifiers: ['namedExport'],
source: './file-with-default-export.js',
localMap: [{ exported: 'namedExport', local: '[default]' }],
@ -128,7 +128,7 @@ describe('Analyzer "find-exports"', async () => {
expect(firstEntry.result[0].exportSpecifiers.length).to.equal(1);
expect(firstEntry.result[0].exportSpecifiers[0]).to.equal('[default]');
expect(firstEntry.result[0].source).to.equal('./styles.css');
expect(firstEntry.result[0].rootFileMap[0]).to.eql({
expect(firstEntry.result[0].rootFileMap[0]).to.deep.equal({
currentFileSpecifier: '[default]',
rootFile: {
file: './styles.css',
@ -147,7 +147,7 @@ describe('Analyzer "find-exports"', async () => {
expect(firstEntry.result[0].exportSpecifiers.length).to.equal(1);
expect(firstEntry.result[0].exportSpecifiers[0]).to.equal('[default]');
expect(firstEntry.result[0].source).to.equal('./styles.css');
expect(firstEntry.result[0].rootFileMap[0]).to.eql({
expect(firstEntry.result[0].rootFileMap[0]).to.deep.equal({
currentFileSpecifier: '[default]',
rootFile: {
file: './styles.css',
@ -161,7 +161,7 @@ describe('Analyzer "find-exports"', async () => {
const queryResults = await providence(findExportsQueryConfig, _providenceCfg);
const firstEntry = getEntry(queryResults[0]);
// This info will be relevant later to identify 'transitive' relations
expect(firstEntry.result[0].localMap).to.eql([
expect(firstEntry.result[0].localMap).to.deep.equal([
{
local: 'x',
exported: 'y',
@ -174,7 +174,7 @@ describe('Analyzer "find-exports"', async () => {
const queryResults = await providence(findExportsQueryConfig, _providenceCfg);
const firstEntry = getEntry(queryResults[0]);
expect(firstEntry.result[0].exportSpecifiers.length).to.equal(2);
expect(firstEntry.result[0].exportSpecifiers).to.eql(['x', 'y']);
expect(firstEntry.result[0].exportSpecifiers).to.deep.equal(['x', 'y']);
expect(firstEntry.result[0].source).to.equal('my/source');
});
@ -190,7 +190,7 @@ describe('Analyzer "find-exports"', async () => {
const secondEntry = getEntry(queryResults[0], 1);
const thirdEntry = getEntry(queryResults[0], 2);
expect(firstEntry.result[0].rootFileMap).to.eql([
expect(firstEntry.result[0].rootFileMap).to.deep.equal([
{
currentFileSpecifier: 'MyComp', // this is the local name in the file we track from
rootFile: {
@ -199,7 +199,7 @@ describe('Analyzer "find-exports"', async () => {
},
},
]);
expect(secondEntry.result[0].rootFileMap).to.eql([
expect(secondEntry.result[0].rootFileMap).to.deep.equal([
{
currentFileSpecifier: 'InBetweenComp',
rootFile: {
@ -208,7 +208,7 @@ describe('Analyzer "find-exports"', async () => {
},
},
]);
expect(thirdEntry.result[0].rootFileMap).to.eql([
expect(thirdEntry.result[0].rootFileMap).to.deep.equal([
{
currentFileSpecifier: 'OriginalComp',
rootFile: {
@ -236,7 +236,7 @@ describe('Analyzer "find-exports"', async () => {
const queryResults = await providence(findExportsQueryConfig, _providenceCfg);
const firstEntry = getEntry(queryResults[0]);
expect(firstEntry.result[0].rootFileMap).to.eql([
expect(firstEntry.result[0].rootFileMap).to.deep.equal([
{
currentFileSpecifier: '[default]',
rootFile: {
@ -252,7 +252,7 @@ describe('Analyzer "find-exports"', async () => {
mockProject([`// some comment here...`]);
const queryResults = await providence(findExportsQueryConfig, _providenceCfg);
const firstEntry = getEntry(queryResults[0]);
expect(firstEntry.result[0].exportSpecifiers).to.eql(['[file]']);
expect(firstEntry.result[0].exportSpecifiers).to.deep.equal(['[file]']);
expect(firstEntry.result[0].source).to.equal(undefined);
});
});
@ -322,10 +322,10 @@ describe('Analyzer "find-exports"', async () => {
const queryResults = await providence(findExportsQueryConfig, _providenceCfg);
const queryResult = queryResults[0];
const [firstEntry, secondEntry, thirdEntry] = getEntries(queryResult);
expect(firstEntry.meta.categories).to.eql(['fooCategory']);
expect(firstEntry.meta.categories).to.deep.equal(['fooCategory']);
// not mutually exclusive...
expect(secondEntry.meta.categories).to.eql(['barCategory', 'testCategory']);
expect(thirdEntry.meta.categories).to.eql([]);
expect(secondEntry.meta.categories).to.deep.equal(['barCategory', 'testCategory']);
expect(thirdEntry.meta.categories).to.deep.equal([]);
});
});
});

View file

@ -25,7 +25,7 @@ describe('Analyzer "find-imports"', async () => {
const queryResults = await providence(findImportsQueryConfig, _providenceCfg);
const queryResult = queryResults[0];
const firstEntry = getEntry(queryResult);
expect(firstEntry.result[0].importSpecifiers).to.eql(['[file]']);
expect(firstEntry.result[0].importSpecifiers).to.deep.equal(['[file]']);
expect(firstEntry.result[0].source).to.equal('imported/source');
});
@ -128,7 +128,7 @@ describe('Analyzer "find-imports"', async () => {
const queryResult = queryResults[0];
const firstEntry = getEntry(queryResult);
// This info will be relevant later to identify transitive relations
expect(firstEntry.result[0].localMap[0]).to.eql({
expect(firstEntry.result[0].localMap[0]).to.deep.equal({
local: 'y',
imported: 'x',
});
@ -332,7 +332,7 @@ describe('Analyzer "find-imports"', async () => {
// Should be normalized source...?
expect(queryResult.queryOutput[0].source).to.equal('@external/source.js');
expect(queryResult.queryOutput[0].id).to.equal('x::@external/source.js');
expect(queryResult.queryOutput[0].dependents).to.eql([
expect(queryResult.queryOutput[0].dependents).to.deep.equal([
'fictional-project/file1.js',
'fictional-project/file2.js',
]);

View file

@ -1,13 +1,14 @@
import { expect } from 'chai';
import { it } from 'mocha';
import { setupAnalyzerTest } from '../../../../test-helpers/setup-analyzer-test.js';
import { mockProject } from '../../../../test-helpers/mock-project-helpers.js';
import { swcTraverse } from '../../../../src/program/utils/swc-traverse.js';
import { AstService } from '../../../../src/program/core/AstService.js';
import {
trackDownIdentifier,
trackDownIdentifierFromScope,
} from '../../../../src/program/analyzers/helpers/track-down-identifier.js';
import { AstService } from '../../../../src/program/core/AstService.js';
import { mockProject } from '../../../../test-helpers/mock-project-helpers.js';
import { setupAnalyzerTest } from '../../../../test-helpers/setup-analyzer-test.js';
} from '../../../../src/program/utils/track-down-identifier.js';
/**
* @typedef {import('@babel/traverse').NodePath} NodePath
@ -39,7 +40,7 @@ describe('trackdownIdentifier', () => {
const rootPath = '/my/project';
const rootFile = await trackDownIdentifier(source, identifierName, currentFilePath, rootPath);
expect(rootFile).to.eql({
expect(rootFile).to.deep.equal({
file: './src/declarationOfMyClass.js',
specifier: 'MyClass',
});
@ -71,7 +72,7 @@ describe('trackdownIdentifier', () => {
const rootPath = '/my/project';
const rootFile = await trackDownIdentifier(source, identifierName, currentFilePath, rootPath);
expect(rootFile).to.eql({
expect(rootFile).to.deep.equal({
file: './src/declarationOfMyClass.js',
specifier: 'MyClass',
});
@ -105,7 +106,7 @@ describe('trackdownIdentifier', () => {
const rootPath = '/my/project';
const rootFile = await trackDownIdentifier(source, identifierName, currentFilePath, rootPath);
expect(rootFile).to.eql({
expect(rootFile).to.deep.equal({
file: './src/declarationOfMyClass.js',
specifier: '[default]',
});
@ -131,7 +132,7 @@ describe('trackdownIdentifier', () => {
const rootPath = '/my/project';
const rootFile = await trackDownIdentifier(source, identifierName, currentFilePath, rootPath);
expect(rootFile).to.eql({
expect(rootFile).to.deep.equal({
file: '@external/source',
specifier: '[default]',
});
@ -162,7 +163,7 @@ describe('trackdownIdentifier', () => {
const rootPath = '/my/project';
const rootFile = await trackDownIdentifier(source, identifierName, currentFilePath, rootPath);
expect(rootFile).to.eql({
expect(rootFile).to.deep.equal({
file: './MyClass.js',
specifier: '[default]',
});
@ -201,7 +202,7 @@ describe('trackdownIdentifier', () => {
rootPath,
projectName,
);
expect(rootFile).to.eql({
expect(rootFile).to.deep.equal({
file: './MyClass.js',
specifier: '[default]',
});
@ -232,7 +233,7 @@ describe('trackdownIdentifier', () => {
const rootPath = '/my/project';
const rootFile = await trackDownIdentifier(source, identifierName, currentFilePath, rootPath);
expect(rootFile).to.eql({
expect(rootFile).to.deep.equal({
file: './src/declarationOfMyNumber.js',
specifier: 'myNumber',
});
@ -260,7 +261,7 @@ describe('trackdownIdentifier', () => {
const rootPath = '/my/project';
const rootFile = await trackDownIdentifier(source, identifierName, currentFilePath, rootPath);
expect(rootFile).to.eql({
expect(rootFile).to.deep.equal({
file: './packages/accordion/IngAccordionContent.js',
specifier: 'IngAccordionContent',
});
@ -277,7 +278,7 @@ describe('trackdownIdentifier', () => {
currentFilePath2,
rootPath2,
);
expect(rootFile2).to.eql({
expect(rootFile2).to.deep.equal({
file: './packages/accordion/IngAccordionInvokerButton.js',
specifier: 'IngAccordionInvokerButton',
});
@ -321,7 +322,7 @@ describe('trackDownIdentifierFromScope', () => {
fullCurrentFilePath,
projectPath,
);
expect(rootFile).to.eql({
expect(rootFile).to.deep.equal({
file: '[current]',
specifier: 'MyClass',
});
@ -372,7 +373,7 @@ describe('trackDownIdentifierFromScope', () => {
fullCurrentFilePath,
projectPath,
);
expect(rootFile).to.eql({
expect(rootFile).to.deep.equal({
file: './src/declarationOfMyClass.js',
specifier: 'MyClass',
});
@ -420,7 +421,7 @@ describe('trackDownIdentifierFromScope', () => {
fullCurrentFilePath,
projectPath,
);
expect(rootFile).to.eql({
expect(rootFile).to.deep.equal({
file: './src/classes.js',
specifier: 'El1',
});

View file

@ -215,14 +215,14 @@ describe('Analyzer "match-imports"', async () => {
);
const [name, filePath, project] = targetExportedId.split('::');
expect(matchedEntry.exportSpecifier).to.eql({
expect(matchedEntry.exportSpecifier).to.deep.equal({
name,
filePath,
project,
id: targetExportedId,
});
expect(matchedEntry.matchesPerProject[0].project).to.equal('importing-target-project');
expect(matchedEntry.matchesPerProject[0].files).to.eql(importedByFiles);
expect(matchedEntry.matchesPerProject[0].files).to.deep.equal(importedByFiles);
}
describe('Extracting exports', () => {
@ -435,7 +435,7 @@ describe('Analyzer "match-imports"', async () => {
});
const queryResult = queryResults[0];
expect(queryResult.queryOutput[0].exportSpecifier.name).to.equal('[default]');
expect(queryResult.queryOutput[0].matchesPerProject).to.eql([
expect(queryResult.queryOutput[0].matchesPerProject).to.deep.equal([
{ files: ['./importDefault1.js', './importDefault2.js'], project: 'target' },
]);
});
@ -476,11 +476,11 @@ describe('Analyzer "match-imports"', async () => {
});
const queryResult = queryResults[0];
expect(queryResult.queryOutput[0].exportSpecifier.name).to.equal('[default]');
expect(queryResult.queryOutput[0].matchesPerProject).to.eql([
expect(queryResult.queryOutput[0].matchesPerProject).to.deep.equal([
{ files: ['./deep-imports.js'], project: 'target' },
]);
expect(queryResult.queryOutput[1].exportSpecifier.name).to.equal('RefClass');
expect(queryResult.queryOutput[1].matchesPerProject).to.eql([
expect(queryResult.queryOutput[1].matchesPerProject).to.deep.equal([
{ files: ['./deep-imports.js'], project: 'target' },
]);
});

View file

@ -188,7 +188,7 @@ describe('Analyzer "match-paths"', async () => {
mockTargetAndReferenceProject(searchTargetProject, referenceProject);
const queryResults = await providence(matchPathsQueryConfig, _providenceCfg);
const queryResult = queryResults[0];
expect(queryResult.queryOutput).to.eql(expectedMatches);
expect(queryResult.queryOutput).to.deep.equal(expectedMatches);
});
describe('Features', () => {
@ -239,7 +239,7 @@ describe('Analyzer "match-paths"', async () => {
mockTargetAndReferenceProject(targetProj, refProj);
const queryResults = await providence(matchPathsQueryConfig, _providenceCfg);
const queryResult = queryResults[0];
expect(queryResult.queryOutput[0].variable.paths[0]).to.eql({
expect(queryResult.queryOutput[0].variable.paths[0]).to.deep.equal({
from: './index.js',
to: './target-src/TargetClass.js',
});
@ -263,7 +263,7 @@ describe('Analyzer "match-paths"', async () => {
mockTargetAndReferenceProject(targetProjWithMultipleExports, refProj);
const queryResults = await providence(matchPathsQueryConfig, _providenceCfg);
const queryResult = queryResults[0];
expect(queryResult.queryOutput[0].variable.paths[0]).to.eql({
expect(queryResult.queryOutput[0].variable.paths[0]).to.deep.equal({
from: './index.js',
to: './reexportFromRoot.js',
});
@ -296,7 +296,7 @@ describe('Analyzer "match-paths"', async () => {
mockTargetAndReferenceProject(targetProjWithMultipleExportsAndMainEntry, refProj);
const queryResults = await providence(matchPathsQueryConfig, _providenceCfg);
const queryResult = queryResults[0];
expect(queryResult.queryOutput[0].variable.paths[0]).to.eql({
expect(queryResult.queryOutput[0].variable.paths[0]).to.deep.equal({
from: './index.js',
to: './target-src/mainEntry.js',
});
@ -308,8 +308,11 @@ describe('Analyzer "match-paths"', async () => {
const queryResults = await providence(matchPathsQueryConfig, _providenceCfg);
const queryResult = queryResults[0];
const unprefixedPaths = queryResult.queryOutput[0].variable.paths[0];
expect(unprefixedPaths).to.eql({ from: './index.js', to: './target-src/TargetClass.js' });
expect(queryResult.queryOutput[0].variable.paths[1]).to.eql({
expect(unprefixedPaths).to.deep.equal({
from: './index.js',
to: './target-src/TargetClass.js',
});
expect(queryResult.queryOutput[0].variable.paths[1]).to.deep.equal({
from: `${refProj.name}/${unprefixedPaths.from.slice(2)}`,
to: unprefixedPaths.to,
});
@ -336,11 +339,11 @@ describe('Analyzer "match-paths"', async () => {
mockTargetAndReferenceProject(targetProjMultipleTargetExtensions, refProj);
const queryResults = await providence(matchPathsQueryConfig, _providenceCfg);
const queryResult = queryResults[0];
expect(queryResult.queryOutput[0].variable.paths[0]).to.eql({
expect(queryResult.queryOutput[0].variable.paths[0]).to.deep.equal({
from: './index.js',
to: './target-src/TargetClass.js',
});
expect(queryResult.queryOutput[1].variable.paths[0]).to.eql({
expect(queryResult.queryOutput[1].variable.paths[0]).to.deep.equal({
from: './index.js',
to: './target-src/TargetSomething.js',
});
@ -410,7 +413,7 @@ describe('Analyzer "match-paths"', async () => {
);
const queryResults = await providence(matchPathsQueryConfigFilter, _providenceCfg);
const queryResult = queryResults[0];
expect(queryResult.queryOutput[0].variable.paths[0]).to.eql({
expect(queryResult.queryOutput[0].variable.paths[0]).to.deep.equal({
from: './index.js',
to: './target-src/TargetClass.js',
});
@ -515,8 +518,8 @@ describe('Analyzer "match-paths"', async () => {
mockTargetAndReferenceProject(searchTargetProject, referenceProject);
const queryResults = await providence(matchPathsQueryConfig, _providenceCfg);
const queryResult = queryResults[0];
expect(queryResult.queryOutput[0].tag).to.eql(expectedMatches[0]);
expect(queryResult.queryOutput[1].tag).to.eql(expectedMatches[1]);
expect(queryResult.queryOutput[0].tag).to.deep.equal(expectedMatches[0]);
expect(queryResult.queryOutput[1].tag).to.deep.equal(expectedMatches[1]);
});
// TODO: test works in isolation, but some side effects occur when run in suite
@ -578,7 +581,7 @@ describe('Analyzer "match-paths"', async () => {
providenceCfg,
);
const queryResult = queryResults[0];
expect(queryResult.queryOutput[0].tag).to.eql({
expect(queryResult.queryOutput[0].tag).to.deep.equal({
from: 'their-button',
to: 'my-button',
paths: [
@ -607,7 +610,7 @@ describe('Analyzer "match-paths"', async () => {
mockTargetAndReferenceProject(searchTargetProject, referenceProject);
const queryResults = await providence(matchPathsQueryConfig, _providenceCfg);
const queryResult = queryResults[0];
expect(queryResult.queryOutput[0].tag.paths[0]).to.eql({
expect(queryResult.queryOutput[0].tag.paths[0]).to.deep.equal({
from: './customelementDefinitions.js',
to: './extendedCustomelementDefinitions.js',
});
@ -617,7 +620,7 @@ describe('Analyzer "match-paths"', async () => {
mockTargetAndReferenceProject(searchTargetProject, referenceProject);
const queryResults = await providence(matchPathsQueryConfig, _providenceCfg);
const queryResult = queryResults[0];
expect(queryResult.queryOutput[0].tag.paths[1]).to.eql({
expect(queryResult.queryOutput[0].tag.paths[1]).to.deep.equal({
from: 'reference-project/customelementDefinitions.js',
to: './extendedCustomelementDefinitions.js',
});
@ -736,7 +739,7 @@ describe('Analyzer "match-paths"', async () => {
mockTargetAndReferenceProject(searchTargetProjectFull, referenceProjectFull);
const queryResults = await providence(matchPathsQueryConfig, _providenceCfg);
const queryResult = queryResults[0];
expect(queryResult.queryOutput).to.eql(expectedMatchesFull);
expect(queryResult.queryOutput).to.deep.equal(expectedMatchesFull);
});
});
});

View file

@ -332,14 +332,14 @@ describe('Analyzer "match-subclasses"', async () => {
);
const [name, filePath, project] = targetExportedId.split('::');
expect(matchedEntry.exportSpecifier).to.eql({
expect(matchedEntry.exportSpecifier).to.deep.equal({
name,
filePath,
project,
id: targetExportedId,
});
expect(matchedEntry.matchesPerProject[0].project).to.equal('importing-target-project');
expect(matchedEntry.matchesPerProject[0].files).to.eql(importedByFiles);
expect(matchedEntry.matchesPerProject[0].files).to.deep.equal(importedByFiles);
}
mockTargetAndReferenceProject(searchTargetProject, referenceProject);

View file

@ -81,19 +81,19 @@ describe('Analyzer', async () => {
const queryResult = queryResults[0];
const { queryOutput, meta } = queryResult;
expect(queryOutput[0]).to.eql({
expect(queryOutput[0]).to.deep.equal({
file: './test-file-0.js',
meta: {},
result: [{ matched: 'entry' }],
});
expect(queryOutput[1]).to.eql({
expect(queryOutput[1]).to.deep.equal({
file: './test-file2.js',
meta: {},
result: [{ matched: 'entry' }],
});
// Local machine info needs to be deleted, so that results are always 'machine agnostic'
// (which is needed to share cached json results via git)
expect(meta).to.eql({
expect(meta).to.deep.equal({
searchType: 'ast-analyzer',
analyzerMeta: {
name: 'my-analyzer',

View file

@ -1,9 +1,7 @@
import { expect } from 'chai';
import { it } from 'mocha';
import pathLib from 'path';
import { InputDataService } from '../../../src/program/core/InputDataService.js';
import { memoizeConfig } from '../../../src/program/utils/memoize.js';
import { getCurrentDir } from '../../../src/program/utils/get-current-dir.js';
import { memoize } from '../../../src/program/utils/memoize.js';
import {
restoreMockedProjects,
mockProject,
@ -24,13 +22,13 @@ describe('InputDataService', () => {
}
beforeEach(() => {
memoizeConfig.isCacheDisabled = true;
memoize.disableCaching();
});
afterEach(() => {
restoreOriginalInputDataPaths();
restoreMockedProjects();
memoizeConfig.isCacheDisabled = false;
memoize.restoreCaching();
});
describe('Configuration', () => {
@ -50,36 +48,43 @@ describe('InputDataService', () => {
});
describe('Methods', () => {
// TODO: mock file system...
it('"createDataObject"', async () => {
/** @type {* & PathFromSystemRoot} */
const projectPath = pathLib.resolve(
getCurrentDir(import.meta.url),
'../../../test-helpers/project-mocks/importing-target-project',
);
mockProject({
'./package.json': JSON.stringify({
name: 'fictional-project',
main: 'my/index.js',
version: '1.0.0',
}),
'./src/file.js': '// bla',
'./src/file2.js': '// bla',
});
const inputDataPerProject = InputDataService.createDataObject([projectPath]);
expect(Object.keys(inputDataPerProject[0].project)).to.eql([
'path',
'mainEntry',
'name',
'version',
'commitHash',
const inputDataPerProject = await InputDataService.createDataObject(['/fictional/project']);
expect(inputDataPerProject).to.deep.equal([
{
project: {
path: '/fictional/project',
mainEntry: './my/index.js',
name: 'fictional-project',
version: '1.0.0',
commitHash: '[not-a-git-root]',
},
entries: [
{
file: './src/file.js',
context: {
code: '// bla',
},
},
{
file: './src/file2.js',
context: {
code: '// bla',
},
},
],
},
]);
expect(inputDataPerProject[0].project.name).to.equal('importing-target-project');
expect(inputDataPerProject[0].project.mainEntry).to.equal(
'./target-src/match-imports/root-level-imports.js',
);
expect(
inputDataPerProject[0].project.path.endsWith(
'/test-helpers/project-mocks/importing-target-project',
),
).to.equal(true);
expect(inputDataPerProject[0].entries.length).to.equal(6);
expect(inputDataPerProject[0].entries[0].context.code).to.not.be.undefined;
expect(inputDataPerProject[0].entries[0].file).to.equal(
'./target-src/find-customelements/multiple.js',
);
});
it('"targetProjectPaths"', async () => {});
@ -99,11 +104,11 @@ describe('InputDataService', () => {
'{ "name": "@another-scope/another-package" }',
});
expect(InputDataService.getMonoRepoPackages('/fictional/project')).to.eql([
{ path: 'packages/pkg1/', name: 'package1' },
{ path: 'packages/pkg2/', name: 'pkg2' }, // fallback when no package.json
{ path: 'packages/pkg3/', name: '@scope/pkg3' },
{ path: 'another-folder/another-package/', name: '@another-scope/another-package' },
expect(await InputDataService.getMonoRepoPackages('/fictional/project')).to.deep.equal([
{ path: 'packages/pkg1', name: 'package1' },
{ path: 'packages/pkg2', name: 'pkg2' }, // fallback when no package.json
{ path: 'packages/pkg3', name: '@scope/pkg3' },
{ path: 'another-folder/another-package', name: '@another-scope/another-package' },
]);
});
@ -120,11 +125,11 @@ describe('InputDataService', () => {
'{ "name": "@another-scope/another-package" }',
});
expect(InputDataService.getMonoRepoPackages('/fictional/project')).to.eql([
{ path: 'packages/pkg1/', name: 'package1' },
{ path: 'packages/pkg2/', name: 'pkg2' }, // fallback when no package.json
{ path: 'packages/pkg3/', name: '@scope/pkg3' },
{ path: 'another-folder/another-package/', name: '@another-scope/another-package' },
expect(await InputDataService.getMonoRepoPackages('/fictional/project')).to.deep.equal([
{ path: 'packages/pkg1', name: 'package1' },
{ path: 'packages/pkg2', name: 'pkg2' }, // fallback when no package.json
{ path: 'packages/pkg3', name: '@scope/pkg3' },
{ path: 'another-folder/another-package', name: '@another-scope/another-package' },
]);
});
});
@ -143,19 +148,21 @@ describe('InputDataService', () => {
});
it('gathers a list of files', async () => {
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project');
expect(globOutput).to.eql([
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project');
expect(globOutput).to.deep.equal([
'/fictional/project/index.js',
'/fictional/project/internal.js',
'/fictional/project/something.test.js',
'/fictional/project/nested/index.js',
'/fictional/project/nested/nested-two/index.test.js',
'/fictional/project/something.test.js',
]);
});
it('allows passing a depth which stops at nested depth', async () => {
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project', { depth: 0 });
expect(globOutput).to.eql([
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project', {
depth: 0,
});
expect(globOutput).to.deep.equal([
'/fictional/project/index.js',
'/fictional/project/internal.js',
'/fictional/project/something.test.js',
@ -163,26 +170,26 @@ describe('InputDataService', () => {
});
it('allows passing extensions', async () => {
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project', {
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project', {
extensions: ['.html', '.js'],
});
expect(globOutput).to.eql([
expect(globOutput).to.deep.equal([
'/fictional/project/index.html',
'/fictional/project/index.js',
'/fictional/project/internal.js',
'/fictional/project/nested/index.js',
'/fictional/project/nested/nested-two/index.test.js',
'/fictional/project/something.test.html',
'/fictional/project/something.test.js',
'/fictional/project/nested/index.js',
'/fictional/project/nested/nested-two/index.test.js',
]);
});
it('allows passing excluded folders', async () => {
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project', {
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project', {
extensions: ['.html', '.js'],
allowlist: ['!nested/**'],
});
expect(globOutput).to.eql([
expect(globOutput).to.deep.equal([
'/fictional/project/index.html',
'/fictional/project/index.js',
'/fictional/project/internal.js',
@ -192,25 +199,25 @@ describe('InputDataService', () => {
});
it('allows passing excluded files', async () => {
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project', {
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project', {
extensions: ['.html', '.js'],
allowlist: ['!index.js', '!**/*/index.js'],
});
expect(globOutput).to.eql([
expect(globOutput).to.deep.equal([
'/fictional/project/index.html',
'/fictional/project/internal.js',
'/fictional/project/nested/nested-two/index.test.js',
'/fictional/project/something.test.html',
'/fictional/project/something.test.js',
'/fictional/project/nested/nested-two/index.test.js',
]);
});
it('allows passing exclude globs', async () => {
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project', {
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project', {
extensions: ['.html', '.js'],
allowlist: ['!**/*.test.{html,js}'],
});
expect(globOutput).to.eql([
expect(globOutput).to.deep.equal([
'/fictional/project/index.html',
'/fictional/project/index.js',
'/fictional/project/internal.js',
@ -219,11 +226,11 @@ describe('InputDataService', () => {
});
it('does not support non globs in "allowlist"', async () => {
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project', {
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project', {
extensions: ['.html', '.js'],
allowlist: ['nested'],
});
expect(globOutput).to.eql([]);
expect(globOutput).to.deep.equal([]);
});
it('omits node_modules and bower_components at root level by default', async () => {
@ -235,8 +242,8 @@ describe('InputDataService', () => {
'./nested/bower_components/pkg/y.js': '',
});
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project');
expect(globOutput).to.eql([
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project');
expect(globOutput).to.deep.equal([
'/fictional/project/index.js',
'/fictional/project/nested/bower_components/pkg/y.js',
'/fictional/project/nested/node_modules/pkg/x.js',
@ -249,12 +256,12 @@ describe('InputDataService', () => {
'./omitted/file.js': '',
'./added/file.js': '',
});
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project', {
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project', {
allowlist: ['*', 'added/**/*'],
});
expect(globOutput).to.eql([
'/fictional/project/added/file.js',
expect(globOutput).to.deep.equal([
'/fictional/project/root-lvl.js',
'/fictional/project/added/file.js',
]);
});
@ -265,10 +272,10 @@ describe('InputDataService', () => {
'./deeper/glob/file.js': '',
'./deeper/file.js': '',
});
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project', {
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project', {
allowlist: ['deeper/**/*'],
});
expect(globOutput).to.eql([
expect(globOutput).to.deep.equal([
'/fictional/project/deeper/file.js',
'/fictional/project/deeper/glob/file.js',
'/fictional/project/deeper/glob/structure/file.js',
@ -285,8 +292,8 @@ describe('InputDataService', () => {
'./some-other-pkg/commitlint.conf.js': '',
});
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project');
expect(globOutput).to.eql(['/fictional/project/index.js']);
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project');
expect(globOutput).to.deep.equal(['/fictional/project/index.js']);
});
it('omits hidden files by default', async () => {
@ -295,8 +302,8 @@ describe('InputDataService', () => {
'./index.js': '',
});
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project');
expect(globOutput).to.eql(['/fictional/project/index.js']);
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project');
expect(globOutput).to.deep.equal(['/fictional/project/index.js']);
});
describe('AllowlistMode', () => {
@ -308,8 +315,8 @@ describe('InputDataService', () => {
}),
'.gitignore': '/dist',
});
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project');
expect(globOutput).to.eql([
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project');
expect(globOutput).to.deep.equal([
// This means allowlistMode is 'git'
]);
@ -322,8 +329,8 @@ describe('InputDataService', () => {
files: ['dist'],
}),
});
const globOutput2 = InputDataService.gatherFilesFromDir('/fictional/project');
expect(globOutput2).to.eql([
const globOutput2 = await InputDataService.gatherFilesFromDir('/fictional/project');
expect(globOutput2).to.deep.equal([
// This means allowlistMode is 'npm'
'/fictional/project/dist/bundle.js',
]);
@ -335,10 +342,10 @@ describe('InputDataService', () => {
projectPath: '/inside/proj/with/node_modules/detect-as-npm',
},
);
const globOutput3 = InputDataService.gatherFilesFromDir(
const globOutput3 = await InputDataService.gatherFilesFromDir(
'/inside/proj/with/node_modules/detect-as-npm',
);
expect(globOutput3).to.eql([
expect(globOutput3).to.deep.equal([
// This means allowlistMode is 'npm' (even though we found .gitignore)
'/inside/proj/with/node_modules/detect-as-npm/dist/bundle.js',
]);
@ -350,10 +357,10 @@ describe('InputDataService', () => {
projectPath: '/inside/proj/with/node_modules/@scoped/detect-as-npm',
},
);
const globOutput4 = InputDataService.gatherFilesFromDir(
const globOutput4 = await InputDataService.gatherFilesFromDir(
'/inside/proj/with/node_modules/@scoped/detect-as-npm',
);
expect(globOutput4).to.eql([
expect(globOutput4).to.deep.equal([
// This means allowlistMode is 'npm' (even though we found .gitignore)
'/inside/proj/with/node_modules/@scoped/detect-as-npm/dist/bundle.js',
]);
@ -369,12 +376,12 @@ describe('InputDataService', () => {
files: ['*.add.js', 'docs', 'src'],
}),
});
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project', {
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project', {
allowlistMode: 'npm',
});
expect(globOutput).to.eql([
'/fictional/project/docs/x.js',
expect(globOutput).to.deep.equal([
'/fictional/project/file.add.js',
'/fictional/project/docs/x.js',
'/fictional/project/src/y.js',
]);
});
@ -395,10 +402,10 @@ build/
!keep/
`,
});
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project', {
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project', {
allowlistMode: 'git',
});
expect(globOutput).to.eql([
expect(globOutput).to.deep.equal([
'/fictional/project/keep/it.js',
'/fictional/project/shall/pass.js',
]);
@ -415,10 +422,10 @@ build/
/dist
`,
});
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project', {
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project', {
allowlistMode: 'all',
});
expect(globOutput).to.eql([
expect(globOutput).to.deep.equal([
'/fictional/project/dist/bundle.js',
'/fictional/project/src/file.js',
]);
@ -434,10 +441,10 @@ build/
},
}),
});
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project', {
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project', {
allowlistMode: 'export-map',
});
expect(globOutput).to.eql(['./internal/file.js']);
expect(globOutput).to.deep.equal(['./internal/file.js']);
});
});
@ -451,11 +458,11 @@ build/
files: ['dist'], // This will not be considered by default, unless explicitly configured in allowlist
}),
});
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project', {
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project', {
allowlist: ['dist/**'],
allowlistMode: 'git', // for clarity, (would also be autodetected if not provided)
});
expect(globOutput).to.eql(['/fictional/project/dist/bundle.js']);
expect(globOutput).to.deep.equal(['/fictional/project/dist/bundle.js']);
});
describe('Default allowlist', () => {
@ -466,10 +473,10 @@ build/
'./added.js': '',
'./omit.js': '',
});
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project', {
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project', {
allowlist: ['added*'],
});
expect(globOutput).to.eql(['/fictional/project/added.js']);
expect(globOutput).to.deep.equal(['/fictional/project/added.js']);
});
it('allows to omit default config filter', async () => {
@ -481,16 +488,16 @@ build/
'./added.js': '',
'./omit.js': '',
});
const globOutput = InputDataService.gatherFilesFromDir('/fictional/project', {
const globOutput = await InputDataService.gatherFilesFromDir('/fictional/project', {
allowlist: ['!omit*'],
omitDefaultAllowlist: true,
});
expect(globOutput).to.eql([
expect(globOutput).to.deep.equal([
'/fictional/project/abc.config.js',
'/fictional/project/added.js',
'/fictional/project/bower_components/omitted/file.js',
'/fictional/project/node_modules/root-lvl.js',
'/fictional/project/xyz.conf.js',
'/fictional/project/node_modules/root-lvl.js',
'/fictional/project/bower_components/omitted/file.js',
]);
});
});
@ -514,7 +521,7 @@ build/
packageRootPath: '/my/proj',
});
expect(exportMapPaths).to.eql([
expect(exportMapPaths).to.deep.equal([
{ internal: './internal-path.js', exposed: './exposed-path.js' },
{ internal: './internal/folder-a/path.js', exposed: './external/folder-a/path.js' },
{ internal: './internal/folder-b/path.js', exposed: './external/folder-b/path.js' },
@ -532,7 +539,7 @@ build/
const exportMapPaths = await InputDataService.getPathsFromExportMap(exports, {
packageRootPath: '/my/proj',
});
expect(exportMapPaths).to.eql([
expect(exportMapPaths).to.deep.equal([
{ internal: './internal-path.js', exposed: './exposed-path.js' },
]);
});
@ -550,7 +557,7 @@ build/
const exportMapPaths = await InputDataService.getPathsFromExportMap(exports, {
packageRootPath: '/my/proj',
});
expect(exportMapPaths).to.eql([
expect(exportMapPaths).to.deep.equal([
{ internal: './internal-exports-folder/file-a.js', exposed: './file-a.js' },
{ internal: './internal-exports-folder/file-b.js', exposed: './file-b.js' },
{ internal: './internal-exports-folder/file-c.js', exposed: './file-c.js' },
@ -569,12 +576,12 @@ build/
const exportMapPaths = await InputDataService.getPathsFromExportMap(exports, {
packageRootPath: '/my/proj',
});
expect(exportMapPaths).to.eql([
expect(exportMapPaths).to.deep.equal([
{ internal: './internal-folder/file-a.js', exposed: './exposed-folder/file-a.js' },
{
internal: './internal-folder/another-folder/file-b.js',
exposed: './exposed-folder/another-folder/file-b.js',
},
{ internal: './internal-folder/file-a.js', exposed: './exposed-folder/file-a.js' },
]);
});
@ -591,9 +598,9 @@ build/
const exportMapPaths = await InputDataService.getPathsFromExportMap(exports, {
packageRootPath: '/my/proj',
});
expect(exportMapPaths).to.eql([
{ internal: './folder-a/file.js', exposed: './exposed-folder/folder-a/file.js' },
{ internal: './folder-b/file.js', exposed: './exposed-folder/folder-b/file.js' },
expect(exportMapPaths).to.deep.equal([
{ exposed: './exposed-folder/folder-a/file.js', internal: './folder-a/file.js' },
{ exposed: './exposed-folder/folder-b/file.js', internal: './folder-b/file.js' },
]);
});
@ -611,7 +618,7 @@ build/
const exportMapPaths = await InputDataService.getPathsFromExportMap(exports, {
packageRootPath: '/my/proj',
});
expect(exportMapPaths).to.eql([
expect(exportMapPaths).to.deep.equal([
{ internal: './internal-folder/file-a.js', exposed: './exposed-folder/file-a.js' },
]);
});
@ -631,7 +638,7 @@ build/
const exportMapPaths = await InputDataService.getPathsFromExportMap(exports, {
packageRootPath: '/my/proj',
});
expect(exportMapPaths).to.eql([
expect(exportMapPaths).to.deep.equal([
{ internal: './esm-exports/file.js', exposed: './file.js' },
]);
});
@ -650,7 +657,7 @@ build/
packageRootPath: '/my/proj',
nodeResolveMode: 'require',
});
expect(exportMapPaths).to.eql([
expect(exportMapPaths).to.deep.equal([
{ internal: './cjs-exports/file.cjs', exposed: './file.cjs' },
]);
});
@ -669,7 +676,7 @@ build/
packageRootPath: '/my/proj',
nodeResolveMode: 'develop',
});
expect(exportMapPaths).to.eql([
expect(exportMapPaths).to.deep.equal([
{ internal: './develop-exports/file.js', exposed: './file.js' },
]);
});
@ -693,7 +700,7 @@ build/
const exportMapPaths = await InputDataService.getPathsFromExportMap(exports, {
packageRootPath: '/my/proj',
});
expect(exportMapPaths).to.eql([
expect(exportMapPaths).to.deep.equal([
{ internal: './index.js', exposed: '.' },
{ internal: './file.js', exposed: './exposed-file.js' },
]);
@ -714,7 +721,7 @@ build/
const exportMapPaths = await InputDataService.getPathsFromExportMap(exports, {
packageRootPath: '/my/proj',
});
expect(exportMapPaths).to.eql([
expect(exportMapPaths).to.deep.equal([
{ internal: './internal-folder/file-a.js', exposed: './exposed-folder/file-a.js' },
{ internal: './internal-folder/file-b.js', exposed: './exposed-folder/file-b.js' },
]);
@ -733,7 +740,7 @@ build/
const exportMapPaths = await InputDataService.getPathsFromExportMap(exports, {
packageRootPath: '/my/proj',
});
expect(exportMapPaths).to.eql([
expect(exportMapPaths).to.deep.equal([
{ internal: './internal-folder/file-a.js', exposed: './exposed-folder/file-a.js' },
{ internal: './internal-folder/file-b.js', exposed: './exposed-folder/file-b.js' },
]);

View file

@ -1,156 +1,18 @@
import { expect } from 'chai';
import { it } from 'mocha';
import { QueryService } from '../../../src/program/core/QueryService.js';
import { DummyAnalyzer } from '../../../test-helpers/templates/DummyAnalyzer.js';
import FindImportsAnalyzer from '../../../src/program/analyzers/find-imports.js';
import { QueryService } from '../../../src/program/core/QueryService.js';
/**
* @typedef {import('../../../types/index.js').Analyzer} Analyzer
* @typedef {import('../../../types/index.js').PathFromSystemRoot} PathFromSystemRoot
* @typedef {import('../../../types/index.js').Analyzer} Analyzer
*/
describe('QueryService', () => {
describe('Methods', () => {
describe('Retrieving QueryConfig', () => {
it('"getQueryConfigFromRegexSearchString"', async () => {
const result = QueryService.getQueryConfigFromRegexSearchString('x');
expect(result).to.eql({ type: 'search', regexString: 'x' });
expect(() => {
// @ts-expect-error
QueryService.getQueryConfigFromRegexSearchString();
}).to.throw('[QueryService.getQueryConfigFromRegexSearchString]: provide a string');
});
describe('"getQueryConfigFromFeatureString"', () => {
it('with tag, attr-key and attr-value', async () => {
const result = QueryService.getQueryConfigFromFeatureString('tg-icon[size=xs]');
expect(result).to.eql({
type: 'feature',
feature: {
name: 'size',
value: 'xs',
tag: 'tg-icon',
isAttribute: true,
usesValueContains: false,
usesValuePartialMatch: false,
usesTagPartialMatch: false,
},
});
});
it('with only tag', async () => {
const result = QueryService.getQueryConfigFromFeatureString('tg-icon');
expect(result).to.eql({
type: 'feature',
feature: {
tag: 'tg-icon',
usesTagPartialMatch: false,
},
});
});
it('with only attr-key', async () => {
const result = QueryService.getQueryConfigFromFeatureString('[attr]');
expect(result).to.eql({
type: 'feature',
feature: {
name: 'attr',
value: undefined,
tag: '',
isAttribute: true,
usesValueContains: false,
usesValuePartialMatch: false,
usesTagPartialMatch: false,
},
});
});
it('with only attr-key and attr-value', async () => {
const result = QueryService.getQueryConfigFromFeatureString('[attr=x]');
expect(result).to.eql({
type: 'feature',
feature: {
name: 'attr',
value: 'x',
tag: '',
isAttribute: true,
usesValueContains: false,
usesValuePartialMatch: false,
usesTagPartialMatch: false,
},
});
});
describe('With partial value', async () => {
it('with tag, attr-key and attr-value', async () => {
const result = QueryService.getQueryConfigFromFeatureString('tg-icon*[size*=xs*]');
expect(result).to.eql({
type: 'feature',
feature: {
name: 'size',
value: 'xs',
tag: 'tg-icon',
isAttribute: true,
usesValueContains: true,
usesValuePartialMatch: true,
usesTagPartialMatch: true,
},
});
});
it('with only tag', async () => {
const result = QueryService.getQueryConfigFromFeatureString('tg-icon*');
expect(result).to.eql({
type: 'feature',
feature: {
tag: 'tg-icon',
usesTagPartialMatch: true,
},
});
});
it('with only attr-key', async () => {
const result = QueryService.getQueryConfigFromFeatureString('[attr*]');
expect(result).to.eql({
type: 'feature',
feature: {
name: 'attr',
value: undefined,
tag: '',
isAttribute: true,
usesValueContains: true,
usesValuePartialMatch: false,
usesTagPartialMatch: false,
},
});
});
it('with only attr-key and attr-value', async () => {
const result = QueryService.getQueryConfigFromFeatureString('[attr*=x*]');
expect(result).to.eql({
type: 'feature',
feature: {
name: 'attr',
value: 'x',
tag: '',
isAttribute: true,
usesValueContains: true,
usesValuePartialMatch: true,
usesTagPartialMatch: false,
},
});
});
});
it('throws when no string provided', async () => {
expect(() => {
// @ts-ignore
QueryService.getQueryConfigFromFeatureString();
}).to.throw('[QueryService.getQueryConfigFromFeatureString]: provide a string');
});
});
describe('"getQueryConfigFromAnalyzer"', () => {
const myAnalyzerCfg = { targetProjectPath: /** @type {PathFromSystemRoot} */ ('/my/path') };
it('accepts a constructor as first argument', async () => {
@ -158,7 +20,7 @@ describe('QueryService', () => {
'find-imports',
myAnalyzerCfg,
);
expect(result).to.eql({
expect(result).to.deep.equal({
type: 'ast-analyzer',
analyzerName: 'find-imports',
analyzerConfig: myAnalyzerCfg,
@ -171,7 +33,7 @@ describe('QueryService', () => {
/** @type {* & Analyzer} */ (DummyAnalyzer),
myAnalyzerCfg,
);
expect(result).to.eql({
expect(result).to.deep.equal({
type: 'ast-analyzer',
analyzerName: 'find-dummy-analyzer',
analyzerConfig: myAnalyzerCfg,
@ -186,7 +48,7 @@ describe('QueryService', () => {
// it('with FeatureConfig', async () => {
// const featureCfg = QueryService.getQueryConfigFromFeatureString('tg-icon[size=xs]');
// const result = QueryService.grepSearch(featureCfg);
// expect(result).to.eql({
// expect(result).to.deep.equal({
// type: 'ast-analyzer',
// analyzerName: 'find-imports',
// analyzerConfig: { x: 'y' },

View file

@ -1,16 +1,17 @@
import { expect } from 'chai';
import { it } from 'mocha';
import { mock } from '../../../test-helpers/mock-project-helpers.js';
import { getSourceCodeFragmentOfDeclaration } from '../../../src/program/utils/index.js';
import { memoizeConfig } from '../../../src/program/utils/memoize.js';
import { mock } from '../../../test-helpers/mock-project-helpers.js';
import { memoize } from '../../../src/program/utils/memoize.js';
describe('getSourceCodeFragmentOfDeclaration', () => {
const initialMemoizeSsCacheDisabled = memoizeConfig.isCacheDisabled;
const initialMemoizeCacheEnabled = memoize.isCacheEnabled;
before(() => {
memoizeConfig.isCacheDisabled = true;
memoize.disableCaching();
});
after(() => {
memoizeConfig.isCacheDisabled = initialMemoizeSsCacheDisabled;
memoize.restoreCaching(initialMemoizeCacheEnabled);
});
describe('Named specifiers', () => {

View file

@ -1,17 +1,11 @@
import { expect } from 'chai';
import { it } from 'mocha';
import { memoize, memoizeConfig } from '../../../src/program/utils/memoize.js';
const cacheDisabledInitialValue = memoizeConfig.isCacheDisabled;
import { memoize } from '../../../src/program/utils/memoize.js';
describe('Memoize', () => {
beforeEach(() => {
// This is important, since memoization only works
memoizeConfig.isCacheDisabled = false;
});
afterEach(() => {
memoizeConfig.isCacheDisabled = cacheDisabledInitialValue;
});
// This is important, since memoization only works when cache is disabled.
// We want to prevent that another test unintentionally disabled caching.
memoize.restoreCaching();
describe('With primitives', () => {
describe('Numbers', () => {
@ -136,15 +130,15 @@ describe('Memoize', () => {
const sumMemoized = memoize(sum);
// Put in cache for args combination
expect(sumMemoized([1], [2])).to.eql([1, 2]);
expect(sumMemoized([1], [2])).to.deep.equal([1, 2]);
expect(sumCalled).to.equal(1);
// Return from cache
expect(sumMemoized([1], [2])).to.eql([1, 2]);
expect(sumMemoized([1], [2])).to.deep.equal([1, 2]);
expect(sumCalled).to.equal(1);
// Put in cache for args combination
expect(sumMemoized([1], [3])).to.eql([1, 3]);
expect(sumMemoized([1], [3])).to.deep.equal([1, 3]);
expect(sumCalled).to.equal(2);
});
@ -162,17 +156,17 @@ describe('Memoize', () => {
}
const sum2Memoized = memoize(sum2);
expect(sumMemoized([1], [2])).to.eql([1, 2]);
expect(sumMemoized([1], [2])).to.deep.equal([1, 2]);
expect(sumCalled).to.equal(1);
expect(sum2Called).to.equal(0);
expect(sum2Memoized([1], [2])).to.eql([1, 2]);
expect(sum2Memoized([1], [2])).to.deep.equal([1, 2]);
expect(sumCalled).to.equal(1);
expect(sum2Called).to.equal(1);
// Both cached
expect(sumMemoized([1], [2])).to.eql([1, 2]);
expect(sum2Memoized([1], [2])).to.eql([1, 2]);
expect(sumMemoized([1], [2])).to.deep.equal([1, 2]);
expect(sum2Memoized([1], [2])).to.deep.equal([1, 2]);
expect(sumCalled).to.equal(1);
expect(sum2Called).to.equal(1);
});
@ -188,15 +182,15 @@ describe('Memoize', () => {
const sumMemoized = memoize(sum, { serializeObjects: true });
// Put in cache for args combination
expect(sumMemoized({ x: 1 }, { y: 2 })).to.eql({ x: 1, y: 2 });
expect(sumMemoized({ x: 1 }, { y: 2 })).to.deep.equal({ x: 1, y: 2 });
expect(sumCalled).to.equal(1);
// Return from cache
expect(sumMemoized({ x: 1 }, { y: 2 })).to.eql({ x: 1, y: 2 });
expect(sumMemoized({ x: 1 }, { y: 2 })).to.deep.equal({ x: 1, y: 2 });
expect(sumCalled).to.equal(1);
// Put in cache for args combination
expect(sumMemoized({ x: 1 }, { y: 3 })).to.eql({ x: 1, y: 3 });
expect(sumMemoized({ x: 1 }, { y: 3 })).to.deep.equal({ x: 1, y: 3 });
expect(sumCalled).to.equal(2);
});
@ -214,17 +208,17 @@ describe('Memoize', () => {
}
const sum2Memoized = memoize(sum2, { serializeObjects: true });
expect(sumMemoized({ x: 1 }, { y: 2 })).to.eql({ x: 1, y: 2 });
expect(sumMemoized({ x: 1 }, { y: 2 })).to.deep.equal({ x: 1, y: 2 });
expect(sumCalled).to.equal(1);
expect(sum2Called).to.equal(0);
expect(sum2Memoized({ x: 1 }, { y: 2 })).to.eql({ x: 1, y: 2 });
expect(sum2Memoized({ x: 1 }, { y: 2 })).to.deep.equal({ x: 1, y: 2 });
expect(sumCalled).to.equal(1);
expect(sum2Called).to.equal(1);
// Both cached
expect(sumMemoized({ x: 1 }, { y: 2 })).to.eql({ x: 1, y: 2 });
expect(sum2Memoized({ x: 1 }, { y: 2 })).to.eql({ x: 1, y: 2 });
expect(sumMemoized({ x: 1 }, { y: 2 })).to.deep.equal({ x: 1, y: 2 });
expect(sum2Memoized({ x: 1 }, { y: 2 })).to.deep.equal({ x: 1, y: 2 });
expect(sumCalled).to.equal(1);
expect(sum2Called).to.equal(1);
});
@ -242,13 +236,13 @@ describe('Memoize', () => {
// Put in cache for args combination
const result = sumMemoized({ x: 1 }, { y: 2 });
expect(result).to.eql({ x: 1, y: 2 });
expect(result).to.deep.equal({ x: 1, y: 2 });
expect(sumCalled).to.equal(1);
// Return from cache
const resultCached = sumMemoized({ x: 1 }, { y: 2 });
expect(resultCached).to.equal(result);
expect(resultCached).to.eql({ x: 1, y: 2 });
expect(resultCached).to.deep.equal({ x: 1, y: 2 });
expect(sumCalled).to.equal(1);
// Outside world can edit returned reference

View file

@ -0,0 +1,279 @@
import path from 'path';
import { globby } from 'globby';
// eslint-disable-next-line import/no-extraneous-dependencies
import { expect } from 'chai';
// eslint-disable-next-line import/no-extraneous-dependencies
import mockFs from 'mock-fs';
import { optimisedGlob } from '../../../src/program/utils/optimised-glob.js';
const measurePerf = process.argv.includes('--measure-perf');
/**
* @param {*} patterns
* @param {*} options
* @returns {string[]}
*/
async function runOptimisedGlobAndCheckGlobbyParity(patterns, options) {
performance.mark('start-optimisedGlob');
const optimisedGlobResult = await optimisedGlob(patterns, options);
performance.mark('end-optimisedGlob');
performance.mark('start-globby');
const globbyResult = await globby(patterns, options);
performance.mark('end-globby');
if (measurePerf) {
const optimisedGlobPerf = performance.measure(
'optimisedGlob',
'start-optimisedGlob',
'end-optimisedGlob',
);
const globbyPerf = performance.measure('globby', 'start-globby', 'end-globby');
console.debug(
`optimisedGlob was ${
globbyPerf.duration - optimisedGlobPerf.duration
}ms quicker than globby.`,
);
}
expect(optimisedGlobResult).to.deep.equal(globbyResult);
return optimisedGlobResult;
}
describe('optimisedGlob', () => {
const testCfg = {
cwd: '/fakeFs',
};
beforeEach(() => {
const fakeFs = {
'/fakeFs/my/folder/some/file.js': 'content',
'/fakeFs/my/folder/lvl1/some/file.js': 'content',
'/fakeFs/my/folder/lvl1/lvl2/some/file.js': 'content',
'/fakeFs/my/folder/lvl1/lvl2/lvl3/some/file.js': 'content',
'/fakeFs/my/folder/some/file.d.ts': 'content',
'/fakeFs/my/folder/lvl1/some/file.d.ts': 'content',
'/fakeFs/my/folder/lvl1/lvl2/some/file.d.ts': 'content',
'/fakeFs/my/folder/lvl1/lvl2/lvl3/some/file.d.ts': 'content',
'/fakeFs/my/folder/some/anotherFile.js': 'content',
'/fakeFs/my/folder/lvl1/some/anotherFile.js': 'content',
'/fakeFs/my/folder/lvl1/lvl2/some/anotherFile.js': 'content',
'/fakeFs/my/folder/lvl1/lvl2/lvl3/some/anotherFile.js': 'content',
'/fakeFs/my/folder/some/anotherFile.d.ts': 'content',
'/fakeFs/my/folder/lvl1/some/anotherFile.d.ts': 'content',
'/fakeFs/my/folder/lvl1/lvl2/some/anotherFile.d.ts': 'content',
'/fakeFs/my/folder/lvl1/lvl2/lvl3/some/anotherFile.d.ts': 'content',
'/fakeFs/my/.hiddenFile.js': 'content',
};
mockFs(fakeFs);
});
afterEach(() => {
mockFs.restore();
});
describe('Star patterns', () => {
it('supports double asterisk like "my/folder/**/some/file.js" ', async () => {
const files = await runOptimisedGlobAndCheckGlobbyParity(
'my/folder/**/some/file.js',
testCfg,
);
expect(files).to.deep.equal([
'my/folder/some/file.js',
'my/folder/lvl1/some/file.js',
'my/folder/lvl1/lvl2/some/file.js',
'my/folder/lvl1/lvl2/lvl3/some/file.js',
]);
});
it('supports single asterisk like "my/folder/*/some/file.js" ', async () => {
const files = await runOptimisedGlobAndCheckGlobbyParity('my/folder/*/some/file.js', testCfg);
expect(files).to.deep.equal(['my/folder/lvl1/some/file.js']);
});
it('supports filenames like "my/folder/lvl1/some/*il*.js" ', async () => {
const files = await runOptimisedGlobAndCheckGlobbyParity(
'my/folder/lvl1/some/*il*.js',
testCfg,
);
expect(files).to.deep.equal([
'my/folder/lvl1/some/anotherFile.js',
'my/folder/lvl1/some/file.js',
]);
});
it('supports globs starting with a star like "**/some/file.js" ', async () => {
const filesDoubleStar = await runOptimisedGlobAndCheckGlobbyParity(
'**/some/file.js',
testCfg,
);
expect(filesDoubleStar).to.deep.equal([
'my/folder/some/file.js',
'my/folder/lvl1/some/file.js',
'my/folder/lvl1/lvl2/some/file.js',
'my/folder/lvl1/lvl2/lvl3/some/file.js',
]);
const filesSingleStar = await runOptimisedGlobAndCheckGlobbyParity(
'*/folder/some/file.js',
testCfg,
);
expect(filesSingleStar).to.deep.equal(['my/folder/some/file.js']);
});
it('gives empty output when location does not exist" ', async () => {
const files = await runOptimisedGlobAndCheckGlobbyParity('my/folder/**/some/file.js', {
...testCfg,
cwd: '/nonExisting/path', // this will not exist
});
expect(files).to.deep.equal([]);
});
it('omits hidden files" ', async () => {
const files = await runOptimisedGlobAndCheckGlobbyParity('*/*/*/*', testCfg);
expect(files).to.deep.equal([
'my/folder/some/anotherFile.d.ts',
'my/folder/some/anotherFile.js',
'my/folder/some/file.d.ts',
'my/folder/some/file.js',
]);
});
});
describe('Accolade patterns', () => {
it('works with filenames like "my/folder/*/some/file.{js,d.ts}" ', async () => {
const files = await runOptimisedGlobAndCheckGlobbyParity(
'my/folder/*/some/file.{js,d.ts}',
testCfg,
);
expect(files).to.deep.equal(['my/folder/lvl1/some/file.d.ts', 'my/folder/lvl1/some/file.js']);
});
});
describe('Multiple globs', () => {
it('accepts an array of globs, like ["my/folder/*/some/file.js", "my/folder/lvl1/*/some/file.js"]', async () => {
const files = await runOptimisedGlobAndCheckGlobbyParity(
['my/folder/*/some/file.js', 'my/folder/lvl1/*/some/file.js'],
testCfg,
);
expect(files).to.deep.equal([
'my/folder/lvl1/some/file.js',
'my/folder/lvl1/lvl2/some/file.js',
]);
});
it('accepts negative globs, like ["my/folder/**/some/file.js", "!my/folder/*/some/file.js"]', async () => {
const files = await runOptimisedGlobAndCheckGlobbyParity(
['my/folder/**/some/file.js', '!my/folder/*/some/file.js'],
testCfg,
);
expect(files).to.deep.equal([
'my/folder/some/file.js',
'my/folder/lvl1/lvl2/some/file.js',
'my/folder/lvl1/lvl2/lvl3/some/file.js',
]);
});
});
describe('Options', () => {
it('"absolute" returns full system paths', async () => {
const files = await runOptimisedGlobAndCheckGlobbyParity('my/folder/*/some/file.{js,d.ts}', {
...testCfg,
absolute: true,
});
if (process.platform === 'win32') {
const driveLetter = path.win32.resolve(testCfg.cwd).slice(0, 1).toUpperCase();
expect(files).to.deep.equal([
`${driveLetter}:/fakeFs/my/folder/lvl1/some/file.d.ts`,
`${driveLetter}:/fakeFs/my/folder/lvl1/some/file.js`,
]);
} else {
expect(files).to.deep.equal([
'/fakeFs/my/folder/lvl1/some/file.d.ts',
'/fakeFs/my/folder/lvl1/some/file.js',
]);
}
});
it('"cwd" changes relative starting point of glob', async () => {
const files = await runOptimisedGlobAndCheckGlobbyParity('folder/*/some/file.{js,d.ts}', {
...testCfg,
cwd: '/fakeFs/my',
});
expect(files).to.deep.equal(['folder/lvl1/some/file.d.ts', 'folder/lvl1/some/file.js']);
});
it('"onlyDirectories" returns only directories/folders', async () => {
const files = await runOptimisedGlobAndCheckGlobbyParity('my/folder/*/some', {
...testCfg,
onlyDirectories: true,
});
expect(files).to.deep.equal(['my/folder/lvl1/some']);
});
it('"onlyFiles" returns only files', async () => {
const files = await runOptimisedGlobAndCheckGlobbyParity('my/folder/*/some', {
...testCfg,
onlyFiles: true,
});
expect(files).to.deep.equal([]);
});
it('"deep" limits the level of results', async () => {
const files = await runOptimisedGlobAndCheckGlobbyParity('my/folder/**', {
...testCfg,
onlyDirectories: true,
deep: 1,
});
expect(files).to.deep.equal(['my/folder/lvl1', 'my/folder/some']);
const files2 = await runOptimisedGlobAndCheckGlobbyParity('my/folder/**', {
...testCfg,
onlyDirectories: true,
deep: 2,
});
expect(files2).to.deep.equal([
'my/folder/lvl1',
'my/folder/some',
'my/folder/lvl1/lvl2',
'my/folder/lvl1/some',
]);
});
it('"dot" allows hidden files" ', async () => {
const files = await runOptimisedGlobAndCheckGlobbyParity('*/*', { ...testCfg, dot: true });
expect(files).to.deep.equal(['my/.hiddenFile.js']);
});
it.skip('"suppressErrors" throws errors when paths do not exist', async () => {
expect(async () =>
optimisedGlob('my/folder/**/some/file.js', {
...testCfg,
cwd: '/nonExisting/path', // this will not exist
suppressErrors: false,
}),
).to.throw();
});
});
});

View file

@ -6,14 +6,14 @@ import {
mockTargetAndReferenceProject,
} from '../../../test-helpers/mock-project-helpers.js';
import { resolveImportPath } from '../../../src/program/utils/resolve-import-path.js';
import { memoizeConfig } from '../../../src/program/utils/memoize.js';
import { memoize } from '../../../src/program/utils/memoize.js';
describe('resolveImportPath', () => {
beforeEach(() => {
memoizeConfig.isCacheDisabled = true;
memoize.disableCaching();
});
afterEach(() => {
memoizeConfig.isCacheDisabled = false;
memoize.restoreCaching();
restoreMockedProjects();
});

View file

@ -60,7 +60,7 @@ describe('swcTraverse', () => {
};
swcTraverse(swcAst, visitor);
expect(foundTypes).to.eql([
expect(foundTypes).to.deep.equal([
'Module',
'ImportDeclaration',
'ImportDefaultSpecifier',
@ -166,7 +166,7 @@ describe('swcTraverse', () => {
expect(declaratorPaths[2].scope.id).to.equal(2);
expect(declaratorPaths[0].node.id.value).to.equal('globalScope');
expect(Object.keys(declaratorPaths[0].scope.bindings)).to.eql([
expect(Object.keys(declaratorPaths[0].scope.bindings)).to.deep.equal([
'globalScope',
'alsoGlobalScope',
]);
@ -180,8 +180,8 @@ describe('swcTraverse', () => {
declaratorPaths[3].node,
);
expect(Object.keys(declaratorPaths[1].scope.bindings)).to.eql(['middleScope']);
expect(Object.keys(declaratorPaths[2].scope.bindings)).to.eql(['deepestScope']);
expect(Object.keys(declaratorPaths[1].scope.bindings)).to.deep.equal(['middleScope']);
expect(Object.keys(declaratorPaths[2].scope.bindings)).to.deep.equal(['deepestScope']);
});
it('creates scopes for nested FunctionDeclaration', async () => {
@ -336,7 +336,7 @@ describe('swcTraverse', () => {
};
swcTraverse(swcAst, visitor, { needsAdvancedPaths: true });
expect(Object.keys(declaratorPaths[0].scope.bindings)).to.eql([
expect(Object.keys(declaratorPaths[0].scope.bindings)).to.deep.equal([
'globalScope',
'alsoGlobalScope',
]);
@ -370,12 +370,12 @@ describe('swcTraverse', () => {
};
swcTraverse(swcAst, visitor, { needsAdvancedPaths: true });
expect(Object.keys(declaratorPaths[0].scope.bindings)).to.eql([
expect(Object.keys(declaratorPaths[0].scope.bindings)).to.deep.equal([
'globalScope',
'stillGlobalScope',
]);
expect(Object.keys(declaratorPaths[1].scope.bindings)).to.eql(['middleScope']);
expect(Object.keys(declaratorPaths[2].scope.bindings)).to.eql(['insideFnScope']);
expect(Object.keys(declaratorPaths[1].scope.bindings)).to.deep.equal(['middleScope']);
expect(Object.keys(declaratorPaths[2].scope.bindings)).to.deep.equal(['insideFnScope']);
});
});
@ -420,8 +420,10 @@ describe('swcTraverse', () => {
expect(babelScopes.length).to.equal(swcScopes.length);
for (let i = 0; i < babelScopes.length; i += 1) {
expect(babelScopes[i].uid - babelRootScopeIdOffset).to.equal(swcScopes[i].id);
expect(Object.keys(babelScopes[i].bindings)).to.eql(Object.keys(swcScopes[i].bindings));
// expect(babelScopes[i].references).to.eql(swcResults[i].references);
expect(Object.keys(babelScopes[i].bindings)).to.deep.equal(
Object.keys(swcScopes[i].bindings),
);
// expect(babelScopes[i].references).to.deep.equal(swcResults[i].references);
}
}

View file

@ -38,9 +38,9 @@ describe('traverseHtml', () => {
},
});
expect(foundDivs).to.eql(['a-lvl1', 'b']);
expect(foundSpans).to.eql(['a-lvl2']);
expect(foundMyTags).to.eql(['a-lvl3']);
expect(foundDivs).to.deep.equal(['a-lvl1', 'b']);
expect(foundSpans).to.deep.equal(['a-lvl2']);
expect(foundMyTags).to.deep.equal(['a-lvl3']);
});
it('traverses different levels in DOM order', async () => {
@ -72,7 +72,7 @@ describe('traverseHtml', () => {
traverseHtml(ast, processObj);
// call order based on dom tree
expect(callOrder).to.eql(['div#a-lvl1', 'span#a-lvl2', 'my-tag#a-lvl3', 'div#b']);
expect(callOrder).to.deep.equal(['div#a-lvl1', 'span#a-lvl2', 'my-tag#a-lvl3', 'div#b']);
});
it('allows to stop traversal (for performance)', async () => {
@ -104,7 +104,7 @@ describe('traverseHtml', () => {
};
traverseHtml(ast, processObj);
expect(callOrder).to.eql(['div#a-lvl1']);
expect(callOrder).to.deep.equal(['div#a-lvl1']);
});
it('allows to traverse within a path', async () => {
@ -135,6 +135,6 @@ describe('traverseHtml', () => {
};
traverseHtml(ast, processObj);
expect(callOrder).to.eql(['my-tag#a-lvl3', 'not-found#a-lvl4']);
expect(callOrder).to.deep.equal(['my-tag#a-lvl3', 'not-found#a-lvl4']);
});
});

View file

@ -4,6 +4,6 @@
"outDir": "./dist-types",
"rootDir": "."
},
"include": ["types"],
"include": ["types", "src", "test-node"],
"exclude": ["dist-types"]
}

View file

@ -1,4 +1,5 @@
import { File } from '@babel/types';
import Vol from 'memfs';
/**
* The name of a variable in a local context. Examples:
@ -152,6 +153,8 @@ export interface ProjectInputDataWithAstMeta extends ProjectInputDataWithMeta {
*/
export type AnyMatchString = string;
export type FsAdapter = Vol;
export type ProvidenceConfig = {
/* Whether analyzer should be run or a grep should be performed */
queryMethod: 'ast' | 'grep';
@ -169,6 +172,7 @@ export type ProvidenceConfig = {
writeLogFile: boolean;
skipCheckMatchCompatibility: boolean;
fallbackToBabel: boolean;
fs: FsAdapter;
};
/**
@ -182,6 +186,7 @@ export type PackageJson = {
devDependencies?: { [dependency: string]: string };
workspaces?: string[];
main?: string;
exports?: { [key: string]: string };
};
export type LernaJson = {

View file

@ -702,8 +702,10 @@ describe('Ajax', () => {
const errors = [
"Failed to execute 'fetch' on 'Window': The user aborted a request.", // chromium
'signal is aborted without reason', // newer chromium (?)
'The operation was aborted. ', // firefox
'Request signal is aborted', // webkit
'The operation was aborted.', // newer webkit
];
expect(errors.includes(/** @type {Error} */ (err).message)).to.be.true;

View file

@ -40,8 +40,11 @@ export function runCheckboxIndeterminateSuite(customConfig) {
let childTag;
before(async () => {
// @ts-expect-error
tag = unsafeStatic(cfg.tagString);
// @ts-expect-error
groupTag = unsafeStatic(cfg.groupTagString);
// @ts-expect-error
childTag = unsafeStatic(cfg.childTagString);
});

View file

@ -788,6 +788,7 @@ export function runValidateMixinSuite(customConfig) {
await fixture(html`
<${tag}
.validators="${[
// @ts-expect-error
new EqualsLength(4, { getMessage: () => html`<div id="test123">test</div>` }),
]}" })]}"
.modelValue="${'123'}"

View file

@ -1,7 +1,8 @@
import { runInputTelSuite } from '@lion/ui/input-tel-test-suites.js';
import { repeat } from 'lit/directives/repeat.js';
import { ref } from 'lit/directives/ref.js';
import { aTimeout, expect, fixture, html } from '@open-wc/testing';
import { html } from 'lit';
import { aTimeout, expect, fixture } from '@open-wc/testing';
import { LionInputTelDropdown } from '@lion/ui/input-tel-dropdown.js';
import { LionOption } from '@lion/ui/listbox.js';
import { LionSelectRich } from '@lion/ui/select-rich.js';

View file

@ -16,6 +16,7 @@ import {
import { sendKeys } from '@web/test-runner-commands';
import sinon from 'sinon';
import { getListboxMembers } from '../../../exports/listbox-test-helpers.js';
import { browserDetection } from '../../core/src/browserDetection.js';
/**
* @typedef {import('../src/LionListbox.js').LionListbox} LionListbox
@ -380,7 +381,12 @@ export function runListboxMixinSuite(customConfig = {}) {
await aTimeout(1000);
// top should be offset 2x40px (sticky header elems) instead of 0px
if (browserDetection.isChrome || browserDetection.isChromium) {
// TODO: find out why this is different in recent Chromium
expect(el.scrollTop).to.equal(160);
} else {
expect(el.scrollTop).to.equal(116);
}
});
});

View file

@ -21,7 +21,6 @@ export function normalizeIntlDate(str, locale = '', { weekday, year, month, day
}
const result = dateString.join('');
// Normalize webkit date formatting without year
if (!year && weekday === 'long' && month === 'long' && day === '2-digit') {
const CHINESE_LOCALES = [
@ -43,7 +42,7 @@ export function normalizeIntlDate(str, locale = '', { weekday, year, month, day
return result.replace(' ', '');
}
if (result.indexOf(',') === -1 && locale === 'en-GB') {
if ((result.indexOf(',') === -1 && locale === 'en-GB') || locale === 'en-AU') {
// Saturday 12 October -> Saturday, 12 October
const match = result.match(/^(\w*) (\d*) (\w*)$/);
if (match !== null) {
@ -63,6 +62,18 @@ export function normalizeIntlDate(str, locale = '', { weekday, year, month, day
return `${match[1]}, ${match[3]} ${match[2]}`;
}
}
if (locale === 'uk-UA') {
// суботу => субота
return result.replace('суботу', 'субота');
}
} else if (weekday === 'long' && month === 'long' && day === '2-digit') {
if (result.indexOf(',') === -1 && locale.startsWith('en-')) {
// Saturday 12 October 2023 -> Saturday, 12 October 2023
const [, _weekDayName, _monthDayNumber, _monthName, _year] =
result.match(/^(\w*) (\d*) (\w*) (\d*)$/) || [];
return `${_weekDayName}, ${_monthDayNumber} ${_monthName} ${_year}`;
}
}
return result;

View file

@ -1,53 +0,0 @@
diff --git a/node_modules/swc-to-babel/lib/swc/index.js b/node_modules/swc-to-babel/lib/swc/index.js
index ab285b3..a58a61d 100644
--- a/node_modules/swc-to-babel/lib/swc/index.js
+++ b/node_modules/swc-to-babel/lib/swc/index.js
@@ -143,6 +143,15 @@ module.exports.ClassMethod = (path) => {
key,
});
+ if (node.kind === 'getter') {
+ node.kind = 'get';
+ }
+ if (node.kind === 'setter') {
+ node.kind = 'set';
+ }
+
+ node.static = node.isStatic;
+
delete path.node.isStatic;
delete path.node.accessibility;
delete path.node.isAbstract;
@@ -301,7 +310,7 @@ module.exports.TSIndexedAccessType = (path) => {
module.exports.ImportDeclaration = ({node}) => {
const {typeOnly} = node;
- node.assertions = node.asserts || [];
+ node.assertions = node.asserts?.properties || [];
node.importKind = typeOnly ? 'type' : 'value';
delete node.asserts;
@@ -340,9 +349,10 @@ module.exports.convertGetterSetter = ({node}) => {
};
module.exports.ExportDefaultDeclaration = ({node}) => {
- node.declaration = node.decl;
+ // node.declaration may have been already provided by convertExportDefaultExpression
+ node.declaration = node.declaration || node.decl;
node.exportKind = 'value';
- node.assertions = [];
+ node.assertions = node.asserts?.properties || [];
delete node.decl;
};
@@ -350,8 +360,8 @@ module.exports.ExportDefaultDeclaration = ({node}) => {
module.exports.ExportNamedDeclaration = ({node}) => {
const {typeOnly} = node;
- node.assertions = [];
- node.source = null;
+ node.assertions = node.asserts?.properties || [];
+ // node.source = null;
node.specifiers = node.specifiers || [];
node.exportKind = typeOnly ? 'type' : 'value';

View file

@ -1,16 +0,0 @@
const fs = require('fs');
const path = require('path');
const lockFileContent = fs.readFileSync(path.resolve('./package-lock.json'), 'utf8');
const allowedRegistries = ['registry.yarnpkg.com', 'registry.npmjs.org'];
const resolvedUrls = lockFileContent.match(/"resolved": "https:.*"/g);
resolvedUrls?.forEach(url => {
const [, registry] = url.match(/^"resolved": "https:\/\/(.*?)\/.*"$/) || [];
if (!allowedRegistries.includes(registry)) {
throw new Error(
`Disallowed registries ("${registry}") in your package-lock.json!
Please make sure you are using a public npm registry when downloading your dependencies!`,
);
}
});

17
scripts/lock-scan.mjs Normal file
View file

@ -0,0 +1,17 @@
import path from 'path';
import fs from 'fs';
function containsDisallowedRegistry(url) {
const [, registry] = url.match(/^"resolved": "https:\/\/(.*?)\/.*"$/) || [];
return !['registry.yarnpkg.com', 'registry.npmjs.org'].includes(registry);
}
const lockFileContent = fs.readFileSync(path.resolve('./package-lock.json'), 'utf8');
const resolvedUrls = lockFileContent.match(/"resolved": "https:.*"/g);
if (resolvedUrls?.some(containsDisallowedRegistry)) {
throw new Error(
`Disallowed registries in your package-lock.json!
Please make sure you are using a public npm registry when downloading your dependencies!`,
);
}

View file

@ -3,10 +3,8 @@
const { chromium } = require('playwright');
const looksSame = require('looks-same');
const { join } = require('path');
const mkdirp = require('mkdirp-promise');
const fs = require('fs');
const { promisify } = require('es6-promisify');
const minimist = require('minimist');
const { promisify } = require('util');
const { mdjsTransformer } = require('@mdjs/core');
const { createConfig, startServer } = require('es-dev-server');
const nodePath = require('path');
@ -14,7 +12,6 @@ const nodePath = require('path');
const access = promisify(fs.access);
const compareScreenshots = promisify(looksSame);
const createScreenshotsDiff = promisify(looksSame.createDiff);
const args = minimist(process.argv);
const DIFF_FOLDER_PREFIX = '.diff';
const CURRENT_FOLDER_PREFIX = '.current';
@ -157,7 +154,7 @@ async function getScreenshot({ root, id, selector, page, clip }) {
const { path, folder } = await buildPath({ root, id, selector });
mkdirp(folder);
await fs.promises.mkdir(folder, { recursive: true });
// Remove caret from screenshots to avoid caret diff
await page.evaluate(() => {
@ -201,7 +198,7 @@ const screenshotsCompareOptions = {
async function invalidateScreenshots({ diffRoot: root, id, selector, reference, current }) {
const { path, folder } = await buildPath({ root, id, selector });
mkdirp(folder);
await fs.promises.mkdir(folder, { recursive: true });
await createScreenshotsDiff({
...screenshotsCompareOptions,
@ -238,10 +235,11 @@ async function validateScreenshot(suite) {
}
}
let updateScreenshots = args['update-screenshots'] || process.env.UPDATE_SCREENSHOTS;
let updateScreenshots =
process.argv.includes('--update-screenshots') || process.env.UPDATE_SCREENSHOTS;
try {
const avaConfig = JSON.parse(args._[2]);
const avaConfig = JSON.parse(process.argv[2]);
updateScreenshots = avaConfig.updateScreenshots;
} catch (e) {
log('Could not parse config');

View file

@ -1,105 +0,0 @@
import { exec } from 'child_process';
import fs from 'fs';
// eslint-disable-next-line import/no-extraneous-dependencies
import lockfile from '@yarnpkg/lockfile';
/**
* === Generic Helpers ===
*/
const execPromise = cmd =>
new Promise(resolve => exec(cmd, { maxBuffer: 200000000 }, (err, stdout) => resolve(stdout)));
const arrDiff = (arrA, arrB, eq = (a, b) => a === b) =>
arrA.filter(a => arrB.every(b => !eq(a, b)));
/**
* === yarn-lock-diff ===
*/
function groupByPackageName(obj) {
const packages = [];
Object.keys(obj.object).forEach(key => {
const names = key.split('@');
let name = names[0];
if (name === '') {
// handle scoped packages
name = `@${names[1]}`;
}
const { version } = obj.object[key];
const found = packages.find(p => p.name === name);
if (found) {
found.versions.push(version);
} else {
packages.push({
name,
versions: [version],
});
}
});
return packages;
}
function yarnLockDiff(prevLockContents, curLockContents) {
const previous = lockfile.parse(prevLockContents);
const current = lockfile.parse(curLockContents);
const previousPackages = groupByPackageName(previous);
const currentPackages = groupByPackageName(current);
const removedResult = [];
const changedResult = [];
previousPackages.forEach(prevPkg => {
const foundCurPkg = currentPackages.find(curPkg => curPkg.name === prevPkg.name);
if (!foundCurPkg) {
removedResult.push(prevPkg);
} else {
const diff = arrDiff(foundCurPkg.versions, prevPkg.versions);
if (diff.length) {
changedResult.push({
name: prevPkg.name,
previousVersions: Array.from(new Set(prevPkg.versions)),
currentVersions: Array.from(new Set(foundCurPkg.versions)),
diff,
});
}
}
});
return { removed: removedResult, changed: changedResult };
}
/**
* === cli ===
*/
function getArgs() {
const idx = process.argv.findIndex(a => a === '--versions-back');
let versionsBack;
if (idx > 0) {
versionsBack = Number(process.argv[idx + 1]);
if (Number.isNaN(versionsBack)) {
throw new Error('Please provide a number for --versions-back');
}
} else {
versionsBack = 1;
}
return { versionsBack };
}
async function main() {
const { versionsBack } = getArgs();
const changeHistory = await execPromise(`git log yarn.lock`);
const commits = changeHistory
.match(/commit (.*)\n/g)
.map(c => c.replace('commit ', '').replace('\n', ''));
// For now, we pick latest commit. When needed in the future, allow '--age 2-months' or smth
const prevLockContents = await execPromise(`git show ${commits[versionsBack - 1]}:yarn.lock`);
const curLockContents = await fs.promises.readFile('yarn.lock', 'utf8');
// eslint-disable-next-line no-console
console.log(JSON.stringify(yarnLockDiff(prevLockContents, curLockContents), null, 2));
}
main();

View file

@ -1,6 +1,8 @@
import fs from 'fs';
import { playwrightLauncher } from '@web/test-runner-playwright';
const devMode = process.argv.includes('--dev-mode');
const packages = fs
.readdirSync('packages')
.filter(
@ -33,7 +35,7 @@ const testRunnerHtml = testRunnerImport =>
`;
export default {
nodeResolve: true,
nodeResolve: { exportConditions: [devMode && 'development'] },
coverageConfig: {
report: true,
reportDir: 'coverage',