diff --git a/packages/plugin-lighthouse/src/lib/constants.ts b/packages/plugin-lighthouse/src/lib/constants.ts index d1ee09fce..9b9462de2 100644 --- a/packages/plugin-lighthouse/src/lib/constants.ts +++ b/packages/plugin-lighthouse/src/lib/constants.ts @@ -6,6 +6,8 @@ import { DEFAULT_PERSIST_OUTPUT_DIR } from '@code-pushup/models'; export const DEFAULT_CHROME_FLAGS = [...DEFAULT_FLAGS, '--headless']; export const LIGHTHOUSE_PLUGIN_SLUG = 'lighthouse'; +export const LIGHTHOUSE_PLUGIN_TITLE = 'Lighthouse'; + export const LIGHTHOUSE_OUTPUT_PATH = path.join( DEFAULT_PERSIST_OUTPUT_DIR, LIGHTHOUSE_PLUGIN_SLUG, diff --git a/packages/plugin-lighthouse/src/lib/format.ts b/packages/plugin-lighthouse/src/lib/format.ts new file mode 100644 index 000000000..da4c9575d --- /dev/null +++ b/packages/plugin-lighthouse/src/lib/format.ts @@ -0,0 +1,4 @@ +import { pluginMetaLogFormatter } from '@code-pushup/utils'; +import { LIGHTHOUSE_PLUGIN_TITLE } from './constants.js'; + +export const formatMetaLog = pluginMetaLogFormatter(LIGHTHOUSE_PLUGIN_TITLE); diff --git a/packages/plugin-lighthouse/src/lib/lighthouse-plugin.ts b/packages/plugin-lighthouse/src/lib/lighthouse-plugin.ts index 8a7c49036..6b33e9841 100644 --- a/packages/plugin-lighthouse/src/lib/lighthouse-plugin.ts +++ b/packages/plugin-lighthouse/src/lib/lighthouse-plugin.ts @@ -1,7 +1,10 @@ import { createRequire } from 'node:module'; import type { PluginConfig, PluginUrls } from '@code-pushup/models'; import { normalizeUrlInput } from '@code-pushup/utils'; -import { LIGHTHOUSE_PLUGIN_SLUG } from './constants.js'; +import { + LIGHTHOUSE_PLUGIN_SLUG, + LIGHTHOUSE_PLUGIN_TITLE, +} from './constants.js'; import { normalizeFlags } from './normalize-flags.js'; import { processAuditsAndGroups } from './processing.js'; import { createRunnerFunction } from './runner/runner.js'; @@ -33,10 +36,10 @@ export function lighthousePlugin( return { slug: LIGHTHOUSE_PLUGIN_SLUG, + title: LIGHTHOUSE_PLUGIN_TITLE, + icon: 'lighthouse', packageName: packageJson.name, version: packageJson.version, - title: 'Lighthouse', - icon: 'lighthouse', audits, groups, runner: createRunnerFunction(normalizedUrls, { diff --git a/packages/plugin-lighthouse/src/lib/processing.ts b/packages/plugin-lighthouse/src/lib/processing.ts index f84a79dc8..14a5e09d1 100644 --- a/packages/plugin-lighthouse/src/lib/processing.ts +++ b/packages/plugin-lighthouse/src/lib/processing.ts @@ -1,9 +1,15 @@ +import type { Audit, Group } from '@code-pushup/models'; import { addIndex, expandAuditsForUrls, expandGroupsForUrls, + logger, + objectFromEntries, + objectToEntries, + pluralizeToken, shouldExpandForUrls, } from '@code-pushup/utils'; +import { formatMetaLog } from './format.js'; import { LIGHTHOUSE_GROUPS, LIGHTHOUSE_NAVIGATION_AUDITS, @@ -14,25 +20,26 @@ export function expandOptionsForUrls( options: FilterOptions, urlCount: number, ): FilterOptions { - return Object.fromEntries( - Object.entries(options).map(([key, value]) => [ + return objectFromEntries( + objectToEntries(options).map(([key, value = []]) => [ key, - Array.isArray(value) - ? value.flatMap(slug => - Array.from({ length: urlCount }, (_, i) => addIndex(slug, i)), - ) - : value, + value.flatMap(slug => + Array.from({ length: urlCount }, (_, i) => addIndex(slug, i)), + ), ]), ); } export function processAuditsAndGroups(urls: string[], options: FilterOptions) { + logTotal(); if (!shouldExpandForUrls(urls.length)) { - return markSkippedAuditsAndGroups( + const marked = markSkippedAuditsAndGroups( LIGHTHOUSE_NAVIGATION_AUDITS, LIGHTHOUSE_GROUPS, options, ); + logSkipped(marked); + return marked; } const expandedAudits = expandAuditsForUrls( LIGHTHOUSE_NAVIGATION_AUDITS, @@ -40,9 +47,55 @@ export function processAuditsAndGroups(urls: string[], options: FilterOptions) { ); const expandedGroups = expandGroupsForUrls(LIGHTHOUSE_GROUPS, urls); const expandedOptions = expandOptionsForUrls(options, urls.length); - return markSkippedAuditsAndGroups( + logExpanded(expandedAudits, expandedGroups, urls); + const marked = markSkippedAuditsAndGroups( expandedAudits, expandedGroups, expandedOptions, ); + logSkipped(marked); + return marked; +} + +function logTotal(): void { + logger.info( + formatMetaLog( + `Created ${pluralizeToken('group', LIGHTHOUSE_GROUPS.length)} and ${pluralizeToken('audit', LIGHTHOUSE_NAVIGATION_AUDITS.length)} from Lighthouse's categories and navigation audits`, + ), + ); +} + +function logExpanded( + expandedAudits: Audit[], + expandedGroups: Group[], + urls: string[], +): void { + logger.info( + formatMetaLog( + `Expanded audits (${LIGHTHOUSE_NAVIGATION_AUDITS.length} → ${expandedAudits.length}) and groups (${LIGHTHOUSE_GROUPS.length} → ${expandedGroups.length}) for ${pluralizeToken('URL', urls.length)}`, + ), + ); +} + +function logSkipped(marked: { audits: Audit[]; groups: Group[] }): void { + const { audits, groups } = marked; + + const formattedCounts = [ + { name: 'audit', items: audits }, + { name: 'group', items: groups }, + ] + .map(({ name, items }) => { + const skipped = items.filter(({ isSkipped }) => isSkipped); + if (skipped.length === 0) { + return ''; + } + return `${skipped.length} out of ${pluralizeToken(name, items.length)}`; + }) + .filter(Boolean) + .join(' and '); + + if (!formattedCounts) { + return; + } + logger.info(formatMetaLog(`Skipping ${formattedCounts}`)); } diff --git a/packages/plugin-lighthouse/src/lib/runner/runner.ts b/packages/plugin-lighthouse/src/lib/runner/runner.ts index fdb8e0ac5..29c5a9868 100644 --- a/packages/plugin-lighthouse/src/lib/runner/runner.ts +++ b/packages/plugin-lighthouse/src/lib/runner/runner.ts @@ -1,11 +1,18 @@ -import type { Config, RunnerResult } from 'lighthouse'; +import ansis from 'ansis'; +import type { Config, Result, RunnerResult } from 'lighthouse'; import { runLighthouse } from 'lighthouse/cli/run.js'; import path from 'node:path'; -import type { AuditOutputs, RunnerFunction } from '@code-pushup/models'; +import type { + AuditOutputs, + RunnerFunction, + TableColumnObject, +} from '@code-pushup/models'; import { addIndex, + asyncSequential, ensureDirectoryExists, - formatAsciiLink, + formatAsciiTable, + formatReportScore, logger, shouldExpandForUrls, stringifyError, @@ -15,8 +22,8 @@ import { DEFAULT_CLI_FLAGS } from './constants.js'; import type { LighthouseCliFlags } from './types.js'; import { enrichFlags, + filterAuditOutputs, getConfig, - normalizeAuditOutputs, toAuditOutputs, withLocalTmpDir, } from './utils.js'; @@ -28,64 +35,118 @@ export function createRunnerFunction( return withLocalTmpDir(async (): Promise => { const config = await getConfig(flags); const normalizationFlags = enrichFlags(flags); - const isSingleUrl = !shouldExpandForUrls(urls.length); + const urlsCount = urls.length; + const isSingleUrl = !shouldExpandForUrls(urlsCount); - const allResults = await urls.reduce(async (prev, url, index) => { - const acc = await prev; - try { - const enrichedFlags = isSingleUrl - ? normalizationFlags - : enrichFlags(flags, index + 1); + const allResults = await asyncSequential(urls, (url, urlIndex) => { + const enrichedFlags = isSingleUrl + ? normalizationFlags + : enrichFlags(flags, urlIndex + 1); + const step = { urlIndex, urlsCount }; + return runLighthouseForUrl(url, enrichedFlags, config, step); + }); - const auditOutputs = await runLighthouseForUrl( - url, - enrichedFlags, - config, - ); - - const processedOutputs = isSingleUrl - ? auditOutputs - : auditOutputs.map(audit => ({ - ...audit, - slug: addIndex(audit.slug, index), - })); - - return [...acc, ...processedOutputs]; - } catch (error) { - logger.warn(stringifyError(error)); - return acc; - } - }, Promise.resolve([])); - - if (allResults.length === 0) { + const collectedResults = allResults.filter(res => res != null); + if (collectedResults.length === 0) { throw new Error( isSingleUrl ? 'Lighthouse did not produce a result.' : 'Lighthouse failed to produce results for all URLs.', ); } - return normalizeAuditOutputs(allResults, normalizationFlags); + + logResultsForAllUrls(collectedResults); + + const auditOutputs: AuditOutputs = collectedResults.flatMap( + res => res.auditOutputs, + ); + return filterAuditOutputs(auditOutputs, normalizationFlags); }); } +type ResultForUrl = { + url: string; + lhr: Result; + auditOutputs: AuditOutputs; +}; + async function runLighthouseForUrl( url: string, flags: LighthouseOptions, config: Config | undefined, -): Promise { - if (flags.outputPath) { - await ensureDirectoryExists(path.dirname(flags.outputPath)); - } + step: { urlIndex: number; urlsCount: number }, +): Promise { + const { urlIndex, urlsCount } = step; - const runnerResult: unknown = await runLighthouse(url, flags, config); + const prefix = ansis.gray(`[${step.urlIndex + 1}/${step.urlsCount}]`); - if (runnerResult == null) { - throw new Error( - `Lighthouse did not produce a result for URL: ${formatAsciiLink(url)}`, + try { + if (flags.outputPath) { + await ensureDirectoryExists(path.dirname(flags.outputPath)); + } + + const lhr: Result = await logger.task( + `${prefix} Running lighthouse on ${url}`, + async () => { + const runnerResult: RunnerResult | undefined = await runLighthouse( + url, + flags, + config, + ); + + if (runnerResult == null) { + throw new Error('Lighthouse did not produce a result'); + } + + return { + message: `${prefix} Completed lighthouse run on ${url}`, + result: runnerResult.lhr, + }; + }, ); + + const auditOutputs = toAuditOutputs(Object.values(lhr.audits), flags); + if (shouldExpandForUrls(urlsCount)) { + return { + url, + lhr, + auditOutputs: auditOutputs.map(audit => ({ + ...audit, + slug: addIndex(audit.slug, urlIndex), + })), + }; + } + return { url, lhr, auditOutputs }; + } catch (error) { + logger.warn(`Lighthouse run failed for ${url} - ${stringifyError(error)}`); + return null; } +} - const { lhr } = runnerResult as RunnerResult; +function logResultsForAllUrls(results: ResultForUrl[]): void { + const categoryNames = Object.fromEntries( + results + .flatMap(res => Object.values(res.lhr.categories)) + .map(category => [category.id, category.title]), + ); - return toAuditOutputs(Object.values(lhr.audits), flags); + logger.info( + formatAsciiTable({ + columns: [ + { key: 'url', label: 'URL', align: 'left' }, + ...Object.entries(categoryNames).map( + ([key, label]): TableColumnObject => ({ key, label, align: 'right' }), + ), + ], + rows: results.map(({ url, lhr }) => ({ + url, + ...Object.fromEntries( + Object.values(lhr.categories).map(category => [ + category.id, + category.score == null ? '-' : formatReportScore(category.score), + ]), + ), + })), + }), + ); } diff --git a/packages/plugin-lighthouse/src/lib/runner/runner.unit.test.ts b/packages/plugin-lighthouse/src/lib/runner/runner.unit.test.ts index c78c0cdb2..9c0ca0a04 100644 --- a/packages/plugin-lighthouse/src/lib/runner/runner.unit.test.ts +++ b/packages/plugin-lighthouse/src/lib/runner/runner.unit.test.ts @@ -1,4 +1,3 @@ -import ansis from 'ansis'; import type { Config } from 'lighthouse'; import { runLighthouse } from 'lighthouse/cli/run.js'; import type { Result } from 'lighthouse/types/lhr/audit-result'; @@ -52,6 +51,7 @@ vi.mock('lighthouse/cli/run.js', async () => { score: 0.9, } satisfies Result, }, + categories: {}, }, }, ); @@ -177,7 +177,7 @@ describe('createRunnerFunction', () => { it('should continue with other URLs when one fails in multiple URL scenario', async () => { const runner = createRunnerFunction([ 'https://localhost:8080', - 'fail', + 'http://fail.com', 'https://localhost:8082', ]); @@ -199,7 +199,7 @@ describe('createRunnerFunction', () => { ); expect(logger.warn).toHaveBeenCalledWith( - `Lighthouse did not produce a result for URL: ${ansis.blueBright('fail')}`, + 'Lighthouse run failed for http://fail.com - Lighthouse did not produce a result', ); }); diff --git a/packages/plugin-lighthouse/src/lib/runner/utils.ts b/packages/plugin-lighthouse/src/lib/runner/utils.ts index 50a4ed47e..a68ad368e 100644 --- a/packages/plugin-lighthouse/src/lib/runner/utils.ts +++ b/packages/plugin-lighthouse/src/lib/runner/utils.ts @@ -22,7 +22,7 @@ import type { LighthouseOptions } from '../types.js'; import { logUnsupportedDetails, toAuditDetails } from './details/details.js'; import type { LighthouseCliFlags } from './types.js'; -export function normalizeAuditOutputs( +export function filterAuditOutputs( auditOutputs: AuditOutputs, flags: LighthouseOptions = { skipAudits: [] }, ): AuditOutputs { @@ -33,7 +33,7 @@ export function normalizeAuditOutputs( export class LighthouseAuditParsingError extends Error { constructor(slug: string, error: unknown) { super( - `\nAudit ${ansis.bold(slug)} failed parsing details: \n${stringifyError(error)}`, + `Failed to parse ${ansis.bold(slug)} audit's details - ${stringifyError(error)}`, ); } } @@ -99,6 +99,7 @@ export type LighthouseLogLevel = | 'silent' | 'warn' | undefined; + export function determineAndSetLogLevel({ verbose, quiet, @@ -127,31 +128,52 @@ export type ConfigOptions = Partial< export async function getConfig( options: ConfigOptions = {}, ): Promise { - const { configPath: filepath, preset } = options; - - if (filepath != null) { - if (filepath.endsWith('.json')) { - // Resolve the config file path relative to where cli was called. - return readJsonFile(filepath); - } else if (/\.(ts|js|mjs)$/.test(filepath)) { - return importModule({ filepath, format: 'esm' }); + const { configPath, preset } = options; + + if (configPath != null) { + // Resolve the config file path relative to where cli was called. + return logger.task( + `Loading lighthouse config from ${configPath}`, + async () => { + const message = `Loaded lighthouse config from ${configPath}`; + if (configPath.endsWith('.json')) { + return { message, result: await readJsonFile(configPath) }; + } + if (/\.(ts|js|mjs)$/.test(configPath)) { + return { + message, + result: await importModule({ + filepath: configPath, + format: 'esm', + }), + }; + } + throw new Error( + `Unknown Lighthouse config file extension in ${configPath}`, + ); + }, + ); + } + + if (preset != null) { + const supportedPresets: Record< + NonNullable, + Config + > = { + desktop: desktopConfig, + perf: perfConfig, + experimental: experimentalConfig, + }; + // in reality, the preset could be a string not included in the type definition + const config: Config | undefined = supportedPresets[preset]; + if (config) { + logger.info(`Loaded config from ${ansis.bold(preset)} preset`); + return config; } else { - logger.warn(`Format of file ${filepath} not supported`); - } - } else if (preset != null) { - switch (preset) { - case 'desktop': - return desktopConfig; - case 'perf': - return perfConfig as Config; - case 'experimental': - return experimentalConfig as Config; - default: - // as preset is a string literal the default case here is normally caught by TS and not possible to happen. Now in reality it can happen and preset could be a string not included in the literal. - // Therefore, we have to use `as string`. Otherwise, it will consider preset as type never - logger.warn(`Preset "${preset as string}" is not supported`); + logger.warn(`Preset "${preset}" is not supported`); } } + return undefined; } @@ -190,10 +212,12 @@ export function withLocalTmpDir(fn: () => Promise): () => Promise { return async () => { const originalTmpDir = process.env['TEMP']; + const localPath = path.join(pluginWorkDir(LIGHTHOUSE_PLUGIN_SLUG), 'tmp'); + // eslint-disable-next-line functional/immutable-data - process.env['TEMP'] = path.join( - pluginWorkDir(LIGHTHOUSE_PLUGIN_SLUG), - 'tmp', + process.env['TEMP'] = localPath; + logger.debug( + `Temporarily overwriting TEMP environment variable with ${localPath} to prevent permissions error on cleanup`, ); try { @@ -201,6 +225,9 @@ export function withLocalTmpDir(fn: () => Promise): () => Promise { } finally { // eslint-disable-next-line functional/immutable-data process.env['TEMP'] = originalTmpDir; + logger.debug( + `Restored TEMP environment variable to original value ${originalTmpDir}`, + ); } }; } diff --git a/packages/plugin-lighthouse/src/lib/runner/utils.unit.test.ts b/packages/plugin-lighthouse/src/lib/runner/utils.unit.test.ts index 536512e74..458efe97d 100644 --- a/packages/plugin-lighthouse/src/lib/runner/utils.unit.test.ts +++ b/packages/plugin-lighthouse/src/lib/runner/utils.unit.test.ts @@ -20,8 +20,8 @@ import type { LighthouseCliFlags } from './types.js'; import { determineAndSetLogLevel, enrichFlags, + filterAuditOutputs, getConfig, - normalizeAuditOutputs, toAuditOutputs, withLocalTmpDir, } from './utils.js'; @@ -51,10 +51,10 @@ vi.mock('bundle-require', async () => { }; }); -describe('normalizeAuditOutputs', () => { +describe('filterAuditOutputs', () => { it('should filter audits listed under skipAudits', () => { expect( - normalizeAuditOutputs( + filterAuditOutputs( [ { slug: 'largest-contentful-paint' } as AuditOutput, { slug: 'cumulative-layout-shifts' } as AuditOutput, @@ -66,7 +66,7 @@ describe('normalizeAuditOutputs', () => { it('should NOT filter audits if no skipAudits are listed', () => { expect( - normalizeAuditOutputs([ + filterAuditOutputs([ { slug: 'largest-contentful-paint' } as AuditOutput, { slug: 'cumulative-layout-shifts' } as AuditOutput, ]), @@ -308,7 +308,7 @@ describe('toAuditOutputs', () => { { verbose: true }, ), ).toThrow( - `Audit ${ansis.bold('cumulative-layout-shift')} failed parsing details:`, + `Failed to parse ${ansis.bold('cumulative-layout-shift')} audit's details`, ); }); }); @@ -378,12 +378,9 @@ describe('getConfig', () => { ); }); - it('should return undefined and log if configPath has wrong extension', async () => { - await expect( - getConfig({ configPath: path.join('wrong.not') }), - ).resolves.toBeUndefined(); - expect(logger.warn).toHaveBeenCalledWith( - 'Format of file wrong.not not supported', + it('should throw if configPath has wrong extension', async () => { + await expect(getConfig({ configPath: 'wrong.not' })).rejects.toThrow( + 'Unknown Lighthouse config file extension in wrong.not', ); }); }); diff --git a/packages/plugin-lighthouse/src/lib/utils.ts b/packages/plugin-lighthouse/src/lib/utils.ts index af3b6b256..4c1605ca2 100644 --- a/packages/plugin-lighthouse/src/lib/utils.ts +++ b/packages/plugin-lighthouse/src/lib/utils.ts @@ -25,9 +25,10 @@ export function lighthouseAuditRef(auditSlug: string, weight = 1): CategoryRef { }; } -export class AuditsNotImplementedError extends Error { - constructor(auditSlugs: string[]) { - super(`audits: "${auditSlugs.join(', ')}" not implemented`); +class NotImplementedError extends Error { + constructor(plural: string, slugs: string[]) { + const formattedSlugs = slugs.map(slug => `"${slug}"`).join(', '); + super(`${plural} not implemented: ${formattedSlugs}`); } } @@ -36,17 +37,11 @@ export function validateAudits(audits: Audit[], onlyAudits: string[]): boolean { slug => !audits.some(audit => audit.slug === slug), ); if (missingAudtis.length > 0) { - throw new AuditsNotImplementedError(missingAudtis); + throw new NotImplementedError('Audits', missingAudtis); } return true; } -export class CategoriesNotImplementedError extends Error { - constructor(categorySlugs: string[]) { - super(`categories: "${categorySlugs.join(', ')}" not implemented`); - } -} - export function validateOnlyCategories( groups: Group[], onlyCategories: string | string[], @@ -55,7 +50,7 @@ export function validateOnlyCategories( groups.every(group => group.slug !== slug), ); if (missingCategories.length > 0) { - throw new CategoriesNotImplementedError(missingCategories); + throw new NotImplementedError('Categories', missingCategories); } return true; } diff --git a/packages/plugin-lighthouse/src/lib/utils.unit.test.ts b/packages/plugin-lighthouse/src/lib/utils.unit.test.ts index c9d816aed..9a25db34f 100644 --- a/packages/plugin-lighthouse/src/lib/utils.unit.test.ts +++ b/packages/plugin-lighthouse/src/lib/utils.unit.test.ts @@ -7,8 +7,6 @@ import { pluginConfigSchema, } from '@code-pushup/models'; import { - AuditsNotImplementedError, - CategoriesNotImplementedError, lighthouseAuditRef, lighthouseGroupRef, markSkippedAuditsAndGroups, @@ -70,7 +68,7 @@ describe('validateAudits', () => { ], ['missing-audit'], ), - ).toThrow(new AuditsNotImplementedError(['missing-audit'])); + ).toThrow('Audits not implemented: "missing-audit"'); }); }); @@ -107,7 +105,7 @@ describe('validateOnlyCategories', () => { ], 'missing-category', ), - ).toThrow(new CategoriesNotImplementedError(['missing-category'])); + ).toThrow('Categories not implemented: "missing-category"'); }); }); @@ -209,7 +207,7 @@ describe('markSkippedAuditsAndGroups to be used in plugin config', () => { ] as Group[], { skipAudits: ['missing-audit'] }, ), - ).toThrow(new AuditsNotImplementedError(['missing-audit'])); + ).toThrow('Audits not implemented: "missing-audit"'); }); it('should mark audits as not skipped when onlyAudits is set', () => { @@ -258,7 +256,7 @@ describe('markSkippedAuditsAndGroups to be used in plugin config', () => { ] as Group[], { onlyAudits: ['missing-audit'] }, ), - ).toThrow(new AuditsNotImplementedError(['missing-audit'])); + ).toThrow('Audits not implemented: "missing-audit"'); }); it('should mark skipped audits and groups when onlyGroups is set', () => { @@ -396,10 +394,10 @@ describe('markSkippedAuditsAndGroups to be used in plugin config', () => { onlyAudits: ['missing-audit'], }, ), - ).toThrow(new AuditsNotImplementedError(['missing-audit'])); + ).toThrow('Audits not implemented: "missing-audit"'); }); - it('should throw if onlyGroups is set with a group slug that is not implemented', () => { + it('should throw if onlyCategories is set with a group slug that is not implemented', () => { expect(() => markSkippedAuditsAndGroups( [{ slug: 'speed-index' }] as Audit[], @@ -413,6 +411,6 @@ describe('markSkippedAuditsAndGroups to be used in plugin config', () => { onlyCategories: ['missing-group'], }, ), - ).toThrow(new CategoriesNotImplementedError(['missing-group'])); + ).toThrow('Categories not implemented: "missing-group"'); }); });