diff --git a/src/lib/chunkify.test.ts b/src/lib/chunkify.test.ts index 6eb69ec..8d686fd 100644 --- a/src/lib/chunkify.test.ts +++ b/src/lib/chunkify.test.ts @@ -5,8 +5,8 @@ test('creates chunks with outer chunks covered', () => { let coverage = { text: 'a { color: red; } b { color: green; } c { color: blue; }', ranges: [ - { start: 0, end: 17 }, - { start: 38, end: 56 }, + { start: 0, end: 17, count: 1 }, + { start: 38, end: 56, count: 1 }, ], url: 'https://example.com', } @@ -19,16 +19,19 @@ test('creates chunks with outer chunks covered', () => { start_offset: 0, end_offset: 17, is_covered: true, + coverage_count: 1, }, { start_offset: 17, end_offset: 38, is_covered: false, + coverage_count: 0, }, { start_offset: 38, end_offset: 56, is_covered: true, + coverage_count: 1, }, ], } satisfies ChunkedCoverage) @@ -37,7 +40,7 @@ test('creates chunks with outer chunks covered', () => { test('creates chunks with only middle chunk covered', () => { let coverage = { text: 'a { color: red; } b { color: green; } c { color: blue; }', - ranges: [{ start: 17, end: 38 }], + ranges: [{ start: 17, end: 38, count: 1 }], url: 'https://example.com', } let result = chunkify(coverage) @@ -49,16 +52,19 @@ test('creates chunks with only middle chunk covered', () => { start_offset: 0, end_offset: 17, is_covered: false, + coverage_count: 0, }, { start_offset: 17, end_offset: 38, is_covered: true, + coverage_count: 1, }, { start_offset: 38, end_offset: 56, is_covered: false, + coverage_count: 0, }, ], } satisfies ChunkedCoverage) @@ -67,7 +73,7 @@ test('creates chunks with only middle chunk covered', () => { test('creates a single chunk when all is covered', () => { let coverage = { text: 'a { color: red; } b { color: green; } c { color: blue; }', - ranges: [{ start: 0, end: 56 }], + ranges: [{ start: 0, end: 56, count: 1 }], url: 'https://example.com', } let result = chunkify(coverage) @@ -79,6 +85,7 @@ test('creates a single chunk when all is covered', () => { start_offset: 0, end_offset: 56, is_covered: true, + coverage_count: 1, }, ], } satisfies ChunkedCoverage) @@ -99,6 +106,7 @@ test('creates a single chunk when none is covered', () => { start_offset: 0, end_offset: 56, is_covered: false, + coverage_count: 0, }, ], } satisfies ChunkedCoverage) @@ -108,7 +116,7 @@ test('includes a trailing uncovered chunk when the last byte is not covered', () // text length = 4; range covers first 3 bytes, leaving the last byte uncovered let coverage = { text: 'abcd', - ranges: [{ start: 0, end: 3 }], + ranges: [{ start: 0, end: 3, count: 1 }], url: 'https://example.com', } let result = chunkify(coverage) @@ -116,8 +124,8 @@ test('includes a trailing uncovered chunk when the last byte is not covered', () expect(result).toEqual({ ...coverage, chunks: [ - { start_offset: 0, end_offset: 3, is_covered: true }, - { start_offset: 3, end_offset: 4, is_covered: false }, + { start_offset: 0, end_offset: 3, is_covered: true, coverage_count: 1 }, + { start_offset: 3, end_offset: 4, is_covered: false, coverage_count: 0 }, ], } satisfies ChunkedCoverage) }) @@ -126,14 +134,14 @@ test('does not emit a spurious empty chunk when the last byte is covered', () => // range covers the full text — no trailing chunk should appear let coverage = { text: 'abcd', - ranges: [{ start: 0, end: 4 }], + ranges: [{ start: 0, end: 4, count: 1 }], url: 'https://example.com', } let result = chunkify(coverage) delete coverage.ranges expect(result).toEqual({ ...coverage, - chunks: [{ start_offset: 0, end_offset: 4, is_covered: true }], + chunks: [{ start_offset: 0, end_offset: 4, is_covered: true, coverage_count: 1 }], } satisfies ChunkedCoverage) }) @@ -145,8 +153,8 @@ test('merges adjacent same-coverage chunks separated by whitespace-only gap', () text: 'a{color:red}\n\nb{color:blue}', // ^12 ^14 — the \n\n gap is whitespace-only ranges: [ - { start: 0, end: 12 }, - { start: 14, end: 27 }, + { start: 0, end: 12, count: 1 }, + { start: 14, end: 27, count: 1 }, ], url: 'https://example.com', } @@ -154,7 +162,7 @@ test('merges adjacent same-coverage chunks separated by whitespace-only gap', () delete coverage.ranges expect(result).toEqual({ ...coverage, - chunks: [{ start_offset: 0, end_offset: 27, is_covered: true }], + chunks: [{ start_offset: 0, end_offset: 27, is_covered: true, coverage_count: 1 }], } satisfies ChunkedCoverage) }) @@ -163,13 +171,30 @@ test('absorbs a zero-length covered chunk into the surrounding uncovered chunk', // The empty chunk should not appear in the output. let coverage = { text: 'a{color:red}', - ranges: [{ start: 5, end: 5 }], + ranges: [{ start: 5, end: 5, count: 1 }], url: 'https://example.com', } let result = chunkify(coverage) delete coverage.ranges expect(result).toEqual({ ...coverage, - chunks: [{ start_offset: 0, end_offset: 12, is_covered: false }], + chunks: [{ start_offset: 0, end_offset: 12, is_covered: false, coverage_count: 0 }], + } satisfies ChunkedCoverage) +}) + +test('merges adjacent covered chunks with different coverage counts, keeping the max', () => { + let coverage = { + text: 'a{color:red}b{color:blue}', + ranges: [ + { start: 0, end: 12, count: 2 }, + { start: 12, end: 25, count: 1 }, + ], + url: 'https://example.com', + } + let result = chunkify(coverage) + delete coverage.ranges + expect(result).toEqual({ + ...coverage, + chunks: [{ start_offset: 0, end_offset: 25, is_covered: true, coverage_count: 2 }], } satisfies ChunkedCoverage) }) diff --git a/src/lib/chunkify.ts b/src/lib/chunkify.ts index c9db891..f5bb342 100644 --- a/src/lib/chunkify.ts +++ b/src/lib/chunkify.ts @@ -1,13 +1,14 @@ import { tokenize } from '@projectwallace/css-parser/tokenizer' -import type { Coverage } from './parse-coverage' +import type { WeightedCoverage } from './decuplicate.js' type Chunk = { start_offset: number end_offset: number + coverage_count: number is_covered: boolean } -export type ChunkedCoverage = Omit & { +export type ChunkedCoverage = Omit & { chunks: Chunk[] } @@ -27,10 +28,14 @@ function merge(stylesheet: ChunkedCoverage): ChunkedCoverage { let latest_chunk = new_chunks.at(-1) - // merge current and previous if they are both covered or uncovered + // merge current and previous if they have the same coverage status if (i > 0 && previous_chunk && latest_chunk) { if (previous_chunk.is_covered === chunk.is_covered) { latest_chunk.end_offset = chunk.end_offset + // keep the highest count seen across merged covered chunks + if (chunk.coverage_count > latest_chunk.coverage_count) { + latest_chunk.coverage_count = chunk.coverage_count + } previous_chunk = chunk continue } @@ -80,12 +85,14 @@ export function mark_comments_as_covered(stylesheet: ChunkedCoverage): ChunkedCo start_offset: chunk.start_offset + last_end, end_offset: chunk.start_offset + comment.start, is_covered: false, + coverage_count: 0, }) } new_chunks.push({ start_offset: chunk.start_offset + comment.start, end_offset: chunk.start_offset + comment.end, is_covered: true, + coverage_count: 1, }) last_end = comment.end } @@ -95,6 +102,7 @@ export function mark_comments_as_covered(stylesheet: ChunkedCoverage): ChunkedCo start_offset: chunk.start_offset + last_end, end_offset: chunk.end_offset, is_covered: false, + coverage_count: 0, }) } } @@ -102,8 +110,8 @@ export function mark_comments_as_covered(stylesheet: ChunkedCoverage): ChunkedCo return merge({ ...stylesheet, chunks: new_chunks }) } -export function chunkify(stylesheet: Coverage): ChunkedCoverage { - let chunks = [] +export function chunkify(stylesheet: WeightedCoverage): ChunkedCoverage { + let chunks: Chunk[] = [] let offset = 0 for (let range of stylesheet.ranges) { @@ -113,6 +121,7 @@ export function chunkify(stylesheet: Coverage): ChunkedCoverage { start_offset: offset, end_offset: range.start, is_covered: false, + coverage_count: 0, }) offset = range.start } @@ -121,6 +130,7 @@ export function chunkify(stylesheet: Coverage): ChunkedCoverage { start_offset: range.start, end_offset: range.end, is_covered: true, + coverage_count: range.count, }) offset = range.end } @@ -131,6 +141,7 @@ export function chunkify(stylesheet: Coverage): ChunkedCoverage { start_offset: offset, end_offset: stylesheet.text.length, is_covered: false, + coverage_count: 0, }) } diff --git a/src/lib/decuplicate.ts b/src/lib/decuplicate.ts index d14a1df..1933343 100644 --- a/src/lib/decuplicate.ts +++ b/src/lib/decuplicate.ts @@ -1,28 +1,48 @@ import type { Coverage, Range } from './parse-coverage.js' -// 1. Merge and concatenate ranges -function merge_ranges(ranges: Range[]): Range[] { +export type WeightedRange = Range & { count: number } +export type WeightedCoverage = Omit & { ranges: WeightedRange[] } + +// 1. Sweep-line merge: produces weighted ranges where count = number of input ranges covering each segment +function merge_ranges_weighted(ranges: Range[]): WeightedRange[] { if (ranges.length === 0) return [] - // sort by start - ranges.sort((a, b) => a.start - b.start) + type Event = { pos: number; delta: number } + let events: Event[] = [] + + for (let r of ranges) { + events.push({ pos: r.start, delta: +1 }) + events.push({ pos: r.end, delta: -1 }) + } + + // sort by position; closes (-1) before opens (+1) at the same position + events.sort((a, b) => a.pos - b.pos || a.delta - b.delta) - let merged: Range[] = [ranges[0]!] + let swept: WeightedRange[] = [] + let depth = 0 + let prev_pos: number | null = null - for (let r of ranges.slice(1)) { - let last = merged.at(-1) + for (let event of events) { + if (prev_pos !== null && event.pos > prev_pos && depth > 0) { + swept.push({ start: prev_pos, end: event.pos, count: depth }) + } + depth += event.delta + prev_pos = event.pos + } - // merge overlapping or adjacent - if (last && r.start <= last.end + 1) { - if (r.end > last.end) { - last.end = r.end - } + // Merge adjacent segments (up to 1-byte gap) with the same count, preserving the + // original behaviour where ranges touching at r.start <= last.end + 1 were merged. + let result: WeightedRange[] = swept.length > 0 ? [{ ...swept[0]! }] : [] + for (let r of swept.slice(1)) { + let last = result.at(-1)! + if (r.start <= last.end + 1 && r.count === last.count) { + if (r.end > last.end) last.end = r.end } else { - merged.push({ start: r.start, end: r.end }) + result.push({ ...r }) } } - return merged + return result } // 2. Merge ranges for a single stylesheet entry into an existing grouped sheet @@ -42,7 +62,7 @@ function merge_entry_ranges( } // 3. Main function orchestrating the grouping and range merging -export function deduplicate_entries(entries: Coverage[]): Coverage[] { +export function deduplicate_entries(entries: Coverage[]): WeightedCoverage[] { let grouped = entries.reduce>((acc, entry) => { let key = entry.text acc[key] = merge_entry_ranges(acc[key], entry) @@ -52,6 +72,6 @@ export function deduplicate_entries(entries: Coverage[]): Coverage[] { return Object.entries(grouped).map(([text, { url, ranges }]) => ({ text, url, - ranges: merge_ranges(ranges), + ranges: merge_ranges_weighted(ranges), })) } diff --git a/src/lib/deduplicate.test.ts b/src/lib/deduplicate.test.ts index c5621dc..48aca84 100644 --- a/src/lib/deduplicate.test.ts +++ b/src/lib/deduplicate.test.ts @@ -7,7 +7,9 @@ test('handles a single entry', () => { ranges: [{ start: 0, end: 4 }], url: 'example.com', } - expect(deduplicate_entries([entry])).toEqual([entry]) + expect(deduplicate_entries([entry])).toEqual([ + { text: entry.text, url: entry.url, ranges: [{ start: 0, end: 4, count: 1 }] }, + ]) }) test('deduplicates a simple duplicate entry', () => { @@ -16,7 +18,9 @@ test('deduplicates a simple duplicate entry', () => { ranges: [{ start: 0, end: 4 }], url: 'example.com', } - expect(deduplicate_entries([entry, entry])).toEqual([entry]) + expect(deduplicate_entries([entry, entry])).toEqual([ + { text: entry.text, url: entry.url, ranges: [{ start: 0, end: 4, count: 2 }] }, + ]) }) test('merges two identical texts with different URLs and identical ranges', () => { @@ -34,7 +38,7 @@ test('merges two identical texts with different URLs and identical ranges', () = ] let first = entries.at(0)! expect(deduplicate_entries(entries)).toEqual([ - { text: first.text, url: first.url, ranges: first.ranges }, + { text: first.text, url: first.url, ranges: [{ start: 0, end: 4, count: 2 }] }, ]) }) @@ -52,8 +56,9 @@ test('merges different ranges on identical CSS, different URLs', () => { }, ] let first = entries.at(0)! + // [0,4] and [5,9] are adjacent within 1-byte tolerance and same count → merged expect(deduplicate_entries(entries)).toEqual([ - { text: first.text, url: first.url, ranges: [{ start: 0, end: 9 }] }, + { text: first.text, url: first.url, ranges: [{ start: 0, end: 9, count: 1 }] }, ]) }) @@ -70,8 +75,9 @@ test('merges different ranges on identical CSS, identical URLs', () => { url: 'example.com', }, ] + // [0,4] and [5,9] are adjacent within 1-byte tolerance and same count → merged expect(deduplicate_entries(entries)).toEqual([ - { text: entries[0]!.text, url: entries[0]!.url, ranges: [{ start: 0, end: 9 }] }, + { text: entries[0]!.text, url: entries[0]!.url, ranges: [{ start: 0, end: 9, count: 1 }] }, ]) }) @@ -89,8 +95,8 @@ test('does not merge different CSS with different URLs and identical ranges', () }, ] expect(deduplicate_entries(entries)).toEqual([ - { text: entries[0]!.text, url: entries[0]!.url, ranges: entries[0]!.ranges }, - { text: entries[1]!.text, url: entries[1]!.url, ranges: entries[1]!.ranges }, + { text: entries[0]!.text, url: entries[0]!.url, ranges: [{ start: 0, end: 4, count: 1 }] }, + { text: entries[1]!.text, url: entries[1]!.url, ranges: [{ start: 0, end: 4, count: 1 }] }, ]) }) @@ -108,7 +114,33 @@ test('does not merge different CSS with same URLs and identical ranges', () => { }, ] expect(deduplicate_entries(entries)).toEqual([ - { text: entries[0]!.text, url: entries[0]!.url, ranges: entries[0]!.ranges }, - { text: entries[1]!.text, url: entries[1]!.url, ranges: entries[1]!.ranges }, + { text: entries[0]!.text, url: entries[0]!.url, ranges: [{ start: 0, end: 4, count: 1 }] }, + { text: entries[1]!.text, url: entries[1]!.url, ranges: [{ start: 0, end: 4, count: 1 }] }, + ]) +}) + +test('produces count: 2 for overlapping ranges', () => { + let entries = [ + { + text: 'a {} b {}', + ranges: [{ start: 0, end: 9 }], + url: 'example.com', + }, + { + text: 'a {} b {}', + ranges: [{ start: 3, end: 6 }], + url: 'example.com', + }, + ] + expect(deduplicate_entries(entries)).toEqual([ + { + text: entries[0]!.text, + url: entries[0]!.url, + ranges: [ + { start: 0, end: 3, count: 1 }, + { start: 3, end: 6, count: 2 }, + { start: 6, end: 9, count: 1 }, + ], + }, ]) }) diff --git a/src/lib/extend-ranges.ts b/src/lib/extend-ranges.ts index c4ddaf8..649763e 100644 --- a/src/lib/extend-ranges.ts +++ b/src/lib/extend-ranges.ts @@ -3,7 +3,7 @@ import type { Coverage } from './parse-coverage' const AT_SIGN = 64 const LONGEST_ATRULE_NAME = '@-webkit-font-feature-values'.length -export function extend_ranges(coverage: Coverage): Coverage { +export function extend_ranges(coverage: T): T { let { ranges, url, text } = coverage // Adjust ranges to include @-rule name (only preludes included) // Cannot reliably include closing } because it may not be the end of the range @@ -52,5 +52,5 @@ export function extend_ranges(coverage: Coverage): Coverage { return range }) - return { text, ranges: new_ranges, url } + return { text, ranges: new_ranges, url } as T } diff --git a/src/lib/index.ts b/src/lib/index.ts index 9f7fc50..198ba19 100644 --- a/src/lib/index.ts +++ b/src/lib/index.ts @@ -14,6 +14,7 @@ export type CoverageData = { total_lines: number covered_lines: number uncovered_lines: number + max_coverage_count: number } export type StylesheetCoverage = CoverageData & { @@ -40,6 +41,7 @@ function calculate_stylesheet_coverage({ text, url, chunks }: PrettifiedCoverage let total_lines = 0 let covered_lines = 0 let uncovered_lines = 0 + let max_coverage_count = 0 for (let chunk of chunks) { let lines = chunk.total_lines @@ -51,6 +53,9 @@ function calculate_stylesheet_coverage({ text, url, chunks }: PrettifiedCoverage if (chunk.is_covered) { covered_lines += lines covered_bytes += bytes + if (chunk.coverage_count > max_coverage_count) { + max_coverage_count = chunk.coverage_count + } } else { uncovered_lines += lines uncovered_bytes += bytes @@ -68,6 +73,7 @@ function calculate_stylesheet_coverage({ text, url, chunks }: PrettifiedCoverage total_lines, covered_lines, uncovered_lines, + max_coverage_count, chunks, } } @@ -79,8 +85,8 @@ export function calculate_coverage(coverage: Coverage[]): CoverageResult { (acc, entry) => filter_coverage(acc, entry), [], ) - let deduplicated: Coverage[] = deduplicate_entries(filtered_coverage) - let extended: Coverage[] = deduplicated.map((coverage) => extend_ranges(coverage)) + let deduplicated = deduplicate_entries(filtered_coverage) + let extended = deduplicated.map((coverage) => extend_ranges(coverage)) let chunkified: ChunkedCoverage[] = extended.map((sheet) => mark_comments_as_covered(chunkify(sheet)), ) @@ -97,6 +103,7 @@ export function calculate_coverage(coverage: Coverage[]): CoverageResult { total_bytes, total_used_bytes, total_unused_bytes, + total_max_coverage_count, } = coverage_per_stylesheet.reduce( (totals, sheet) => { totals.total_lines += sheet.total_lines @@ -105,6 +112,9 @@ export function calculate_coverage(coverage: Coverage[]): CoverageResult { totals.total_bytes += sheet.total_bytes totals.total_used_bytes += sheet.covered_bytes totals.total_unused_bytes += sheet.uncovered_bytes + if (sheet.max_coverage_count > totals.total_max_coverage_count) { + totals.total_max_coverage_count = sheet.max_coverage_count + } return totals }, { @@ -114,6 +124,7 @@ export function calculate_coverage(coverage: Coverage[]): CoverageResult { total_bytes: 0, total_used_bytes: 0, total_unused_bytes: 0, + total_max_coverage_count: 0, }, ) @@ -127,6 +138,7 @@ export function calculate_coverage(coverage: Coverage[]): CoverageResult { uncovered_lines: total_uncovered_lines, byte_coverage_ratio: ratio(total_used_bytes, total_bytes), line_coverage_ratio: ratio(total_covered_lines, total_lines), + max_coverage_count: total_max_coverage_count, coverage_per_stylesheet, total_stylesheets: coverage_per_stylesheet.length, } diff --git a/src/lib/prettify.test.ts b/src/lib/prettify.test.ts index b0662d7..622cfe5 100644 --- a/src/lib/prettify.test.ts +++ b/src/lib/prettify.test.ts @@ -7,7 +7,7 @@ test('includes the last character of each chunk', () => { // With the off-by-one bug (end_offset - 1) it would be silently dropped. let chunked = chunkify({ text: 'a{color:red}', - ranges: [{ start: 0, end: 12 }], + ranges: [{ start: 0, end: 12, count: 1 }], url: 'https://example.com', }) let result = prettify(chunked) @@ -26,7 +26,7 @@ test('prettified.text is the formatted output, not the original', () => { let original = 'a{color:red}' let chunked = chunkify({ text: original, - ranges: [{ start: 0, end: original.length }], + ranges: [{ start: 0, end: original.length, count: 1 }], url: 'https://example.com', }) let result = prettify(chunked) @@ -41,7 +41,7 @@ test('offsets in prettified result are based on formatted CSS length, not origin let original = 'a{color:red}' let chunked = chunkify({ text: original, - ranges: [{ start: 0, end: original.length }], + ranges: [{ start: 0, end: original.length, count: 1 }], url: 'https://example.com', }) let result = prettify(chunked) @@ -56,7 +56,7 @@ test('offsets index into prettified.text and yield formatted CSS, not original C let original = 'a{color:red}' let chunked = chunkify({ text: original, - ranges: [{ start: 0, end: original.length }], + ranges: [{ start: 0, end: original.length, count: 1 }], url: 'https://example.com', }) let result = prettify(chunked)