|
1
|
"use strict"; |
|
2
|
var __create = Object.create; |
|
3
|
var __defProp = Object.defineProperty; |
|
4
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor; |
|
5
|
var __getOwnPropNames = Object.getOwnPropertyNames; |
|
6
|
var __getProtoOf = Object.getPrototypeOf; |
|
7
|
var __hasOwnProp = Object.prototype.hasOwnProperty; |
|
8
|
var __export = (target, all) => { |
|
9
|
for (var name in all) |
|
10
|
__defProp(target, name, { get: all[name], enumerable: true }); |
|
11
|
}; |
|
12
|
var __copyProps = (to, from, except, desc) => { |
|
13
|
if (from && typeof from === "object" || typeof from === "function") { |
|
14
|
for (let key of __getOwnPropNames(from)) |
|
15
|
if (!__hasOwnProp.call(to, key) && key !== except) |
|
16
|
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); |
|
17
|
} |
|
18
|
return to; |
|
19
|
}; |
|
20
|
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( |
|
21
|
// If the importer is in node compatibility mode or this is not an ESM |
|
22
|
// file that has been converted to a CommonJS file using a Babel- |
|
23
|
// compatible transform (i.e. "__esModule" has not been set), then set |
|
24
|
// "default" to the CommonJS "module.exports" for node compatibility. |
|
25
|
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, |
|
26
|
mod |
|
27
|
)); |
|
28
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); |
|
29
|
var merge_exports = {}; |
|
30
|
__export(merge_exports, { |
|
31
|
createMergedReport: () => createMergedReport |
|
32
|
}); |
|
33
|
module.exports = __toCommonJS(merge_exports); |
|
34
|
var import_fs = __toESM(require("fs")); |
|
35
|
var import_path = __toESM(require("path")); |
|
36
|
var import_utils = require("playwright-core/lib/utils"); |
|
37
|
var import_blob = require("./blob"); |
|
38
|
var import_multiplexer = require("./multiplexer"); |
|
39
|
var import_stringInternPool = require("../isomorphic/stringInternPool"); |
|
40
|
var import_teleReceiver = require("../isomorphic/teleReceiver"); |
|
41
|
var import_reporters = require("../runner/reporters"); |
|
42
|
var import_util = require("../util"); |
|
43
|
async function createMergedReport(config, dir, reporterDescriptions, rootDirOverride) { |
|
44
|
const reporters = await (0, import_reporters.createReporters)(config, "merge", reporterDescriptions); |
|
45
|
const multiplexer = new import_multiplexer.Multiplexer(reporters); |
|
46
|
const stringPool = new import_stringInternPool.StringInternPool(); |
|
47
|
let printStatus = () => { |
|
48
|
}; |
|
49
|
if (!multiplexer.printsToStdio()) { |
|
50
|
printStatus = printStatusToStdout; |
|
51
|
printStatus(`merging reports from ${dir}`); |
|
52
|
} |
|
53
|
const shardFiles = await sortedShardFiles(dir); |
|
54
|
if (shardFiles.length === 0) |
|
55
|
throw new Error(`No report files found in ${dir}`); |
|
56
|
const eventData = await mergeEvents(dir, shardFiles, stringPool, printStatus, rootDirOverride); |
|
57
|
const pathSeparator = rootDirOverride ? import_path.default.sep : eventData.pathSeparatorFromMetadata ?? import_path.default.sep; |
|
58
|
const pathPackage = pathSeparator === "/" ? import_path.default.posix : import_path.default.win32; |
|
59
|
const receiver = new import_teleReceiver.TeleReporterReceiver(multiplexer, { |
|
60
|
mergeProjects: false, |
|
61
|
mergeTestCases: false, |
|
62
|
// When merging on a different OS, an absolute path like `C:\foo\bar` from win may look like |
|
63
|
// a relative path on posix, and vice versa. |
|
64
|
// Therefore, we cannot use `path.resolve()` here - it will resolve relative-looking paths |
|
65
|
// against `process.cwd()`, while we just want to normalize ".." and "." segments. |
|
66
|
resolvePath: (rootDir, relativePath) => stringPool.internString(pathPackage.normalize(pathPackage.join(rootDir, relativePath))), |
|
67
|
configOverrides: config.config |
|
68
|
}); |
|
69
|
printStatus(`processing test events`); |
|
70
|
const dispatchEvents = async (events) => { |
|
71
|
for (const event of events) { |
|
72
|
if (event.method === "onEnd") |
|
73
|
printStatus(`building final report`); |
|
74
|
await receiver.dispatch(event); |
|
75
|
if (event.method === "onEnd") |
|
76
|
printStatus(`finished building report`); |
|
77
|
} |
|
78
|
}; |
|
79
|
await dispatchEvents(eventData.prologue); |
|
80
|
for (const { reportFile, eventPatchers, metadata, tags, startTime, duration } of eventData.reports) { |
|
81
|
const reportJsonl = await import_fs.default.promises.readFile(reportFile); |
|
82
|
const events = parseTestEvents(reportJsonl); |
|
83
|
new import_stringInternPool.JsonStringInternalizer(stringPool).traverse(events); |
|
84
|
eventPatchers.patchers.push(new AttachmentPathPatcher(dir)); |
|
85
|
if (metadata.name) |
|
86
|
eventPatchers.patchers.push(new GlobalErrorPatcher(metadata.name)); |
|
87
|
if (tags.length) |
|
88
|
eventPatchers.patchers.push(new GlobalErrorPatcher(tags.join(" "))); |
|
89
|
eventPatchers.patchEvents(events); |
|
90
|
await dispatchEvents(events); |
|
91
|
multiplexer.onMachineEnd({ |
|
92
|
startTime: new Date(startTime), |
|
93
|
duration, |
|
94
|
tag: tags, |
|
95
|
shardIndex: metadata.shard?.current |
|
96
|
}); |
|
97
|
} |
|
98
|
await dispatchEvents(eventData.epilogue); |
|
99
|
} |
|
100
|
const commonEventNames = ["onBlobReportMetadata", "onConfigure", "onProject", "onBegin", "onEnd"]; |
|
101
|
const commonEvents = new Set(commonEventNames); |
|
102
|
const commonEventRegex = new RegExp(`${commonEventNames.join("|")}`); |
|
103
|
function parseCommonEvents(reportJsonl) { |
|
104
|
return splitBufferLines(reportJsonl).map((line) => line.toString("utf8")).filter((line) => commonEventRegex.test(line)).map((line) => JSON.parse(line)).filter((event) => commonEvents.has(event.method)); |
|
105
|
} |
|
106
|
function parseTestEvents(reportJsonl) { |
|
107
|
return splitBufferLines(reportJsonl).map((line) => line.toString("utf8")).filter((line) => line.length).map((line) => JSON.parse(line)).filter((event) => !commonEvents.has(event.method)); |
|
108
|
} |
|
109
|
function splitBufferLines(buffer) { |
|
110
|
const lines = []; |
|
111
|
let start = 0; |
|
112
|
while (start < buffer.length) { |
|
113
|
const end = buffer.indexOf(10, start); |
|
114
|
if (end === -1) { |
|
115
|
lines.push(buffer.slice(start)); |
|
116
|
break; |
|
117
|
} |
|
118
|
lines.push(buffer.slice(start, end)); |
|
119
|
start = end + 1; |
|
120
|
} |
|
121
|
return lines; |
|
122
|
} |
|
123
|
async function extractAndParseReports(dir, shardFiles, internalizer, printStatus) { |
|
124
|
const shardEvents = []; |
|
125
|
await import_fs.default.promises.mkdir(import_path.default.join(dir, "resources"), { recursive: true }); |
|
126
|
const reportNames = new UniqueFileNameGenerator(); |
|
127
|
for (const file of shardFiles) { |
|
128
|
const absolutePath = import_path.default.join(dir, file); |
|
129
|
printStatus(`extracting: ${(0, import_util.relativeFilePath)(absolutePath)}`); |
|
130
|
const zipFile = new import_utils.ZipFile(absolutePath); |
|
131
|
const entryNames = await zipFile.entries(); |
|
132
|
for (const entryName of entryNames.sort()) { |
|
133
|
let fileName = import_path.default.join(dir, entryName); |
|
134
|
const content = await zipFile.read(entryName); |
|
135
|
if (entryName.endsWith(".jsonl")) { |
|
136
|
fileName = reportNames.makeUnique(fileName); |
|
137
|
let parsedEvents = parseCommonEvents(content); |
|
138
|
internalizer.traverse(parsedEvents); |
|
139
|
const metadata = findMetadata(parsedEvents, file); |
|
140
|
parsedEvents = modernizer.modernize(metadata.version, parsedEvents); |
|
141
|
shardEvents.push({ |
|
142
|
file, |
|
143
|
localPath: fileName, |
|
144
|
metadata, |
|
145
|
parsedEvents |
|
146
|
}); |
|
147
|
} |
|
148
|
await import_fs.default.promises.writeFile(fileName, content); |
|
149
|
} |
|
150
|
zipFile.close(); |
|
151
|
} |
|
152
|
return shardEvents; |
|
153
|
} |
|
154
|
function findMetadata(events, file) { |
|
155
|
if (events[0]?.method !== "onBlobReportMetadata") |
|
156
|
throw new Error(`No metadata event found in ${file}`); |
|
157
|
const metadata = events[0].params; |
|
158
|
if (metadata.version > import_blob.currentBlobReportVersion) |
|
159
|
throw new Error(`Blob report ${file} was created with a newer version of Playwright.`); |
|
160
|
return metadata; |
|
161
|
} |
|
162
|
async function mergeEvents(dir, shardReportFiles, stringPool, printStatus, rootDirOverride) { |
|
163
|
const internalizer = new import_stringInternPool.JsonStringInternalizer(stringPool); |
|
164
|
const configureEvents = []; |
|
165
|
const projectEvents = []; |
|
166
|
const endEvents = []; |
|
167
|
const blobs = await extractAndParseReports(dir, shardReportFiles, internalizer, printStatus); |
|
168
|
blobs.sort((a, b) => { |
|
169
|
const nameA = a.metadata.name ?? ""; |
|
170
|
const nameB = b.metadata.name ?? ""; |
|
171
|
if (nameA !== nameB) |
|
172
|
return nameA.localeCompare(nameB); |
|
173
|
const shardA = a.metadata.shard?.current ?? 0; |
|
174
|
const shardB = b.metadata.shard?.current ?? 0; |
|
175
|
if (shardA !== shardB) |
|
176
|
return shardA - shardB; |
|
177
|
return a.file.localeCompare(b.file); |
|
178
|
}); |
|
179
|
printStatus(`merging events`); |
|
180
|
const reports = []; |
|
181
|
const globalTestIdSet = /* @__PURE__ */ new Set(); |
|
182
|
for (let i = 0; i < blobs.length; ++i) { |
|
183
|
const { parsedEvents, metadata, localPath } = blobs[i]; |
|
184
|
const eventPatchers = new JsonEventPatchers(); |
|
185
|
eventPatchers.patchers.push(new IdsPatcher( |
|
186
|
stringPool, |
|
187
|
metadata.name, |
|
188
|
String(i), |
|
189
|
globalTestIdSet |
|
190
|
)); |
|
191
|
if (rootDirOverride) |
|
192
|
eventPatchers.patchers.push(new PathSeparatorPatcher(metadata.pathSeparator)); |
|
193
|
eventPatchers.patchEvents(parsedEvents); |
|
194
|
let tags = []; |
|
195
|
let startTime = 0; |
|
196
|
let duration = 0; |
|
197
|
for (const event of parsedEvents) { |
|
198
|
if (event.method === "onConfigure") { |
|
199
|
configureEvents.push(event); |
|
200
|
tags = event.params.config.tags || []; |
|
201
|
} else if (event.method === "onProject") { |
|
202
|
projectEvents.push(event); |
|
203
|
} else if (event.method === "onEnd") { |
|
204
|
endEvents.push({ event, metadata, tags }); |
|
205
|
startTime = event.params.result.startTime; |
|
206
|
duration = event.params.result.duration; |
|
207
|
} |
|
208
|
} |
|
209
|
reports.push({ |
|
210
|
eventPatchers, |
|
211
|
reportFile: localPath, |
|
212
|
metadata, |
|
213
|
tags, |
|
214
|
startTime, |
|
215
|
duration |
|
216
|
}); |
|
217
|
} |
|
218
|
return { |
|
219
|
prologue: [ |
|
220
|
mergeConfigureEvents(configureEvents, rootDirOverride), |
|
221
|
...projectEvents, |
|
222
|
{ method: "onBegin", params: void 0 } |
|
223
|
], |
|
224
|
reports, |
|
225
|
epilogue: [ |
|
226
|
mergeEndEvents(endEvents), |
|
227
|
{ method: "onExit", params: void 0 } |
|
228
|
], |
|
229
|
pathSeparatorFromMetadata: blobs[0]?.metadata.pathSeparator |
|
230
|
}; |
|
231
|
} |
|
232
|
function mergeConfigureEvents(configureEvents, rootDirOverride) { |
|
233
|
if (!configureEvents.length) |
|
234
|
throw new Error("No configure events found"); |
|
235
|
let config = { |
|
236
|
configFile: void 0, |
|
237
|
globalTimeout: 0, |
|
238
|
maxFailures: 0, |
|
239
|
metadata: {}, |
|
240
|
rootDir: "", |
|
241
|
version: "", |
|
242
|
workers: 0, |
|
243
|
globalSetup: null, |
|
244
|
globalTeardown: null |
|
245
|
}; |
|
246
|
for (const event of configureEvents) |
|
247
|
config = mergeConfigs(config, event.params.config); |
|
248
|
if (rootDirOverride) { |
|
249
|
config.rootDir = rootDirOverride; |
|
250
|
} else { |
|
251
|
const rootDirs = new Set(configureEvents.map((e) => e.params.config.rootDir)); |
|
252
|
if (rootDirs.size > 1) { |
|
253
|
throw new Error([ |
|
254
|
`Blob reports being merged were recorded with different test directories, and`, |
|
255
|
`merging cannot proceed. This may happen if you are merging reports from`, |
|
256
|
`machines with different environments, like different operating systems or`, |
|
257
|
`if the tests ran with different playwright configs.`, |
|
258
|
``, |
|
259
|
`You can force merge by specifying a merge config file with "-c" option. If`, |
|
260
|
`you'd like all test paths to be correct, make sure 'testDir' in the merge config`, |
|
261
|
`file points to the actual tests location.`, |
|
262
|
``, |
|
263
|
`Found directories:`, |
|
264
|
...rootDirs |
|
265
|
].join("\n")); |
|
266
|
} |
|
267
|
} |
|
268
|
return { |
|
269
|
method: "onConfigure", |
|
270
|
params: { |
|
271
|
config |
|
272
|
} |
|
273
|
}; |
|
274
|
} |
|
275
|
function mergeConfigs(to, from) { |
|
276
|
return { |
|
277
|
...to, |
|
278
|
...from, |
|
279
|
metadata: { |
|
280
|
...to.metadata, |
|
281
|
...from.metadata, |
|
282
|
actualWorkers: (to.metadata.actualWorkers || 0) + (from.metadata.actualWorkers || 0) |
|
283
|
}, |
|
284
|
workers: to.workers + from.workers |
|
285
|
}; |
|
286
|
} |
|
287
|
function mergeEndEvents(endEvents) { |
|
288
|
let startTime = endEvents.length ? 1e13 : Date.now(); |
|
289
|
let status = "passed"; |
|
290
|
let endTime = 0; |
|
291
|
for (const { event } of endEvents) { |
|
292
|
const shardResult = event.params.result; |
|
293
|
if (shardResult.status === "failed") |
|
294
|
status = "failed"; |
|
295
|
else if (shardResult.status === "timedout" && status !== "failed") |
|
296
|
status = "timedout"; |
|
297
|
else if (shardResult.status === "interrupted" && status !== "failed" && status !== "timedout") |
|
298
|
status = "interrupted"; |
|
299
|
startTime = Math.min(startTime, shardResult.startTime); |
|
300
|
endTime = Math.max(endTime, shardResult.startTime + shardResult.duration); |
|
301
|
} |
|
302
|
const result = { |
|
303
|
status, |
|
304
|
startTime, |
|
305
|
duration: endTime - startTime |
|
306
|
}; |
|
307
|
return { |
|
308
|
method: "onEnd", |
|
309
|
params: { |
|
310
|
result |
|
311
|
} |
|
312
|
}; |
|
313
|
} |
|
314
|
async function sortedShardFiles(dir) { |
|
315
|
const files = await import_fs.default.promises.readdir(dir); |
|
316
|
return files.filter((file) => file.endsWith(".zip")).sort(); |
|
317
|
} |
|
318
|
function printStatusToStdout(message) { |
|
319
|
process.stdout.write(`${message} |
|
320
|
`); |
|
321
|
} |
|
322
|
class UniqueFileNameGenerator { |
|
323
|
constructor() { |
|
324
|
this._usedNames = /* @__PURE__ */ new Set(); |
|
325
|
} |
|
326
|
makeUnique(name) { |
|
327
|
if (!this._usedNames.has(name)) { |
|
328
|
this._usedNames.add(name); |
|
329
|
return name; |
|
330
|
} |
|
331
|
const extension = import_path.default.extname(name); |
|
332
|
name = name.substring(0, name.length - extension.length); |
|
333
|
let index = 0; |
|
334
|
while (true) { |
|
335
|
const candidate = `${name}-${++index}${extension}`; |
|
336
|
if (!this._usedNames.has(candidate)) { |
|
337
|
this._usedNames.add(candidate); |
|
338
|
return candidate; |
|
339
|
} |
|
340
|
} |
|
341
|
} |
|
342
|
} |
|
343
|
class IdsPatcher { |
|
344
|
constructor(stringPool, botName, salt, globalTestIdSet) { |
|
345
|
this._stringPool = stringPool; |
|
346
|
this._botName = botName; |
|
347
|
this._salt = salt; |
|
348
|
this._testIdsMap = /* @__PURE__ */ new Map(); |
|
349
|
this._globalTestIdSet = globalTestIdSet; |
|
350
|
} |
|
351
|
patchEvent(event) { |
|
352
|
const { method, params } = event; |
|
353
|
switch (method) { |
|
354
|
case "onProject": |
|
355
|
this._onProject(params.project); |
|
356
|
return; |
|
357
|
case "onAttach": |
|
358
|
case "onTestBegin": |
|
359
|
case "onStepBegin": |
|
360
|
case "onStepEnd": |
|
361
|
case "onStdIO": |
|
362
|
params.testId = params.testId ? this._mapTestId(params.testId) : void 0; |
|
363
|
return; |
|
364
|
case "onTestEnd": |
|
365
|
params.test.testId = this._mapTestId(params.test.testId); |
|
366
|
return; |
|
367
|
} |
|
368
|
} |
|
369
|
_onProject(project) { |
|
370
|
project.metadata ??= {}; |
|
371
|
project.suites.forEach((suite) => this._updateTestIds(suite)); |
|
372
|
} |
|
373
|
_updateTestIds(suite) { |
|
374
|
suite.entries.forEach((entry) => { |
|
375
|
if ("testId" in entry) |
|
376
|
this._updateTestId(entry); |
|
377
|
else |
|
378
|
this._updateTestIds(entry); |
|
379
|
}); |
|
380
|
} |
|
381
|
_updateTestId(test) { |
|
382
|
test.testId = this._mapTestId(test.testId); |
|
383
|
if (this._botName) { |
|
384
|
test.tags = test.tags || []; |
|
385
|
test.tags.unshift("@" + this._botName); |
|
386
|
} |
|
387
|
} |
|
388
|
_mapTestId(testId) { |
|
389
|
const t1 = this._stringPool.internString(testId); |
|
390
|
if (this._testIdsMap.has(t1)) |
|
391
|
return this._testIdsMap.get(t1); |
|
392
|
if (this._globalTestIdSet.has(t1)) { |
|
393
|
const t2 = this._stringPool.internString(testId + this._salt); |
|
394
|
this._globalTestIdSet.add(t2); |
|
395
|
this._testIdsMap.set(t1, t2); |
|
396
|
return t2; |
|
397
|
} |
|
398
|
this._globalTestIdSet.add(t1); |
|
399
|
this._testIdsMap.set(t1, t1); |
|
400
|
return t1; |
|
401
|
} |
|
402
|
} |
|
403
|
class AttachmentPathPatcher { |
|
404
|
constructor(_resourceDir) { |
|
405
|
this._resourceDir = _resourceDir; |
|
406
|
} |
|
407
|
patchEvent(event) { |
|
408
|
if (event.method === "onAttach") |
|
409
|
this._patchAttachments(event.params.attachments); |
|
410
|
else if (event.method === "onTestEnd") |
|
411
|
this._patchAttachments(event.params.result.attachments ?? []); |
|
412
|
} |
|
413
|
_patchAttachments(attachments) { |
|
414
|
for (const attachment of attachments) { |
|
415
|
if (!attachment.path) |
|
416
|
continue; |
|
417
|
attachment.path = import_path.default.join(this._resourceDir, attachment.path); |
|
418
|
} |
|
419
|
} |
|
420
|
} |
|
421
|
class PathSeparatorPatcher { |
|
422
|
constructor(from) { |
|
423
|
this._from = from ?? (import_path.default.sep === "/" ? "\\" : "/"); |
|
424
|
this._to = import_path.default.sep; |
|
425
|
} |
|
426
|
patchEvent(jsonEvent) { |
|
427
|
if (this._from === this._to) |
|
428
|
return; |
|
429
|
if (jsonEvent.method === "onProject") { |
|
430
|
this._updateProject(jsonEvent.params.project); |
|
431
|
return; |
|
432
|
} |
|
433
|
if (jsonEvent.method === "onTestEnd") { |
|
434
|
const test = jsonEvent.params.test; |
|
435
|
test.annotations?.forEach((annotation) => this._updateAnnotationLocation(annotation)); |
|
436
|
const testResult = jsonEvent.params.result; |
|
437
|
testResult.annotations?.forEach((annotation) => this._updateAnnotationLocation(annotation)); |
|
438
|
testResult.errors.forEach((error) => this._updateErrorLocations(error)); |
|
439
|
(testResult.attachments ?? []).forEach((attachment) => { |
|
440
|
if (attachment.path) |
|
441
|
attachment.path = this._updatePath(attachment.path); |
|
442
|
}); |
|
443
|
return; |
|
444
|
} |
|
445
|
if (jsonEvent.method === "onStepBegin") { |
|
446
|
const step = jsonEvent.params.step; |
|
447
|
this._updateLocation(step.location); |
|
448
|
return; |
|
449
|
} |
|
450
|
if (jsonEvent.method === "onStepEnd") { |
|
451
|
const step = jsonEvent.params.step; |
|
452
|
this._updateErrorLocations(step.error); |
|
453
|
step.annotations?.forEach((annotation) => this._updateAnnotationLocation(annotation)); |
|
454
|
return; |
|
455
|
} |
|
456
|
if (jsonEvent.method === "onAttach") { |
|
457
|
const attach = jsonEvent.params; |
|
458
|
attach.attachments.forEach((attachment) => { |
|
459
|
if (attachment.path) |
|
460
|
attachment.path = this._updatePath(attachment.path); |
|
461
|
}); |
|
462
|
return; |
|
463
|
} |
|
464
|
} |
|
465
|
_updateProject(project) { |
|
466
|
project.outputDir = this._updatePath(project.outputDir); |
|
467
|
project.testDir = this._updatePath(project.testDir); |
|
468
|
project.snapshotDir = this._updatePath(project.snapshotDir); |
|
469
|
project.suites.forEach((suite) => this._updateSuite(suite, true)); |
|
470
|
} |
|
471
|
_updateSuite(suite, isFileSuite = false) { |
|
472
|
this._updateLocation(suite.location); |
|
473
|
if (isFileSuite) |
|
474
|
suite.title = this._updatePath(suite.title); |
|
475
|
for (const entry of suite.entries) { |
|
476
|
if ("testId" in entry) { |
|
477
|
this._updateLocation(entry.location); |
|
478
|
entry.annotations?.forEach((annotation) => this._updateAnnotationLocation(annotation)); |
|
479
|
} else { |
|
480
|
this._updateSuite(entry); |
|
481
|
} |
|
482
|
} |
|
483
|
} |
|
484
|
_updateErrorLocations(error) { |
|
485
|
while (error) { |
|
486
|
this._updateLocation(error.location); |
|
487
|
error = error.cause; |
|
488
|
} |
|
489
|
} |
|
490
|
_updateAnnotationLocation(annotation) { |
|
491
|
this._updateLocation(annotation.location); |
|
492
|
} |
|
493
|
_updateLocation(location) { |
|
494
|
if (location) |
|
495
|
location.file = this._updatePath(location.file); |
|
496
|
} |
|
497
|
_updatePath(text) { |
|
498
|
return text.split(this._from).join(this._to); |
|
499
|
} |
|
500
|
} |
|
501
|
class GlobalErrorPatcher { |
|
502
|
constructor(botName) { |
|
503
|
this._prefix = `(${botName}) `; |
|
504
|
} |
|
505
|
patchEvent(event) { |
|
506
|
if (event.method !== "onError") |
|
507
|
return; |
|
508
|
const error = event.params.error; |
|
509
|
if (error.message !== void 0) |
|
510
|
error.message = this._prefix + error.message; |
|
511
|
if (error.stack !== void 0) |
|
512
|
error.stack = this._prefix + error.stack; |
|
513
|
} |
|
514
|
} |
|
515
|
class JsonEventPatchers { |
|
516
|
constructor() { |
|
517
|
this.patchers = []; |
|
518
|
} |
|
519
|
patchEvents(events) { |
|
520
|
for (const event of events) { |
|
521
|
for (const patcher of this.patchers) |
|
522
|
patcher.patchEvent(event); |
|
523
|
} |
|
524
|
} |
|
525
|
} |
|
526
|
class BlobModernizer { |
|
527
|
modernize(fromVersion, events) { |
|
528
|
const result = []; |
|
529
|
for (const event of events) |
|
530
|
result.push(...this._modernize(fromVersion, event)); |
|
531
|
return result; |
|
532
|
} |
|
533
|
_modernize(fromVersion, event) { |
|
534
|
let events = [event]; |
|
535
|
for (let version = fromVersion; version < import_blob.currentBlobReportVersion; ++version) |
|
536
|
events = this[`_modernize_${version}_to_${version + 1}`].call(this, events); |
|
537
|
return events; |
|
538
|
} |
|
539
|
_modernize_1_to_2(events) { |
|
540
|
return events.map((event) => { |
|
541
|
if (event.method === "onProject") { |
|
542
|
const modernizeSuite = (suite) => { |
|
543
|
const newSuites = suite.suites.map(modernizeSuite); |
|
544
|
const { suites, tests, ...remainder } = suite; |
|
545
|
return { entries: [...newSuites, ...tests], ...remainder }; |
|
546
|
}; |
|
547
|
const project = event.params.project; |
|
548
|
project.suites = project.suites.map(modernizeSuite); |
|
549
|
} |
|
550
|
return event; |
|
551
|
}); |
|
552
|
} |
|
553
|
} |
|
554
|
const modernizer = new BlobModernizer(); |
|
555
|
// Annotate the CommonJS export names for ESM import in node: |
|
556
|
0 && (module.exports = { |
|
557
|
createMergedReport |
|
558
|
}); |
|
559
|
|