chore: append sources to traces (#9736)
This commit is contained in:
parent
676e72c8e0
commit
5451808df1
|
@ -24,7 +24,7 @@ import type { Connection } from './connection';
|
|||
import type { ClientSideInstrumentation, Logger } from './types';
|
||||
|
||||
export abstract class ChannelOwner<T extends channels.Channel = channels.Channel, Initializer = {}> extends EventEmitter {
|
||||
protected _connection: Connection;
|
||||
readonly _connection: Connection;
|
||||
private _parent: ChannelOwner | undefined;
|
||||
private _objects = new Map<string, ChannelOwner>();
|
||||
|
||||
|
|
|
@ -62,6 +62,7 @@ export class Connection extends EventEmitter {
|
|||
private _rootObject: Root;
|
||||
private _closedErrorMessage: string | undefined;
|
||||
private _isRemote = false;
|
||||
private _sourceCollector: Set<string> | undefined;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
|
@ -88,6 +89,10 @@ export class Connection extends EventEmitter {
|
|||
return this._objects.get(guid)!;
|
||||
}
|
||||
|
||||
setSourceCollector(collector: Set<string> | undefined) {
|
||||
this._sourceCollector = collector;
|
||||
}
|
||||
|
||||
async sendMessageToServer(object: ChannelOwner, method: string, params: any, maybeStackTrace: ParsedStackTrace | null): Promise<any> {
|
||||
if (this._closedErrorMessage)
|
||||
throw new Error(this._closedErrorMessage);
|
||||
|
@ -95,7 +100,8 @@ export class Connection extends EventEmitter {
|
|||
const guid = object._guid;
|
||||
const stackTrace: ParsedStackTrace = maybeStackTrace || { frameTexts: [], frames: [], apiName: '', allFrames: [] };
|
||||
const { frames, apiName } = stackTrace;
|
||||
|
||||
if (this._sourceCollector)
|
||||
frames.forEach(f => this._sourceCollector!.add(f.file));
|
||||
const id = ++this._lastId;
|
||||
const converted = { id, guid, method, params };
|
||||
// Do not include metadata in debug logs to avoid noise.
|
||||
|
|
|
@ -18,15 +18,25 @@ import * as api from '../../types/types';
|
|||
import * as channels from '../protocol/channels';
|
||||
import { Artifact } from './artifact';
|
||||
import { BrowserContext } from './browserContext';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import yauzl from 'yauzl';
|
||||
import yazl from 'yazl';
|
||||
import { assert, calculateSha1 } from '../utils/utils';
|
||||
import { ManualPromise } from '../utils/async';
|
||||
import EventEmitter from 'events';
|
||||
|
||||
export class Tracing implements api.Tracing {
|
||||
private _context: BrowserContext;
|
||||
private _sources: Set<string> | undefined;
|
||||
|
||||
constructor(channel: BrowserContext) {
|
||||
this._context = channel;
|
||||
}
|
||||
|
||||
async start(options: { name?: string, snapshots?: boolean, screenshots?: boolean } = {}) {
|
||||
async start(options: { name?: string, snapshots?: boolean, screenshots?: boolean, sources?: boolean } = {}) {
|
||||
this._sources = options.sources ? new Set() : undefined;
|
||||
this._context._connection.setSourceCollector(this._sources);
|
||||
await this._context._wrapApiCall(async (channel: channels.BrowserContextChannel) => {
|
||||
await channel.tracingStart(options);
|
||||
await channel.tracingStartChunk();
|
||||
|
@ -34,6 +44,7 @@ export class Tracing implements api.Tracing {
|
|||
}
|
||||
|
||||
async startChunk() {
|
||||
this._context._connection.setSourceCollector(this._sources);
|
||||
await this._context._wrapApiCall(async (channel: channels.BrowserContextChannel) => {
|
||||
await channel.tracingStartChunk();
|
||||
});
|
||||
|
@ -52,12 +63,78 @@ export class Tracing implements api.Tracing {
|
|||
});
|
||||
}
|
||||
|
||||
private async _doStopChunk(channel: channels.BrowserContextChannel, path: string | undefined) {
|
||||
const result = await channel.tracingStopChunk({ save: !!path });
|
||||
if (!result.artifact)
|
||||
private async _doStopChunk(channel: channels.BrowserContextChannel, filePath: string | undefined) {
|
||||
const sources = this._sources;
|
||||
this._context._connection.setSourceCollector(undefined);
|
||||
const skipCompress = !this._context._connection.isRemote();
|
||||
const result = await channel.tracingStopChunk({ save: !!filePath, skipCompress });
|
||||
if (!filePath) {
|
||||
// Not interested in artifacts.
|
||||
return;
|
||||
const artifact = Artifact.from(result.artifact);
|
||||
await artifact.saveAs(path!);
|
||||
}
|
||||
|
||||
// If we don't have anything locally and we run against remote Playwright, compress on remote side.
|
||||
if (!skipCompress && !sources) {
|
||||
const artifact = Artifact.from(result.artifact!);
|
||||
await artifact.saveAs(filePath);
|
||||
await artifact.delete();
|
||||
return;
|
||||
}
|
||||
|
||||
// We either have sources to append or we were running locally, compress on client side
|
||||
|
||||
const promise = new ManualPromise<void>();
|
||||
const zipFile = new yazl.ZipFile();
|
||||
(zipFile as any as EventEmitter).on('error', error => promise.reject(error));
|
||||
|
||||
// Add sources.
|
||||
if (sources) {
|
||||
for (const source of sources)
|
||||
zipFile.addFile(source, 'resources/src@' + calculateSha1(source) + '.txt');
|
||||
}
|
||||
|
||||
await fs.promises.mkdir(path.dirname(filePath), { recursive: true });
|
||||
if (skipCompress) {
|
||||
// Local scenario, compress the entries.
|
||||
for (const entry of result.entries!)
|
||||
zipFile.addFile(entry.value, entry.name);
|
||||
zipFile.end(undefined, () => {
|
||||
zipFile.outputStream.pipe(fs.createWriteStream(filePath)).on('close', () => promise.resolve());
|
||||
});
|
||||
return promise;
|
||||
}
|
||||
|
||||
// Remote scenario, repack.
|
||||
const artifact = Artifact.from(result.artifact!);
|
||||
const tmpPath = filePath! + '.tmp';
|
||||
await artifact.saveAs(tmpPath);
|
||||
await artifact.delete();
|
||||
|
||||
yauzl.open(filePath!, (err, inZipFile) => {
|
||||
if (err) {
|
||||
promise.reject(err);
|
||||
return;
|
||||
}
|
||||
assert(inZipFile);
|
||||
let pendingEntries = inZipFile.entryCount;
|
||||
inZipFile.on('entry', entry => {
|
||||
inZipFile.openReadStream(entry, (err, readStream) => {
|
||||
if (err) {
|
||||
promise.reject(err);
|
||||
return;
|
||||
}
|
||||
zipFile.addReadStream(readStream!, entry.fileName);
|
||||
if (--pendingEntries === 0) {
|
||||
zipFile.end();
|
||||
zipFile.outputStream.pipe(fs.createWriteStream(filePath)).on('close', () => {
|
||||
fs.promises.unlink(tmpPath).then(() => {
|
||||
promise.resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
return promise;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -196,8 +196,8 @@ export class BrowserContextDispatcher extends Dispatcher<BrowserContext, channel
|
|||
}
|
||||
|
||||
async tracingStopChunk(params: channels.BrowserContextTracingStopChunkParams): Promise<channels.BrowserContextTracingStopChunkResult> {
|
||||
const artifact = await this._context.tracing.stopChunk(params.save);
|
||||
return { artifact: artifact ? new ArtifactDispatcher(this._scope, artifact) : undefined };
|
||||
const { artifact, entries } = await this._context.tracing.stopChunk(params.save, params.skipCompress);
|
||||
return { artifact: artifact ? new ArtifactDispatcher(this._scope, artifact) : undefined, entries };
|
||||
}
|
||||
|
||||
async tracingStop(params: channels.BrowserContextTracingStopParams): Promise<channels.BrowserContextTracingStopResult> {
|
||||
|
|
|
@ -1116,12 +1116,14 @@ export type BrowserContextTracingStartChunkOptions = {};
|
|||
export type BrowserContextTracingStartChunkResult = void;
|
||||
export type BrowserContextTracingStopChunkParams = {
|
||||
save: boolean,
|
||||
skipCompress: boolean,
|
||||
};
|
||||
export type BrowserContextTracingStopChunkOptions = {
|
||||
|
||||
};
|
||||
export type BrowserContextTracingStopChunkResult = {
|
||||
artifact?: ArtifactChannel,
|
||||
entries: NameValue[],
|
||||
};
|
||||
export type BrowserContextTracingStopParams = {};
|
||||
export type BrowserContextTracingStopOptions = {};
|
||||
|
|
|
@ -812,8 +812,12 @@ BrowserContext:
|
|||
tracingStopChunk:
|
||||
parameters:
|
||||
save: boolean
|
||||
skipCompress: boolean
|
||||
returns:
|
||||
artifact: Artifact?
|
||||
entries:
|
||||
type: array
|
||||
items: NameValue
|
||||
|
||||
tracingStop:
|
||||
|
||||
|
|
|
@ -506,6 +506,7 @@ export function createScheme(tChannel: (name: string) => Validator): Scheme {
|
|||
scheme.BrowserContextTracingStartChunkParams = tOptional(tObject({}));
|
||||
scheme.BrowserContextTracingStopChunkParams = tObject({
|
||||
save: tBoolean,
|
||||
skipCompress: tBoolean,
|
||||
});
|
||||
scheme.BrowserContextTracingStopParams = tOptional(tObject({}));
|
||||
scheme.BrowserContextHarExportParams = tOptional(tObject({}));
|
||||
|
|
|
@ -32,6 +32,8 @@ import { FrameSnapshot } from '../common/snapshotTypes';
|
|||
import { HarTracer, HarTracerDelegate } from '../../supplements/har/harTracer';
|
||||
import * as har from '../../supplements/har/har';
|
||||
import { VERSION } from '../common/traceEvents';
|
||||
import { NameValue } from '../../../common/types';
|
||||
import { ManualPromise } from '../../../utils/async';
|
||||
|
||||
export type TracerOptions = {
|
||||
name?: string;
|
||||
|
@ -107,7 +109,7 @@ export class Tracing implements InstrumentationListener, SnapshotterDelegate, Ha
|
|||
|
||||
async startChunk() {
|
||||
if (this._state && this._state.recording)
|
||||
await this.stopChunk(false);
|
||||
await this.stopChunk(false, false);
|
||||
|
||||
if (!this._state)
|
||||
throw new Error('Must start tracing before starting a new chunk');
|
||||
|
@ -163,7 +165,7 @@ export class Tracing implements InstrumentationListener, SnapshotterDelegate, Ha
|
|||
await this._writeChain;
|
||||
}
|
||||
|
||||
async stopChunk(save: boolean): Promise<Artifact | null> {
|
||||
async stopChunk(save: boolean, skipCompress: boolean): Promise<{ artifact: Artifact | null, entries: NameValue[] }> {
|
||||
if (this._isStopping)
|
||||
throw new Error(`Tracing is already stopping`);
|
||||
this._isStopping = true;
|
||||
|
@ -185,7 +187,7 @@ export class Tracing implements InstrumentationListener, SnapshotterDelegate, Ha
|
|||
this._isStopping = false;
|
||||
if (save)
|
||||
throw new Error(`Must start tracing before stopping`);
|
||||
return null;
|
||||
return { artifact: null, entries: [] };
|
||||
}
|
||||
|
||||
const state = this._state!;
|
||||
|
@ -198,31 +200,41 @@ export class Tracing implements InstrumentationListener, SnapshotterDelegate, Ha
|
|||
// Chain the export operation against write operations,
|
||||
// so that neither trace files nor sha1s change during the export.
|
||||
return await this._appendTraceOperation(async () => {
|
||||
const result = save ? this._export(state) : Promise.resolve(null);
|
||||
return result.finally(async () => {
|
||||
this._isStopping = false;
|
||||
state.recording = false;
|
||||
});
|
||||
this._isStopping = false;
|
||||
state.recording = false;
|
||||
|
||||
if (!save)
|
||||
return { artifact: null, entries: [] };
|
||||
|
||||
// Har files a live, make a snapshot before returning the resulting entries.
|
||||
const networkFile = path.join(state.networkFile, '..', createGuid());
|
||||
await fs.promises.copyFile(state.networkFile, networkFile);
|
||||
|
||||
const entries: NameValue[] = [];
|
||||
entries.push({ name: 'trace.trace', value: state.traceFile });
|
||||
entries.push({ name: 'trace.network', value: networkFile });
|
||||
for (const sha1 of state.sha1s)
|
||||
entries.push({ name: path.join('resources', sha1), value: path.join(this._resourcesDir, sha1) });
|
||||
|
||||
const zipArtifact = skipCompress ? null : await this._exportZip(entries, state).catch(() => null);
|
||||
return { artifact: zipArtifact, entries };
|
||||
});
|
||||
}
|
||||
|
||||
private async _export(state: RecordingState): Promise<Artifact> {
|
||||
private async _exportZip(entries: NameValue[], state: RecordingState): Promise<Artifact | null> {
|
||||
const zipFile = new yazl.ZipFile();
|
||||
const failedPromise = new Promise<Artifact>((_, reject) => (zipFile as any as EventEmitter).on('error', reject));
|
||||
const succeededPromise = new Promise<Artifact>(fulfill => {
|
||||
zipFile.addFile(state.traceFile, 'trace.trace');
|
||||
zipFile.addFile(state.networkFile, 'trace.network');
|
||||
const zipFileName = state.traceFile + '.zip';
|
||||
for (const sha1 of state.sha1s)
|
||||
zipFile.addFile(path.join(this._resourcesDir, sha1), path.join('resources', sha1));
|
||||
zipFile.end();
|
||||
zipFile.outputStream.pipe(fs.createWriteStream(zipFileName)).on('close', () => {
|
||||
const artifact = new Artifact(this._context, zipFileName);
|
||||
artifact.reportFinished();
|
||||
fulfill(artifact);
|
||||
});
|
||||
const result = new ManualPromise<Artifact | null>();
|
||||
(zipFile as any as EventEmitter).on('error', error => result.reject(error));
|
||||
for (const entry of entries)
|
||||
zipFile.addFile(entry.value, entry.name);
|
||||
zipFile.end();
|
||||
const zipFileName = state.traceFile + '.zip';
|
||||
zipFile.outputStream.pipe(fs.createWriteStream(zipFileName)).on('close', () => {
|
||||
const artifact = new Artifact(this._context, zipFileName);
|
||||
artifact.reportFinished();
|
||||
result.resolve(artifact);
|
||||
});
|
||||
return Promise.race([failedPromise, succeededPromise]);
|
||||
return result;
|
||||
}
|
||||
|
||||
async _captureSnapshot(name: 'before' | 'after' | 'action' | 'event', sdkObject: SdkObject, metadata: CallMetadata, element?: ElementHandle) {
|
||||
|
|
|
@ -61,8 +61,10 @@ export const SourceTab: React.FunctionComponent<{
|
|||
value = stackInfo;
|
||||
} else {
|
||||
const filePath = stackInfo.frames[selectedFrame].file;
|
||||
if (!stackInfo.fileContent.has(filePath))
|
||||
stackInfo.fileContent.set(filePath, await fetch(`file?path=${filePath}`).then(response => response.text()).catch(e => `<Unable to read "${filePath}">`));
|
||||
if (!stackInfo.fileContent.has(filePath)) {
|
||||
const sha1 = await calculateSha1(filePath);
|
||||
stackInfo.fileContent.set(filePath, await fetch(`sha1/src@${sha1}.txt`).then(response => response.text()).catch(e => `<Unable to read "${filePath}">`));
|
||||
}
|
||||
value = stackInfo.fileContent.get(filePath)!;
|
||||
}
|
||||
return value;
|
||||
|
@ -83,3 +85,15 @@ export const SourceTab: React.FunctionComponent<{
|
|||
<StackTraceView action={action} selectedFrame={selectedFrame} setSelectedFrame={setSelectedFrame}></StackTraceView>
|
||||
</SplitView>;
|
||||
};
|
||||
|
||||
export async function calculateSha1(text: string): Promise<string> {
|
||||
const buffer = new TextEncoder().encode(text);
|
||||
const hash = await crypto.subtle.digest('SHA-1', buffer);
|
||||
const hexCodes = [];
|
||||
const view = new DataView(hash);
|
||||
for (let i = 0; i < view.byteLength; i += 1) {
|
||||
const byte = view.getUint8(i).toString(16).padStart(2, '0');
|
||||
hexCodes.push(byte);
|
||||
}
|
||||
return hexCodes.join('');
|
||||
}
|
||||
|
|
|
@ -288,7 +288,7 @@ export const test = _baseTest.extend<TestFixtures, WorkerAndFileFixtures>({
|
|||
context.setDefaultNavigationTimeout(navigationTimeout || actionTimeout || 0);
|
||||
if (captureTrace) {
|
||||
if (!(context.tracing as any)[kTracingStarted]) {
|
||||
await context.tracing.start({ screenshots: true, snapshots: true });
|
||||
await context.tracing.start({ screenshots: true, snapshots: true, sources: true } as any);
|
||||
(context.tracing as any)[kTracingStarted] = true;
|
||||
} else {
|
||||
await context.tracing.startChunk();
|
||||
|
|
|
@ -146,7 +146,7 @@ export const playwrightFixtures: Fixtures<PlaywrightTestOptions & PlaywrightTest
|
|||
contexts.set(context, { closed: false });
|
||||
context.on('close', () => contexts.get(context).closed = true);
|
||||
if (trace)
|
||||
await context.tracing.start({ screenshots: true, snapshots: true });
|
||||
await context.tracing.start({ screenshots: true, snapshots: true, sources: true } as any);
|
||||
(context as any)._csi = {
|
||||
onApiCallBegin: (apiCall: string, stackTrace: ParsedStackTrace | null) => {
|
||||
if (apiCall.startsWith('expect.'))
|
||||
|
|
|
@ -110,7 +110,7 @@ const test = playwrightTest.extend<{ showTraceViewer: (trace: string) => Promise
|
|||
runAndTrace: async ({ context, showTraceViewer }, use, testInfo) => {
|
||||
await use(async (body: () => Promise<void>) => {
|
||||
const traceFile = testInfo.outputPath('trace.zip');
|
||||
await context.tracing.start({ snapshots: true, screenshots: true });
|
||||
await context.tracing.start({ snapshots: true, screenshots: true, sources: true } as any);
|
||||
await body();
|
||||
await context.tracing.stop({ path: traceFile });
|
||||
return showTraceViewer(traceFile);
|
||||
|
@ -124,7 +124,7 @@ let traceFile: string;
|
|||
|
||||
test.beforeAll(async function recordTrace({ browser, browserName, browserType, server }, workerInfo) {
|
||||
const context = await browser.newContext();
|
||||
await context.tracing.start({ name: 'test', screenshots: true, snapshots: true });
|
||||
await context.tracing.start({ name: 'test', screenshots: true, snapshots: true, sources: true } as any);
|
||||
const page = await context.newPage();
|
||||
await page.goto('data:text/html,<html>Hello world</html>');
|
||||
await page.setContent('<button>Click</button>');
|
||||
|
|
Loading…
Reference in New Issue