Skip to content

Commit

Permalink
test(screencast): more tests on Chromium, new seek impl (#3699)
Browse files Browse the repository at this point in the history
  • Loading branch information
yury-s authored Aug 31, 2020
1 parent fcc1680 commit f23dbfb
Show file tree
Hide file tree
Showing 2 changed files with 113 additions and 69 deletions.
56 changes: 56 additions & 0 deletions test/assets/player.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
<script>
async function playToTheEnd() {
const video = document.querySelector('video');
const result = new Promise(r => video.onended = r);
video.play();
return await result;
}

async function playOneFrame() {
const video = document.querySelector('video');
const result = new Promise(r => video.onpause = r);
video.ontimeupdate = () => {
video.pause();
video.ontimeupdate = null;
};
video.play();
return await result;
}

async function playNFrames(n) {
for (let i = 0; i < n; i++)
await playOneFrame();
}

async function countFrames() {
const video = document.querySelector('video');

if (!video.duration)
return 0;

if (video.currentTime)
await playToTheEnd();

let count = 0;
while (true) {
++count;
await playOneFrame();
if (video.ended)
break;
}
return count;
}

async function seekLastFrame() {
const frameCount = await countFrames();
await playNFrames(frameCount);
return frameCount;
}

</script>
<body>
<video controls>
<source src="v.webm" type="video/webm">
Your browser does not support HTML video.
</video>
</body>
126 changes: 57 additions & 69 deletions test/screencast.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -102,21 +102,7 @@ class VideoPlayer {
this._server.serveFile(req, response, videoFile);
});

await this._page.goto(this._server.PREFIX + servertPath);
await this._page.$eval('video', (v: HTMLVideoElement) => {
return new Promise(fulfil => {
// In case video playback autostarts.
v.pause();
v.onplaying = fulfil;
v.play();
});
});
await this._page.$eval('video', (v: HTMLVideoElement) => {
v.pause();
const result = new Promise(f => v.onseeked = f);
v.currentTime = v.duration;
return result;
});
await this._page.goto(this._server.PREFIX + '/player.html');
}

async duration() {
Expand All @@ -131,36 +117,31 @@ class VideoPlayer {
return await this._page.$eval('video', (v: HTMLVideoElement) => v.videoHeight);
}

async seek(timestamp) {
await this._page.$eval('video', (v: HTMLVideoElement, timestamp) => {
v.pause();
const result = new Promise(f => v.onseeked = f);
v.currentTime = timestamp;
return result;
}, timestamp);
}

async seekFirstNonEmptyFrame() {
let time = 0;
for (let i = 0; i < 10; i++) {
await this.seek(time);
await this._page.evaluate(async () => await (window as any).playToTheEnd());
while (true) {
await this._page.evaluate(async () => await (window as any).playOneFrame());
const ended = await this._page.$eval('video', (video: HTMLVideoElement) => video.ended);
if (ended)
throw new Error('All frames are empty');
const pixels = await this.pixels();
if (!pixels.every(p => p === 255))
return;
time += 0.1;
}
}

async seekLastNonEmptyFrame() {
const duration = await this.duration();
let time = duration - 0.01;
for (let i = 0; i < 10; i++) {
await this.seek(time);
const pixels = await this.pixels();
if (!pixels.every(p => p === 0))
return;
time -= 0.1;
}
async countFrames() {
return await this._page.evaluate(async () => await (window as any).countFrames());
}
async currentTime() {
return await this._page.$eval('video', (v: HTMLVideoElement) => v.currentTime);
}
async playOneFrame() {
return await this._page.evaluate(async () => await (window as any).playOneFrame());
}

async seekLastFrame() {
return await this._page.evaluate(async () => await (window as any).seekLastFrame());
}

async pixels(point = {x: 0, y: 0}) {
Expand All @@ -179,38 +160,41 @@ class VideoPlayer {
}
}

it('should capture static page', test => {
test.skip(options.WIRE);
}, async ({page, tmpDir, videoPlayer, toImpl}) => {
const videoFile = path.join(tmpDir, 'v.webm');
await page.evaluate(() => document.body.style.backgroundColor = 'red');
await toImpl(page)._delegate.startScreencast({outputFile: videoFile, width: 640, height: 480});
// TODO: in WebKit figure out why video size is not reported correctly for
// static pictures.
if (options.HEADLESS && options.WEBKIT)
await page.setViewportSize({width: 1270, height: 950});
await new Promise(r => setTimeout(r, 300));
await toImpl(page)._delegate.stopScreencast();
expect(fs.existsSync(videoFile)).toBe(true);

await videoPlayer.load(videoFile);
const duration = await videoPlayer.duration();
expect(duration).toBeGreaterThan(0);

expect(await videoPlayer.videoWidth()).toBe(640);
expect(await videoPlayer.videoHeight()).toBe(480);

await videoPlayer.seekLastNonEmptyFrame();
const pixels = await videoPlayer.pixels();
expectAll(pixels, almostRed);
});

describe('screencast', suite => {
suite.skip(options.WIRE || options.CHROMIUM);
suite.slow();
suite.skip(options.WIRE);
}, () => {
it('should capture static page', test => {
test.flaky(options.CHROMIUM && LINUX && !options.HEADLESS);
test.flaky(options.WEBKIT && LINUX);
}, async ({page, tmpDir, videoPlayer, toImpl}) => {
const videoFile = path.join(tmpDir, 'v.webm');
await page.evaluate(() => document.body.style.backgroundColor = 'red');
await toImpl(page)._delegate.startScreencast({outputFile: videoFile, width: 640, height: 480});
// TODO: in WebKit figure out why video size is not reported correctly for
// static pictures.
if (options.HEADLESS && options.WEBKIT)
await page.setViewportSize({width: 1270, height: 950});
await new Promise(r => setTimeout(r, 300));
await toImpl(page)._delegate.stopScreencast();
expect(fs.existsSync(videoFile)).toBe(true);

await videoPlayer.load(videoFile);
const duration = await videoPlayer.duration();
expect(duration).toBeGreaterThan(0);

expect(await videoPlayer.videoWidth()).toBe(640);
expect(await videoPlayer.videoHeight()).toBe(480);

await videoPlayer.seekLastFrame();
const pixels = await videoPlayer.pixels();
expectAll(pixels, almostRed);
});

it('should capture navigation', test => {
test.flaky(options.CHROMIUM && MAC);
test.flaky(options.FIREFOX && LINUX && !options.HEADLESS);
test.flaky(options.WEBKIT);
test.flaky(options.FIREFOX);
}, async ({page, tmpDir, server, videoPlayer, toImpl}) => {
const videoFile = path.join(tmpDir, 'v.webm');
await page.goto(server.PREFIX + '/background-color.html#rgb(0,0,0)');
Expand All @@ -236,12 +220,17 @@ describe('screencast', suite => {
}

{
await videoPlayer.seekLastNonEmptyFrame();
await videoPlayer.seekLastFrame();
const pixels = await videoPlayer.pixels();
expectAll(pixels, almostGrey);
}
});
});

describe('screencast', suite => {
suite.slow();
suite.skip(options.WIRE || options.CHROMIUM);
}, () => {
it('should capture css transformation', test => {
test.fixme(options.WEBKIT && WIN, 'Accelerated compositing is disabled in WebKit on Windows.');
test.flaky(options.WEBKIT && LINUX);
Expand All @@ -262,14 +251,13 @@ describe('screencast', suite => {
expect(duration).toBeGreaterThan(0);

{
await videoPlayer.seekLastNonEmptyFrame();
await videoPlayer.seekLastFrame();
const pixels = await videoPlayer.pixels({x: 95, y: 45});
expectAll(pixels, almostRed);
}
});

it('should fire start/stop events when page created/closed', test => {
test.slow();
test.flaky(options.FIREFOX, 'Even slow is not slow enough');
}, async ({browser, tmpDir, toImpl}) => {
// Use server side of the context. All the code below also uses server side APIs.
Expand Down

0 comments on commit f23dbfb

Please sign in to comment.