refactor(capture): fix delay before completion

This commit is contained in:
wryk 2019-03-15 20:02:20 +01:00
parent b403a78437
commit b741779265

View file

@ -1,9 +1,6 @@
import EventEmitter from 'eventemitter3' import EventEmitter from 'eventemitter3'
import pEvent from 'p-event'
import { import { makeRectangle, crop } from '/services/rectangle.js'
makeRectangle,
crop
} from '/services/rectangle.js'
import { import {
GIF_WIDTH, GIF_WIDTH,
@ -11,41 +8,46 @@ import {
GIF_FRAME_RATE GIF_FRAME_RATE
} from '/constants.js' } from '/constants.js'
export function capture (mediaStream, duration) { export function capture (mediaStream, duration) {
const emitter = new EventEmitter() const emitter = new EventEmitter()
Promise.resolve().then(() => { Promise.resolve().then(async () => {
const video = document.createElement('video') const delayTime = 1000 / GIF_FRAME_RATE
video.autoplay = true const totalFrames = duration / 1000 * GIF_FRAME_RATE
video.setAttribute('playsinline', '')
video.setAttribute('webkit-playsinline', '') // Well, this is a very low frame rate or very short duration clip
if (totalFrames < 1) {
emitter.emit('done', {
imageWidth: GIF_WIDTH,
imageHeight: GIF_HEIGHT,
imageDataList: [],
delayTime
})
return
}
const imageDataList = []
const canvas = document.createElement('canvas') const canvas = document.createElement('canvas')
canvas.width = GIF_WIDTH canvas.width = GIF_WIDTH
canvas.height = GIF_HEIGHT canvas.height = GIF_HEIGHT
const canvasContext = canvas.getContext('2d') const canvasContext = canvas.getContext('2d')
const totalFrames = duration / 1000 * GIF_FRAME_RATE
if (totalFrames < 1) {
resolve([])
}
const delayTime = 1000 / GIF_FRAME_RATE
video.srcObject = mediaStream
video.addEventListener('canplay', () => {
const soureRectangle = crop(makeRectangle(0, 0, video.videoWidth, video.videoHeight))
const destinationRectangle = makeRectangle(0, 0, canvas.width, canvas.height) const destinationRectangle = makeRectangle(0, 0, canvas.width, canvas.height)
const imageDataList = [] const video = document.createElement('video')
video.setAttribute('playsinline', '')
video.setAttribute('webkit-playsinline', '')
video.srcObject = mediaStream
video.play()
const intervalId = setInterval(() => { await pEvent(video, 'canplaythrough')
if (imageDataList.length < totalFrames) { const soureRectangle = crop(makeRectangle(0, 0, video.videoWidth, video.videoHeight))
step()
function step() {
canvasContext.drawImage( canvasContext.drawImage(
video, video,
soureRectangle.x, soureRectangle.x,
@ -68,9 +70,10 @@ export function capture (mediaStream, duration) {
imageDataList.push(imageData) imageDataList.push(imageData)
emitter.emit('progress', imageDataList.length / totalFrames) emitter.emit('progress', imageDataList.length / totalFrames)
} else {
clearInterval(intervalId)
if (imageDataList.length < totalFrames) {
setTimeout(step, delayTime)
} else {
emitter.emit('done', { emitter.emit('done', {
imageDataList, imageDataList,
imageWidth: GIF_WIDTH, imageWidth: GIF_WIDTH,
@ -78,8 +81,7 @@ export function capture (mediaStream, duration) {
delayTime delayTime
}) })
} }
}, delayTime) }
})
}) })
.catch(error => emitter.emit('error', error)) .catch(error => emitter.emit('error', error))