|
@@ -20,7 +20,7 @@ export interface PlatformConfigType {
|
|
|
minFrameRate?: number;
|
|
|
}
|
|
|
|
|
|
-export interface InitConfigType extends DisplayMediaStreamConstraints {
|
|
|
+export interface InitConfigType extends MediaStreamConstraints {
|
|
|
uploadUrl?: string;
|
|
|
resolution?: ResolutionType;
|
|
|
autoDownload?: boolean;
|
|
@@ -28,6 +28,7 @@ export interface InitConfigType extends DisplayMediaStreamConstraints {
|
|
|
platform?: PlatformType;
|
|
|
config?: PlatformConfigType;
|
|
|
debug?: boolean;
|
|
|
+ disbaledAudio?: boolean;
|
|
|
}
|
|
|
export enum RecorderStatusType {
|
|
|
init = 0,
|
|
@@ -36,11 +37,19 @@ export enum RecorderStatusType {
|
|
|
end = 3,
|
|
|
}
|
|
|
|
|
|
+interface CustomMediaStreamConstraints extends MediaStreamConstraints {
|
|
|
+ selfBrowserSurface?: string,
|
|
|
+ systemAudio?: string,
|
|
|
+}
|
|
|
export class BasicSimaqRecorder extends EventEmitter {
|
|
|
- displayMediaStreamConstraints: DisplayMediaStreamConstraints = {
|
|
|
+ displayMediaStreamConstraints: CustomMediaStreamConstraints = {
|
|
|
video: videoConstraints.getValue(),
|
|
|
// audio: audioConstraints.getValue(),
|
|
|
+ preferCurrentTab: true,
|
|
|
audio: false,
|
|
|
+ // selfBrowserSurface: 'exclude',
|
|
|
+ systemAudio: 'exclude'
|
|
|
+
|
|
|
};
|
|
|
private isStartRecoding = false;
|
|
|
private stream: MediaStream;
|
|
@@ -56,18 +65,22 @@ export class BasicSimaqRecorder extends EventEmitter {
|
|
|
private canvasId: string;
|
|
|
private platformConfig: PlatformConfigType;
|
|
|
private chromeMediaSourceId: string | null;
|
|
|
-
|
|
|
+ private disbaledAudio = false;
|
|
|
+ private debug = false;
|
|
|
constructor(arg: InitConfigType) {
|
|
|
super();
|
|
|
- console.log('arg', arg);
|
|
|
+
|
|
|
this.autoDownload = arg.autoDownload;
|
|
|
this.platform = arg.platform;
|
|
|
this.platformConfig = arg.config;
|
|
|
this.uploadUrl = arg.uploadUrl;
|
|
|
+ this.disbaledAudio = arg.disbaledAudio;
|
|
|
+ this.debug = arg.debug;
|
|
|
this.initParams(arg);
|
|
|
videoConstraints.subscribe((value) => {
|
|
|
- console.log('subscribe', value);
|
|
|
+ this.debug && console.log('subscribe', value);
|
|
|
});
|
|
|
+ this.debug && console.log('arg', arg);
|
|
|
}
|
|
|
private sleep = (ms) => new Promise((r) => setTimeout(r, ms));
|
|
|
|
|
@@ -107,7 +120,7 @@ export class BasicSimaqRecorder extends EventEmitter {
|
|
|
public async startRecord(): Promise<void> {
|
|
|
try {
|
|
|
if (!this.isStartRecoding) {
|
|
|
- console.log('开始录屏!', isSupport());
|
|
|
+ this.debug && console.log('开始录屏!', isSupport());
|
|
|
if (!isSupport()) {
|
|
|
console.error('当前浏览器不支持录屏或不存在https环境');
|
|
|
return;
|
|
@@ -119,7 +132,7 @@ export class BasicSimaqRecorder extends EventEmitter {
|
|
|
|
|
|
const media = await this.getDefaultMedia();
|
|
|
|
|
|
- console.log('media', media);
|
|
|
+ this.debug && console.log('media', media);
|
|
|
if (media) {
|
|
|
this.emit('startRecord');
|
|
|
this.isStartRecoding = true;
|
|
@@ -133,7 +146,7 @@ export class BasicSimaqRecorder extends EventEmitter {
|
|
|
await this.createMediaRecoder();
|
|
|
this.mediaRecorder.start();
|
|
|
this.stream.getVideoTracks()[0].onended = () => {
|
|
|
- console.log('stop-share');
|
|
|
+ this.debug && console.log('stop-share');
|
|
|
this.endRecord();
|
|
|
};
|
|
|
}
|
|
@@ -170,8 +183,7 @@ export class BasicSimaqRecorder extends EventEmitter {
|
|
|
if (audioInput) {
|
|
|
this.audioInput = audioInput;
|
|
|
}
|
|
|
- console.log('audioInput', audioInput);
|
|
|
- console.log('this.canvasElement', this.canvasElement);
|
|
|
+ this.debug && console.log('audioInput', audioInput);
|
|
|
const stream = this.canvasElement.captureStream(30);
|
|
|
if (stream) {
|
|
|
return resolve(stream);
|
|
@@ -206,7 +218,7 @@ export class BasicSimaqRecorder extends EventEmitter {
|
|
|
this.platformConfig.minFrameRate || 30,
|
|
|
},
|
|
|
};
|
|
|
- console.log('videoConfig', videoConfig);
|
|
|
+ this.debug && console.log('videoConfig', videoConfig);
|
|
|
const res = await navigator.mediaDevices.getUserMedia({
|
|
|
audio: false,
|
|
|
video: videoConfig,
|
|
@@ -227,9 +239,9 @@ export class BasicSimaqRecorder extends EventEmitter {
|
|
|
if (audioInput) {
|
|
|
this.audioInput = audioInput;
|
|
|
}
|
|
|
- console.log('audioInput', audioInput);
|
|
|
+ this.debug && console.log('audioInput', audioInput);
|
|
|
if (navigator.mediaDevices.getDisplayMedia) {
|
|
|
- console.log(
|
|
|
+ this.debug && console.log(
|
|
|
'displayMediaStreamConstraints',
|
|
|
this.displayMediaStreamConstraints,
|
|
|
);
|
|
@@ -313,6 +325,11 @@ export class BasicSimaqRecorder extends EventEmitter {
|
|
|
}
|
|
|
});
|
|
|
}
|
|
|
+
|
|
|
+ public async setMuteMode() {
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
private streamStop(): void {
|
|
|
if (this.stream) {
|
|
|
this.stream.getTracks().forEach((track) => track.stop());
|
|
@@ -327,44 +344,71 @@ export class BasicSimaqRecorder extends EventEmitter {
|
|
|
|
|
|
private async createMediaRecoder() {
|
|
|
// let mergeSteam: MediaStream;
|
|
|
- let audioTrack: MediaStreamTrack, videoTrack: MediaStreamTrack;
|
|
|
- if (this.audioInput) {
|
|
|
- [videoTrack] = this.stream.getVideoTracks();
|
|
|
- [audioTrack] = this.audioInput.getAudioTracks();
|
|
|
- this.stream = new MediaStream([videoTrack, audioTrack]);
|
|
|
- }
|
|
|
- const globalAudioInstance = (window as any).Howler;
|
|
|
- if (globalAudioInstance?.ctx) {
|
|
|
- const streamDest =
|
|
|
- globalAudioInstance.ctx.createMediaStreamDestination();
|
|
|
- globalAudioInstance.masterGain.connect(streamDest);
|
|
|
- console.log('streamDest', streamDest);
|
|
|
- [videoTrack] = this.stream.getVideoTracks();
|
|
|
- [audioTrack] = (streamDest.stream as MediaStream).getAudioTracks();
|
|
|
- console.log('audioTrack', audioTrack);
|
|
|
- this.stream = new MediaStream([videoTrack, audioTrack]);
|
|
|
+ try {
|
|
|
+ let audioTrack: MediaStreamTrack, videoTrack: MediaStreamTrack;
|
|
|
+ if (this.audioInput) {
|
|
|
+ [videoTrack] = this.stream.getVideoTracks();
|
|
|
+ [audioTrack] = this.audioInput.getAudioTracks();
|
|
|
+ this.stream = new MediaStream([videoTrack, audioTrack]);
|
|
|
+ }
|
|
|
+ const globalAudioInstance = (window as any).Howler;
|
|
|
+ if (globalAudioInstance?.ctx && !this.disbaledAudio) {
|
|
|
+ const streamAudio =
|
|
|
+ globalAudioInstance.ctx.createMediaStreamDestination();
|
|
|
+ globalAudioInstance.masterGain.connect(streamAudio);
|
|
|
+ console.log('streamDest', streamAudio);
|
|
|
+ (window as any).streamAudio = streamAudio;
|
|
|
+ [videoTrack] = this.stream.getVideoTracks();
|
|
|
+ [audioTrack] = (streamAudio.stream as MediaStream).getAudioTracks();
|
|
|
+ console.log('audioTrack', audioTrack);
|
|
|
+ audioTrack.onended = (event) => {
|
|
|
+ console.log('audioTrack-end', event);
|
|
|
+ this.mediaRecorder.stop();
|
|
|
+ };
|
|
|
+ this.stream = new MediaStream([videoTrack, audioTrack]);
|
|
|
+ }
|
|
|
+
|
|
|
+ const mediaRecorder = new MediaRecorder(this.stream, {
|
|
|
+ mimeType: 'video/webm;codecs=vp9',
|
|
|
+ audioBitsPerSecond: videoConstraints.value.audioBitsPerSecond,
|
|
|
+ videoBitsPerSecond: videoConstraints.value.videoBitsPerSecond,
|
|
|
+ });
|
|
|
+ this.mediaRecorder = mediaRecorder;
|
|
|
+ // if (this.debug) {
|
|
|
+ (window as any).__SIMAQ__ = mediaRecorder;
|
|
|
+ // }
|
|
|
+
|
|
|
+ this.mediaRecorder.ondataavailable = (event) => {
|
|
|
+ this.recordChunks.push(event.data);
|
|
|
+ this.emit(
|
|
|
+ 'record',
|
|
|
+ new Blob([event.data], {
|
|
|
+ type: 'video/webm; codecs=webm',
|
|
|
+ }),
|
|
|
+ );
|
|
|
+ };
|
|
|
+ this.mediaRecorder.stop = () => {
|
|
|
+ // setTimeout(() => {
|
|
|
+ // this.handleAutoDownload();
|
|
|
+ // }, 1000);
|
|
|
+ };
|
|
|
+ this.mediaRecorder.onerror = (event: ErrorEvent) => {
|
|
|
+ console.error(`onerror stream: ${event.error.name},`, event);
|
|
|
+ };
|
|
|
+ this.mediaRecorder.onstop = (event) => {
|
|
|
+ console.warn(`onstop stream:`, event);
|
|
|
+ };
|
|
|
+ this.mediaRecorder.onpause = (event) => {
|
|
|
+ console.warn(`onpause stream: `, event);
|
|
|
+ };
|
|
|
+ this.mediaRecorder.onresume = (event) => {
|
|
|
+ console.warn(`onresume stream: `, event);
|
|
|
+ };
|
|
|
+
|
|
|
+ } catch (error) {
|
|
|
+ console.error('error', error)
|
|
|
}
|
|
|
|
|
|
- const mediaRecorder = new MediaRecorder(this.stream, {
|
|
|
- mimeType: 'video/webm;codecs=vp9',
|
|
|
- audioBitsPerSecond: videoConstraints.value.audioBitsPerSecond,
|
|
|
- videoBitsPerSecond: videoConstraints.value.videoBitsPerSecond,
|
|
|
- });
|
|
|
- this.mediaRecorder = mediaRecorder;
|
|
|
- this.mediaRecorder.ondataavailable = (event) => {
|
|
|
- this.recordChunks.push(event.data);
|
|
|
- this.emit(
|
|
|
- 'record',
|
|
|
- new Blob([event.data], {
|
|
|
- type: 'video/webm; codecs=webm',
|
|
|
- }),
|
|
|
- );
|
|
|
- };
|
|
|
- this.mediaRecorder.stop = () => {
|
|
|
- // setTimeout(() => {
|
|
|
- // this.handleAutoDownload();
|
|
|
- // }, 1000);
|
|
|
- };
|
|
|
}
|
|
|
|
|
|
private handleAutoDownload(chunks: Blob[]): void {
|