|
@@ -39,7 +39,8 @@ export enum RecorderStatusType {
|
|
|
export class BasicSimaqRecorder extends EventEmitter {
|
|
|
displayMediaStreamConstraints: DisplayMediaStreamConstraints = {
|
|
|
video: videoConstraints.getValue(),
|
|
|
- audio: audioConstraints.getValue(),
|
|
|
+ // audio: audioConstraints.getValue(),
|
|
|
+ audio: false,
|
|
|
};
|
|
|
private isStartRecoding = false;
|
|
|
private stream: MediaStream;
|
|
@@ -129,7 +130,7 @@ export class BasicSimaqRecorder extends EventEmitter {
|
|
|
// console.log('video', video);
|
|
|
video.srcObject = media;
|
|
|
this.stream = media;
|
|
|
- this.createMediaRecoder();
|
|
|
+ await this.createMediaRecoder();
|
|
|
this.mediaRecorder.start();
|
|
|
this.stream.getVideoTracks()[0].onended = () => {
|
|
|
console.log('stop-share');
|
|
@@ -228,6 +229,10 @@ export class BasicSimaqRecorder extends EventEmitter {
|
|
|
}
|
|
|
console.log('audioInput', audioInput);
|
|
|
if (navigator.mediaDevices.getDisplayMedia) {
|
|
|
+ console.log(
|
|
|
+ 'displayMediaStreamConstraints',
|
|
|
+ this.displayMediaStreamConstraints,
|
|
|
+ );
|
|
|
const res = await navigator.mediaDevices.getDisplayMedia(
|
|
|
this.displayMediaStreamConstraints,
|
|
|
);
|
|
@@ -243,13 +248,21 @@ export class BasicSimaqRecorder extends EventEmitter {
|
|
|
private async getDeaultAudio(): Promise<MediaStream> {
|
|
|
return new Promise(async (resolve) => {
|
|
|
try {
|
|
|
+ const audioConfig = {
|
|
|
+ echoCancellation: true,
|
|
|
+ autoGainControl: true,
|
|
|
+ noiseSuppression: true,
|
|
|
+ latency: 0,
|
|
|
+ };
|
|
|
+ console.log('audioConfig', audioConfig);
|
|
|
if (navigator.mediaDevices.getUserMedia) {
|
|
|
const res = await navigator.mediaDevices.getUserMedia({
|
|
|
- audio: true,
|
|
|
+ audio: audioConfig,
|
|
|
video: false,
|
|
|
});
|
|
|
return resolve(res);
|
|
|
}
|
|
|
+
|
|
|
return resolve(null);
|
|
|
} catch (error) {
|
|
|
return resolve(null);
|
|
@@ -312,8 +325,7 @@ export class BasicSimaqRecorder extends EventEmitter {
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- private createMediaRecoder(): void {
|
|
|
- console.log('video-flag', videoConstraints.value);
|
|
|
+ private async createMediaRecoder() {
|
|
|
// let mergeSteam: MediaStream;
|
|
|
let audioTrack: MediaStreamTrack, videoTrack: MediaStreamTrack;
|
|
|
if (this.audioInput) {
|
|
@@ -321,8 +333,20 @@ export class BasicSimaqRecorder extends EventEmitter {
|
|
|
[audioTrack] = this.audioInput.getAudioTracks();
|
|
|
this.stream = new MediaStream([videoTrack, audioTrack]);
|
|
|
}
|
|
|
+ const globalAudioInstance = (window as any).Howler;
|
|
|
+ if (globalAudioInstance?.ctx) {
|
|
|
+ const streamDest =
|
|
|
+ globalAudioInstance.ctx.createMediaStreamDestination();
|
|
|
+ globalAudioInstance.masterGain.connect(streamDest);
|
|
|
+ console.log('streamDest', streamDest);
|
|
|
+ [videoTrack] = this.stream.getVideoTracks();
|
|
|
+ [audioTrack] = (streamDest.stream as MediaStream).getAudioTracks();
|
|
|
+ console.log('audioTrack', audioTrack);
|
|
|
+ this.stream = new MediaStream([videoTrack, audioTrack]);
|
|
|
+ }
|
|
|
+
|
|
|
const mediaRecorder = new MediaRecorder(this.stream, {
|
|
|
- mimeType: 'video/webm;codecs=vp9,opus',
|
|
|
+ mimeType: 'video/webm;codecs=vp9',
|
|
|
audioBitsPerSecond: videoConstraints.value.audioBitsPerSecond,
|
|
|
videoBitsPerSecond: videoConstraints.value.videoBitsPerSecond,
|
|
|
});
|
|
@@ -332,7 +356,7 @@ export class BasicSimaqRecorder extends EventEmitter {
|
|
|
this.emit(
|
|
|
'record',
|
|
|
new Blob([event.data], {
|
|
|
- type: 'video/mp4; codecs=h264',
|
|
|
+ type: 'video/webm; codecs=webm',
|
|
|
}),
|
|
|
);
|
|
|
};
|
|
@@ -345,7 +369,7 @@ export class BasicSimaqRecorder extends EventEmitter {
|
|
|
|
|
|
private handleAutoDownload(chunks: Blob[]): void {
|
|
|
const downloadBlob = new Blob(chunks, {
|
|
|
- type: 'video/mp4; codecs=h264',
|
|
|
+ type: 'video/webm; codecs=webm',
|
|
|
});
|
|
|
const url = URL.createObjectURL(downloadBlob);
|
|
|
const a: HTMLAnchorElement = document.createElement('a');
|