Refactor MixerState (#94)
* Refactor MixerState This extracts the actual audio handling out from MixerState into a separate module with two classes, Player and AudioEngine, because separation of concerns good and 1,000 line file bad. * Remove unnecessary module * Fix mic calibration * rename engine to audioEngine * Fix a butchered merge * remove a bunch of unused imports
This commit is contained in:
parent
c33ca55d48
commit
5ece897b2e
11 changed files with 453 additions and 413 deletions
|
@ -47,6 +47,7 @@
|
|||
"eslint-plugin-jsx-a11y": "6.2.3",
|
||||
"eslint-plugin-react": "7.14.3",
|
||||
"eslint-plugin-react-hooks": "^1.6.1",
|
||||
"eventemitter3": "^4.0.0",
|
||||
"fetch-progress": "github:UniversityRadioYork/fetch-progress",
|
||||
"file-loader": "3.0.1",
|
||||
"fs-extra": "7.0.1",
|
||||
|
@ -91,6 +92,7 @@
|
|||
"sass-loader": "7.2.0",
|
||||
"sdp-transform": "^2.14.0",
|
||||
"semver": "6.3.0",
|
||||
"strict-event-emitter-types": "^2.0.0",
|
||||
"style-loader": "1.0.0",
|
||||
"terser-webpack-plugin": "1.4.1",
|
||||
"ts-pnp": "1.1.4",
|
||||
|
|
|
@ -2,11 +2,11 @@ import SdpTransform from "sdp-transform";
|
|||
import * as later from "later";
|
||||
|
||||
import * as BroadcastState from "./state";
|
||||
import * as MixerState from "../mixer/state";
|
||||
|
||||
import { Streamer, ConnectionStateEnum } from "./streamer";
|
||||
import { Dispatch } from "redux";
|
||||
import { broadcastApiRequest } from "../api";
|
||||
import { audioEngine } from "../mixer/audio";
|
||||
|
||||
type StreamerState = "HELLO" | "OFFER" | "ANSWER" | "CONNECTED";
|
||||
|
||||
|
@ -121,7 +121,7 @@ export class WebRTCStreamer extends Streamer {
|
|||
if (now.getSeconds() < 45) {
|
||||
later.setTimeout(
|
||||
async () => {
|
||||
await MixerState.playNewsIntro();
|
||||
await audioEngine.playNewsIntro();
|
||||
},
|
||||
later.parse
|
||||
.recur()
|
||||
|
@ -134,7 +134,7 @@ export class WebRTCStreamer extends Streamer {
|
|||
if (now.getMinutes() <= 1 && now.getSeconds() < 55) {
|
||||
later.setTimeout(
|
||||
async () => {
|
||||
await MixerState.playNewsEnd();
|
||||
await audioEngine.playNewsEnd();
|
||||
},
|
||||
later.parse
|
||||
.recur()
|
||||
|
|
|
@ -6,6 +6,7 @@ import * as MixerState from "../mixer/state";
|
|||
import * as NavbarState from "../navbar/state";
|
||||
import { ConnectionStateEnum } from "./streamer";
|
||||
import { RecordingStreamer } from "./recording_streamer";
|
||||
import { audioEngine } from "../mixer/audio";
|
||||
|
||||
export let streamer: WebRTCStreamer | null = null;
|
||||
|
||||
|
@ -302,7 +303,10 @@ export const goOnAir = (): AppThunk => async (dispatch, getState) => {
|
|||
return;
|
||||
}
|
||||
console.log("starting streamer.");
|
||||
streamer = new WebRTCStreamer(MixerState.destination.stream, dispatch);
|
||||
streamer = new WebRTCStreamer(
|
||||
audioEngine.streamingDestination.stream,
|
||||
dispatch
|
||||
);
|
||||
streamer.addConnectionStateListener((state) => {
|
||||
dispatch(broadcastState.actions.setConnectionState(state));
|
||||
if (state === "CONNECTION_LOST") {
|
||||
|
@ -328,7 +332,7 @@ export const stopStreaming = (): AppThunk => async (dispatch) => {
|
|||
let recorder: RecordingStreamer;
|
||||
|
||||
export const startRecording = (): AppThunk => async (dispatch) => {
|
||||
recorder = new RecordingStreamer(MixerState.destination.stream);
|
||||
recorder = new RecordingStreamer(audioEngine.streamingDestination.stream);
|
||||
recorder.addConnectionStateListener((state) => {
|
||||
dispatch(broadcastState.actions.setRecordingState(state));
|
||||
});
|
||||
|
|
277
src/mixer/audio.ts
Normal file
277
src/mixer/audio.ts
Normal file
|
@ -0,0 +1,277 @@
|
|||
import EventEmitter from "eventemitter3";
|
||||
import StrictEmitter from "strict-event-emitter-types";
|
||||
|
||||
import WaveSurfer from "wavesurfer.js";
|
||||
import CursorPlugin from "wavesurfer.js/dist/plugin/wavesurfer.cursor.min.js";
|
||||
import RegionsPlugin from "wavesurfer.js/dist/plugin/wavesurfer.regions.min.js";
|
||||
import NewsEndCountdown from "../assets/audio/NewsEndCountdown.wav";
|
||||
import NewsIntro from "../assets/audio/NewsIntro.wav";
|
||||
|
||||
interface PlayerEvents {
|
||||
loadComplete: (duration: number) => void;
|
||||
timeChange: (time: number) => void;
|
||||
play: () => void;
|
||||
pause: () => void;
|
||||
finish: () => void;
|
||||
}
|
||||
|
||||
const PlayerEmitter: StrictEmitter<
|
||||
EventEmitter,
|
||||
PlayerEvents
|
||||
> = EventEmitter as any;
|
||||
|
||||
class Player extends ((PlayerEmitter as unknown) as { new (): EventEmitter }) {
|
||||
private constructor(
|
||||
private readonly engine: AudioEngine,
|
||||
private wavesurfer: WaveSurfer,
|
||||
private readonly waveform: HTMLElement
|
||||
) {
|
||||
super();
|
||||
}
|
||||
|
||||
get isPlaying() {
|
||||
return this.wavesurfer.isPlaying();
|
||||
}
|
||||
|
||||
get currentTime() {
|
||||
return this.wavesurfer.getCurrentTime();
|
||||
}
|
||||
|
||||
play() {
|
||||
return this.wavesurfer.play();
|
||||
}
|
||||
|
||||
pause() {
|
||||
return this.wavesurfer.pause();
|
||||
}
|
||||
|
||||
stop() {
|
||||
return this.wavesurfer.stop();
|
||||
}
|
||||
|
||||
redraw() {
|
||||
this.wavesurfer.drawBuffer();
|
||||
}
|
||||
|
||||
setIntro(duration: number) {
|
||||
this.wavesurfer.addRegion({
|
||||
id: "intro",
|
||||
resize: false,
|
||||
start: 0,
|
||||
end: duration,
|
||||
color: "rgba(125,0,255, 0.12)",
|
||||
});
|
||||
}
|
||||
|
||||
setVolume(val: number) {
|
||||
this.wavesurfer.setVolume(val);
|
||||
}
|
||||
|
||||
public static create(engine: AudioEngine, player: number, url: string) {
|
||||
let waveform = document.getElementById("waveform-" + player.toString());
|
||||
if (waveform == null) {
|
||||
throw new Error();
|
||||
}
|
||||
waveform.innerHTML = "";
|
||||
const wavesurfer = WaveSurfer.create({
|
||||
audioContext: engine.audioContext,
|
||||
container: "#waveform-" + player.toString(),
|
||||
waveColor: "#CCCCFF",
|
||||
progressColor: "#9999FF",
|
||||
backend: "MediaElementWebAudio",
|
||||
responsive: true,
|
||||
xhr: {
|
||||
credentials: "include",
|
||||
} as any,
|
||||
plugins: [
|
||||
CursorPlugin.create({
|
||||
showTime: true,
|
||||
opacity: 1,
|
||||
customShowTimeStyle: {
|
||||
"background-color": "#000",
|
||||
color: "#fff",
|
||||
padding: "2px",
|
||||
"font-size": "10px",
|
||||
},
|
||||
}),
|
||||
RegionsPlugin.create({}),
|
||||
],
|
||||
});
|
||||
|
||||
const instance = new this(engine, wavesurfer, waveform);
|
||||
|
||||
wavesurfer.on("ready", () => {
|
||||
console.log("ready");
|
||||
instance.emit("loadComplete", wavesurfer.getDuration());
|
||||
});
|
||||
wavesurfer.on("play", () => {
|
||||
instance.emit("play");
|
||||
});
|
||||
wavesurfer.on("pause", () => {
|
||||
instance.emit("pause");
|
||||
});
|
||||
wavesurfer.on("seek", () => {
|
||||
instance.emit("timeChange", wavesurfer.getCurrentTime());
|
||||
});
|
||||
wavesurfer.on("finish", () => {
|
||||
instance.emit("finish");
|
||||
});
|
||||
wavesurfer.on("audioprocess", () => {
|
||||
instance.emit("timeChange", wavesurfer.getCurrentTime());
|
||||
});
|
||||
|
||||
(wavesurfer as any).backend.gainNode.disconnect();
|
||||
(wavesurfer as any).backend.gainNode.connect(engine.finalCompressor);
|
||||
(wavesurfer as any).backend.gainNode.connect(
|
||||
engine.audioContext.destination
|
||||
);
|
||||
|
||||
wavesurfer.load(url);
|
||||
|
||||
return instance;
|
||||
}
|
||||
}
|
||||
|
||||
interface EngineEvents {
|
||||
micOpen: () => void;
|
||||
}
|
||||
|
||||
const EngineEmitter: StrictEmitter<
|
||||
EventEmitter,
|
||||
EngineEvents
|
||||
> = EventEmitter as any;
|
||||
|
||||
export class AudioEngine extends ((EngineEmitter as unknown) as {
|
||||
new (): EventEmitter;
|
||||
}) {
|
||||
public audioContext: AudioContext;
|
||||
public players: (Player | undefined)[] = [];
|
||||
|
||||
micMedia: MediaStream | null = null;
|
||||
micSource: MediaStreamAudioSourceNode | null = null;
|
||||
micCalibrationGain: GainNode;
|
||||
micAnalyser: AnalyserNode;
|
||||
micCompressor: DynamicsCompressorNode;
|
||||
micMixGain: GainNode;
|
||||
|
||||
finalCompressor: DynamicsCompressorNode;
|
||||
streamingDestination: MediaStreamAudioDestinationNode;
|
||||
|
||||
newsStartCountdownEl: HTMLAudioElement;
|
||||
newsStartCountdownNode: MediaElementAudioSourceNode;
|
||||
|
||||
newsEndCountdownEl: HTMLAudioElement;
|
||||
newsEndCountdownNode: MediaElementAudioSourceNode;
|
||||
|
||||
analysisBuffer: Float32Array;
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this.audioContext = new AudioContext({
|
||||
sampleRate: 44100,
|
||||
latencyHint: "interactive",
|
||||
});
|
||||
|
||||
this.finalCompressor = this.audioContext.createDynamicsCompressor();
|
||||
this.finalCompressor.ratio.value = 20; //brickwall destination comressor
|
||||
this.finalCompressor.threshold.value = -0.5;
|
||||
this.finalCompressor.attack.value = 0;
|
||||
this.finalCompressor.release.value = 0.2;
|
||||
this.finalCompressor.connect(this.audioContext.destination);
|
||||
|
||||
this.streamingDestination = this.audioContext.createMediaStreamDestination();
|
||||
this.finalCompressor.connect(this.streamingDestination);
|
||||
|
||||
this.micCalibrationGain = this.audioContext.createGain();
|
||||
|
||||
this.micAnalyser = this.audioContext.createAnalyser();
|
||||
this.micAnalyser.fftSize = 8192;
|
||||
|
||||
this.analysisBuffer = new Float32Array(this.micAnalyser.fftSize);
|
||||
|
||||
this.micCompressor = this.audioContext.createDynamicsCompressor();
|
||||
this.micCompressor.ratio.value = 3; // mic compressor - fairly gentle, can be upped
|
||||
this.micCompressor.threshold.value = -18;
|
||||
this.micCompressor.attack.value = 0.01;
|
||||
this.micCompressor.release.value = 0.1;
|
||||
|
||||
this.micMixGain = this.audioContext.createGain();
|
||||
this.micMixGain.gain.value = 1;
|
||||
|
||||
this.micCalibrationGain
|
||||
.connect(this.micAnalyser)
|
||||
.connect(this.micCompressor)
|
||||
.connect(this.micMixGain)
|
||||
.connect(this.streamingDestination);
|
||||
|
||||
this.newsEndCountdownEl = new Audio(NewsEndCountdown);
|
||||
this.newsEndCountdownEl.preload = "auto";
|
||||
this.newsEndCountdownEl.volume = 0.5;
|
||||
this.newsEndCountdownNode = this.audioContext.createMediaElementSource(
|
||||
this.newsEndCountdownEl
|
||||
);
|
||||
this.newsEndCountdownNode.connect(this.audioContext.destination);
|
||||
|
||||
this.newsStartCountdownEl = new Audio(NewsIntro);
|
||||
this.newsStartCountdownEl.preload = "auto";
|
||||
this.newsStartCountdownEl.volume = 0.5;
|
||||
this.newsStartCountdownNode = this.audioContext.createMediaElementSource(
|
||||
this.newsStartCountdownEl
|
||||
);
|
||||
this.newsStartCountdownNode.connect(this.audioContext.destination);
|
||||
}
|
||||
|
||||
public createPlayer(number: number, url: string) {
|
||||
const player = Player.create(this, number, url);
|
||||
this.players[number] = player;
|
||||
return player;
|
||||
}
|
||||
|
||||
async openMic(deviceId: string) {
|
||||
console.log("opening mic", deviceId);
|
||||
this.micMedia = await navigator.mediaDevices.getUserMedia({
|
||||
audio: {
|
||||
deviceId: { exact: deviceId },
|
||||
echoCancellation: false,
|
||||
autoGainControl: false,
|
||||
noiseSuppression: false,
|
||||
latency: 0.01,
|
||||
},
|
||||
});
|
||||
|
||||
this.micSource = this.audioContext.createMediaStreamSource(this.micMedia);
|
||||
|
||||
this.micSource.connect(this.micCalibrationGain);
|
||||
|
||||
this.emit("micOpen");
|
||||
}
|
||||
|
||||
setMicCalibrationGain(value: number) {
|
||||
this.micCalibrationGain.gain.value = value;
|
||||
}
|
||||
|
||||
setMicVolume(value: number) {
|
||||
this.micMixGain.gain.value = value;
|
||||
}
|
||||
|
||||
getMicLevel() {
|
||||
this.micAnalyser.getFloatTimeDomainData(this.analysisBuffer);
|
||||
let peak = 0;
|
||||
for (let i = 0; i < this.analysisBuffer.length; i++) {
|
||||
peak = Math.max(peak, this.analysisBuffer[i] ** 2);
|
||||
}
|
||||
return 10 * Math.log10(peak);
|
||||
}
|
||||
|
||||
async playNewsEnd() {
|
||||
this.newsEndCountdownEl.currentTime = 0;
|
||||
await this.newsEndCountdownEl.play();
|
||||
}
|
||||
|
||||
async playNewsIntro() {
|
||||
this.newsStartCountdownEl.currentTime = 0;
|
||||
await this.newsStartCountdownEl.play();
|
||||
}
|
||||
}
|
||||
|
||||
export const audioEngine = new AudioEngine();
|
|
@ -12,66 +12,14 @@ import Keys from "keymaster";
|
|||
import { Track, MYRADIO_NON_API_BASE, AuxItem } from "../api";
|
||||
import { AppThunk } from "../store";
|
||||
import { RootState } from "../rootReducer";
|
||||
import WaveSurfer from "wavesurfer.js";
|
||||
import CursorPlugin from "wavesurfer.js/dist/plugin/wavesurfer.cursor.min.js";
|
||||
import RegionsPlugin from "wavesurfer.js/dist/plugin/wavesurfer.regions.min.js";
|
||||
import * as later from "later";
|
||||
import NewsIntro from "../assets/audio/NewsIntro.wav";
|
||||
import NewsEndCountdown from "../assets/audio/NewsEndCountdown.wav";
|
||||
import { audioEngine } from "./audio";
|
||||
|
||||
const audioContext = new (window.AudioContext ||
|
||||
(window as any).webkitAudioContext)();
|
||||
const wavesurfers: WaveSurfer[] = [];
|
||||
const playerGainTweens: Array<{
|
||||
target: VolumePresetEnum;
|
||||
tweens: Between[];
|
||||
}> = [];
|
||||
const loadAbortControllers: AbortController[] = [];
|
||||
|
||||
let micMedia: MediaStream | null = null;
|
||||
let micSource: MediaStreamAudioSourceNode | null = null;
|
||||
let micCalibrationGain: GainNode | null = null;
|
||||
let micCompressor: DynamicsCompressorNode | null = null;
|
||||
let micMixGain: GainNode | null = null;
|
||||
|
||||
const finalCompressor = audioContext.createDynamicsCompressor();
|
||||
finalCompressor.ratio.value = 20; //brickwall destination comressor
|
||||
finalCompressor.threshold.value = -0.5;
|
||||
finalCompressor.attack.value = 0;
|
||||
finalCompressor.release.value = 0.2;
|
||||
|
||||
export const destination = audioContext.createMediaStreamDestination();
|
||||
console.log("final destination", destination);
|
||||
finalCompressor.connect(destination);
|
||||
|
||||
const newsEndCountdownEl = new Audio(NewsEndCountdown);
|
||||
newsEndCountdownEl.preload = "auto";
|
||||
newsEndCountdownEl.volume = 0.5;
|
||||
const newsEndCountdownNode = audioContext.createMediaElementSource(
|
||||
newsEndCountdownEl
|
||||
);
|
||||
newsEndCountdownNode.connect(audioContext.destination);
|
||||
|
||||
const newsStartCountdownEl = new Audio(NewsIntro);
|
||||
newsStartCountdownEl.preload = "auto";
|
||||
newsStartCountdownEl.volume = 0.5;
|
||||
const newsStartCountdownNode = audioContext.createMediaElementSource(
|
||||
newsStartCountdownEl
|
||||
);
|
||||
newsStartCountdownNode.connect(audioContext.destination);
|
||||
|
||||
export async function playNewsEnd() {
|
||||
newsEndCountdownEl.currentTime = 0;
|
||||
await newsEndCountdownEl.play();
|
||||
}
|
||||
|
||||
export async function playNewsIntro() {
|
||||
newsStartCountdownEl.currentTime = 0;
|
||||
await newsStartCountdownEl.play();
|
||||
}
|
||||
|
||||
let timerInterval: later.Timer;
|
||||
|
||||
type PlayerStateEnum = "playing" | "paused" | "stopped";
|
||||
type PlayerRepeatEnum = "none" | "one" | "all";
|
||||
type VolumePresetEnum = "off" | "bed" | "full";
|
||||
|
@ -100,7 +48,6 @@ interface MicState {
|
|||
volume: 1 | 0;
|
||||
baseGain: number;
|
||||
id: string | null;
|
||||
calibration: boolean;
|
||||
}
|
||||
|
||||
interface MixerState {
|
||||
|
@ -108,56 +55,26 @@ interface MixerState {
|
|||
mic: MicState;
|
||||
}
|
||||
|
||||
const BasePlayerState: PlayerState = {
|
||||
loadedItem: null,
|
||||
loading: -1,
|
||||
state: "stopped",
|
||||
volume: 1,
|
||||
gain: 1,
|
||||
timeCurrent: 0,
|
||||
timeRemaining: 0,
|
||||
timeLength: 0,
|
||||
playOnLoad: false,
|
||||
autoAdvance: true,
|
||||
repeat: "none",
|
||||
tracklistItemID: -1,
|
||||
loadError: false,
|
||||
};
|
||||
|
||||
const mixerState = createSlice({
|
||||
name: "Player",
|
||||
initialState: {
|
||||
players: [
|
||||
{
|
||||
loadedItem: null,
|
||||
loading: -1,
|
||||
state: "stopped",
|
||||
volume: 1,
|
||||
gain: 1,
|
||||
timeCurrent: 0,
|
||||
timeRemaining: 0,
|
||||
timeLength: 0,
|
||||
playOnLoad: false,
|
||||
autoAdvance: true,
|
||||
repeat: "none",
|
||||
tracklistItemID: -1,
|
||||
loadError: false,
|
||||
},
|
||||
{
|
||||
loadedItem: null,
|
||||
loading: -1,
|
||||
state: "stopped",
|
||||
volume: 1,
|
||||
gain: 1,
|
||||
timeCurrent: 0,
|
||||
timeRemaining: 0,
|
||||
timeLength: 0,
|
||||
playOnLoad: false,
|
||||
autoAdvance: true,
|
||||
repeat: "none",
|
||||
tracklistItemID: -1,
|
||||
loadError: false,
|
||||
},
|
||||
{
|
||||
loadedItem: null,
|
||||
loading: -1,
|
||||
state: "stopped",
|
||||
volume: 1,
|
||||
gain: 1,
|
||||
timeCurrent: 0,
|
||||
timeRemaining: 0,
|
||||
timeLength: 0,
|
||||
playOnLoad: false,
|
||||
autoAdvance: true,
|
||||
repeat: "none",
|
||||
tracklistItemID: -1,
|
||||
loadError: false,
|
||||
},
|
||||
],
|
||||
players: [BasePlayerState, BasePlayerState, BasePlayerState],
|
||||
mic: {
|
||||
open: false,
|
||||
volume: 1,
|
||||
|
@ -165,7 +82,6 @@ const mixerState = createSlice({
|
|||
baseGain: 1,
|
||||
openError: null,
|
||||
id: "None",
|
||||
calibration: false,
|
||||
},
|
||||
} as MixerState,
|
||||
reducers: {
|
||||
|
@ -304,12 +220,6 @@ const mixerState = createSlice({
|
|||
) {
|
||||
state.players[action.payload.player].tracklistItemID = action.payload.id;
|
||||
},
|
||||
startMicCalibration(state) {
|
||||
state.mic.calibration = true;
|
||||
},
|
||||
stopMicCalibration(state) {
|
||||
state.mic.calibration = false;
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
|
@ -321,8 +231,8 @@ export const load = (
|
|||
player: number,
|
||||
item: PlanItem | Track | AuxItem
|
||||
): AppThunk => async (dispatch, getState) => {
|
||||
if (typeof wavesurfers[player] !== "undefined") {
|
||||
if (wavesurfers[player].isPlaying()) {
|
||||
if (typeof audioEngine.players[player] !== "undefined") {
|
||||
if (audioEngine.players[player]?.isPlaying) {
|
||||
// already playing, don't kill playback
|
||||
return;
|
||||
}
|
||||
|
@ -363,130 +273,6 @@ export const load = (
|
|||
|
||||
console.log("loading");
|
||||
|
||||
let waveform = document.getElementById("waveform-" + player.toString());
|
||||
if (waveform !== null) {
|
||||
waveform.innerHTML = "";
|
||||
}
|
||||
const wavesurfer = WaveSurfer.create({
|
||||
audioContext,
|
||||
container: "#waveform-" + player.toString(),
|
||||
waveColor: "#CCCCFF",
|
||||
progressColor: "#9999FF",
|
||||
backend: "MediaElementWebAudio",
|
||||
responsive: true,
|
||||
xhr: {
|
||||
credentials: "include",
|
||||
} as any,
|
||||
plugins: [
|
||||
CursorPlugin.create({
|
||||
showTime: true,
|
||||
opacity: 1,
|
||||
customShowTimeStyle: {
|
||||
"background-color": "#000",
|
||||
color: "#fff",
|
||||
padding: "2px",
|
||||
"font-size": "10px",
|
||||
},
|
||||
}),
|
||||
RegionsPlugin.create({}),
|
||||
],
|
||||
});
|
||||
|
||||
wavesurfer.on("ready", () => {
|
||||
dispatch(mixerState.actions.itemLoadComplete({ player }));
|
||||
dispatch(
|
||||
mixerState.actions.setTimeLength({
|
||||
player,
|
||||
time: wavesurfer.getDuration(),
|
||||
})
|
||||
);
|
||||
dispatch(
|
||||
mixerState.actions.setTimeCurrent({
|
||||
player,
|
||||
time: 0,
|
||||
})
|
||||
);
|
||||
const state = getState().mixer.players[player];
|
||||
if (state.playOnLoad) {
|
||||
wavesurfer.play();
|
||||
}
|
||||
if (state.loadedItem && "intro" in state.loadedItem) {
|
||||
wavesurfer.addRegion({
|
||||
id: "intro",
|
||||
resize: false,
|
||||
start: 0,
|
||||
end: state.loadedItem.intro,
|
||||
color: "rgba(125,0,255, 0.12)",
|
||||
});
|
||||
}
|
||||
});
|
||||
wavesurfer.on("play", () => {
|
||||
dispatch(mixerState.actions.setPlayerState({ player, state: "playing" }));
|
||||
});
|
||||
wavesurfer.on("pause", () => {
|
||||
dispatch(
|
||||
mixerState.actions.setPlayerState({
|
||||
player,
|
||||
state: wavesurfer.getCurrentTime() === 0 ? "stopped" : "paused",
|
||||
})
|
||||
);
|
||||
});
|
||||
wavesurfer.on("seek", () => {
|
||||
dispatch(
|
||||
mixerState.actions.setTimeCurrent({
|
||||
player,
|
||||
time: wavesurfer.getCurrentTime(),
|
||||
})
|
||||
);
|
||||
});
|
||||
wavesurfer.on("finish", () => {
|
||||
dispatch(mixerState.actions.setPlayerState({ player, state: "stopped" }));
|
||||
const state = getState().mixer.players[player];
|
||||
if (state.tracklistItemID !== -1) {
|
||||
dispatch(BroadcastState.tracklistEnd(state.tracklistItemID));
|
||||
}
|
||||
if (state.repeat === "one") {
|
||||
wavesurfer.play();
|
||||
} else if (state.repeat === "all") {
|
||||
if ("channel" in item) {
|
||||
// it's not in the CML/libraries "column"
|
||||
const itsChannel = getState()
|
||||
.showplan.plan!.filter((x) => x.channel === item.channel)
|
||||
.sort((x, y) => x.weight - y.weight);
|
||||
const itsIndex = itsChannel.indexOf(item);
|
||||
if (itsIndex === itsChannel.length - 1) {
|
||||
dispatch(load(player, itsChannel[0]));
|
||||
}
|
||||
}
|
||||
} else if (state.autoAdvance) {
|
||||
if ("channel" in item) {
|
||||
// it's not in the CML/libraries "column"
|
||||
const itsChannel = getState()
|
||||
.showplan.plan!.filter((x) => x.channel === item.channel)
|
||||
.sort((x, y) => x.weight - y.weight);
|
||||
const itsIndex = itsChannel.indexOf(item);
|
||||
if (itsIndex > -1 && itsIndex !== itsChannel.length - 1) {
|
||||
dispatch(load(player, itsChannel[itsIndex + 1]));
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
wavesurfer.on("audioprocess", () => {
|
||||
if (
|
||||
Math.abs(
|
||||
wavesurfer.getCurrentTime() -
|
||||
getState().mixer.players[player].timeCurrent
|
||||
) > 0.5
|
||||
) {
|
||||
dispatch(
|
||||
mixerState.actions.setTimeCurrent({
|
||||
player,
|
||||
time: wavesurfer.getCurrentTime(),
|
||||
})
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
try {
|
||||
const signal = loadAbortControllers[player].signal; // hang on to the signal, even if its controller gets replaced
|
||||
const result = await fetch(url, {
|
||||
|
@ -509,22 +295,93 @@ export const load = (
|
|||
const blob = new Blob([rawData]);
|
||||
const objectUrl = URL.createObjectURL(blob);
|
||||
|
||||
const audio = new Audio(objectUrl);
|
||||
const playerInstance = await audioEngine.createPlayer(player, objectUrl);
|
||||
|
||||
wavesurfer.load(audio);
|
||||
playerInstance.on("loadComplete", (duration) => {
|
||||
console.log("loadComplete");
|
||||
dispatch(mixerState.actions.itemLoadComplete({ player }));
|
||||
dispatch(
|
||||
mixerState.actions.setTimeLength({
|
||||
player,
|
||||
time: duration,
|
||||
})
|
||||
);
|
||||
dispatch(
|
||||
mixerState.actions.setTimeCurrent({
|
||||
player,
|
||||
time: 0,
|
||||
})
|
||||
);
|
||||
const state = getState().mixer.players[player];
|
||||
if (state.playOnLoad) {
|
||||
playerInstance.play();
|
||||
}
|
||||
if (state.loadedItem && "intro" in state.loadedItem) {
|
||||
playerInstance.setIntro(state.loadedItem.intro);
|
||||
}
|
||||
});
|
||||
|
||||
// THIS IS BAD
|
||||
(wavesurfer as any).backend.gainNode.disconnect();
|
||||
(wavesurfer as any).backend.gainNode.connect(finalCompressor);
|
||||
(wavesurfer as any).backend.gainNode.connect(audioContext.destination);
|
||||
playerInstance.on("play", () => {
|
||||
dispatch(mixerState.actions.setPlayerState({ player, state: "playing" }));
|
||||
});
|
||||
playerInstance.on("pause", () => {
|
||||
dispatch(
|
||||
mixerState.actions.setPlayerState({
|
||||
player,
|
||||
state: playerInstance.currentTime === 0 ? "stopped" : "paused",
|
||||
})
|
||||
);
|
||||
});
|
||||
playerInstance.on("timeChange", (time) => {
|
||||
if (Math.abs(time - getState().mixer.players[player].timeCurrent) > 0.5) {
|
||||
dispatch(
|
||||
mixerState.actions.setTimeCurrent({
|
||||
player,
|
||||
time,
|
||||
})
|
||||
);
|
||||
}
|
||||
});
|
||||
playerInstance.on("finish", () => {
|
||||
dispatch(mixerState.actions.setPlayerState({ player, state: "stopped" }));
|
||||
const state = getState().mixer.players[player];
|
||||
if (state.tracklistItemID !== -1) {
|
||||
dispatch(BroadcastState.tracklistEnd(state.tracklistItemID));
|
||||
}
|
||||
if (state.repeat === "one") {
|
||||
playerInstance.play();
|
||||
} else if (state.repeat === "all") {
|
||||
if ("channel" in item) {
|
||||
// it's not in the CML/libraries "column"
|
||||
const itsChannel = getState()
|
||||
.showplan.plan!.filter((x) => x.channel === item.channel)
|
||||
.sort((x, y) => x.weight - y.weight);
|
||||
const itsIndex = itsChannel.indexOf(item);
|
||||
if (itsIndex === itsChannel.length - 1) {
|
||||
dispatch(load(player, itsChannel[0]));
|
||||
}
|
||||
}
|
||||
} else if (state.autoAdvance) {
|
||||
if ("channel" in item) {
|
||||
// it's not in the CML/libraries "column"
|
||||
const itsChannel = getState()
|
||||
.showplan.plan!.filter((x) => x.channel === item.channel)
|
||||
.sort((x, y) => x.weight - y.weight);
|
||||
const itsIndex = itsChannel.indexOf(item);
|
||||
if (itsIndex > -1 && itsIndex !== itsChannel.length - 1) {
|
||||
dispatch(load(player, itsChannel[itsIndex + 1]));
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Double-check we haven't been aborted since
|
||||
if (signal.aborted) {
|
||||
// noinspection ExceptionCaughtLocallyJS
|
||||
throw new DOMException("abort load", "AbortError");
|
||||
}
|
||||
|
||||
wavesurfer.setVolume(getState().mixer.players[player].gain);
|
||||
wavesurfers[player] = wavesurfer;
|
||||
playerInstance.setVolume(getState().mixer.players[player].gain);
|
||||
delete loadAbortControllers[player];
|
||||
} catch (e) {
|
||||
if ("name" in e && e.name === "AbortError") {
|
||||
|
@ -540,20 +397,20 @@ export const play = (player: number): AppThunk => async (
|
|||
dispatch,
|
||||
getState
|
||||
) => {
|
||||
if (typeof wavesurfers[player] === "undefined") {
|
||||
if (typeof audioEngine.players[player] === "undefined") {
|
||||
console.log("nothing loaded");
|
||||
return;
|
||||
}
|
||||
if (audioContext.state !== "running") {
|
||||
if (audioEngine.audioContext.state !== "running") {
|
||||
console.log("Resuming AudioContext because Chrome bad");
|
||||
await audioContext.resume();
|
||||
await audioEngine.audioContext.resume();
|
||||
}
|
||||
var state = getState().mixer.players[player];
|
||||
const state = getState().mixer.players[player];
|
||||
if (state.loading !== -1) {
|
||||
console.log("not ready");
|
||||
return;
|
||||
}
|
||||
wavesurfers[player].play();
|
||||
audioEngine.players[player]?.play();
|
||||
|
||||
if (state.loadedItem && "album" in state.loadedItem) {
|
||||
//track
|
||||
|
@ -567,7 +424,7 @@ export const play = (player: number): AppThunk => async (
|
|||
};
|
||||
|
||||
export const pause = (player: number): AppThunk => (dispatch, getState) => {
|
||||
if (typeof wavesurfers[player] === "undefined") {
|
||||
if (typeof audioEngine.players[player] === "undefined") {
|
||||
console.log("nothing loaded");
|
||||
return;
|
||||
}
|
||||
|
@ -575,15 +432,15 @@ export const pause = (player: number): AppThunk => (dispatch, getState) => {
|
|||
console.log("not ready");
|
||||
return;
|
||||
}
|
||||
if (wavesurfers[player].isPlaying()) {
|
||||
wavesurfers[player].pause();
|
||||
if (audioEngine.players[player]?.isPlaying) {
|
||||
audioEngine.players[player]?.pause();
|
||||
} else {
|
||||
wavesurfers[player].play();
|
||||
audioEngine.players[player]?.play();
|
||||
}
|
||||
};
|
||||
|
||||
export const stop = (player: number): AppThunk => (dispatch, getState) => {
|
||||
if (typeof wavesurfers[player] === "undefined") {
|
||||
if (typeof audioEngine.players[player] === "undefined") {
|
||||
console.log("nothing loaded");
|
||||
return;
|
||||
}
|
||||
|
@ -592,7 +449,7 @@ export const stop = (player: number): AppThunk => (dispatch, getState) => {
|
|||
console.log("not ready");
|
||||
return;
|
||||
}
|
||||
wavesurfers[player].stop();
|
||||
audioEngine.players[player]?.stop();
|
||||
// Incase wavesurver wasn't playing, it won't 'finish', so just make sure the UI is stopped.
|
||||
dispatch(mixerState.actions.setPlayerState({ player, state: "stopped" }));
|
||||
|
||||
|
@ -608,8 +465,8 @@ export const {
|
|||
} = mixerState.actions;
|
||||
|
||||
export const redrawWavesurfers = (): AppThunk => () => {
|
||||
wavesurfers.forEach(function(item) {
|
||||
item.drawBuffer();
|
||||
audioEngine.players.forEach(function(item) {
|
||||
item?.redraw();
|
||||
});
|
||||
};
|
||||
|
||||
|
@ -668,8 +525,8 @@ export const setVolume = (
|
|||
.time(FADE_TIME_SECONDS * 1000)
|
||||
.easing((Between as any).Easing.Exponential.InOut)
|
||||
.on("update", (val: number) => {
|
||||
if (typeof wavesurfers[player] !== "undefined") {
|
||||
wavesurfers[player].setVolume(val);
|
||||
if (typeof audioEngine.players[player] !== "undefined") {
|
||||
audioEngine.players[player]?.setVolume(val);
|
||||
}
|
||||
})
|
||||
.on("complete", () => {
|
||||
|
@ -693,9 +550,9 @@ export const openMicrophone = (micID: string): AppThunk => async (
|
|||
// if (getState().mixer.mic.open) {
|
||||
// micSource?.disconnect();
|
||||
// }
|
||||
if (audioContext.state !== "running") {
|
||||
if (audioEngine.audioContext.state !== "running") {
|
||||
console.log("Resuming AudioContext because Chrome bad");
|
||||
await audioContext.resume();
|
||||
await audioEngine.audioContext.resume();
|
||||
}
|
||||
dispatch(mixerState.actions.setMicError(null));
|
||||
if (!("mediaDevices" in navigator)) {
|
||||
|
@ -704,15 +561,7 @@ export const openMicrophone = (micID: string): AppThunk => async (
|
|||
return;
|
||||
}
|
||||
try {
|
||||
micMedia = await navigator.mediaDevices.getUserMedia({
|
||||
audio: {
|
||||
deviceId: { exact: micID },
|
||||
echoCancellation: false,
|
||||
autoGainControl: false,
|
||||
noiseSuppression: false,
|
||||
latency: 0.01,
|
||||
},
|
||||
});
|
||||
await audioEngine.openMic(micID);
|
||||
} catch (e) {
|
||||
if (e instanceof DOMException) {
|
||||
switch (e.message) {
|
||||
|
@ -727,33 +576,12 @@ export const openMicrophone = (micID: string): AppThunk => async (
|
|||
}
|
||||
return;
|
||||
}
|
||||
// Okay, we have a mic stream, time to do some audio nonsense
|
||||
|
||||
const state = getState().mixer.mic;
|
||||
micSource = audioContext.createMediaStreamSource(micMedia);
|
||||
audioEngine.setMicCalibrationGain(state.baseGain);
|
||||
audioEngine.setMicVolume(state.volume);
|
||||
|
||||
micCalibrationGain = audioContext.createGain();
|
||||
micCalibrationGain.gain.value = state.baseGain;
|
||||
|
||||
micCompressor = audioContext.createDynamicsCompressor();
|
||||
micCompressor.ratio.value = 3; // mic compressor - fairly gentle, can be upped
|
||||
micCompressor.threshold.value = -18;
|
||||
micCompressor.attack.value = 0.01;
|
||||
micCompressor.release.value = 0.1;
|
||||
|
||||
micMixGain = audioContext.createGain();
|
||||
micMixGain.gain.value = state.volume;
|
||||
|
||||
micSource
|
||||
.connect(micCalibrationGain)
|
||||
.connect(micCompressor)
|
||||
.connect(micMixGain)
|
||||
.connect(finalCompressor);
|
||||
dispatch(mixerState.actions.micOpen(micID));
|
||||
|
||||
const state2 = getState();
|
||||
if (state2.optionsMenu.open && state2.optionsMenu.currentTab === "mic") {
|
||||
dispatch(startMicCalibration());
|
||||
}
|
||||
};
|
||||
|
||||
export const setMicVolume = (level: MicVolumePresetEnum): AppThunk => (
|
||||
|
@ -773,84 +601,28 @@ export const setMicVolume = (level: MicVolumePresetEnum): AppThunk => (
|
|||
mixerState.actions.setMicLevels({ volume: levelVal, gain: levelVal })
|
||||
);
|
||||
// latency, plus a little buffer
|
||||
}, audioContext.baseLatency * 1000 + 150);
|
||||
}, audioEngine.audioContext.baseLatency * 1000 + 150);
|
||||
}
|
||||
};
|
||||
|
||||
let analyser: AnalyserNode | null = null;
|
||||
|
||||
const CALIBRATE_THE_CALIBRATOR = false;
|
||||
|
||||
export const startMicCalibration = (): AppThunk => async (
|
||||
dispatch,
|
||||
getState
|
||||
) => {
|
||||
if (!getState().mixer.mic.open) {
|
||||
return;
|
||||
}
|
||||
dispatch(mixerState.actions.startMicCalibration());
|
||||
let input: AudioNode;
|
||||
if (CALIBRATE_THE_CALIBRATOR) {
|
||||
const sauce = new Audio(
|
||||
"https://ury.org.uk/myradio/NIPSWeb/managed_play/?managedid=6489"
|
||||
); // URY 1K Sine -2.5dbFS PPM5
|
||||
sauce.crossOrigin = "use-credentials";
|
||||
sauce.autoplay = true;
|
||||
sauce.load();
|
||||
input = audioContext.createMediaElementSource(sauce);
|
||||
} else {
|
||||
input = micCalibrationGain!;
|
||||
}
|
||||
analyser = audioContext.createAnalyser();
|
||||
analyser.fftSize = 8192;
|
||||
input.connect(analyser);
|
||||
};
|
||||
|
||||
let float: Float32Array | null = null;
|
||||
|
||||
export function getMicAnalysis() {
|
||||
if (!analyser) {
|
||||
throw new Error();
|
||||
}
|
||||
if (!float) {
|
||||
float = new Float32Array(analyser.fftSize);
|
||||
}
|
||||
analyser.getFloatTimeDomainData(float);
|
||||
let peak = 0;
|
||||
for (let i = 0; i < float.length; i++) {
|
||||
peak = Math.max(peak, float[i] ** 2);
|
||||
}
|
||||
return 10 * Math.log10(peak);
|
||||
}
|
||||
|
||||
export const stopMicCalibration = (): AppThunk => (dispatch, getState) => {
|
||||
if (getState().mixer.mic.calibration === null) {
|
||||
return;
|
||||
}
|
||||
dispatch(mixerState.actions.stopMicCalibration());
|
||||
};
|
||||
|
||||
export const mixerMiddleware: Middleware<{}, RootState, Dispatch<any>> = (
|
||||
store
|
||||
) => (next) => (action) => {
|
||||
const oldState = store.getState().mixer;
|
||||
const result = next(action);
|
||||
const newState = store.getState().mixer;
|
||||
|
||||
newState.players.forEach((state, index) => {
|
||||
if (typeof wavesurfers[index] !== "undefined") {
|
||||
if (oldState.players[index].gain !== newState.players[index].gain) {
|
||||
wavesurfers[index].setVolume(state.gain);
|
||||
}
|
||||
if (oldState.players[index].gain !== newState.players[index].gain) {
|
||||
audioEngine.players[index]?.setVolume(state.gain);
|
||||
}
|
||||
});
|
||||
if (
|
||||
newState.mic.baseGain !== oldState.mic.baseGain &&
|
||||
micCalibrationGain !== null
|
||||
) {
|
||||
micCalibrationGain.gain.value = newState.mic.baseGain;
|
||||
|
||||
if (newState.mic.baseGain !== oldState.mic.baseGain) {
|
||||
audioEngine.setMicCalibrationGain(newState.mic.baseGain);
|
||||
}
|
||||
if (newState.mic.volume !== oldState.mic.volume && micMixGain !== null) {
|
||||
micMixGain.gain.value = newState.mic.volume;
|
||||
if (newState.mic.volume !== oldState.mic.volume) {
|
||||
audioEngine.setMicVolume(newState.mic.volume);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
|
|
@ -29,10 +29,16 @@ export function MicTab() {
|
|||
const [openError, setOpenError] = useState<null | MicErrorEnum>(null);
|
||||
|
||||
async function fetchMicNames() {
|
||||
console.log("start fetchNames");
|
||||
if (!("getUserMedia" in navigator.mediaDevices)) {
|
||||
setOpenError("NOT_SECURE_CONTEXT");
|
||||
return;
|
||||
}
|
||||
// Because Chrome, we have to call getUserMedia() before enumerateDevices()
|
||||
try {
|
||||
await navigator.mediaDevices.getUserMedia({ audio: true });
|
||||
} catch (e) {
|
||||
console.warn(e);
|
||||
if (e instanceof DOMException) {
|
||||
switch (e.message) {
|
||||
case "Permission denied":
|
||||
|
@ -46,8 +52,11 @@ export function MicTab() {
|
|||
}
|
||||
return;
|
||||
}
|
||||
console.log("done");
|
||||
try {
|
||||
console.log("gUM");
|
||||
const devices = await navigator.mediaDevices.enumerateDevices();
|
||||
console.log(devices);
|
||||
setMicList(reduceToInputs(devices));
|
||||
} catch (e) {
|
||||
setOpenError("UNKNOWN_ENUM");
|
||||
|
@ -61,7 +70,11 @@ export function MicTab() {
|
|||
|
||||
return (
|
||||
<>
|
||||
<button onClick={fetchMicNames} disabled={micList !== null}>
|
||||
<button
|
||||
onClick={fetchMicNames}
|
||||
disabled={micList !== null}
|
||||
className="btn btn-outline-dark"
|
||||
>
|
||||
Open
|
||||
</button>
|
||||
<select
|
||||
|
|
|
@ -7,8 +7,8 @@ import React, {
|
|||
HTMLProps,
|
||||
} from "react";
|
||||
import { useSelector } from "react-redux";
|
||||
import * as MixerState from "../../mixer/state";
|
||||
import { RootState } from "../../rootReducer";
|
||||
import { audioEngine } from "../../mixer/audio";
|
||||
|
||||
interface VUMeterProps extends HTMLProps<HTMLCanvasElement> {
|
||||
range: [number, number];
|
||||
|
@ -23,24 +23,24 @@ export function VUMeter(props: VUMeterProps) {
|
|||
const rafRef = useRef<number | null>(null);
|
||||
const [peak, setPeak] = useState(-Infinity);
|
||||
const animate = useCallback(() => {
|
||||
if (state.calibration) {
|
||||
const result = MixerState.getMicAnalysis();
|
||||
if (state.open) {
|
||||
const result = audioEngine.getMicLevel();
|
||||
setPeak(result);
|
||||
rafRef.current = requestAnimationFrame(animate);
|
||||
} else if (rafRef.current !== null) {
|
||||
cancelAnimationFrame(rafRef.current);
|
||||
rafRef.current = null;
|
||||
}
|
||||
}, [state.calibration]);
|
||||
}, [state.open]);
|
||||
|
||||
useEffect(() => {
|
||||
if (state.calibration) {
|
||||
if (state.open) {
|
||||
rafRef.current = requestAnimationFrame(animate);
|
||||
} else if (rafRef.current !== null) {
|
||||
cancelAnimationFrame(rafRef.current);
|
||||
rafRef.current = null;
|
||||
}
|
||||
}, [animate, state.calibration]);
|
||||
return () => {
|
||||
if (rafRef.current !== null) {
|
||||
cancelAnimationFrame(rafRef.current);
|
||||
rafRef.current = null;
|
||||
}
|
||||
};
|
||||
}, [animate, state.open]);
|
||||
|
||||
useLayoutEffect(() => {
|
||||
if (canvasRef.current) {
|
||||
|
|
|
@ -1,12 +1,4 @@
|
|||
import {
|
||||
createSlice,
|
||||
PayloadAction,
|
||||
Middleware,
|
||||
Dispatch,
|
||||
} from "@reduxjs/toolkit";
|
||||
import { RootState } from "../rootReducer";
|
||||
|
||||
import * as MixerState from "../mixer/state";
|
||||
import { createSlice, PayloadAction } from "@reduxjs/toolkit";
|
||||
|
||||
export type OptionsTabIDsEnum = "mic" | "about" | "advanced" | "stats";
|
||||
|
||||
|
@ -36,25 +28,3 @@ const optionsMenuState = createSlice({
|
|||
export default optionsMenuState.reducer;
|
||||
|
||||
export const { open, openToTab, close, changeTab } = optionsMenuState.actions;
|
||||
|
||||
export const tabSyncMiddleware: Middleware<{}, RootState, Dispatch> = (
|
||||
store
|
||||
) => (next) => (action) => {
|
||||
const oldState = store.getState();
|
||||
const result = next(action);
|
||||
const newState = store.getState();
|
||||
if (newState.optionsMenu.currentTab === "mic") {
|
||||
if (
|
||||
oldState.optionsMenu.currentTab !== "mic" &&
|
||||
newState.optionsMenu.open
|
||||
) {
|
||||
store.dispatch(MixerState.startMicCalibration() as any);
|
||||
}
|
||||
} else if (
|
||||
oldState.optionsMenu.currentTab === "mic" ||
|
||||
oldState.optionsMenu.open !== newState.optionsMenu.open
|
||||
) {
|
||||
store.dispatch(MixerState.stopMicCalibration() as any);
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
import React, { useCallback, useEffect, useRef, useState } from "react";
|
||||
import React, { useEffect, useRef, useState } from "react";
|
||||
import { useSelector, useDispatch } from "react-redux";
|
||||
import {
|
||||
FaLevelDownAlt,
|
||||
|
@ -8,7 +8,6 @@ import {
|
|||
FaPause,
|
||||
FaStop,
|
||||
} from "react-icons/fa";
|
||||
import { add, format } from "date-fns";
|
||||
import { RootState } from "../rootReducer";
|
||||
import * as MixerState from "../mixer/state";
|
||||
import { secToHHMM, timestampToHHMM } from "../lib/utils";
|
||||
|
|
|
@ -5,7 +5,6 @@ import {
|
|||
mixerMiddleware,
|
||||
mixerKeyboardShortcutsMiddleware,
|
||||
} from "./mixer/state";
|
||||
import { tabSyncMiddleware } from "./optionsMenu/state";
|
||||
import { persistStore } from "redux-persist";
|
||||
|
||||
const store = configureStore({
|
||||
|
@ -13,7 +12,6 @@ const store = configureStore({
|
|||
middleware: [
|
||||
mixerMiddleware,
|
||||
mixerKeyboardShortcutsMiddleware,
|
||||
tabSyncMiddleware,
|
||||
...getDefaultMiddleware(),
|
||||
],
|
||||
});
|
||||
|
|
11
yarn.lock
11
yarn.lock
|
@ -1643,9 +1643,9 @@
|
|||
tsutils "^3.17.1"
|
||||
|
||||
"@ury1350/prettier-config@^1.0.0":
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@ury1350/prettier-config/-/prettier-config-1.0.0.tgz#c5c81d187c016a9692edbd25b3964b23132c9b06"
|
||||
integrity sha512-U36sZLI1vf9BiVB9P1CRYgBCoS0lYqzpUb37Cy8sgjjQvg8XwfjA75fMjgIBS99uYRN/VtnRrNIm93OsvetVJw==
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@ury1350/prettier-config/-/prettier-config-1.0.1.tgz#024c80d050f16149bd3928a139fec86d26e10b3d"
|
||||
integrity sha512-G6zwRgCZ5jkjn180AovRQN6TGqVhkdNoShpxOFF7QjHYkVrPLb4ig26yMpS94EqeyzL/KDYIwrS/n7Tkgqw07A==
|
||||
|
||||
"@webassemblyjs/ast@1.8.5":
|
||||
version "1.8.5"
|
||||
|
@ -10084,6 +10084,11 @@ stream-shift@^1.0.0:
|
|||
resolved "https://registry.yarnpkg.com/stream-shift/-/stream-shift-1.0.1.tgz#d7088281559ab2778424279b0877da3c392d5a3d"
|
||||
integrity sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ==
|
||||
|
||||
strict-event-emitter-types@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/strict-event-emitter-types/-/strict-event-emitter-types-2.0.0.tgz#05e15549cb4da1694478a53543e4e2f4abcf277f"
|
||||
integrity sha512-Nk/brWYpD85WlOgzw5h173aci0Teyv8YdIAEtV+N88nDB0dLlazZyJMIsN6eo1/AR61l+p6CJTG1JIyFaoNEEA==
|
||||
|
||||
strict-uri-encode@^1.0.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713"
|
||||
|
|
Loading…
Reference in a new issue