diff --git a/package.json b/package.json index fe2e1ce..f775227 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "webstudio", - "version": "1.3.0", + "version": "1.4.0", "private": true, "dependencies": { "@babel/core": "7.6.0", diff --git a/src/App.scss b/src/App.scss index eae3db8..6f8ad62 100644 --- a/src/App.scss +++ b/src/App.scss @@ -72,8 +72,12 @@ $number-of-channels: 3; padding: 10px; .channel-vu { + height: 40px; text-align: center; background: black; + span { + font-size: 0.7em; + } canvas { max-width: 100%; border-left: 1px solid gray; diff --git a/src/broadcast/state.ts b/src/broadcast/state.ts index e4a94ac..ed86b3c 100644 --- a/src/broadcast/state.ts +++ b/src/broadcast/state.ts @@ -7,7 +7,6 @@ import * as NavbarState from "../navbar/state"; import { ConnectionStateEnum } from "./streamer"; import { RecordingStreamer } from "./recording_streamer"; import { audioEngine } from "../mixer/audio"; -import { setItemPlayed } from "../showplanner/state"; export let streamer: WebRTCStreamer | null = null; @@ -316,8 +315,6 @@ export const goOnAir = (): AppThunk => async (dispatch, getState) => { } else if (state === "CONNECTED") { // okay, we've connected dispatch(registerForShow()); - } else if (state === "LIVE") { - dispatch(setItemPlayed({ itemId: "all", played: false })); } }); await streamer.start(); diff --git a/src/mixer/audio.ts b/src/mixer/audio.ts index a29b716..14c25a4 100644 --- a/src/mixer/audio.ts +++ b/src/mixer/audio.ts @@ -25,10 +25,12 @@ const PlayerEmitter: StrictEmitter< class Player extends ((PlayerEmitter as unknown) as { new (): EventEmitter }) { private volume = 0; private trim = 0; + private pfl = false; private constructor( private readonly engine: AudioEngine, private wavesurfer: WaveSurfer, - private readonly waveform: HTMLElement + private readonly waveform: HTMLElement, + private readonly customOutput: boolean ) { super(); } @@ -129,6 +131,10 @@ class Player extends ((PlayerEmitter as unknown) as { new (): EventEmitter }) { return this.volume; } + getPFL() { + return this.pfl; + } + setVolume(val: number) { this.volume = val; this._applyVolume(); @@ -139,19 +145,63 @@ class Player extends ((PlayerEmitter as unknown) as { new (): EventEmitter }) { this._applyVolume(); } + setPFL(enabled: boolean) { + this.pfl = enabled; + this._connectPFL(); + } + + setOutputDevice(sinkId: string) { + if (!this.customOutput) { + throw Error( + "Can't set sinkId when player is not in customOutput mode. Please reinit player." + ); + } + try { + (this.wavesurfer as any).setSinkId(sinkId); + } catch (e) { + throw Error("Tried to setSinkId " + sinkId + ", failed due to: " + e); + } + } + _applyVolume() { const level = this.volume + this.trim; const linear = Math.pow(10, level / 20); - if (linear < 1) { - this.wavesurfer.setVolume(linear); - (this.wavesurfer as any).backend.gainNode.gain.value = 1; - } else { - this.wavesurfer.setVolume(1); + + // Actually adjust the wavesurfer gain node gain instead, so we can tap off analyser for PFL. + this.wavesurfer.setVolume(1); + if (!this.customOutput) { (this.wavesurfer as any).backend.gainNode.gain.value = linear; } } - public static create(engine: AudioEngine, player: number, url: string) { + _connectPFL() { + if (this.pfl) { + // In this case, we just want to route the player output to the headphones direct. + // Tap it from analyser to avoid the player volume. + (this.wavesurfer as any).backend.analyser.connect( + this.engine.headphonesNode + ); + } else { + try { + (this.wavesurfer as any).backend.analyser.disconnect( + this.engine.headphonesNode + ); + } catch (e) { + // This connection wasn't connected anyway, ignore. + } + } + } + + public static create( + engine: AudioEngine, + player: number, + outputId: string, + pfl: boolean, + url: string + ) { + // If we want to output to a custom audio device, we're gonna need to do things differently. + const customOutput = outputId !== "internal"; + let waveform = document.getElementById("waveform-" + player.toString()); if (waveform == null) { throw new Error(); @@ -165,7 +215,7 @@ class Player extends ((PlayerEmitter as unknown) as { new (): EventEmitter }) { waveColor: "#CCCCFF", backgroundColor: "#FFFFFF", progressColor: "#9999FF", - backend: "MediaElementWebAudio", + backend: customOutput ? "MediaElement" : "MediaElementWebAudio", barWidth: 2, responsive: true, xhr: { @@ -186,7 +236,7 @@ class Player extends ((PlayerEmitter as unknown) as { new (): EventEmitter }) { ], }); - const instance = new this(engine, wavesurfer, waveform); + const instance = new this(engine, wavesurfer, waveform, customOutput); wavesurfer.on("ready", () => { console.log("ready"); @@ -208,14 +258,23 @@ class Player extends ((PlayerEmitter as unknown) as { new (): EventEmitter }) { instance.emit("timeChange", wavesurfer.getCurrentTime()); }); - (wavesurfer as any).backend.gainNode.disconnect(); - (wavesurfer as any).backend.gainNode.connect(engine.finalCompressor); - (wavesurfer as any).backend.gainNode.connect( - engine.playerAnalysers[player] - ); - wavesurfer.load(url); + if (customOutput) { + try { + instance.setOutputDevice(outputId); + } catch (e) { + console.error("Failed to set channel " + player + " output. " + e); + } + } else { + (wavesurfer as any).backend.gainNode.disconnect(); + (wavesurfer as any).backend.gainNode.connect(engine.finalCompressor); + (wavesurfer as any).backend.gainNode.connect( + engine.playerAnalysers[player] + ); + instance.setPFL(pfl); + } + return instance; } @@ -233,10 +292,18 @@ export type LevelsSource = | "mic-precomp" | "mic-final" | "master" + | "pfl" | "player-0" | "player-1" | "player-2"; +export type ChannelMapping = + | "stereo-normal" + | "stereo-flipped" + | "mono-left" + | "mono-right" + | "mono-both"; + // Setting this directly affects the performance of .getFloatTimeDomainData() // Must be a power of 2. const ANALYSIS_FFT_SIZE = 2048; @@ -253,8 +320,12 @@ const EngineEmitter: StrictEmitter< export class AudioEngine extends ((EngineEmitter as unknown) as { new (): EventEmitter; }) { + // Multipurpose Bits public audioContext: AudioContext; - public players: (Player | undefined)[] = []; + analysisBuffer: Float32Array; + analysisBuffer2: Float32Array; + + // Mic Input micMedia: MediaStream | null = null; micSource: MediaStreamAudioSourceNode | null = null; @@ -264,54 +335,41 @@ export class AudioEngine extends ((EngineEmitter as unknown) as { micMixGain: GainNode; micFinalAnalyser: typeof StereoAnalyserNode; - finalCompressor: DynamicsCompressorNode; - streamingDestination: MediaStreamAudioDestinationNode; - + // Player Inputs + public players: (Player | undefined)[] = []; playerAnalysers: typeof StereoAnalyserNode[]; - streamingAnalyser: typeof StereoAnalyserNode; + // Final Processing + finalCompressor: DynamicsCompressorNode; + // Streaming / Recording + streamingAnalyser: typeof StereoAnalyserNode; + streamingDestination: MediaStreamAudioDestinationNode; + + // News In/Out Reminders newsStartCountdownEl: HTMLAudioElement; newsStartCountdownNode: MediaElementAudioSourceNode; newsEndCountdownEl: HTMLAudioElement; newsEndCountdownNode: MediaElementAudioSourceNode; - analysisBuffer: Float32Array; - analysisBuffer2: Float32Array; + + // Headphones + headphonesNode: GainNode; + pflAnalyser: typeof StereoAnalyserNode; constructor() { super(); + + // Multipurpose Bits this.audioContext = new AudioContext({ sampleRate: 44100, latencyHint: "interactive", }); - this.finalCompressor = this.audioContext.createDynamicsCompressor(); - this.finalCompressor.ratio.value = 20; //brickwall destination comressor - this.finalCompressor.threshold.value = -0.5; - this.finalCompressor.attack.value = 0; - this.finalCompressor.release.value = 0.2; - this.finalCompressor.knee.value = 0; + this.analysisBuffer = new Float32Array(ANALYSIS_FFT_SIZE); + this.analysisBuffer2 = new Float32Array(ANALYSIS_FFT_SIZE); - this.playerAnalysers = []; - for (let i = 0; i < 3; i++) { - let analyser = new StereoAnalyserNode(this.audioContext); - analyser.fftSize = ANALYSIS_FFT_SIZE; - this.playerAnalysers.push(analyser); - } - - this.streamingAnalyser = new StereoAnalyserNode(this.audioContext); - this.streamingAnalyser.fftSize = ANALYSIS_FFT_SIZE; - - // this.streamingAnalyser.maxDecibels = 0; - - this.streamingDestination = this.audioContext.createMediaStreamDestination(); - - this.finalCompressor.connect(this.audioContext.destination); - - this.finalCompressor - .connect(this.streamingAnalyser) - .connect(this.streamingDestination); + // Mic Input this.micCalibrationGain = this.audioContext.createGain(); @@ -319,10 +377,6 @@ export class AudioEngine extends ((EngineEmitter as unknown) as { this.micPrecompAnalyser.fftSize = ANALYSIS_FFT_SIZE; this.micPrecompAnalyser.maxDecibels = 0; - this.micFinalAnalyser = new StereoAnalyserNode(this.audioContext); - this.micFinalAnalyser.fftSize = ANALYSIS_FFT_SIZE; - this.micFinalAnalyser.maxDecibels = 0; - this.micCompressor = this.audioContext.createDynamicsCompressor(); this.micCompressor.ratio.value = 3; // mic compressor - fairly gentle, can be upped this.micCompressor.threshold.value = -18; @@ -333,13 +387,36 @@ export class AudioEngine extends ((EngineEmitter as unknown) as { this.micMixGain = this.audioContext.createGain(); this.micMixGain.gain.value = 1; - this.micCalibrationGain.connect(this.micPrecompAnalyser); - this.micCalibrationGain - .connect(this.micCompressor) - .connect(this.micMixGain) - .connect(this.micFinalAnalyser) - // we don't run the mic into masterAnalyser to ensure it doesn't go to audioContext.destination - .connect(this.streamingAnalyser); + this.micFinalAnalyser = new StereoAnalyserNode(this.audioContext); + this.micFinalAnalyser.fftSize = ANALYSIS_FFT_SIZE; + this.micFinalAnalyser.maxDecibels = 0; + + // Player Input + + this.playerAnalysers = []; + for (let i = 0; i < 3; i++) { + let analyser = new StereoAnalyserNode(this.audioContext); + analyser.fftSize = ANALYSIS_FFT_SIZE; + this.playerAnalysers.push(analyser); + } + + // Final Processing + + this.finalCompressor = this.audioContext.createDynamicsCompressor(); + this.finalCompressor.ratio.value = 20; //brickwall destination comressor + this.finalCompressor.threshold.value = -0.5; + this.finalCompressor.attack.value = 0; + this.finalCompressor.release.value = 0.2; + this.finalCompressor.knee.value = 0; + + // Streaming/Recording + + this.streamingAnalyser = new StereoAnalyserNode(this.audioContext); + this.streamingAnalyser.fftSize = ANALYSIS_FFT_SIZE; + + this.streamingDestination = this.audioContext.createMediaStreamDestination(); + + // News In/Out Reminders this.newsEndCountdownEl = new Audio(NewsEndCountdown); this.newsEndCountdownEl.preload = "auto"; @@ -347,7 +424,6 @@ export class AudioEngine extends ((EngineEmitter as unknown) as { this.newsEndCountdownNode = this.audioContext.createMediaElementSource( this.newsEndCountdownEl ); - this.newsEndCountdownNode.connect(this.audioContext.destination); this.newsStartCountdownEl = new Audio(NewsIntro); this.newsStartCountdownEl.preload = "auto"; @@ -355,14 +431,62 @@ export class AudioEngine extends ((EngineEmitter as unknown) as { this.newsStartCountdownNode = this.audioContext.createMediaElementSource( this.newsStartCountdownEl ); - this.newsStartCountdownNode.connect(this.audioContext.destination); - this.analysisBuffer = new Float32Array(ANALYSIS_FFT_SIZE); - this.analysisBuffer2 = new Float32Array(ANALYSIS_FFT_SIZE); + // Headphones (for PFL / Monitoring) + this.headphonesNode = this.audioContext.createGain(); + this.pflAnalyser = new StereoAnalyserNode(this.audioContext); + this.pflAnalyser.fftSize = ANALYSIS_FFT_SIZE; + this.pflAnalyser.maxDecibels = 0; + + // Routing the above bits together + + // Mic Source gets routed to micCompressor or micMixGain. + // We run setMicProcessingEnabled() later to either patch to the compressor, or bypass it to the mixGain node. + this.micCompressor.connect(this.micMixGain); + + // Send the final mic feed to the VU meter and Stream. + // We bypass the finalCompressor to ensure it doesn't go to audioContext.destination + // since this will cause delayed mic monitoring. Speech jam central! + this.micMixGain + .connect(this.micFinalAnalyser) + .connect(this.streamingAnalyser); + + this._connectFinalCompressor(true); + + // Send the streaming analyser to the Streamer! + this.streamingAnalyser.connect(this.streamingDestination); + + // Feed the news in/out reminders to the headphones too. + this.newsStartCountdownNode.connect(this.audioContext.destination); + this.newsEndCountdownNode.connect(this.audioContext.destination); + + // Send the headphones feed to the headphones. + const db = -12; // DB gain on headphones (-6 to match default trim) + this.headphonesNode.gain.value = Math.pow(10, db / 20); + this.headphonesNode.connect(this.audioContext.destination); + this.headphonesNode.connect(this.pflAnalyser); } - public createPlayer(number: number, url: string) { - const player = Player.create(this, number, url); + // Routes the final compressor (all players) to the stream, and optionally headphones. + _connectFinalCompressor(masterToHeadphones: boolean) { + this.finalCompressor.disconnect(); + + if (masterToHeadphones) { + // Send the final compressor (all players and guests) to the headphones. + this.finalCompressor.connect(this.headphonesNode); + } + + // Also send the final compressor to the streaming analyser on to the stream. + this.finalCompressor.connect(this.streamingAnalyser); + } + + public createPlayer( + number: number, + outputId: string, + pfl: boolean, + url: string + ) { + const player = Player.create(this, number, outputId, pfl, url); this.players[number] = player; return player; } @@ -384,7 +508,30 @@ export class AudioEngine extends ((EngineEmitter as unknown) as { this.players[number] = undefined; } - async openMic(deviceId: string) { + public setPFL(number: number, enabled: boolean) { + var routeMainOut = true; + var player = this.getPlayer(number); + + if (player) { + player.setPFL(enabled); + } + + for (let i = 0; i < this.players.length; i++) { + player = this.getPlayer(i); + if (player?.getPFL()) { + // PFL is enabled on this channel, so we're not routing the regular output to H/Ps. + routeMainOut = false; + console.log("Player", i, "is PFL'd."); + } else { + console.log("Player", i, "isn't PFL'd."); + } + } + console.log("Routing main out?", routeMainOut); + + this._connectFinalCompressor(routeMainOut); + } + + async openMic(deviceId: string, channelMapping: ChannelMapping) { if (this.micSource !== null && this.micMedia !== null) { this.micMedia.getAudioTracks()[0].stop(); this.micSource.disconnect(); @@ -404,8 +551,36 @@ export class AudioEngine extends ((EngineEmitter as unknown) as { this.micSource = this.audioContext.createMediaStreamSource(this.micMedia); - this.micSource.connect(this.micCalibrationGain); + // Handle stereo mic sources. + const splitterNode = this.audioContext.createChannelSplitter(2); + const mergerNode = this.audioContext.createChannelMerger(2); + this.micSource.connect(splitterNode); + switch (channelMapping) { + case "stereo-normal": + splitterNode.connect(mergerNode, 0, 0); + splitterNode.connect(mergerNode, 1, 1); + break; + case "stereo-flipped": + splitterNode.connect(mergerNode, 1, 0); + splitterNode.connect(mergerNode, 0, 1); + break; + case "mono-left": + splitterNode.connect(mergerNode, 0, 0); + splitterNode.connect(mergerNode, 0, 1); + break; + case "mono-right": + splitterNode.connect(mergerNode, 1, 0); + splitterNode.connect(mergerNode, 1, 1); + break; + case "mono-both": + default: + splitterNode.connect(mergerNode, 0, 0); + splitterNode.connect(mergerNode, 1, 0); + splitterNode.connect(mergerNode, 0, 1); + splitterNode.connect(mergerNode, 1, 1); + } + mergerNode.connect(this.micCalibrationGain); this.emit("micOpen"); } @@ -418,6 +593,24 @@ export class AudioEngine extends ((EngineEmitter as unknown) as { this.micMixGain.gain.value = value; } + setMicProcessingEnabled(value: boolean) { + /* + * Disconnect whatever was connected before. + * It's either connected to micCompressor or micMixGain + * (depending on if we're going from enabled to disabled or vice - versa). + * Also connected is the micPrecompAnalyser), but you can't disconnect only one node, + * so you have to disconnect all anyway. + */ + this.micCalibrationGain.disconnect(); + this.micCalibrationGain.connect(this.micPrecompAnalyser); + console.log("Setting mic processing to: ", value); + if (value) { + this.micCalibrationGain.connect(this.micCompressor); + } else { + this.micCalibrationGain.connect(this.micMixGain); + } + } + getLevels(source: LevelsSource, stereo: boolean): [number, number] { switch (source) { case "mic-precomp": @@ -438,6 +631,12 @@ export class AudioEngine extends ((EngineEmitter as unknown) as { this.analysisBuffer2 ); break; + case "pfl": + this.pflAnalyser.getFloatTimeDomainData( + this.analysisBuffer, + this.analysisBuffer2 + ); + break; case "player-0": this.playerAnalysers[0].getFloatTimeDomainData( this.analysisBuffer, diff --git a/src/mixer/state.ts b/src/mixer/state.ts index 7c46756..3d18357 100644 --- a/src/mixer/state.ts +++ b/src/mixer/state.ts @@ -12,7 +12,7 @@ import Keys from "keymaster"; import { Track, MYRADIO_NON_API_BASE, AuxItem } from "../api"; import { AppThunk } from "../store"; import { RootState } from "../rootReducer"; -import { audioEngine } from "./audio"; +import { audioEngine, ChannelMapping } from "./audio"; import * as TheNews from "./the_news"; const playerGainTweens: Array<{ @@ -36,8 +36,11 @@ interface PlayerState { loadError: boolean; state: PlayerStateEnum; volume: number; + volumeEnum: VolumePresetEnum; gain: number; trim: number; + micAutoDuck: boolean; + pfl: boolean; timeCurrent: number; timeRemaining: number; timeLength: number; @@ -53,6 +56,7 @@ interface MicState { volume: 1 | 0; baseGain: number; id: string | null; + processing: boolean; } interface MixerState { @@ -65,8 +69,11 @@ const BasePlayerState: PlayerState = { loading: -1, state: "stopped", volume: 1, + volumeEnum: "full", gain: 0, + micAutoDuck: false, trim: defaultTrimDB, + pfl: false, timeCurrent: 0, timeRemaining: 0, timeLength: 0, @@ -84,10 +91,12 @@ const mixerState = createSlice({ mic: { open: false, volume: 1, + volumeEnum: "full", gain: 1, baseGain: 0, openError: null, id: "None", + processing: true, }, } as MixerState, reducers: { @@ -96,6 +105,7 @@ const mixerState = createSlice({ action: PayloadAction<{ player: number; item: PlanItem | Track | AuxItem | null; + customOutput: boolean; resetTrim?: boolean; }> ) { @@ -111,7 +121,10 @@ const mixerState = createSlice({ state.players[action.payload.player].timeLength = 0; state.players[action.payload.player].tracklistItemID = -1; state.players[action.payload.player].loadError = false; - if (action.payload.resetTrim) { + + if (action.payload.customOutput) { + state.players[action.payload.player].trim = 0; + } else if (action.payload.resetTrim) { state.players[action.payload.player].trim = defaultTrimDB; } }, @@ -139,9 +152,12 @@ const mixerState = createSlice({ action: PayloadAction<{ player: number; volume: number; + volumeEnum: VolumePresetEnum; }> ) { state.players[action.payload.player].volume = action.payload.volume; + state.players[action.payload.player].volumeEnum = + action.payload.volumeEnum; }, setPlayerGain( state, @@ -161,6 +177,24 @@ const mixerState = createSlice({ ) { state.players[action.payload.player].trim = action.payload.trim; }, + setPlayerMicAutoDuck( + state, + action: PayloadAction<{ + player: number; + enabled: boolean; + }> + ) { + state.players[action.payload.player].micAutoDuck = action.payload.enabled; + }, + setPlayerPFL( + state, + action: PayloadAction<{ + player: number; + enabled: boolean; + }> + ) { + state.players[action.payload.player].pfl = action.payload.enabled; + }, setLoadedItemIntro( state, action: PayloadAction<{ @@ -210,6 +244,9 @@ const mixerState = createSlice({ setMicBaseGain(state, action: PayloadAction) { state.mic.baseGain = action.payload; }, + setMicProcessingEnabled(state, action: PayloadAction) { + state.mic.processing = action.payload; + }, setTimeCurrent( state, action: PayloadAction<{ @@ -348,9 +385,17 @@ export const load = ( loadAbortControllers[player] = new AbortController(); const shouldResetTrim = getState().settings.resetTrimOnLoad; + const customOutput = + getState().settings.channelOutputIds[player] !== "internal"; + const isPFL = getState().mixer.players[player].pfl; dispatch( - mixerState.actions.loadItem({ player, item, resetTrim: shouldResetTrim }) + mixerState.actions.loadItem({ + player, + item, + customOutput, + resetTrim: shouldResetTrim, + }) ); let url; @@ -406,7 +451,14 @@ export const load = ( const blob = new Blob([rawData]); const objectUrl = URL.createObjectURL(blob); - const playerInstance = await audioEngine.createPlayer(player, objectUrl); + const channelOutputId = getState().settings.channelOutputIds[player]; + + const playerInstance = await audioEngine.createPlayer( + player, + channelOutputId, + isPFL, + objectUrl + ); // Clear the last one out from memory if (typeof lastObjectURLs[player] === "string") { @@ -549,10 +601,25 @@ export const play = (player: number): AppThunk => async ( } audioEngine.players[player]?.play(); - if (state.loadedItem && state.loadedItem.type === "central") { + // If we're starting off audible, try and tracklist. + if (state.volume > 0) { + dispatch(attemptTracklist(player)); + } +}; + +const attemptTracklist = (player: number): AppThunk => async ( + dispatch, + getState +) => { + const state = getState().mixer.players[player]; + if ( + state.loadedItem && + state.loadedItem.type === "central" && + audioEngine.players[player]?.isPlaying + ) { //track console.log("potentially tracklisting", state.loadedItem); - if (getState().mixer.players[player].tracklistItemID === -1) { + if (state.tracklistItemID === -1) { dispatch(BroadcastState.tracklistStart(player, state.loadedItem.trackid)); } else { console.log("not tracklisting because already tracklisted"); @@ -615,6 +682,8 @@ export const { toggleAutoAdvance, togglePlayOnLoad, toggleRepeat, + setTracklistItemID, + setPlayerMicAutoDuck, } = mixerState.actions; export const redrawWavesurfers = (): AppThunk => () => { @@ -623,12 +692,11 @@ export const redrawWavesurfers = (): AppThunk => () => { }); }; -export const { setTracklistItemID } = mixerState.actions; - const FADE_TIME_SECONDS = 1; export const setVolume = ( player: number, - level: VolumePresetEnum + level: VolumePresetEnum, + fade: boolean = true ): AppThunk => (dispatch, getState) => { let volume: number; let uiLevel: number; @@ -660,12 +728,38 @@ export const setVolume = ( playerGainTweens[player].tweens.forEach((tween) => tween.pause()); if (playerGainTweens[player].target === level) { delete playerGainTweens[player]; - dispatch(mixerState.actions.setPlayerVolume({ player, volume: uiLevel })); + dispatch( + mixerState.actions.setPlayerVolume({ + player, + volume: uiLevel, + volumeEnum: level, + }) + ); dispatch(mixerState.actions.setPlayerGain({ player, gain: volume })); return; } } + if (level !== "off") { + // If we're fading up the volume, disable the PFL. + dispatch(setChannelPFL(player, false)); + // Also catch a tracklist if we started with the channel off. + dispatch(attemptTracklist(player)); + } + + // If not fading, just do it. + if (!fade) { + dispatch( + mixerState.actions.setPlayerVolume({ + player, + volume: uiLevel, + volumeEnum: level, + }) + ); + dispatch(mixerState.actions.setPlayerGain({ player, gain: volume })); + return; + } + const state = getState().mixer.players[player]; const currentLevel = state.volume; @@ -680,7 +774,13 @@ export const setVolume = ( const volumeTween = new Between(currentLevel, uiLevel) .time(FADE_TIME_SECONDS * 1000) .on("update", (val: number) => { - dispatch(mixerState.actions.setPlayerVolume({ player, volume: val })); + dispatch( + mixerState.actions.setPlayerVolume({ + player, + volume: val, + volumeEnum: level, + }) + ); }); const gainTween = new Between(currentGain, volume) .time(FADE_TIME_SECONDS * 1000) @@ -708,15 +808,34 @@ export const setChannelTrim = (player: number, val: number): AppThunk => async ( audioEngine.players[player]?.setTrim(val); }; -export const openMicrophone = (micID: string): AppThunk => async ( - dispatch, - getState -) => { - // TODO: not sure why this is here, and I have a hunch it may break shit, so disabling - // File a ticket if it breaks stuff. -Marks - // if (getState().mixer.mic.open) { - // micSource?.disconnect(); - // } +export const setChannelPFL = ( + player: number, + enabled: boolean +): AppThunk => async (dispatch) => { + if ( + enabled && + typeof audioEngine.players[player] !== "undefined" && + !audioEngine.players[player]?.isPlaying + ) { + dispatch(setVolume(player, "off", false)); + dispatch(play(player)); + } + // If the player number is -1, do all channels. + if (player === -1) { + for (let i = 0; i < audioEngine.players.length; i++) { + dispatch(mixerState.actions.setPlayerPFL({ player: i, enabled: false })); + audioEngine.setPFL(i, false); + } + } else { + dispatch(mixerState.actions.setPlayerPFL({ player, enabled })); + audioEngine.setPFL(player, enabled); + } +}; + +export const openMicrophone = ( + micID: string, + micMapping: ChannelMapping +): AppThunk => async (dispatch, getState) => { if (audioEngine.audioContext.state !== "running") { console.log("Resuming AudioContext because Chrome bad"); await audioEngine.audioContext.resume(); @@ -728,7 +847,7 @@ export const openMicrophone = (micID: string): AppThunk => async ( return; } try { - await audioEngine.openMic(micID); + await audioEngine.openMic(micID, micMapping); } catch (e) { if (e instanceof DOMException) { switch (e.message) { @@ -747,20 +866,47 @@ export const openMicrophone = (micID: string): AppThunk => async ( const state = getState().mixer.mic; audioEngine.setMicCalibrationGain(state.baseGain); audioEngine.setMicVolume(state.volume); - + // Now to patch in the Mic to the Compressor, or Bypass it. + audioEngine.setMicProcessingEnabled(state.processing); dispatch(mixerState.actions.micOpen(micID)); }; -export const setMicVolume = (level: MicVolumePresetEnum): AppThunk => ( +export const setMicProcessingEnabled = (enabled: boolean): AppThunk => async ( dispatch ) => { + dispatch(mixerState.actions.setMicProcessingEnabled(enabled)); + audioEngine.setMicProcessingEnabled(enabled); +}; + +export const setMicVolume = (level: MicVolumePresetEnum): AppThunk => ( + dispatch, + getState +) => { + const players = getState().mixer.players; + // no tween fuckery here, just cut the level const levelVal = level === "full" ? 1 : 0; // actually, that's a lie - if we're turning it off we delay it a little to compensate for // processing latency + if (levelVal !== 0) { dispatch(mixerState.actions.setMicLevels({ volume: levelVal })); + for (let player = 0; player < players.length; player++) { + // If we have auto duck enabled on this channel player, tell it to fade down. + if ( + players[player].micAutoDuck && + players[player].volumeEnum === "full" + ) { + dispatch(setVolume(player, "bed")); + } + } } else { + for (let player = 0; player < players.length; player++) { + // If we have auto duck enabled on this channel player, tell it to fade back up. + if (players[player].micAutoDuck && players[player].volumeEnum === "bed") { + dispatch(setVolume(player, "full")); + } + } window.setTimeout(() => { dispatch(mixerState.actions.setMicLevels({ volume: levelVal })); // latency, plus a little buffer diff --git a/src/navbar/index.tsx b/src/navbar/index.tsx index 3348aa8..dcaa96b 100644 --- a/src/navbar/index.tsx +++ b/src/navbar/index.tsx @@ -1,5 +1,5 @@ import React, { useRef, useEffect, useState } from "react"; -import { useDispatch, useSelector } from "react-redux"; +import { shallowEqual, useDispatch, useSelector } from "react-redux"; import Clock from "react-live-clock"; import Stopwatch from "react-stopwatch"; @@ -11,6 +11,7 @@ import { FaSpinner, FaExclamationTriangle, FaCog, + FaHeadphonesAlt, } from "react-icons/fa"; import { RootState } from "../rootReducer"; @@ -26,6 +27,7 @@ import { VUMeter } from "../optionsMenu/helpers/VUMeter"; import { getShowplan, setItemPlayed } from "../showplanner/state"; import * as OptionsMenuState from "../optionsMenu/state"; +import { setChannelPFL } from "../mixer/state"; function nicifyConnectionState(state: ConnectionStateEnum): string { switch (state) { @@ -159,24 +161,6 @@ export function NavBarMyRadio() { } export function NavBarMain() { - const dispatch = useDispatch(); - const broadcastState = useSelector((state: RootState) => state.broadcast); - const settings = useSelector((state: RootState) => state.settings); - - const [connectButtonAnimating, setConnectButtonAnimating] = useState(false); - - const prevRegistrationStage = useRef(broadcastState.stage); - useEffect(() => { - if (broadcastState.stage !== prevRegistrationStage.current) { - setConnectButtonAnimating(false); - } - prevRegistrationStage.current = broadcastState.stage; - }, [broadcastState.stage]); - - const { planSaveError, planSaving } = useSelector( - (state: RootState) => state.showplan - ); - return ( <> - + + ); +} +function SavingAlert() { + const { planSaveError, planSaving } = useSelector( + (state: RootState) => state.showplan + ); + return ( + <> + {planSaving && ( +
  • + Saving show plan...
  • + )} + {planSaveError && ( +
  • + + {planSaveError} +
  • + )} + + ); +} +function RegisterButton() { + const dispatch = useDispatch(); + const broadcastState = useSelector((state: RootState) => state.broadcast); + const [connectButtonAnimating, setConnectButtonAnimating] = useState(false); + + const prevRegistrationStage = useRef(broadcastState.stage); + useEffect(() => { + if (broadcastState.stage !== prevRegistrationStage.current) { + setConnectButtonAnimating(false); + } + prevRegistrationStage.current = broadcastState.stage; + }, [broadcastState.stage]); + + return ( + <> +
  • +
    + {nicifyConnectionState(broadcastState.connectionState)} +
    +
  • +
  • { + setConnectButtonAnimating(true); + switch (broadcastState.stage) { + case "NOT_REGISTERED": + dispatch(BroadcastState.goOnAir()); + break; + case "REGISTERED": + dispatch(BroadcastState.cancelTimeslot()); + break; + } + }} + > + {connectButtonAnimating ? ( + <> + + + + ) : ( + <> + + {broadcastState.stage === "NOT_REGISTERED" && "Register"} + {broadcastState.stage === "REGISTERED" && "Stop"} + + )} +
  • + + ); +} +function RecordingButton() { + const recordingState = useSelector( + (state: RootState) => state.broadcast.recordingState + ); + const enableRecording = useSelector( + (state: RootState) => state.settings.enableRecording + ); + const dispatch = useDispatch(); + return ( + <> + {enableRecording && (
  • { - setConnectButtonAnimating(true); - switch (broadcastState.stage) { - case "NOT_REGISTERED": - dispatch(BroadcastState.goOnAir()); - break; - case "REGISTERED": - dispatch(BroadcastState.cancelTimeslot()); - break; - } - }} + className={ + "btn rounded-0 pt-2 pb-1 nav-item nav-link " + + (recordingState === "CONNECTED" + ? "btn-outline-danger active" + : "btn-outline-light") + } + onClick={() => + dispatch( + recordingState === "NOT_CONNECTED" + ? BroadcastState.startRecording() + : BroadcastState.stopRecording() + ) + } > - {connectButtonAnimating ? ( - <> - - - + {" "} + {recordingState === "CONNECTED" ? ( + { + return {formatted}; + }} + /> ) : ( - <> - - {broadcastState.stage === "NOT_REGISTERED" && "Register"} - {broadcastState.stage === "REGISTERED" && "Stop"} - + "Record" )}
  • - {settings.enableRecording && ( -
  • - dispatch( - broadcastState.recordingState === "NOT_CONNECTED" - ? BroadcastState.startRecording() - : BroadcastState.stopRecording() - ) - } - > - {" "} - {broadcastState.recordingState === "CONNECTED" ? ( - { - return {formatted}; - }} - /> - ) : ( - "Record" - )} -
  • - )} -
  • dispatch(OptionsMenuState.open())} - > - Options -
  • + )} + + ); +} +function OptionsButton() { + const dispatch = useDispatch(); + return ( +
  • dispatch(OptionsMenuState.open())} + > + Options +
  • + ); +} -
  • +function MeterBridge() { + const dispatch = useDispatch(); + const proMode = useSelector((state: RootState) => state.settings.proMode); + const playerPFLs = useSelector( + (state: RootState) => state.mixer.players.map((x) => x.pfl), + shallowEqual + ); + const isPFL = useSelector((state) => playerPFLs).some((x) => x === true); + + return ( + <> + {proMode && isPFL && ( +
  • dispatch(setChannelPFL(-1, false))} + > + Clear PFL +
  • + )} + +
  • + {isPFL && ( + + )} + {!isPFL && ( -
  • - + )} + ); } diff --git a/src/navbar/navbar.scss b/src/navbar/navbar.scss index f1b08df..27c087a 100644 --- a/src/navbar/navbar.scss +++ b/src/navbar/navbar.scss @@ -305,3 +305,8 @@ .nav-link.connect { min-width: 90px; } + +.pfl-live.nav-vu { + box-shadow: inset 0 0 3px 6px #dc3545; + padding-top: 6px; +} diff --git a/src/optionsMenu/AdvancedTab.tsx b/src/optionsMenu/AdvancedTab.tsx index 7bc12e5..879cce4 100644 --- a/src/optionsMenu/AdvancedTab.tsx +++ b/src/optionsMenu/AdvancedTab.tsx @@ -1,14 +1,116 @@ -import React from "react"; +import React, { useEffect, useState } from "react"; import { RootState } from "../rootReducer"; import { useSelector, useDispatch } from "react-redux"; import { changeSetting } from "./settingsState"; import { changeBroadcastSetting } from "../broadcast/state"; +type ErrorEnum = + | "NO_PERMISSION" + | "NOT_SECURE_CONTEXT" + | "UNKNOWN" + | "UNKNOWN_ENUM"; + +function reduceToOutputs(devices: MediaDeviceInfo[]) { + var temp: MediaDeviceInfo[] = []; + devices.forEach((device) => { + if (device.kind === "audiooutput") { + temp.push(device); + } + }); + return temp; +} + +function ChannelOutputSelect({ + outputList, + channel, +}: { + outputList: MediaDeviceInfo[] | null; + channel: number; +}) { + const outputIds = useSelector( + (state: RootState) => state.settings.channelOutputIds + ); + const outputId = outputIds[channel]; + const dispatch = useDispatch(); + return ( +
    + + +
    + ); +} export function AdvancedTab() { const settings = useSelector((state: RootState) => state.settings); + const [outputList, setOutputList] = useState(null); const broadcastState = useSelector((state: RootState) => state.broadcast); + const [openError, setOpenError] = useState(null); + const dispatch = useDispatch(); + async function fetchOutputNames() { + if (!("mediaDevices" in navigator)) { + setOpenError("NOT_SECURE_CONTEXT"); + return; + } + // Because Chrome, we have to call getUserMedia() before enumerateDevices() + try { + await navigator.mediaDevices.getUserMedia({ audio: true }); + } catch (e) { + if (e instanceof DOMException) { + switch (e.message) { + case "Permission denied": + setOpenError("NO_PERMISSION"); + break; + default: + setOpenError("UNKNOWN"); + } + } else { + setOpenError("UNKNOWN"); + } + return; + } + try { + const devices = await navigator.mediaDevices.enumerateDevices(); + setOutputList(reduceToOutputs(devices)); + } catch (e) { + setOpenError("UNKNOWN_ENUM"); + } + } + + useEffect(() => { + fetchOutputNames(); + }, []); + // @ts-ignore return ( <> @@ -71,6 +173,30 @@ export function AdvancedTab() { /> + +
    +

    Channel Outputs

    +

    + Select a sound output for each channel. Internal routes + directly to the WebStudio stream/recorder. Other outputs will disable + ProMode ™ features.{" "} + Routing will apply upon loading a new item. +

    + {openError !== null && ( +
    + {openError === "NO_PERMISSION" + ? "Please grant this page permission to use your outputs/microphone and try again." + : openError === "NOT_SECURE_CONTEXT" + ? "We can't open the outputs. Please make sure the address bar has a https:// at the start and try again." + : openError === "UNKNOWN_ENUM" + ? "An error occurred when enumerating output devices. Please try again." + : "An error occurred when opening the output devices. Please try again."} +
    + )} + + + +

    Misc

    diff --git a/src/optionsMenu/MicTab.tsx b/src/optionsMenu/MicTab.tsx index 0d2040f..1702345 100644 --- a/src/optionsMenu/MicTab.tsx +++ b/src/optionsMenu/MicTab.tsx @@ -4,6 +4,7 @@ import { RootState } from "../rootReducer"; import * as MixerState from "../mixer/state"; import { VUMeter } from "./helpers/VUMeter"; +import { ChannelMapping } from "../mixer/audio"; type MicErrorEnum = | "NO_PERMISSION" @@ -26,10 +27,12 @@ export function MicTab() { const [micList, setMicList] = useState(null); const dispatch = useDispatch(); const [nextMicSource, setNextMicSource] = useState("$NONE"); + const [nextMicMapping, setNextMicMapping] = useState( + "mono-both" + ); const [openError, setOpenError] = useState(null); async function fetchMicNames() { - console.log("start fetchNames"); if (!("mediaDevices" in navigator)) { setOpenError("NOT_SECURE_CONTEXT"); return; @@ -52,9 +55,7 @@ export function MicTab() { } return; } - console.log("done"); try { - console.log("gUM"); const devices = await navigator.mediaDevices.enumerateDevices(); console.log(devices); setMicList(reduceToInputs(devices)); @@ -65,7 +66,12 @@ export function MicTab() { function setMicSource(sourceId: string) { setNextMicSource(sourceId); - dispatch(MixerState.openMicrophone(sourceId)); + dispatch(MixerState.openMicrophone(sourceId, nextMicMapping)); + } + + function setMicMapping(mapping: ChannelMapping) { + setNextMicMapping(mapping); + dispatch(MixerState.openMicrophone(nextMicSource, mapping)); } return ( @@ -110,6 +116,32 @@ export function MicTab() { : "An error occurred when opening the microphone. Please try again."}
    )} + + +
    + { + dispatch(MixerState.setMicProcessingEnabled(e.target.checked)); + }} + /> + +

    Calibration

    @@ -124,7 +156,7 @@ export function MicTab() { height={40} source="mic-precomp" range={[-70, 0]} - greenRange={[-14, -10]} + greenRange={state.processing ? [-16, -6] : [-32, -5]} stereo={true} />
    diff --git a/src/optionsMenu/helpers/VUMeter.tsx b/src/optionsMenu/helpers/VUMeter.tsx index 97c07d1..0e7791b 100644 --- a/src/optionsMenu/helpers/VUMeter.tsx +++ b/src/optionsMenu/helpers/VUMeter.tsx @@ -27,6 +27,8 @@ export function VUMeter(props: VUMeterProps) { const isMic = props.source.substr(0, 3) === "mic"; + const FPS = 30; // Limit the FPS so that lower spec machines have a better time juggling CPU. + useEffect(() => { const animate = () => { if (!isMic || isMicOpen) { @@ -38,7 +40,9 @@ export function VUMeter(props: VUMeterProps) { if (props.stereo) { setPeakR(result[1]); } - rafRef.current = requestAnimationFrame(animate); + setTimeout((current = rafRef.current, a = animate) => { + current = requestAnimationFrame(a); + }, 1000 / FPS); } }; if (!isMic || isMicOpen) { diff --git a/src/optionsMenu/settingsState.ts b/src/optionsMenu/settingsState.ts index 05d4c48..62da17d 100644 --- a/src/optionsMenu/settingsState.ts +++ b/src/optionsMenu/settingsState.ts @@ -8,6 +8,7 @@ interface Settings { proMode: boolean; channelVUs: boolean; channelVUsStereo: boolean; + channelOutputIds: string[]; resetTrimOnLoad: boolean; saveShowPlanChanges: boolean; } @@ -22,6 +23,7 @@ const settingsState = createSlice({ proMode: false, channelVUs: true, channelVUsStereo: true, + channelOutputIds: ["internal", "internal", "internal"], resetTrimOnLoad: true, saveShowPlanChanges: false, } as Settings, diff --git a/src/session/index.tsx b/src/session/index.tsx index 7e52f66..caf0136 100644 --- a/src/session/index.tsx +++ b/src/session/index.tsx @@ -78,7 +78,7 @@ const SessionHandler: React.FC = function() { ); } - return

    ; + return <>; }; export default SessionHandler; diff --git a/src/showplanner/Item.tsx b/src/showplanner/Item.tsx index 5b8e9b5..ec82638 100644 --- a/src/showplanner/Item.tsx +++ b/src/showplanner/Item.tsx @@ -25,14 +25,15 @@ export const Item = memo(function Item({ const id = itemId(x); const isGhost = "ghostid" in x; - const playerState = useSelector((state: RootState) => - column > -1 ? state.mixer.players[column] : undefined + const loadedItem = useSelector( + (state: RootState) => + column > -1 ? state.mixer.players[column]?.loadedItem : null, + (a, b) => + (a === null && b === null) || + (a !== null && b !== null && itemId(a) === itemId(b)) ); - const isLoaded = - playerState && - playerState.loadedItem !== null && - itemId(playerState.loadedItem) === id; + const isLoaded = loadedItem !== null ? itemId(loadedItem) === id : false; const showDebug = useSelector( (state: RootState) => state.settings.showDebugInfo @@ -84,14 +85,7 @@ export const Item = memo(function Item({ "item " + ("played" in x ? (x.played ? "played " : "") : "") + x.type + - `${ - column >= 0 && - playerState && - playerState.loadedItem !== null && - itemId(playerState.loadedItem) === id - ? " active" - : "" - }` + `${column >= 0 && isLoaded ? " active" : ""}` } onClick={triggerClick} onContextMenu={openContextMenu} diff --git a/src/showplanner/Player.tsx b/src/showplanner/Player.tsx index e1ed3e0..7cc9a2e 100644 --- a/src/showplanner/Player.tsx +++ b/src/showplanner/Player.tsx @@ -197,6 +197,46 @@ function TimingButtons({ id }: { id: number }) { ); } +function LoadedTrackInfo({ id }: { id: number }) { + const dispatch = useDispatch(); + const loadedItem = useSelector( + (state: RootState) => state.mixer.players[id].loadedItem + ); + const loading = useSelector( + (state: RootState) => state.mixer.players[id].loading + ); + const loadError = useSelector( + (state: RootState) => state.mixer.players[id].loadError + ); + + return ( + + + {loadedItem !== null && loading === -1 + ? loadedItem.title + : loading !== -1 + ? `LOADING` + : loadError + ? "LOAD FAILED" + : "No Media Selected"} + + + Explicit + + + ); +} + export function Player({ id }: { id: number }) { // Define time remaining (secs) when the play icon should flash. const SECS_REMAINING_WARNING = 20; @@ -214,13 +254,8 @@ export function Player({ id }: { id: number }) { omit(b, "timeCurrent", "timeRemaining") ) ); - const proMode = useSelector((state: RootState) => state.settings.proMode); - const vuEnabled = useSelector( - (state: RootState) => state.settings.channelVUs - ); - const vuStereo = useSelector( - (state: RootState) => state.settings.channelVUsStereo - ); + const settings = useSelector((state: RootState) => state.settings); + const customOutput = settings.channelOutputIds[id] !== "internal"; const dispatch = useDispatch(); const VUsource = (id: number) => { @@ -305,32 +340,9 @@ export function Player({ id }: { id: number }) {   Repeat {playerState.repeat} - {proMode && } + {settings.proMode && !customOutput && }
    - - - {playerState.loadedItem !== null && playerState.loading === -1 - ? playerState.loadedItem.title - : playerState.loading !== -1 - ? `LOADING` - : playerState.loadError - ? "LOAD FAILED" - : "No Media Selected"} - - - Explicit - - +
    {playerState.loadedItem !== null && playerState.loading === -1 @@ -432,15 +444,21 @@ export function Player({ id }: { id: number }) {
    - {proMode && vuEnabled && ( + {settings.proMode && settings.channelVUs && (
    - + {customOutput ? ( + + Custom audio output disables VU meters. + + ) : ( + + )}
    )} diff --git a/src/showplanner/ProModeButtons.tsx b/src/showplanner/ProModeButtons.tsx index 1303d6a..81ee246 100644 --- a/src/showplanner/ProModeButtons.tsx +++ b/src/showplanner/ProModeButtons.tsx @@ -1,33 +1,73 @@ import React, { useState } from "react"; -import { FaTachometerAlt } from "react-icons/fa"; +import { + FaHeadphonesAlt, + FaMicrophoneAlt, + FaTachometerAlt, +} from "react-icons/fa"; import { useDispatch, useSelector } from "react-redux"; import { RootState } from "../rootReducer"; -import { setChannelTrim } from "../mixer/state"; +import { + setChannelPFL, + setChannelTrim, + setPlayerMicAutoDuck, +} from "../mixer/state"; -type ButtonIds = "trim"; +type ButtonIds = "trim" | "pfl" | "autoDuck"; export default function ProModeButtons({ channel }: { channel: number }) { const [activeButton, setActiveButton] = useState(null); const trimVal = useSelector( (state: RootState) => state.mixer.players[channel]?.trim ); + + const micAutoDuck = useSelector( + (state: RootState) => state.mixer.players[channel]?.micAutoDuck + ); + + const pflState = useSelector( + (state: RootState) => state.mixer.players[channel]?.pfl + ); const dispatch = useDispatch(); return ( <>
    Pro Mode™ - {(activeButton === null || activeButton === "trim") && ( - - )} + + + {activeButton === "trim" && ( <> - {trimVal} dB + {trimVal} dB )} + {activeButton === "pfl" && ( + + Pre Fader Listen: {pflState ? "Yes" : "No"} + + )} + {activeButton === "autoDuck" && ( + + Duck on Mic: {micAutoDuck ? "Yes" : "No"} + + )}
    ); diff --git a/src/showplanner/index.tsx b/src/showplanner/index.tsx index c42843f..3b89d27 100644 --- a/src/showplanner/index.tsx +++ b/src/showplanner/index.tsx @@ -25,7 +25,7 @@ import { ResponderProvided, } from "react-beautiful-dnd"; -import { useSelector, useDispatch } from "react-redux"; +import { useSelector, useDispatch, shallowEqual } from "react-redux"; import { RootState } from "../rootReducer"; import { PlanItem, @@ -199,6 +199,9 @@ function LibraryColumn() { function MicControl() { const state = useSelector((state: RootState) => state.mixer.mic); const proMode = useSelector((state: RootState) => state.settings.proMode); + const stereo = useSelector( + (state: RootState) => state.settings.channelVUsStereo + ); const dispatch = useDispatch(); return ( @@ -253,8 +256,8 @@ function MicControl() { height={40} source="mic-final" range={[-40, 3]} - greenRange={[-10, -5]} - stereo={proMode} + greenRange={[-16, -6]} + stereo={proMode && stereo} />
    @@ -291,8 +294,9 @@ function incrReducer(state: number, action: any) { } const Showplanner: React.FC<{ timeslotId: number }> = function({ timeslotId }) { - const { plan: showplan, planLoadError, planLoading } = useSelector( - (state: RootState) => state.showplan + const isShowplan = useSelector( + (state: RootState) => state.showplan.plan !== null, + shallowEqual ); // Tell Modals that #root is the main page content, for accessability reasons. @@ -400,26 +404,15 @@ const Showplanner: React.FC<{ timeslotId: number }> = function({ timeslotId }) { }; }, [dispatch, session.currentTimeslot]); - if (showplan === null) { - return ( - - ); + if (!isShowplan) { + return ; } return (
    -
    - - - -
    + = function({ timeslotId }) { ); }; +function GettingShowPlanScreen() { + const { planLoading, planLoadError } = useSelector( + (state: RootState) => state.showplan + ); + return ( + + ); +} + export function LoadingDialogue({ title, subtitle, @@ -526,4 +533,16 @@ export function LoadingDialogue({ ); } +function ChannelStrips() { + const showplan = useSelector((state: RootState) => state.showplan.plan!); + + return ( +
    + + + +
    + ); +} + export default Showplanner; diff --git a/yarn.lock b/yarn.lock index 1d95290..a9c0751 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6228,9 +6228,9 @@ inherits@2.0.3: integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= ini@^1.3.5: - version "1.3.5" - resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927" - integrity sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw== + version "1.3.8" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" + integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== inquirer@6.5.0: version "6.5.0"