Merge branch 'master' into marks-migrate-contextmenu

This commit is contained in:
Matthew Stratford 2021-01-25 00:57:12 +00:00
commit 2b06751b52
17 changed files with 957 additions and 296 deletions

View file

@ -1,6 +1,6 @@
{
"name": "webstudio",
"version": "1.3.0",
"version": "1.4.0",
"private": true,
"dependencies": {
"@babel/core": "7.6.0",

View file

@ -72,8 +72,12 @@ $number-of-channels: 3;
padding: 10px;
.channel-vu {
height: 40px;
text-align: center;
background: black;
span {
font-size: 0.7em;
}
canvas {
max-width: 100%;
border-left: 1px solid gray;

View file

@ -7,7 +7,6 @@ import * as NavbarState from "../navbar/state";
import { ConnectionStateEnum } from "./streamer";
import { RecordingStreamer } from "./recording_streamer";
import { audioEngine } from "../mixer/audio";
import { setItemPlayed } from "../showplanner/state";
export let streamer: WebRTCStreamer | null = null;
@ -316,8 +315,6 @@ export const goOnAir = (): AppThunk => async (dispatch, getState) => {
} else if (state === "CONNECTED") {
// okay, we've connected
dispatch(registerForShow());
} else if (state === "LIVE") {
dispatch(setItemPlayed({ itemId: "all", played: false }));
}
});
await streamer.start();

View file

@ -25,10 +25,12 @@ const PlayerEmitter: StrictEmitter<
class Player extends ((PlayerEmitter as unknown) as { new (): EventEmitter }) {
private volume = 0;
private trim = 0;
private pfl = false;
private constructor(
private readonly engine: AudioEngine,
private wavesurfer: WaveSurfer,
private readonly waveform: HTMLElement
private readonly waveform: HTMLElement,
private readonly customOutput: boolean
) {
super();
}
@ -129,6 +131,10 @@ class Player extends ((PlayerEmitter as unknown) as { new (): EventEmitter }) {
return this.volume;
}
getPFL() {
return this.pfl;
}
setVolume(val: number) {
this.volume = val;
this._applyVolume();
@ -139,19 +145,63 @@ class Player extends ((PlayerEmitter as unknown) as { new (): EventEmitter }) {
this._applyVolume();
}
setPFL(enabled: boolean) {
this.pfl = enabled;
this._connectPFL();
}
setOutputDevice(sinkId: string) {
if (!this.customOutput) {
throw Error(
"Can't set sinkId when player is not in customOutput mode. Please reinit player."
);
}
try {
(this.wavesurfer as any).setSinkId(sinkId);
} catch (e) {
throw Error("Tried to setSinkId " + sinkId + ", failed due to: " + e);
}
}
_applyVolume() {
const level = this.volume + this.trim;
const linear = Math.pow(10, level / 20);
if (linear < 1) {
this.wavesurfer.setVolume(linear);
(this.wavesurfer as any).backend.gainNode.gain.value = 1;
} else {
this.wavesurfer.setVolume(1);
// Actually adjust the wavesurfer gain node gain instead, so we can tap off analyser for PFL.
this.wavesurfer.setVolume(1);
if (!this.customOutput) {
(this.wavesurfer as any).backend.gainNode.gain.value = linear;
}
}
public static create(engine: AudioEngine, player: number, url: string) {
_connectPFL() {
if (this.pfl) {
// In this case, we just want to route the player output to the headphones direct.
// Tap it from analyser to avoid the player volume.
(this.wavesurfer as any).backend.analyser.connect(
this.engine.headphonesNode
);
} else {
try {
(this.wavesurfer as any).backend.analyser.disconnect(
this.engine.headphonesNode
);
} catch (e) {
// This connection wasn't connected anyway, ignore.
}
}
}
public static create(
engine: AudioEngine,
player: number,
outputId: string,
pfl: boolean,
url: string
) {
// If we want to output to a custom audio device, we're gonna need to do things differently.
const customOutput = outputId !== "internal";
let waveform = document.getElementById("waveform-" + player.toString());
if (waveform == null) {
throw new Error();
@ -165,7 +215,7 @@ class Player extends ((PlayerEmitter as unknown) as { new (): EventEmitter }) {
waveColor: "#CCCCFF",
backgroundColor: "#FFFFFF",
progressColor: "#9999FF",
backend: "MediaElementWebAudio",
backend: customOutput ? "MediaElement" : "MediaElementWebAudio",
barWidth: 2,
responsive: true,
xhr: {
@ -186,7 +236,7 @@ class Player extends ((PlayerEmitter as unknown) as { new (): EventEmitter }) {
],
});
const instance = new this(engine, wavesurfer, waveform);
const instance = new this(engine, wavesurfer, waveform, customOutput);
wavesurfer.on("ready", () => {
console.log("ready");
@ -208,14 +258,23 @@ class Player extends ((PlayerEmitter as unknown) as { new (): EventEmitter }) {
instance.emit("timeChange", wavesurfer.getCurrentTime());
});
(wavesurfer as any).backend.gainNode.disconnect();
(wavesurfer as any).backend.gainNode.connect(engine.finalCompressor);
(wavesurfer as any).backend.gainNode.connect(
engine.playerAnalysers[player]
);
wavesurfer.load(url);
if (customOutput) {
try {
instance.setOutputDevice(outputId);
} catch (e) {
console.error("Failed to set channel " + player + " output. " + e);
}
} else {
(wavesurfer as any).backend.gainNode.disconnect();
(wavesurfer as any).backend.gainNode.connect(engine.finalCompressor);
(wavesurfer as any).backend.gainNode.connect(
engine.playerAnalysers[player]
);
instance.setPFL(pfl);
}
return instance;
}
@ -233,10 +292,18 @@ export type LevelsSource =
| "mic-precomp"
| "mic-final"
| "master"
| "pfl"
| "player-0"
| "player-1"
| "player-2";
export type ChannelMapping =
| "stereo-normal"
| "stereo-flipped"
| "mono-left"
| "mono-right"
| "mono-both";
// Setting this directly affects the performance of .getFloatTimeDomainData()
// Must be a power of 2.
const ANALYSIS_FFT_SIZE = 2048;
@ -253,8 +320,12 @@ const EngineEmitter: StrictEmitter<
export class AudioEngine extends ((EngineEmitter as unknown) as {
new (): EventEmitter;
}) {
// Multipurpose Bits
public audioContext: AudioContext;
public players: (Player | undefined)[] = [];
analysisBuffer: Float32Array;
analysisBuffer2: Float32Array;
// Mic Input
micMedia: MediaStream | null = null;
micSource: MediaStreamAudioSourceNode | null = null;
@ -264,54 +335,41 @@ export class AudioEngine extends ((EngineEmitter as unknown) as {
micMixGain: GainNode;
micFinalAnalyser: typeof StereoAnalyserNode;
finalCompressor: DynamicsCompressorNode;
streamingDestination: MediaStreamAudioDestinationNode;
// Player Inputs
public players: (Player | undefined)[] = [];
playerAnalysers: typeof StereoAnalyserNode[];
streamingAnalyser: typeof StereoAnalyserNode;
// Final Processing
finalCompressor: DynamicsCompressorNode;
// Streaming / Recording
streamingAnalyser: typeof StereoAnalyserNode;
streamingDestination: MediaStreamAudioDestinationNode;
// News In/Out Reminders
newsStartCountdownEl: HTMLAudioElement;
newsStartCountdownNode: MediaElementAudioSourceNode;
newsEndCountdownEl: HTMLAudioElement;
newsEndCountdownNode: MediaElementAudioSourceNode;
analysisBuffer: Float32Array;
analysisBuffer2: Float32Array;
// Headphones
headphonesNode: GainNode;
pflAnalyser: typeof StereoAnalyserNode;
constructor() {
super();
// Multipurpose Bits
this.audioContext = new AudioContext({
sampleRate: 44100,
latencyHint: "interactive",
});
this.finalCompressor = this.audioContext.createDynamicsCompressor();
this.finalCompressor.ratio.value = 20; //brickwall destination comressor
this.finalCompressor.threshold.value = -0.5;
this.finalCompressor.attack.value = 0;
this.finalCompressor.release.value = 0.2;
this.finalCompressor.knee.value = 0;
this.analysisBuffer = new Float32Array(ANALYSIS_FFT_SIZE);
this.analysisBuffer2 = new Float32Array(ANALYSIS_FFT_SIZE);
this.playerAnalysers = [];
for (let i = 0; i < 3; i++) {
let analyser = new StereoAnalyserNode(this.audioContext);
analyser.fftSize = ANALYSIS_FFT_SIZE;
this.playerAnalysers.push(analyser);
}
this.streamingAnalyser = new StereoAnalyserNode(this.audioContext);
this.streamingAnalyser.fftSize = ANALYSIS_FFT_SIZE;
// this.streamingAnalyser.maxDecibels = 0;
this.streamingDestination = this.audioContext.createMediaStreamDestination();
this.finalCompressor.connect(this.audioContext.destination);
this.finalCompressor
.connect(this.streamingAnalyser)
.connect(this.streamingDestination);
// Mic Input
this.micCalibrationGain = this.audioContext.createGain();
@ -319,10 +377,6 @@ export class AudioEngine extends ((EngineEmitter as unknown) as {
this.micPrecompAnalyser.fftSize = ANALYSIS_FFT_SIZE;
this.micPrecompAnalyser.maxDecibels = 0;
this.micFinalAnalyser = new StereoAnalyserNode(this.audioContext);
this.micFinalAnalyser.fftSize = ANALYSIS_FFT_SIZE;
this.micFinalAnalyser.maxDecibels = 0;
this.micCompressor = this.audioContext.createDynamicsCompressor();
this.micCompressor.ratio.value = 3; // mic compressor - fairly gentle, can be upped
this.micCompressor.threshold.value = -18;
@ -333,13 +387,36 @@ export class AudioEngine extends ((EngineEmitter as unknown) as {
this.micMixGain = this.audioContext.createGain();
this.micMixGain.gain.value = 1;
this.micCalibrationGain.connect(this.micPrecompAnalyser);
this.micCalibrationGain
.connect(this.micCompressor)
.connect(this.micMixGain)
.connect(this.micFinalAnalyser)
// we don't run the mic into masterAnalyser to ensure it doesn't go to audioContext.destination
.connect(this.streamingAnalyser);
this.micFinalAnalyser = new StereoAnalyserNode(this.audioContext);
this.micFinalAnalyser.fftSize = ANALYSIS_FFT_SIZE;
this.micFinalAnalyser.maxDecibels = 0;
// Player Input
this.playerAnalysers = [];
for (let i = 0; i < 3; i++) {
let analyser = new StereoAnalyserNode(this.audioContext);
analyser.fftSize = ANALYSIS_FFT_SIZE;
this.playerAnalysers.push(analyser);
}
// Final Processing
this.finalCompressor = this.audioContext.createDynamicsCompressor();
this.finalCompressor.ratio.value = 20; //brickwall destination comressor
this.finalCompressor.threshold.value = -0.5;
this.finalCompressor.attack.value = 0;
this.finalCompressor.release.value = 0.2;
this.finalCompressor.knee.value = 0;
// Streaming/Recording
this.streamingAnalyser = new StereoAnalyserNode(this.audioContext);
this.streamingAnalyser.fftSize = ANALYSIS_FFT_SIZE;
this.streamingDestination = this.audioContext.createMediaStreamDestination();
// News In/Out Reminders
this.newsEndCountdownEl = new Audio(NewsEndCountdown);
this.newsEndCountdownEl.preload = "auto";
@ -347,7 +424,6 @@ export class AudioEngine extends ((EngineEmitter as unknown) as {
this.newsEndCountdownNode = this.audioContext.createMediaElementSource(
this.newsEndCountdownEl
);
this.newsEndCountdownNode.connect(this.audioContext.destination);
this.newsStartCountdownEl = new Audio(NewsIntro);
this.newsStartCountdownEl.preload = "auto";
@ -355,14 +431,62 @@ export class AudioEngine extends ((EngineEmitter as unknown) as {
this.newsStartCountdownNode = this.audioContext.createMediaElementSource(
this.newsStartCountdownEl
);
this.newsStartCountdownNode.connect(this.audioContext.destination);
this.analysisBuffer = new Float32Array(ANALYSIS_FFT_SIZE);
this.analysisBuffer2 = new Float32Array(ANALYSIS_FFT_SIZE);
// Headphones (for PFL / Monitoring)
this.headphonesNode = this.audioContext.createGain();
this.pflAnalyser = new StereoAnalyserNode(this.audioContext);
this.pflAnalyser.fftSize = ANALYSIS_FFT_SIZE;
this.pflAnalyser.maxDecibels = 0;
// Routing the above bits together
// Mic Source gets routed to micCompressor or micMixGain.
// We run setMicProcessingEnabled() later to either patch to the compressor, or bypass it to the mixGain node.
this.micCompressor.connect(this.micMixGain);
// Send the final mic feed to the VU meter and Stream.
// We bypass the finalCompressor to ensure it doesn't go to audioContext.destination
// since this will cause delayed mic monitoring. Speech jam central!
this.micMixGain
.connect(this.micFinalAnalyser)
.connect(this.streamingAnalyser);
this._connectFinalCompressor(true);
// Send the streaming analyser to the Streamer!
this.streamingAnalyser.connect(this.streamingDestination);
// Feed the news in/out reminders to the headphones too.
this.newsStartCountdownNode.connect(this.audioContext.destination);
this.newsEndCountdownNode.connect(this.audioContext.destination);
// Send the headphones feed to the headphones.
const db = -12; // DB gain on headphones (-6 to match default trim)
this.headphonesNode.gain.value = Math.pow(10, db / 20);
this.headphonesNode.connect(this.audioContext.destination);
this.headphonesNode.connect(this.pflAnalyser);
}
public createPlayer(number: number, url: string) {
const player = Player.create(this, number, url);
// Routes the final compressor (all players) to the stream, and optionally headphones.
_connectFinalCompressor(masterToHeadphones: boolean) {
this.finalCompressor.disconnect();
if (masterToHeadphones) {
// Send the final compressor (all players and guests) to the headphones.
this.finalCompressor.connect(this.headphonesNode);
}
// Also send the final compressor to the streaming analyser on to the stream.
this.finalCompressor.connect(this.streamingAnalyser);
}
public createPlayer(
number: number,
outputId: string,
pfl: boolean,
url: string
) {
const player = Player.create(this, number, outputId, pfl, url);
this.players[number] = player;
return player;
}
@ -384,7 +508,30 @@ export class AudioEngine extends ((EngineEmitter as unknown) as {
this.players[number] = undefined;
}
async openMic(deviceId: string) {
public setPFL(number: number, enabled: boolean) {
var routeMainOut = true;
var player = this.getPlayer(number);
if (player) {
player.setPFL(enabled);
}
for (let i = 0; i < this.players.length; i++) {
player = this.getPlayer(i);
if (player?.getPFL()) {
// PFL is enabled on this channel, so we're not routing the regular output to H/Ps.
routeMainOut = false;
console.log("Player", i, "is PFL'd.");
} else {
console.log("Player", i, "isn't PFL'd.");
}
}
console.log("Routing main out?", routeMainOut);
this._connectFinalCompressor(routeMainOut);
}
async openMic(deviceId: string, channelMapping: ChannelMapping) {
if (this.micSource !== null && this.micMedia !== null) {
this.micMedia.getAudioTracks()[0].stop();
this.micSource.disconnect();
@ -404,8 +551,36 @@ export class AudioEngine extends ((EngineEmitter as unknown) as {
this.micSource = this.audioContext.createMediaStreamSource(this.micMedia);
this.micSource.connect(this.micCalibrationGain);
// Handle stereo mic sources.
const splitterNode = this.audioContext.createChannelSplitter(2);
const mergerNode = this.audioContext.createChannelMerger(2);
this.micSource.connect(splitterNode);
switch (channelMapping) {
case "stereo-normal":
splitterNode.connect(mergerNode, 0, 0);
splitterNode.connect(mergerNode, 1, 1);
break;
case "stereo-flipped":
splitterNode.connect(mergerNode, 1, 0);
splitterNode.connect(mergerNode, 0, 1);
break;
case "mono-left":
splitterNode.connect(mergerNode, 0, 0);
splitterNode.connect(mergerNode, 0, 1);
break;
case "mono-right":
splitterNode.connect(mergerNode, 1, 0);
splitterNode.connect(mergerNode, 1, 1);
break;
case "mono-both":
default:
splitterNode.connect(mergerNode, 0, 0);
splitterNode.connect(mergerNode, 1, 0);
splitterNode.connect(mergerNode, 0, 1);
splitterNode.connect(mergerNode, 1, 1);
}
mergerNode.connect(this.micCalibrationGain);
this.emit("micOpen");
}
@ -418,6 +593,24 @@ export class AudioEngine extends ((EngineEmitter as unknown) as {
this.micMixGain.gain.value = value;
}
setMicProcessingEnabled(value: boolean) {
/*
* Disconnect whatever was connected before.
* It's either connected to micCompressor or micMixGain
* (depending on if we're going from enabled to disabled or vice - versa).
* Also connected is the micPrecompAnalyser), but you can't disconnect only one node,
* so you have to disconnect all anyway.
*/
this.micCalibrationGain.disconnect();
this.micCalibrationGain.connect(this.micPrecompAnalyser);
console.log("Setting mic processing to: ", value);
if (value) {
this.micCalibrationGain.connect(this.micCompressor);
} else {
this.micCalibrationGain.connect(this.micMixGain);
}
}
getLevels(source: LevelsSource, stereo: boolean): [number, number] {
switch (source) {
case "mic-precomp":
@ -438,6 +631,12 @@ export class AudioEngine extends ((EngineEmitter as unknown) as {
this.analysisBuffer2
);
break;
case "pfl":
this.pflAnalyser.getFloatTimeDomainData(
this.analysisBuffer,
this.analysisBuffer2
);
break;
case "player-0":
this.playerAnalysers[0].getFloatTimeDomainData(
this.analysisBuffer,

View file

@ -12,7 +12,7 @@ import Keys from "keymaster";
import { Track, MYRADIO_NON_API_BASE, AuxItem } from "../api";
import { AppThunk } from "../store";
import { RootState } from "../rootReducer";
import { audioEngine } from "./audio";
import { audioEngine, ChannelMapping } from "./audio";
import * as TheNews from "./the_news";
const playerGainTweens: Array<{
@ -36,8 +36,11 @@ interface PlayerState {
loadError: boolean;
state: PlayerStateEnum;
volume: number;
volumeEnum: VolumePresetEnum;
gain: number;
trim: number;
micAutoDuck: boolean;
pfl: boolean;
timeCurrent: number;
timeRemaining: number;
timeLength: number;
@ -53,6 +56,7 @@ interface MicState {
volume: 1 | 0;
baseGain: number;
id: string | null;
processing: boolean;
}
interface MixerState {
@ -65,8 +69,11 @@ const BasePlayerState: PlayerState = {
loading: -1,
state: "stopped",
volume: 1,
volumeEnum: "full",
gain: 0,
micAutoDuck: false,
trim: defaultTrimDB,
pfl: false,
timeCurrent: 0,
timeRemaining: 0,
timeLength: 0,
@ -84,10 +91,12 @@ const mixerState = createSlice({
mic: {
open: false,
volume: 1,
volumeEnum: "full",
gain: 1,
baseGain: 0,
openError: null,
id: "None",
processing: true,
},
} as MixerState,
reducers: {
@ -96,6 +105,7 @@ const mixerState = createSlice({
action: PayloadAction<{
player: number;
item: PlanItem | Track | AuxItem | null;
customOutput: boolean;
resetTrim?: boolean;
}>
) {
@ -111,7 +121,10 @@ const mixerState = createSlice({
state.players[action.payload.player].timeLength = 0;
state.players[action.payload.player].tracklistItemID = -1;
state.players[action.payload.player].loadError = false;
if (action.payload.resetTrim) {
if (action.payload.customOutput) {
state.players[action.payload.player].trim = 0;
} else if (action.payload.resetTrim) {
state.players[action.payload.player].trim = defaultTrimDB;
}
},
@ -139,9 +152,12 @@ const mixerState = createSlice({
action: PayloadAction<{
player: number;
volume: number;
volumeEnum: VolumePresetEnum;
}>
) {
state.players[action.payload.player].volume = action.payload.volume;
state.players[action.payload.player].volumeEnum =
action.payload.volumeEnum;
},
setPlayerGain(
state,
@ -161,6 +177,24 @@ const mixerState = createSlice({
) {
state.players[action.payload.player].trim = action.payload.trim;
},
setPlayerMicAutoDuck(
state,
action: PayloadAction<{
player: number;
enabled: boolean;
}>
) {
state.players[action.payload.player].micAutoDuck = action.payload.enabled;
},
setPlayerPFL(
state,
action: PayloadAction<{
player: number;
enabled: boolean;
}>
) {
state.players[action.payload.player].pfl = action.payload.enabled;
},
setLoadedItemIntro(
state,
action: PayloadAction<{
@ -210,6 +244,9 @@ const mixerState = createSlice({
setMicBaseGain(state, action: PayloadAction<number>) {
state.mic.baseGain = action.payload;
},
setMicProcessingEnabled(state, action: PayloadAction<boolean>) {
state.mic.processing = action.payload;
},
setTimeCurrent(
state,
action: PayloadAction<{
@ -348,9 +385,17 @@ export const load = (
loadAbortControllers[player] = new AbortController();
const shouldResetTrim = getState().settings.resetTrimOnLoad;
const customOutput =
getState().settings.channelOutputIds[player] !== "internal";
const isPFL = getState().mixer.players[player].pfl;
dispatch(
mixerState.actions.loadItem({ player, item, resetTrim: shouldResetTrim })
mixerState.actions.loadItem({
player,
item,
customOutput,
resetTrim: shouldResetTrim,
})
);
let url;
@ -406,7 +451,14 @@ export const load = (
const blob = new Blob([rawData]);
const objectUrl = URL.createObjectURL(blob);
const playerInstance = await audioEngine.createPlayer(player, objectUrl);
const channelOutputId = getState().settings.channelOutputIds[player];
const playerInstance = await audioEngine.createPlayer(
player,
channelOutputId,
isPFL,
objectUrl
);
// Clear the last one out from memory
if (typeof lastObjectURLs[player] === "string") {
@ -549,10 +601,25 @@ export const play = (player: number): AppThunk => async (
}
audioEngine.players[player]?.play();
if (state.loadedItem && state.loadedItem.type === "central") {
// If we're starting off audible, try and tracklist.
if (state.volume > 0) {
dispatch(attemptTracklist(player));
}
};
const attemptTracklist = (player: number): AppThunk => async (
dispatch,
getState
) => {
const state = getState().mixer.players[player];
if (
state.loadedItem &&
state.loadedItem.type === "central" &&
audioEngine.players[player]?.isPlaying
) {
//track
console.log("potentially tracklisting", state.loadedItem);
if (getState().mixer.players[player].tracklistItemID === -1) {
if (state.tracklistItemID === -1) {
dispatch(BroadcastState.tracklistStart(player, state.loadedItem.trackid));
} else {
console.log("not tracklisting because already tracklisted");
@ -615,6 +682,8 @@ export const {
toggleAutoAdvance,
togglePlayOnLoad,
toggleRepeat,
setTracklistItemID,
setPlayerMicAutoDuck,
} = mixerState.actions;
export const redrawWavesurfers = (): AppThunk => () => {
@ -623,12 +692,11 @@ export const redrawWavesurfers = (): AppThunk => () => {
});
};
export const { setTracklistItemID } = mixerState.actions;
const FADE_TIME_SECONDS = 1;
export const setVolume = (
player: number,
level: VolumePresetEnum
level: VolumePresetEnum,
fade: boolean = true
): AppThunk => (dispatch, getState) => {
let volume: number;
let uiLevel: number;
@ -660,12 +728,38 @@ export const setVolume = (
playerGainTweens[player].tweens.forEach((tween) => tween.pause());
if (playerGainTweens[player].target === level) {
delete playerGainTweens[player];
dispatch(mixerState.actions.setPlayerVolume({ player, volume: uiLevel }));
dispatch(
mixerState.actions.setPlayerVolume({
player,
volume: uiLevel,
volumeEnum: level,
})
);
dispatch(mixerState.actions.setPlayerGain({ player, gain: volume }));
return;
}
}
if (level !== "off") {
// If we're fading up the volume, disable the PFL.
dispatch(setChannelPFL(player, false));
// Also catch a tracklist if we started with the channel off.
dispatch(attemptTracklist(player));
}
// If not fading, just do it.
if (!fade) {
dispatch(
mixerState.actions.setPlayerVolume({
player,
volume: uiLevel,
volumeEnum: level,
})
);
dispatch(mixerState.actions.setPlayerGain({ player, gain: volume }));
return;
}
const state = getState().mixer.players[player];
const currentLevel = state.volume;
@ -680,7 +774,13 @@ export const setVolume = (
const volumeTween = new Between(currentLevel, uiLevel)
.time(FADE_TIME_SECONDS * 1000)
.on("update", (val: number) => {
dispatch(mixerState.actions.setPlayerVolume({ player, volume: val }));
dispatch(
mixerState.actions.setPlayerVolume({
player,
volume: val,
volumeEnum: level,
})
);
});
const gainTween = new Between(currentGain, volume)
.time(FADE_TIME_SECONDS * 1000)
@ -708,15 +808,34 @@ export const setChannelTrim = (player: number, val: number): AppThunk => async (
audioEngine.players[player]?.setTrim(val);
};
export const openMicrophone = (micID: string): AppThunk => async (
dispatch,
getState
) => {
// TODO: not sure why this is here, and I have a hunch it may break shit, so disabling
// File a ticket if it breaks stuff. -Marks
// if (getState().mixer.mic.open) {
// micSource?.disconnect();
// }
export const setChannelPFL = (
player: number,
enabled: boolean
): AppThunk => async (dispatch) => {
if (
enabled &&
typeof audioEngine.players[player] !== "undefined" &&
!audioEngine.players[player]?.isPlaying
) {
dispatch(setVolume(player, "off", false));
dispatch(play(player));
}
// If the player number is -1, do all channels.
if (player === -1) {
for (let i = 0; i < audioEngine.players.length; i++) {
dispatch(mixerState.actions.setPlayerPFL({ player: i, enabled: false }));
audioEngine.setPFL(i, false);
}
} else {
dispatch(mixerState.actions.setPlayerPFL({ player, enabled }));
audioEngine.setPFL(player, enabled);
}
};
export const openMicrophone = (
micID: string,
micMapping: ChannelMapping
): AppThunk => async (dispatch, getState) => {
if (audioEngine.audioContext.state !== "running") {
console.log("Resuming AudioContext because Chrome bad");
await audioEngine.audioContext.resume();
@ -728,7 +847,7 @@ export const openMicrophone = (micID: string): AppThunk => async (
return;
}
try {
await audioEngine.openMic(micID);
await audioEngine.openMic(micID, micMapping);
} catch (e) {
if (e instanceof DOMException) {
switch (e.message) {
@ -747,20 +866,47 @@ export const openMicrophone = (micID: string): AppThunk => async (
const state = getState().mixer.mic;
audioEngine.setMicCalibrationGain(state.baseGain);
audioEngine.setMicVolume(state.volume);
// Now to patch in the Mic to the Compressor, or Bypass it.
audioEngine.setMicProcessingEnabled(state.processing);
dispatch(mixerState.actions.micOpen(micID));
};
export const setMicVolume = (level: MicVolumePresetEnum): AppThunk => (
export const setMicProcessingEnabled = (enabled: boolean): AppThunk => async (
dispatch
) => {
dispatch(mixerState.actions.setMicProcessingEnabled(enabled));
audioEngine.setMicProcessingEnabled(enabled);
};
export const setMicVolume = (level: MicVolumePresetEnum): AppThunk => (
dispatch,
getState
) => {
const players = getState().mixer.players;
// no tween fuckery here, just cut the level
const levelVal = level === "full" ? 1 : 0;
// actually, that's a lie - if we're turning it off we delay it a little to compensate for
// processing latency
if (levelVal !== 0) {
dispatch(mixerState.actions.setMicLevels({ volume: levelVal }));
for (let player = 0; player < players.length; player++) {
// If we have auto duck enabled on this channel player, tell it to fade down.
if (
players[player].micAutoDuck &&
players[player].volumeEnum === "full"
) {
dispatch(setVolume(player, "bed"));
}
}
} else {
for (let player = 0; player < players.length; player++) {
// If we have auto duck enabled on this channel player, tell it to fade back up.
if (players[player].micAutoDuck && players[player].volumeEnum === "bed") {
dispatch(setVolume(player, "full"));
}
}
window.setTimeout(() => {
dispatch(mixerState.actions.setMicLevels({ volume: levelVal }));
// latency, plus a little buffer

View file

@ -1,5 +1,5 @@
import React, { useRef, useEffect, useState } from "react";
import { useDispatch, useSelector } from "react-redux";
import { shallowEqual, useDispatch, useSelector } from "react-redux";
import Clock from "react-live-clock";
import Stopwatch from "react-stopwatch";
@ -11,6 +11,7 @@ import {
FaSpinner,
FaExclamationTriangle,
FaCog,
FaHeadphonesAlt,
} from "react-icons/fa";
import { RootState } from "../rootReducer";
@ -26,6 +27,7 @@ import { VUMeter } from "../optionsMenu/helpers/VUMeter";
import { getShowplan, setItemPlayed } from "../showplanner/state";
import * as OptionsMenuState from "../optionsMenu/state";
import { setChannelPFL } from "../mixer/state";
function nicifyConnectionState(state: ConnectionStateEnum): string {
switch (state) {
@ -159,24 +161,6 @@ export function NavBarMyRadio() {
}
export function NavBarMain() {
const dispatch = useDispatch();
const broadcastState = useSelector((state: RootState) => state.broadcast);
const settings = useSelector((state: RootState) => state.settings);
const [connectButtonAnimating, setConnectButtonAnimating] = useState(false);
const prevRegistrationStage = useRef(broadcastState.stage);
useEffect(() => {
if (broadcastState.stage !== prevRegistrationStage.current) {
setConnectButtonAnimating(false);
}
prevRegistrationStage.current = broadcastState.stage;
}, [broadcastState.stage]);
const { planSaveError, planSaving } = useSelector(
(state: RootState) => state.showplan
);
return (
<>
<ul className="nav navbar-nav navbar-left">
@ -198,98 +182,179 @@ export function NavBarMain() {
timezone={"europe/london"}
/>
</li>
{planSaving && (
<li className="btn rounded-0 py-2 nav-item alert-info">
<FaSpinner className="nav-spin mb-1" /> Saving show plan...
</li>
)}
{planSaveError && (
<li className="btn rounded-0 py-2 nav-item alert-danger">
<FaExclamationTriangle className="p-0 mr-1" />
{planSaveError}
</li>
)}
<SavingAlert />
</ul>
<ul className="nav navbar-nav navbar-right mr-0 pr-0">
<li className="nav-item" style={{ color: "white" }}>
<div className="nav-link">
<b>{nicifyConnectionState(broadcastState.connectionState)}</b>
</div>
<RegisterButton />
<RecordingButton />
<OptionsButton />
<MeterBridge />
</ul>
</>
);
}
function SavingAlert() {
const { planSaveError, planSaving } = useSelector(
(state: RootState) => state.showplan
);
return (
<>
{planSaving && (
<li className="btn rounded-0 py-2 nav-item alert-info">
<FaSpinner className="nav-spin mb-1" /> Saving show plan...
</li>
)}
{planSaveError && (
<li className="btn rounded-0 py-2 nav-item alert-danger">
<FaExclamationTriangle className="p-0 mr-1" />
{planSaveError}
</li>
)}
</>
);
}
function RegisterButton() {
const dispatch = useDispatch();
const broadcastState = useSelector((state: RootState) => state.broadcast);
const [connectButtonAnimating, setConnectButtonAnimating] = useState(false);
const prevRegistrationStage = useRef(broadcastState.stage);
useEffect(() => {
if (broadcastState.stage !== prevRegistrationStage.current) {
setConnectButtonAnimating(false);
}
prevRegistrationStage.current = broadcastState.stage;
}, [broadcastState.stage]);
return (
<>
<li className="nav-item" style={{ color: "white" }}>
<div className="nav-link">
<b>{nicifyConnectionState(broadcastState.connectionState)}</b>
</div>
</li>
<li
className="btn btn-outline-light rounded-0 pt-2 pb-1 nav-item nav-link connect"
onClick={() => {
setConnectButtonAnimating(true);
switch (broadcastState.stage) {
case "NOT_REGISTERED":
dispatch(BroadcastState.goOnAir());
break;
case "REGISTERED":
dispatch(BroadcastState.cancelTimeslot());
break;
}
}}
>
{connectButtonAnimating ? (
<>
<FaBroadcastTower size={17} className="mr-2" />
<FaSpinner size={17} className="nav-spin mr-2" />
</>
) : (
<>
<FaBroadcastTower size={17} className="mr-2" />
{broadcastState.stage === "NOT_REGISTERED" && "Register"}
{broadcastState.stage === "REGISTERED" && "Stop"}
</>
)}
</li>
</>
);
}
function RecordingButton() {
const recordingState = useSelector(
(state: RootState) => state.broadcast.recordingState
);
const enableRecording = useSelector(
(state: RootState) => state.settings.enableRecording
);
const dispatch = useDispatch();
return (
<>
{enableRecording && (
<li
className="btn btn-outline-light rounded-0 pt-2 pb-1 nav-item nav-link connect"
onClick={() => {
setConnectButtonAnimating(true);
switch (broadcastState.stage) {
case "NOT_REGISTERED":
dispatch(BroadcastState.goOnAir());
break;
case "REGISTERED":
dispatch(BroadcastState.cancelTimeslot());
break;
}
}}
className={
"btn rounded-0 pt-2 pb-1 nav-item nav-link " +
(recordingState === "CONNECTED"
? "btn-outline-danger active"
: "btn-outline-light")
}
onClick={() =>
dispatch(
recordingState === "NOT_CONNECTED"
? BroadcastState.startRecording()
: BroadcastState.stopRecording()
)
}
>
{connectButtonAnimating ? (
<>
<FaBroadcastTower size={17} className="mr-2" />
<FaSpinner size={17} className="nav-spin mr-2" />
</>
<FaCircle
size={17}
className={
recordingState === "CONNECTED" ? "rec-blink" : "rec-stop"
}
/>{" "}
{recordingState === "CONNECTED" ? (
<Stopwatch
seconds={0}
minutes={0}
hours={0}
render={({ formatted }) => {
return <span>{formatted}</span>;
}}
/>
) : (
<>
<FaBroadcastTower size={17} className="mr-2" />
{broadcastState.stage === "NOT_REGISTERED" && "Register"}
{broadcastState.stage === "REGISTERED" && "Stop"}
</>
"Record"
)}
</li>
{settings.enableRecording && (
<li
className={
"btn rounded-0 pt-2 pb-1 nav-item nav-link " +
(broadcastState.recordingState === "CONNECTED"
? "btn-outline-danger active"
: "btn-outline-light")
}
onClick={() =>
dispatch(
broadcastState.recordingState === "NOT_CONNECTED"
? BroadcastState.startRecording()
: BroadcastState.stopRecording()
)
}
>
<FaCircle
size={17}
className={
broadcastState.recordingState === "CONNECTED"
? "rec-blink"
: "rec-stop"
}
/>{" "}
{broadcastState.recordingState === "CONNECTED" ? (
<Stopwatch
seconds={0}
minutes={0}
hours={0}
render={({ formatted }) => {
return <span>{formatted}</span>;
}}
/>
) : (
"Record"
)}
</li>
)}
<li
className="btn btn-outline-light rounded-0 pt-2 pb-1 nav-item nav-link"
onClick={() => dispatch(OptionsMenuState.open())}
>
<FaCog size={17} /> Options
</li>
)}
</>
);
}
function OptionsButton() {
const dispatch = useDispatch();
return (
<li
className="btn btn-outline-light rounded-0 pt-2 pb-1 nav-item nav-link"
onClick={() => dispatch(OptionsMenuState.open())}
>
<FaCog size={17} /> Options
</li>
);
}
<li className="nav-item px-2 nav-vu">
function MeterBridge() {
const dispatch = useDispatch();
const proMode = useSelector((state: RootState) => state.settings.proMode);
const playerPFLs = useSelector(
(state: RootState) => state.mixer.players.map((x) => x.pfl),
shallowEqual
);
const isPFL = useSelector((state) => playerPFLs).some((x) => x === true);
return (
<>
{proMode && isPFL && (
<li
className="btn btn-danger rounded-0 pt-2 pb-1 nav-item nav-link clear-pfl"
onClick={() => dispatch(setChannelPFL(-1, false))}
>
<FaHeadphonesAlt size={17} /> Clear PFL
</li>
)}
<li className={"nav-item px-2 nav-vu" + (isPFL ? " pfl-live" : "")}>
{isPFL && (
<VUMeter
width={235}
height={34}
source="pfl"
range={[-40, 3]}
stereo={true}
/>
)}
{!isPFL && (
<VUMeter
width={235}
height={40}
@ -297,8 +362,8 @@ export function NavBarMain() {
range={[-40, 3]}
stereo={true}
/>
</li>
</ul>
)}
</li>
</>
);
}

View file

@ -305,3 +305,8 @@
.nav-link.connect {
min-width: 90px;
}
.pfl-live.nav-vu {
box-shadow: inset 0 0 3px 6px #dc3545;
padding-top: 6px;
}

View file

@ -1,14 +1,116 @@
import React from "react";
import React, { useEffect, useState } from "react";
import { RootState } from "../rootReducer";
import { useSelector, useDispatch } from "react-redux";
import { changeSetting } from "./settingsState";
import { changeBroadcastSetting } from "../broadcast/state";
type ErrorEnum =
| "NO_PERMISSION"
| "NOT_SECURE_CONTEXT"
| "UNKNOWN"
| "UNKNOWN_ENUM";
function reduceToOutputs(devices: MediaDeviceInfo[]) {
var temp: MediaDeviceInfo[] = [];
devices.forEach((device) => {
if (device.kind === "audiooutput") {
temp.push(device);
}
});
return temp;
}
function ChannelOutputSelect({
outputList,
channel,
}: {
outputList: MediaDeviceInfo[] | null;
channel: number;
}) {
const outputIds = useSelector(
(state: RootState) => state.settings.channelOutputIds
);
const outputId = outputIds[channel];
const dispatch = useDispatch();
return (
<div className="form-group">
<label>Channel {channel + 1}</label>
<select
className="form-control"
id="broadcastSourceSelect"
value={outputId}
onChange={(e) => {
let channelOutputIds = { ...outputIds };
channelOutputIds[channel] = e.target.value;
dispatch(
changeSetting({
key: "channelOutputIds",
// @ts-ignore
val: channelOutputIds,
})
);
}}
>
{outputId !== "internal" &&
!outputList?.some((id) => id.deviceId === outputId) && (
<option value={outputId} disabled>
Missing Device ({outputId})
</option>
)}
<option value="internal">Internal (Direct to Stream/Headphones)</option>
{(outputList || []).map(function(e, i) {
return (
<option value={e.deviceId} key={i}>
{e.label !== "" ? e.label : e.deviceId}
</option>
);
})}
</select>
</div>
);
}
export function AdvancedTab() {
const settings = useSelector((state: RootState) => state.settings);
const [outputList, setOutputList] = useState<null | MediaDeviceInfo[]>(null);
const broadcastState = useSelector((state: RootState) => state.broadcast);
const [openError, setOpenError] = useState<null | ErrorEnum>(null);
const dispatch = useDispatch();
async function fetchOutputNames() {
if (!("mediaDevices" in navigator)) {
setOpenError("NOT_SECURE_CONTEXT");
return;
}
// Because Chrome, we have to call getUserMedia() before enumerateDevices()
try {
await navigator.mediaDevices.getUserMedia({ audio: true });
} catch (e) {
if (e instanceof DOMException) {
switch (e.message) {
case "Permission denied":
setOpenError("NO_PERMISSION");
break;
default:
setOpenError("UNKNOWN");
}
} else {
setOpenError("UNKNOWN");
}
return;
}
try {
const devices = await navigator.mediaDevices.enumerateDevices();
setOutputList(reduceToOutputs(devices));
} catch (e) {
setOpenError("UNKNOWN_ENUM");
}
}
useEffect(() => {
fetchOutputNames();
}, []);
// @ts-ignore
return (
<>
@ -71,6 +173,30 @@ export function AdvancedTab() {
/>
<label className="form-check-label">End of Show</label>
</div>
<hr />
<h2>Channel Outputs</h2>
<p>
Select a sound output for each channel. <code>Internal</code> routes
directly to the WebStudio stream/recorder. Other outputs will disable
ProMode &trade; features.{" "}
<strong>Routing will apply upon loading a new item.</strong>
</p>
{openError !== null && (
<div className="sp-alert">
{openError === "NO_PERMISSION"
? "Please grant this page permission to use your outputs/microphone and try again."
: openError === "NOT_SECURE_CONTEXT"
? "We can't open the outputs. Please make sure the address bar has a https:// at the start and try again."
: openError === "UNKNOWN_ENUM"
? "An error occurred when enumerating output devices. Please try again."
: "An error occurred when opening the output devices. Please try again."}
</div>
)}
<ChannelOutputSelect outputList={outputList} channel={0} />
<ChannelOutputSelect outputList={outputList} channel={1} />
<ChannelOutputSelect outputList={outputList} channel={2} />
<hr />
<h2>Misc</h2>
<div className="form-check">

View file

@ -4,6 +4,7 @@ import { RootState } from "../rootReducer";
import * as MixerState from "../mixer/state";
import { VUMeter } from "./helpers/VUMeter";
import { ChannelMapping } from "../mixer/audio";
type MicErrorEnum =
| "NO_PERMISSION"
@ -26,10 +27,12 @@ export function MicTab() {
const [micList, setMicList] = useState<null | MediaDeviceInfo[]>(null);
const dispatch = useDispatch();
const [nextMicSource, setNextMicSource] = useState("$NONE");
const [nextMicMapping, setNextMicMapping] = useState<ChannelMapping>(
"mono-both"
);
const [openError, setOpenError] = useState<null | MicErrorEnum>(null);
async function fetchMicNames() {
console.log("start fetchNames");
if (!("mediaDevices" in navigator)) {
setOpenError("NOT_SECURE_CONTEXT");
return;
@ -52,9 +55,7 @@ export function MicTab() {
}
return;
}
console.log("done");
try {
console.log("gUM");
const devices = await navigator.mediaDevices.enumerateDevices();
console.log(devices);
setMicList(reduceToInputs(devices));
@ -65,7 +66,12 @@ export function MicTab() {
function setMicSource(sourceId: string) {
setNextMicSource(sourceId);
dispatch(MixerState.openMicrophone(sourceId));
dispatch(MixerState.openMicrophone(sourceId, nextMicMapping));
}
function setMicMapping(mapping: ChannelMapping) {
setNextMicMapping(mapping);
dispatch(MixerState.openMicrophone(nextMicSource, mapping));
}
return (
@ -110,6 +116,32 @@ export function MicTab() {
: "An error occurred when opening the microphone. Please try again."}
</div>
)}
<select
className="form-control my-2"
value={nextMicMapping}
onChange={(e) => setMicMapping(e.target.value as ChannelMapping)}
disabled={nextMicSource === "$NONE"}
>
<option value={"mono-both"} label="Mono (Default)" />
<option value={"mono-left"} label="Mono - Left Channel" />
<option value={"mono-right"} label="Mono - Right Channel" />
<option value={"stereo-normal"} label="Stereo" />
<option value={"stereo-flipped"} label="Stereo - Flipped" />
</select>
<div className="form-check">
<input
className="form-check-input"
type="checkbox"
checked={state.processing}
onChange={(e) => {
dispatch(MixerState.setMicProcessingEnabled(e.target.checked));
}}
/>
<label className="form-check-label">
Apply Mic Processing (Default: On)
</label>
</div>
<hr />
<div style={{ opacity: state.open ? 1 : 0.5 }}>
<h3>Calibration</h3>
@ -124,7 +156,7 @@ export function MicTab() {
height={40}
source="mic-precomp"
range={[-70, 0]}
greenRange={[-14, -10]}
greenRange={state.processing ? [-16, -6] : [-32, -5]}
stereo={true}
/>
</div>

View file

@ -27,6 +27,8 @@ export function VUMeter(props: VUMeterProps) {
const isMic = props.source.substr(0, 3) === "mic";
const FPS = 30; // Limit the FPS so that lower spec machines have a better time juggling CPU.
useEffect(() => {
const animate = () => {
if (!isMic || isMicOpen) {
@ -38,7 +40,9 @@ export function VUMeter(props: VUMeterProps) {
if (props.stereo) {
setPeakR(result[1]);
}
rafRef.current = requestAnimationFrame(animate);
setTimeout((current = rafRef.current, a = animate) => {
current = requestAnimationFrame(a);
}, 1000 / FPS);
}
};
if (!isMic || isMicOpen) {

View file

@ -8,6 +8,7 @@ interface Settings {
proMode: boolean;
channelVUs: boolean;
channelVUsStereo: boolean;
channelOutputIds: string[];
resetTrimOnLoad: boolean;
saveShowPlanChanges: boolean;
}
@ -22,6 +23,7 @@ const settingsState = createSlice({
proMode: false,
channelVUs: true,
channelVUsStereo: true,
channelOutputIds: ["internal", "internal", "internal"],
resetTrimOnLoad: true,
saveShowPlanChanges: false,
} as Settings,

View file

@ -78,7 +78,7 @@ const SessionHandler: React.FC = function() {
);
}
return <p></p>;
return <></>;
};
export default SessionHandler;

View file

@ -25,14 +25,15 @@ export const Item = memo(function Item({
const id = itemId(x);
const isGhost = "ghostid" in x;
const playerState = useSelector((state: RootState) =>
column > -1 ? state.mixer.players[column] : undefined
const loadedItem = useSelector(
(state: RootState) =>
column > -1 ? state.mixer.players[column]?.loadedItem : null,
(a, b) =>
(a === null && b === null) ||
(a !== null && b !== null && itemId(a) === itemId(b))
);
const isLoaded =
playerState &&
playerState.loadedItem !== null &&
itemId(playerState.loadedItem) === id;
const isLoaded = loadedItem !== null ? itemId(loadedItem) === id : false;
const showDebug = useSelector(
(state: RootState) => state.settings.showDebugInfo
@ -84,14 +85,7 @@ export const Item = memo(function Item({
"item " +
("played" in x ? (x.played ? "played " : "") : "") +
x.type +
`${
column >= 0 &&
playerState &&
playerState.loadedItem !== null &&
itemId(playerState.loadedItem) === id
? " active"
: ""
}`
`${column >= 0 && isLoaded ? " active" : ""}`
}
onClick={triggerClick}
onContextMenu={openContextMenu}

View file

@ -197,6 +197,46 @@ function TimingButtons({ id }: { id: number }) {
);
}
function LoadedTrackInfo({ id }: { id: number }) {
const dispatch = useDispatch();
const loadedItem = useSelector(
(state: RootState) => state.mixer.players[id].loadedItem
);
const loading = useSelector(
(state: RootState) => state.mixer.players[id].loading
);
const loadError = useSelector(
(state: RootState) => state.mixer.players[id].loadError
);
return (
<span className="card-title">
<strong>
{loadedItem !== null && loading === -1
? loadedItem.title
: loading !== -1
? `LOADING`
: loadError
? "LOAD FAILED"
: "No Media Selected"}
</strong>
<small
className={
"border rounded border-danger text-danger p-1 m-1" +
(loadedItem !== null &&
loading === -1 &&
"clean" in loadedItem &&
!loadedItem.clean
? ""
: " d-none")
}
>
Explicit
</small>
</span>
);
}
export function Player({ id }: { id: number }) {
// Define time remaining (secs) when the play icon should flash.
const SECS_REMAINING_WARNING = 20;
@ -214,13 +254,8 @@ export function Player({ id }: { id: number }) {
omit(b, "timeCurrent", "timeRemaining")
)
);
const proMode = useSelector((state: RootState) => state.settings.proMode);
const vuEnabled = useSelector(
(state: RootState) => state.settings.channelVUs
);
const vuStereo = useSelector(
(state: RootState) => state.settings.channelVUsStereo
);
const settings = useSelector((state: RootState) => state.settings);
const customOutput = settings.channelOutputIds[id] !== "internal";
const dispatch = useDispatch();
const VUsource = (id: number) => {
@ -305,32 +340,9 @@ export function Player({ id }: { id: number }) {
&nbsp; Repeat {playerState.repeat}
</button>
</div>
{proMode && <ProModeButtons channel={id} />}
{settings.proMode && !customOutput && <ProModeButtons channel={id} />}
<div className="card-body p-0">
<span className="card-title">
<strong>
{playerState.loadedItem !== null && playerState.loading === -1
? playerState.loadedItem.title
: playerState.loading !== -1
? `LOADING`
: playerState.loadError
? "LOAD FAILED"
: "No Media Selected"}
</strong>
<small
className={
"border rounded border-danger text-danger p-1 m-1" +
(playerState.loadedItem !== null &&
playerState.loading === -1 &&
"clean" in playerState.loadedItem &&
!playerState.loadedItem.clean
? ""
: " d-none")
}
>
Explicit
</small>
</span>
<LoadedTrackInfo id={id} />
<br />
<span className="text-muted">
{playerState.loadedItem !== null && playerState.loading === -1
@ -432,15 +444,21 @@ export function Player({ id }: { id: number }) {
</button>
</div>
{proMode && vuEnabled && (
{settings.proMode && settings.channelVUs && (
<div className="channel-vu">
<VUMeter
width={300}
height={40}
source={VUsource(id)}
range={[-40, 0]}
stereo={vuStereo}
/>
{customOutput ? (
<span className="text-muted">
Custom audio output disables VU meters.
</span>
) : (
<VUMeter
width={300}
height={40}
source={VUsource(id)}
range={[-40, 0]}
stereo={settings.channelVUsStereo}
/>
)}
</div>
)}
</div>

View file

@ -1,33 +1,73 @@
import React, { useState } from "react";
import { FaTachometerAlt } from "react-icons/fa";
import {
FaHeadphonesAlt,
FaMicrophoneAlt,
FaTachometerAlt,
} from "react-icons/fa";
import { useDispatch, useSelector } from "react-redux";
import { RootState } from "../rootReducer";
import { setChannelTrim } from "../mixer/state";
import {
setChannelPFL,
setChannelTrim,
setPlayerMicAutoDuck,
} from "../mixer/state";
type ButtonIds = "trim";
type ButtonIds = "trim" | "pfl" | "autoDuck";
export default function ProModeButtons({ channel }: { channel: number }) {
const [activeButton, setActiveButton] = useState<ButtonIds | null>(null);
const trimVal = useSelector(
(state: RootState) => state.mixer.players[channel]?.trim
);
const micAutoDuck = useSelector(
(state: RootState) => state.mixer.players[channel]?.micAutoDuck
);
const pflState = useSelector(
(state: RootState) => state.mixer.players[channel]?.pfl
);
const dispatch = useDispatch();
return (
<>
<div className="row m-0 p-1 card-header channelButtons proMode hover-menu">
<span className="hover-label">Pro Mode&trade;</span>
{(activeButton === null || activeButton === "trim") && (
<button
className="btn btn-warning"
title="Trim"
onClick={() =>
setActiveButton(activeButton === "trim" ? null : "trim")
}
>
<FaTachometerAlt />
</button>
)}
<button
className="mr-1 btn btn-warning"
title="Trim"
onClick={() =>
setActiveButton(activeButton === "trim" ? null : "trim")
}
>
<FaTachometerAlt />
</button>
<button
className={
"mr-1 btn " + (pflState ? "btn-danger" : "btn-outline-dark")
}
title="PFL Channel"
onClick={() => {
dispatch(setChannelPFL(channel, !pflState));
setActiveButton("pfl");
}}
>
<FaHeadphonesAlt />
</button>
<button
className={
"mr-1 btn " + (micAutoDuck ? "btn-info" : "btn-outline-dark")
}
title="Auto Duck on Mic Live"
onClick={() => {
dispatch(
setPlayerMicAutoDuck({ player: channel, enabled: !micAutoDuck })
);
setActiveButton("autoDuck");
}}
>
<FaMicrophoneAlt />
</button>
{activeButton === "trim" && (
<>
<input
@ -42,9 +82,19 @@ export default function ProModeButtons({ channel }: { channel: number }) {
e.target.blur(); // Stop dragging from disabling the keyboard triggers.
}}
/>
<b>{trimVal} dB</b>
<strong className="mt-2">{trimVal} dB</strong>
</>
)}
{activeButton === "pfl" && (
<span className="mt-2 ml-2">
Pre Fader Listen:&nbsp;<strong>{pflState ? "Yes" : "No"}</strong>
</span>
)}
{activeButton === "autoDuck" && (
<span className="mt-2 ml-2">
Duck on Mic:&nbsp;<strong>{micAutoDuck ? "Yes" : "No"}</strong>
</span>
)}
</div>
</>
);

View file

@ -25,7 +25,7 @@ import {
ResponderProvided,
} from "react-beautiful-dnd";
import { useSelector, useDispatch } from "react-redux";
import { useSelector, useDispatch, shallowEqual } from "react-redux";
import { RootState } from "../rootReducer";
import {
PlanItem,
@ -199,6 +199,9 @@ function LibraryColumn() {
function MicControl() {
const state = useSelector((state: RootState) => state.mixer.mic);
const proMode = useSelector((state: RootState) => state.settings.proMode);
const stereo = useSelector(
(state: RootState) => state.settings.channelVUsStereo
);
const dispatch = useDispatch();
return (
@ -253,8 +256,8 @@ function MicControl() {
height={40}
source="mic-final"
range={[-40, 3]}
greenRange={[-10, -5]}
stereo={proMode}
greenRange={[-16, -6]}
stereo={proMode && stereo}
/>
</div>
<div className={`mixer-buttons ${!state.open && "disabled"}`}>
@ -291,8 +294,9 @@ function incrReducer(state: number, action: any) {
}
const Showplanner: React.FC<{ timeslotId: number }> = function({ timeslotId }) {
const { plan: showplan, planLoadError, planLoading } = useSelector(
(state: RootState) => state.showplan
const isShowplan = useSelector(
(state: RootState) => state.showplan.plan !== null,
shallowEqual
);
// Tell Modals that #root is the main page content, for accessability reasons.
@ -400,26 +404,15 @@ const Showplanner: React.FC<{ timeslotId: number }> = function({ timeslotId }) {
};
}, [dispatch, session.currentTimeslot]);
if (showplan === null) {
return (
<LoadingDialogue
title="Getting Show Plan..."
subtitle={planLoading ? "Hang on a sec..." : ""}
error={planLoadError}
percent={100}
/>
);
if (!isShowplan) {
return <GettingShowPlanScreen />;
}
return (
<div className="sp-container m-0">
<CombinedNavAlertBar />
<div className="sp">
<DragDropContext onDragEnd={onDragEnd}>
<div className="channels">
<Channel id={0} data={showplan} />
<Channel id={1} data={showplan} />
<Channel id={2} data={showplan} />
</div>
<ChannelStrips />
<span
id="sidebar-toggle"
className="btn btn-outline-dark btn-sm mb-0"
@ -479,6 +472,20 @@ const Showplanner: React.FC<{ timeslotId: number }> = function({ timeslotId }) {
);
};
function GettingShowPlanScreen() {
const { planLoading, planLoadError } = useSelector(
(state: RootState) => state.showplan
);
return (
<LoadingDialogue
title="Getting Show Plan..."
subtitle={planLoading ? "Hang on a sec..." : ""}
error={planLoadError}
percent={100}
/>
);
}
export function LoadingDialogue({
title,
subtitle,
@ -526,4 +533,16 @@ export function LoadingDialogue({
);
}
function ChannelStrips() {
const showplan = useSelector((state: RootState) => state.showplan.plan!);
return (
<div className="channels">
<Channel id={0} data={showplan} />
<Channel id={1} data={showplan} />
<Channel id={2} data={showplan} />
</div>
);
}
export default Showplanner;

View file

@ -6228,9 +6228,9 @@ inherits@2.0.3:
integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=
ini@^1.3.5:
version "1.3.5"
resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927"
integrity sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==
version "1.3.8"
resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c"
integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==
inquirer@6.5.0:
version "6.5.0"