From 8822ccb72b83832f4f331f5e4611be281eadc274 Mon Sep 17 00:00:00 2001
From: Marks Polakovs
Date: Mon, 20 Apr 2020 16:08:12 +0200
Subject: [PATCH 01/10] Ignore double loads (fixes #84)
---
src/mixer/state.ts | 7 ++++++-
1 file changed, 6 insertions(+), 1 deletion(-)
diff --git a/src/mixer/state.ts b/src/mixer/state.ts
index dd822f3..4699c8d 100644
--- a/src/mixer/state.ts
+++ b/src/mixer/state.ts
@@ -6,7 +6,7 @@ import {
} from "@reduxjs/toolkit";
import fetchProgress, { FetchProgressData } from "fetch-progress";
import Between from "between.js";
-import { PlanItem } from "../showplanner/state";
+import { itemId, PlanItem } from "../showplanner/state";
import * as BroadcastState from "../broadcast/state";
import Keys from "keymaster";
import { Track, MYRADIO_NON_API_BASE, AuxItem } from "../api";
@@ -327,6 +327,11 @@ export const load = (
return;
}
}
+ // If this is already the currently loaded item, don't bother
+ const currentItem = getState().mixer.players[player].loadedItem;
+ if (currentItem !== null && itemId(currentItem) === itemId(item)) {
+ return;
+ }
// If we're already loading something, abort it
if (typeof loadAbortControllers[player] !== "undefined") {
loadAbortControllers[player].abort();
From 556ef1b91670b263590d6d8e92c79f3ce8c2e22f Mon Sep 17 00:00:00 2001
From: michael-grace
Date: Wed, 22 Apr 2020 21:31:32 +0100
Subject: [PATCH 02/10] generalise vumeter and add to navbar
---
src/navbar/index.tsx | 13 +-
src/navbar/navbar.scss | 386 +++++++++++++++-------------
src/optionsMenu/MicTab.tsx | 26 +-
src/optionsMenu/helpers/VUMeter.tsx | 39 ++-
4 files changed, 256 insertions(+), 208 deletions(-)
diff --git a/src/navbar/index.tsx b/src/navbar/index.tsx
index feb8c5b..ff53254 100644
--- a/src/navbar/index.tsx
+++ b/src/navbar/index.tsx
@@ -1,6 +1,7 @@
import React, { useRef, useEffect } from "react";
import { useDispatch, useSelector } from "react-redux";
import Clock from "react-live-clock";
+import { VUMeter } from "../optionsMenu/helpers/VUMeter";
import { FaRegClock, FaRegUser } from "react-icons/fa";
@@ -68,6 +69,16 @@ export function NavBar() {
/>
+
+
+
+
+
@@ -195,7 +206,7 @@ function AlertBar() {
{state?.content}
{state?.closure !== null && (
diff --git a/src/navbar/navbar.scss b/src/navbar/navbar.scss
index fc5a20a..a8abce2 100644
--- a/src/navbar/navbar.scss
+++ b/src/navbar/navbar.scss
@@ -1,222 +1,258 @@
/* Stuff to get the nav to collapse at 991px (when the menu collapses)
* Taken from http://stackoverflow.com/a/36289507/995325 */
+
@media (max-width: 991px) {
- .navbar-header {
- float: none;
- }
- .navbar-left,
- .navbar-right {
- float: none !important;
- }
- .navbar-toggle {
- display: block;
- }
- .navbar-collapse {
- border-top: 1px solid transparent;
- box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1);
- }
- .navbar-fixed-top {
- top: 0;
- border-width: 0 0 1px;
- }
- .navbar-collapse.collapse {
- display: none !important;
- }
- .navbar-nav {
- float: none !important;
- margin-top: 7.5px;
- }
- .navbar-nav > li {
- float: none;
- }
- .navbar-nav > li > a {
- padding-top: 10px;
- padding-bottom: 10px;
- }
- .collapse.in {
- display: block !important;
- }
-}
-/** MyRadio bootstrap navbar overides taken from http://work.smarchal.com/twbscolor/css/2D425F2D425Fffffffe1e1e11 **/
-.navbar-ury {
- background-color: #2d333c;
- border-color: #2d425f;
- border: none;
- max-height: 7vh;
- flex-shrink: 0;
-}
-.navbar-ury .navbar-brand {
- color: #ffffff;
- padding: 10px 15px; /** added to center the logo **/
- line-height: 30px; /** added to center the logo **/
- margin-right: 0;
-}
-.navbar-ury .navbar-brand img {
- max-height: 100%;
-}
-.navbar-ury .navbar-brand:hover,
-.navbar-ury .navbar-brand:focus {
- color: #e1e1e1;
-}
-.navbar-ury .navbar-brand.divider {
- border-right: 1px white solid;
- margin: 14px 0;
- height: 22px;
- padding: 0;
-}
-.navbar-ury .navbar-text {
- color: #ffffff;
-}
-.navbar-ury .navbar-nav > li > a {
- color: #ffffff;
-}
-.navbar-ury .navbar-nav > li.nav-img {
- height: 50px;
-}
-.navbar-ury .navbar-nav > li.nav-img > a {
- height: 100%;
- padding: 10px 10px;
-}
-.navbar-ury .navbar-nav > li.nav-img > a > img {
- height: 100%;
-}
-.navbar-ury .navbar-nav .caret {
- margin-left: 5px;
-}
-.navbar-ury .navbar-nav .glyphicon {
- margin-right: 5px;
+ .navbar-header {
+ float: none;
+ }
+ .navbar-left,
+ .navbar-right {
+ float: none !important;
+ }
+ .navbar-toggle {
+ display: block;
+ }
+ .navbar-collapse {
+ border-top: 1px solid transparent;
+ box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1);
+ }
+ .navbar-fixed-top {
+ top: 0;
+ border-width: 0 0 1px;
+ }
+ .navbar-collapse.collapse {
+ display: none !important;
+ }
+ .navbar-nav {
+ float: none !important;
+ margin-top: 7.5px;
+ }
+ .navbar-nav>li {
+ float: none;
+ }
+ .navbar-nav>li>a {
+ padding-top: 10px;
+ padding-bottom: 10px;
+ }
+ .collapse.in {
+ display: block !important;
+ }
}
-.navbar-ury .navbar-nav > li > a:hover,
-.navbar-ury .navbar-nav > li > a:focus {
- color: #e1e1e1;
- background-color: #2d425f;
+
+/** MyRadio bootstrap navbar overides taken from http://work.smarchal.com/twbscolor/css/2D425F2D425Fffffffe1e1e11 **/
+
+.navbar-ury {
+ background-color: #2d333c;
+ border-color: #2d425f;
+ border: none;
+ max-height: 7vh;
+ flex-shrink: 0;
}
-.navbar-ury .navbar-nav > li > .dropdown-menu {
- background-color: #2d333c;
- border-top: none;
+
+.navbar-ury .navbar-brand {
+ color: #ffffff;
+ padding: 10px 15px;
+ /** added to center the logo **/
+ line-height: 30px;
+ /** added to center the logo **/
+ margin-right: 0;
}
-.navbar-ury .navbar-nav > li > .dropdown-menu > a {
- color: #ffffff;
+
+.navbar-ury .navbar-brand img {
+ max-height: 100%;
}
-.navbar-ury .navbar-nav > li > .dropdown-menu > a:hover,
-.navbar-ury .navbar-nav > li > .dropdown-menu > a:focus {
- color: #e1e1e1;
- background-color: #2d425f;
+
+.navbar-ury .navbar-brand:hover,
+.navbar-ury .navbar-brand:focus {
+ color: #e1e1e1;
}
-.navbar-ury .navbar-nav > li > .dropdown-menu > .divider {
- background-color: #2d425f;
+
+.navbar-ury .navbar-brand.divider {
+ border-right: 1px white solid;
+ margin: 14px 0;
+ height: 22px;
+ padding: 0;
}
-.navbar-ury .navbar-nav .open .dropdown-menu > .active > a,
-.navbar-ury .navbar-nav .open .dropdown-menu > .active > a:hover,
-.navbar-ury .navbar-nav .open .dropdown-menu > .active > a:focus {
- color: #e1e1e1;
- background-color: #2d333c;
+
+.navbar-ury .navbar-text {
+ color: #ffffff;
}
-.navbar-ury .navbar-nav > .active > a,
-.navbar-ury .navbar-nav > .active > a:hover,
-.navbar-ury .navbar-nav > .active > a:focus {
- color: #e1e1e1;
- background-color: #2d333c;
+
+.navbar-ury .navbar-nav>li>a {
+ color: #ffffff;
}
-.navbar-ury .navbar-nav > .open > a,
-.navbar-ury .navbar-nav > .open > a:hover,
-.navbar-ury .navbar-nav > .open > a:focus {
- color: #e1e1e1;
- background-color: #2d425f !important;
+
+.navbar-ury .navbar-nav>li.nav-img {
+ height: 50px;
}
+
+.navbar-ury .navbar-nav>li.nav-img>a {
+ height: 100%;
+ padding: 10px 10px;
+}
+
+.navbar-ury .navbar-nav>li.nav-img>a>img {
+ height: 100%;
+}
+
+.navbar-ury .navbar-nav .caret {
+ margin-left: 5px;
+}
+
+.navbar-ury .navbar-nav .glyphicon {
+ margin-right: 5px;
+}
+
+.navbar-ury .navbar-nav>li>a:hover,
+.navbar-ury .navbar-nav>li>a:focus {
+ color: #e1e1e1;
+ background-color: #2d425f;
+}
+
+.navbar-ury .navbar-nav>li>.dropdown-menu {
+ background-color: #2d333c;
+ border-top: none;
+}
+
+.navbar-ury .navbar-nav>li>.dropdown-menu>a {
+ color: #ffffff;
+}
+
+.navbar-ury .navbar-nav>li>.dropdown-menu>a:hover,
+.navbar-ury .navbar-nav>li>.dropdown-menu>a:focus {
+ color: #e1e1e1;
+ background-color: #2d425f;
+}
+
+.navbar-ury .navbar-nav>li>.dropdown-menu>.divider {
+ background-color: #2d425f;
+}
+
+.navbar-ury .navbar-nav .open .dropdown-menu>.active>a,
+.navbar-ury .navbar-nav .open .dropdown-menu>.active>a:hover,
+.navbar-ury .navbar-nav .open .dropdown-menu>.active>a:focus {
+ color: #e1e1e1;
+ background-color: #2d333c;
+}
+
+.navbar-ury .navbar-nav>.active>a,
+.navbar-ury .navbar-nav>.active>a:hover,
+.navbar-ury .navbar-nav>.active>a:focus {
+ color: #e1e1e1;
+ background-color: #2d333c;
+}
+
+.navbar-ury .navbar-nav>.open>a,
+.navbar-ury .navbar-nav>.open>a:hover,
+.navbar-ury .navbar-nav>.open>a:focus {
+ color: #e1e1e1;
+ background-color: #2d425f !important;
+}
+
.navbar-ury .navbar-toggle {
- border-color: #2d425f;
+ border-color: #2d425f;
}
+
.navbar-ury .navbar-toggle:hover,
.navbar-ury .navbar-toggle:focus {
- background-color: #2d425f;
+ background-color: #2d425f;
}
+
.navbar-ury .navbar-toggle .icon-bar {
- background-color: #ffffff;
+ background-color: #ffffff;
}
+
.navbar-ury .navbar-collapse,
.navbar-ury .navbar-form {
- border-color: #ffffff;
+ border-color: #ffffff;
}
+
.navbar-ury .navbar-link {
- color: #ffffff;
+ color: #ffffff;
}
+
.navbar-ury .navbar-link:hover {
- color: #e1e1e1;
+ color: #e1e1e1;
}
+
.navbar-ury .navbar-brand:hover {
- color: #e1e1e1;
- background-color: #2d425f !important;
+ color: #e1e1e1;
+ background-color: #2d425f !important;
}
@media (max-width: 767px) {
- .navbar-ury .navbar-nav .open .dropdown-menu > li > a {
- color: #ffffff;
- }
- .navbar-ury .navbar-nav .open .dropdown-menu > li > a:hover,
- .navbar-ury .navbar-nav .open .dropdown-menu > li > a:focus {
- color: #e1e1e1;
- }
- .navbar-ury .navbar-nav .open .dropdown-menu > .active > a,
- .navbar-ury .navbar-nav .open .dropdown-menu > .active > a:hover,
- .navbar-ury .navbar-nav .open .dropdown-menu > .active > a:focus {
- color: #e1e1e1;
- background-color: #2d425f;
- }
+ .navbar-ury .navbar-nav .open .dropdown-menu>li>a {
+ color: #ffffff;
+ }
+ .navbar-ury .navbar-nav .open .dropdown-menu>li>a:hover,
+ .navbar-ury .navbar-nav .open .dropdown-menu>li>a:focus {
+ color: #e1e1e1;
+ }
+ .navbar-ury .navbar-nav .open .dropdown-menu>.active>a,
+ .navbar-ury .navbar-nav .open .dropdown-menu>.active>a:hover,
+ .navbar-ury .navbar-nav .open .dropdown-menu>.active>a:focus {
+ color: #e1e1e1;
+ background-color: #2d425f;
+ }
}
.alertbar {
- position: fixed !important;
- top: 0;
- left: 0;
- width: 100vw;
- min-height: 38.5px;
- z-index: 99999;
- transform: translateY(-50px);
- transition: 400ms transform;
- &.visible {
- transform: translateY(0);
- }
+ position: fixed !important;
+ top: 0;
+ left: 0;
+ width: 100vw;
+ min-height: 38.5px;
+ z-index: 99999;
+ transform: translateY(-50px);
+ transition: 400ms transform;
+ &.visible {
+ transform: translateY(0);
+ }
}
.logo-hover {
- position: relative;
- .logo-myradio {
- opacity: 0;
- position: absolute;
- left: 25px;
- transition: 0.4s;
- height: 32px;
- }
- .logo-webstudio {
- opacity: 1;
- transition: 0.4s;
- height: 28px;
- margin-top: -5px;
- }
- &:hover {
+ position: relative;
.logo-myradio {
- opacity: 1;
+ opacity: 0;
+ position: absolute;
+ left: 25px;
+ transition: 0.4s;
+ height: 32px;
}
.logo-webstudio {
- opacity: 0;
+ opacity: 1;
+ transition: 0.4s;
+ height: 28px;
+ margin-top: -5px;
+ }
+ &:hover {
+ .logo-myradio {
+ opacity: 1;
+ }
+ .logo-webstudio {
+ opacity: 0;
+ }
}
- }
}
#timelord {
- background: black;
- border: red 1px solid;
- padding: 0.2rem;
- margin: auto 0;
- color: white;
- width: 100%;
- max-width: 40vw;
- height: 100%;
- max-height: 36.5px;
+ background: black;
+ border: red 1px solid;
+ padding: 0.2rem;
+ margin: auto 0;
+ color: white;
+ width: 100%;
+ max-width: 40vw;
+ height: 100%;
+ max-height: 36.5px;
}
+
#timelord .time {
- font-weight: bold;
- font-size: 1.1em;
+ font-weight: bold;
+ font-size: 1.1em;
}
+
+#navMeter {
+ margin: 0.4rem 0 0 1rem;
+}
\ No newline at end of file
diff --git a/src/optionsMenu/MicTab.tsx b/src/optionsMenu/MicTab.tsx
index 4399597..e4626c3 100644
--- a/src/optionsMenu/MicTab.tsx
+++ b/src/optionsMenu/MicTab.tsx
@@ -1,4 +1,4 @@
-import React, { useState, useEffect, useRef, useCallback } from "react";
+import React, { useState } from "react";
import { useSelector, useDispatch } from "react-redux";
import { RootState } from "../rootReducer";
@@ -59,29 +59,6 @@ export function MicTab() {
dispatch(MixerState.openMicrophone(sourceId));
}
- const rafRef = useRef(null);
- const [peak, setPeak] = useState(-Infinity);
-
- const animate = useCallback(() => {
- if (state.calibration) {
- const result = MixerState.getMicAnalysis();
- setPeak(result);
- rafRef.current = requestAnimationFrame(animate);
- } else if (rafRef.current !== null) {
- cancelAnimationFrame(rafRef.current);
- rafRef.current = null;
- }
- }, [state.calibration]);
-
- useEffect(() => {
- if (state.calibration) {
- rafRef.current = requestAnimationFrame(animate);
- } else if (rafRef.current !== null) {
- cancelAnimationFrame(rafRef.current);
- rafRef.current = null;
- }
- }, [animate, state.calibration]);
-
return (
<>
-
+
-
@@ -206,7 +205,7 @@ function AlertBar() {
{state?.content}
{state?.closure !== null && (
From 6ee50b198e9e3c4288b0454fdad65d5530902280 Mon Sep 17 00:00:00 2001
From: michael-grace
Date: Wed, 22 Apr 2020 22:33:41 +0100
Subject: [PATCH 06/10] move mic meter
---
src/App.css | 13 +++++++++++++
src/navbar/index.tsx | 10 ----------
src/navbar/navbar.scss | 4 ----
src/showplanner/index.tsx | 9 +++++++++
4 files changed, 22 insertions(+), 14 deletions(-)
diff --git a/src/App.css b/src/App.css
index f659e04..1109cde 100644
--- a/src/App.css
+++ b/src/App.css
@@ -94,38 +94,46 @@ button {
z-index: 5;
/* padding: 1px; */
}
+
.waveform .current,
.waveform .remaining,
.waveform .length {
font-weight: 800;
}
+
.waveform .remaining,
.waveform .outro {
right: 0;
position: absolute;
}
+
.waveform .intro,
.waveform .outro,
.waveform .length {
bottom: 0;
position: absolute;
}
+
.waveform .length {
text-align: center;
width: 100%;
}
+
.waveform {
height: 6vh;
}
+
.waveform .graph {
position: absolute;
height: 6vh;
width: 100%;
overflow: hidden;
}
+
.waveform .graph wave {
height: 6vh !important;
}
+
.waveform .loading {
background: #ccccff;
}
@@ -135,6 +143,7 @@ button {
}
/* Flash class and keyframe animation */
+
.sp-ending-soon {
-webkit-animation: green-flash steps(1) 0.5s infinite;
animation: green-flash steps(1) 0.5s infinite;
@@ -154,3 +163,7 @@ button {
pointer-events: none;
box-shadow: inset 0 0 3px 6px red;
}
+
+#micMeter {
+ margin: 0.4rem 0 0 0.6rem;
+}
diff --git a/src/navbar/index.tsx b/src/navbar/index.tsx
index d3748a2..feb8c5b 100644
--- a/src/navbar/index.tsx
+++ b/src/navbar/index.tsx
@@ -1,7 +1,6 @@
import React, { useRef, useEffect } from "react";
import { useDispatch, useSelector } from "react-redux";
import Clock from "react-live-clock";
-import { VUMeter } from "../optionsMenu/helpers/VUMeter";
import { FaRegClock, FaRegUser } from "react-icons/fa";
@@ -69,15 +68,6 @@ export function NavBar() {
/>
-
-
-
-
diff --git a/src/navbar/navbar.scss b/src/navbar/navbar.scss
index 36c4434..b21e6cd 100644
--- a/src/navbar/navbar.scss
+++ b/src/navbar/navbar.scss
@@ -251,7 +251,3 @@
font-weight: bold;
font-size: 1.1em;
}
-
-#navMeter {
- margin: 0.4rem 0 0 1rem;
-}
diff --git a/src/showplanner/index.tsx b/src/showplanner/index.tsx
index d066a87..3be6c23 100644
--- a/src/showplanner/index.tsx
+++ b/src/showplanner/index.tsx
@@ -2,6 +2,7 @@ import React, { useState, useReducer, useEffect } from "react";
import { ContextMenu, MenuItem } from "react-contextmenu";
import { useBeforeunload } from "react-beforeunload";
import { FaAlignJustify, FaBookOpen, FaMicrophone } from "react-icons/fa";
+import { VUMeter } from "../optionsMenu/helpers/VUMeter";
import { TimeslotItem } from "../api";
import appLogo from "../assets/images/webstudio.svg";
@@ -145,6 +146,14 @@ function MicControl() {
The microphone has not been setup. Go to options.
)}
+
+
+
Date: Thu, 23 Apr 2020 10:31:02 +0200
Subject: [PATCH 07/10] Fix SEL None
---
stateserver.py | 9 +++++----
1 file changed, 5 insertions(+), 4 deletions(-)
diff --git a/stateserver.py b/stateserver.py
index f3bcdd7..4d10e59 100755
--- a/stateserver.py
+++ b/stateserver.py
@@ -303,10 +303,11 @@ def post_registerCheck() -> Any:
}
if start_time + datetime.timedelta(minutes=2) < now_time:
- # they're late, bring them live now
- print("({}, {}) late, bringing on air now".format(connection["connid"], connection["wsid"]))
- do_ws_srv_telnet(connection["wsid"])
- subprocess.Popen(['sel', '5'])
+ if connection["wsid"] is not None:
+ # they're late, bring them live now
+ print("({}, {}) late, bringing on air now".format(connection["connid"], connection["wsid"]))
+ do_ws_srv_telnet(connection["wsid"])
+ subprocess.Popen(['sel', '5'])
assert connection is not None
if new_connection:
From df20df4ee6d520f9209858fd464e89bab5b7964a Mon Sep 17 00:00:00 2001
From: Marks Polakovs
Date: Thu, 23 Apr 2020 12:19:17 +0200
Subject: [PATCH 08/10] Try and fix unexpected jukeboxing, and add extra debug
logs
---
stateserver.py | 17 ++++++++++++-----
1 file changed, 12 insertions(+), 5 deletions(-)
diff --git a/stateserver.py b/stateserver.py
index 4d10e59..73b58e3 100755
--- a/stateserver.py
+++ b/stateserver.py
@@ -155,6 +155,7 @@ def stateDecider() -> Dict[str, Any]:
if currentConnection:
print("We're currently doing a show, so check if they want middle news.")
willRunAutoNews = currentConnection["autoNewsMiddle"]
+ print("(conclusion: {})".format("yes" if willRunAutoNews else "no"))
newSelSource = currentConnection["sourceid"]
newWSSource = currentConnection["wsid"]
elif SUSTAINER_AUTONEWS:
@@ -211,9 +212,10 @@ def post_cancelCheck() -> Any:
if currentShow and currentShow["connid"] == content["connid"]:
# this show is (at least supposed to be) live now.
# kill their show
- # but don't kill it during the news, to avoid unexpected jukeboxing
+ # but don't kill it during the news, or after the end time, to avoid unexpected jukeboxing
now = datetime.datetime.now().timestamp()
if now < (currentShow["endTimestamp"] - 15):
+ print("Jukeboxing due to {}'s ({}, {}) cancellation".format(currentShow["connid"], currentShow["timeslotid"], currentShow["wsid"]))
do_ws_srv_telnet("NUL")
subprocess.Popen(["sel", str(SOURCE_JUKEBOX)])
@@ -380,10 +382,15 @@ def post_wsSessions() -> Any:
if conn["wsid"] in wsids_to_remove:
print("({}, {}) gone".format(conn["connid"], conn["wsid"]))
conn["wsid"] = None
- # TODO Make this actually do a disconnect sequence if this is the current show.
- # time.sleep(5)
- subprocess.Popen(['sel', str(SOURCE_JUKEBOX)])
- do_ws_srv_telnet("NUL")
+ currentShow = getCurrentShowConnection()
+ if currentShow and currentShow["connid"] == conn["connid"]:
+ # they should be on air now, but they've just died. go to jukebox.
+ # but don't kill it during the news, or after the end time, to avoid unexpected jukeboxing
+ now = datetime.datetime.now().timestamp()
+ if now < (currentShow["endTimestamp"] - 15):
+ print("jukeboxing due to their disappearance...")
+ subprocess.Popen(['sel', str(SOURCE_JUKEBOX)])
+ do_ws_srv_telnet("NUL")
return genPayload("Thx, K, bye.")
From c5da634eaae175ac94f4736c8b096f421279463e Mon Sep 17 00:00:00 2001
From: Marks Polakovs
Date: Thu, 23 Apr 2020 22:10:44 +0200
Subject: [PATCH 09/10] Refactor MixerState (#94)
* Refactor MixerState
This extracts the actual audio handling out from MixerState into a separate module with two classes, Player and AudioEngine, because separation of concerns good and 1,000 line file bad.
* Remove unnecessary module
* Fix mic calibration
* rename engine to audioEngine
* Fix a butchered merge
* remove a bunch of unused imports
---
package.json | 2 +
src/broadcast/rtc_streamer.ts | 6 +-
src/broadcast/state.ts | 8 +-
src/mixer/audio.ts | 277 ++++++++++++++++
src/mixer/state.ts | 486 ++++++++--------------------
src/optionsMenu/MicTab.tsx | 15 +-
src/optionsMenu/helpers/VUMeter.tsx | 24 +-
src/optionsMenu/state.ts | 32 +-
src/showplanner/Player.tsx | 3 +-
src/store.ts | 2 -
yarn.lock | 11 +-
11 files changed, 453 insertions(+), 413 deletions(-)
create mode 100644 src/mixer/audio.ts
diff --git a/package.json b/package.json
index ed36018..1fd50b4 100644
--- a/package.json
+++ b/package.json
@@ -47,6 +47,7 @@
"eslint-plugin-jsx-a11y": "6.2.3",
"eslint-plugin-react": "7.14.3",
"eslint-plugin-react-hooks": "^1.6.1",
+ "eventemitter3": "^4.0.0",
"fetch-progress": "github:UniversityRadioYork/fetch-progress",
"file-loader": "3.0.1",
"fs-extra": "7.0.1",
@@ -91,6 +92,7 @@
"sass-loader": "7.2.0",
"sdp-transform": "^2.14.0",
"semver": "6.3.0",
+ "strict-event-emitter-types": "^2.0.0",
"style-loader": "1.0.0",
"terser-webpack-plugin": "1.4.1",
"ts-pnp": "1.1.4",
diff --git a/src/broadcast/rtc_streamer.ts b/src/broadcast/rtc_streamer.ts
index 89404e9..088feed 100644
--- a/src/broadcast/rtc_streamer.ts
+++ b/src/broadcast/rtc_streamer.ts
@@ -2,11 +2,11 @@ import SdpTransform from "sdp-transform";
import * as later from "later";
import * as BroadcastState from "./state";
-import * as MixerState from "../mixer/state";
import { Streamer, ConnectionStateEnum } from "./streamer";
import { Dispatch } from "redux";
import { broadcastApiRequest } from "../api";
+import { audioEngine } from "../mixer/audio";
type StreamerState = "HELLO" | "OFFER" | "ANSWER" | "CONNECTED";
@@ -112,7 +112,7 @@ export class WebRTCStreamer extends Streamer {
if (now.getSeconds() < 45) {
later.setTimeout(
async () => {
- await MixerState.playNewsIntro();
+ await audioEngine.playNewsIntro();
},
later.parse
.recur()
@@ -125,7 +125,7 @@ export class WebRTCStreamer extends Streamer {
if (now.getMinutes() <= 1 && now.getSeconds() < 55) {
later.setTimeout(
async () => {
- await MixerState.playNewsEnd();
+ await audioEngine.playNewsEnd();
},
later.parse
.recur()
diff --git a/src/broadcast/state.ts b/src/broadcast/state.ts
index 2902395..78d2431 100644
--- a/src/broadcast/state.ts
+++ b/src/broadcast/state.ts
@@ -6,6 +6,7 @@ import * as MixerState from "../mixer/state";
import * as NavbarState from "../navbar/state";
import { ConnectionStateEnum } from "./streamer";
import { RecordingStreamer } from "./recording_streamer";
+import { audioEngine } from "../mixer/audio";
export let streamer: WebRTCStreamer | null = null;
@@ -302,7 +303,10 @@ export const goOnAir = (): AppThunk => async (dispatch, getState) => {
return;
}
console.log("starting streamer.");
- streamer = new WebRTCStreamer(MixerState.destination.stream, dispatch);
+ streamer = new WebRTCStreamer(
+ audioEngine.streamingDestination.stream,
+ dispatch
+ );
streamer.addConnectionStateListener((state) => {
dispatch(broadcastState.actions.setConnectionState(state));
if (state === "CONNECTION_LOST") {
@@ -328,7 +332,7 @@ export const stopStreaming = (): AppThunk => async (dispatch) => {
let recorder: RecordingStreamer;
export const startRecording = (): AppThunk => async (dispatch) => {
- recorder = new RecordingStreamer(MixerState.destination.stream);
+ recorder = new RecordingStreamer(audioEngine.streamingDestination.stream);
recorder.addConnectionStateListener((state) => {
dispatch(broadcastState.actions.setRecordingState(state));
});
diff --git a/src/mixer/audio.ts b/src/mixer/audio.ts
new file mode 100644
index 0000000..18a51e3
--- /dev/null
+++ b/src/mixer/audio.ts
@@ -0,0 +1,277 @@
+import EventEmitter from "eventemitter3";
+import StrictEmitter from "strict-event-emitter-types";
+
+import WaveSurfer from "wavesurfer.js";
+import CursorPlugin from "wavesurfer.js/dist/plugin/wavesurfer.cursor.min.js";
+import RegionsPlugin from "wavesurfer.js/dist/plugin/wavesurfer.regions.min.js";
+import NewsEndCountdown from "../assets/audio/NewsEndCountdown.wav";
+import NewsIntro from "../assets/audio/NewsIntro.wav";
+
+interface PlayerEvents {
+ loadComplete: (duration: number) => void;
+ timeChange: (time: number) => void;
+ play: () => void;
+ pause: () => void;
+ finish: () => void;
+}
+
+const PlayerEmitter: StrictEmitter<
+ EventEmitter,
+ PlayerEvents
+> = EventEmitter as any;
+
+class Player extends ((PlayerEmitter as unknown) as { new (): EventEmitter }) {
+ private constructor(
+ private readonly engine: AudioEngine,
+ private wavesurfer: WaveSurfer,
+ private readonly waveform: HTMLElement
+ ) {
+ super();
+ }
+
+ get isPlaying() {
+ return this.wavesurfer.isPlaying();
+ }
+
+ get currentTime() {
+ return this.wavesurfer.getCurrentTime();
+ }
+
+ play() {
+ return this.wavesurfer.play();
+ }
+
+ pause() {
+ return this.wavesurfer.pause();
+ }
+
+ stop() {
+ return this.wavesurfer.stop();
+ }
+
+ redraw() {
+ this.wavesurfer.drawBuffer();
+ }
+
+ setIntro(duration: number) {
+ this.wavesurfer.addRegion({
+ id: "intro",
+ resize: false,
+ start: 0,
+ end: duration,
+ color: "rgba(125,0,255, 0.12)",
+ });
+ }
+
+ setVolume(val: number) {
+ this.wavesurfer.setVolume(val);
+ }
+
+ public static create(engine: AudioEngine, player: number, url: string) {
+ let waveform = document.getElementById("waveform-" + player.toString());
+ if (waveform == null) {
+ throw new Error();
+ }
+ waveform.innerHTML = "";
+ const wavesurfer = WaveSurfer.create({
+ audioContext: engine.audioContext,
+ container: "#waveform-" + player.toString(),
+ waveColor: "#CCCCFF",
+ progressColor: "#9999FF",
+ backend: "MediaElementWebAudio",
+ responsive: true,
+ xhr: {
+ credentials: "include",
+ } as any,
+ plugins: [
+ CursorPlugin.create({
+ showTime: true,
+ opacity: 1,
+ customShowTimeStyle: {
+ "background-color": "#000",
+ color: "#fff",
+ padding: "2px",
+ "font-size": "10px",
+ },
+ }),
+ RegionsPlugin.create({}),
+ ],
+ });
+
+ const instance = new this(engine, wavesurfer, waveform);
+
+ wavesurfer.on("ready", () => {
+ console.log("ready");
+ instance.emit("loadComplete", wavesurfer.getDuration());
+ });
+ wavesurfer.on("play", () => {
+ instance.emit("play");
+ });
+ wavesurfer.on("pause", () => {
+ instance.emit("pause");
+ });
+ wavesurfer.on("seek", () => {
+ instance.emit("timeChange", wavesurfer.getCurrentTime());
+ });
+ wavesurfer.on("finish", () => {
+ instance.emit("finish");
+ });
+ wavesurfer.on("audioprocess", () => {
+ instance.emit("timeChange", wavesurfer.getCurrentTime());
+ });
+
+ (wavesurfer as any).backend.gainNode.disconnect();
+ (wavesurfer as any).backend.gainNode.connect(engine.finalCompressor);
+ (wavesurfer as any).backend.gainNode.connect(
+ engine.audioContext.destination
+ );
+
+ wavesurfer.load(url);
+
+ return instance;
+ }
+}
+
+interface EngineEvents {
+ micOpen: () => void;
+}
+
+const EngineEmitter: StrictEmitter<
+ EventEmitter,
+ EngineEvents
+> = EventEmitter as any;
+
+export class AudioEngine extends ((EngineEmitter as unknown) as {
+ new (): EventEmitter;
+}) {
+ public audioContext: AudioContext;
+ public players: (Player | undefined)[] = [];
+
+ micMedia: MediaStream | null = null;
+ micSource: MediaStreamAudioSourceNode | null = null;
+ micCalibrationGain: GainNode;
+ micAnalyser: AnalyserNode;
+ micCompressor: DynamicsCompressorNode;
+ micMixGain: GainNode;
+
+ finalCompressor: DynamicsCompressorNode;
+ streamingDestination: MediaStreamAudioDestinationNode;
+
+ newsStartCountdownEl: HTMLAudioElement;
+ newsStartCountdownNode: MediaElementAudioSourceNode;
+
+ newsEndCountdownEl: HTMLAudioElement;
+ newsEndCountdownNode: MediaElementAudioSourceNode;
+
+ analysisBuffer: Float32Array;
+
+ constructor() {
+ super();
+ this.audioContext = new AudioContext({
+ sampleRate: 44100,
+ latencyHint: "interactive",
+ });
+
+ this.finalCompressor = this.audioContext.createDynamicsCompressor();
+ this.finalCompressor.ratio.value = 20; //brickwall destination comressor
+ this.finalCompressor.threshold.value = -0.5;
+ this.finalCompressor.attack.value = 0;
+ this.finalCompressor.release.value = 0.2;
+ this.finalCompressor.connect(this.audioContext.destination);
+
+ this.streamingDestination = this.audioContext.createMediaStreamDestination();
+ this.finalCompressor.connect(this.streamingDestination);
+
+ this.micCalibrationGain = this.audioContext.createGain();
+
+ this.micAnalyser = this.audioContext.createAnalyser();
+ this.micAnalyser.fftSize = 8192;
+
+ this.analysisBuffer = new Float32Array(this.micAnalyser.fftSize);
+
+ this.micCompressor = this.audioContext.createDynamicsCompressor();
+ this.micCompressor.ratio.value = 3; // mic compressor - fairly gentle, can be upped
+ this.micCompressor.threshold.value = -18;
+ this.micCompressor.attack.value = 0.01;
+ this.micCompressor.release.value = 0.1;
+
+ this.micMixGain = this.audioContext.createGain();
+ this.micMixGain.gain.value = 1;
+
+ this.micCalibrationGain
+ .connect(this.micAnalyser)
+ .connect(this.micCompressor)
+ .connect(this.micMixGain)
+ .connect(this.streamingDestination);
+
+ this.newsEndCountdownEl = new Audio(NewsEndCountdown);
+ this.newsEndCountdownEl.preload = "auto";
+ this.newsEndCountdownEl.volume = 0.5;
+ this.newsEndCountdownNode = this.audioContext.createMediaElementSource(
+ this.newsEndCountdownEl
+ );
+ this.newsEndCountdownNode.connect(this.audioContext.destination);
+
+ this.newsStartCountdownEl = new Audio(NewsIntro);
+ this.newsStartCountdownEl.preload = "auto";
+ this.newsStartCountdownEl.volume = 0.5;
+ this.newsStartCountdownNode = this.audioContext.createMediaElementSource(
+ this.newsStartCountdownEl
+ );
+ this.newsStartCountdownNode.connect(this.audioContext.destination);
+ }
+
+ public createPlayer(number: number, url: string) {
+ const player = Player.create(this, number, url);
+ this.players[number] = player;
+ return player;
+ }
+
+ async openMic(deviceId: string) {
+ console.log("opening mic", deviceId);
+ this.micMedia = await navigator.mediaDevices.getUserMedia({
+ audio: {
+ deviceId: { exact: deviceId },
+ echoCancellation: false,
+ autoGainControl: false,
+ noiseSuppression: false,
+ latency: 0.01,
+ },
+ });
+
+ this.micSource = this.audioContext.createMediaStreamSource(this.micMedia);
+
+ this.micSource.connect(this.micCalibrationGain);
+
+ this.emit("micOpen");
+ }
+
+ setMicCalibrationGain(value: number) {
+ this.micCalibrationGain.gain.value = value;
+ }
+
+ setMicVolume(value: number) {
+ this.micMixGain.gain.value = value;
+ }
+
+ getMicLevel() {
+ this.micAnalyser.getFloatTimeDomainData(this.analysisBuffer);
+ let peak = 0;
+ for (let i = 0; i < this.analysisBuffer.length; i++) {
+ peak = Math.max(peak, this.analysisBuffer[i] ** 2);
+ }
+ return 10 * Math.log10(peak);
+ }
+
+ async playNewsEnd() {
+ this.newsEndCountdownEl.currentTime = 0;
+ await this.newsEndCountdownEl.play();
+ }
+
+ async playNewsIntro() {
+ this.newsStartCountdownEl.currentTime = 0;
+ await this.newsStartCountdownEl.play();
+ }
+}
+
+export const audioEngine = new AudioEngine();
diff --git a/src/mixer/state.ts b/src/mixer/state.ts
index 4699c8d..aa895ad 100644
--- a/src/mixer/state.ts
+++ b/src/mixer/state.ts
@@ -12,66 +12,14 @@ import Keys from "keymaster";
import { Track, MYRADIO_NON_API_BASE, AuxItem } from "../api";
import { AppThunk } from "../store";
import { RootState } from "../rootReducer";
-import WaveSurfer from "wavesurfer.js";
-import CursorPlugin from "wavesurfer.js/dist/plugin/wavesurfer.cursor.min.js";
-import RegionsPlugin from "wavesurfer.js/dist/plugin/wavesurfer.regions.min.js";
-import * as later from "later";
-import NewsIntro from "../assets/audio/NewsIntro.wav";
-import NewsEndCountdown from "../assets/audio/NewsEndCountdown.wav";
+import { audioEngine } from "./audio";
-const audioContext = new (window.AudioContext ||
- (window as any).webkitAudioContext)();
-const wavesurfers: WaveSurfer[] = [];
const playerGainTweens: Array<{
target: VolumePresetEnum;
tweens: Between[];
}> = [];
const loadAbortControllers: AbortController[] = [];
-let micMedia: MediaStream | null = null;
-let micSource: MediaStreamAudioSourceNode | null = null;
-let micCalibrationGain: GainNode | null = null;
-let micCompressor: DynamicsCompressorNode | null = null;
-let micMixGain: GainNode | null = null;
-
-const finalCompressor = audioContext.createDynamicsCompressor();
-finalCompressor.ratio.value = 20; //brickwall destination comressor
-finalCompressor.threshold.value = -0.5;
-finalCompressor.attack.value = 0;
-finalCompressor.release.value = 0.2;
-
-export const destination = audioContext.createMediaStreamDestination();
-console.log("final destination", destination);
-finalCompressor.connect(destination);
-
-const newsEndCountdownEl = new Audio(NewsEndCountdown);
-newsEndCountdownEl.preload = "auto";
-newsEndCountdownEl.volume = 0.5;
-const newsEndCountdownNode = audioContext.createMediaElementSource(
- newsEndCountdownEl
-);
-newsEndCountdownNode.connect(audioContext.destination);
-
-const newsStartCountdownEl = new Audio(NewsIntro);
-newsStartCountdownEl.preload = "auto";
-newsStartCountdownEl.volume = 0.5;
-const newsStartCountdownNode = audioContext.createMediaElementSource(
- newsStartCountdownEl
-);
-newsStartCountdownNode.connect(audioContext.destination);
-
-export async function playNewsEnd() {
- newsEndCountdownEl.currentTime = 0;
- await newsEndCountdownEl.play();
-}
-
-export async function playNewsIntro() {
- newsStartCountdownEl.currentTime = 0;
- await newsStartCountdownEl.play();
-}
-
-let timerInterval: later.Timer;
-
type PlayerStateEnum = "playing" | "paused" | "stopped";
type PlayerRepeatEnum = "none" | "one" | "all";
type VolumePresetEnum = "off" | "bed" | "full";
@@ -100,7 +48,6 @@ interface MicState {
volume: 1 | 0;
baseGain: number;
id: string | null;
- calibration: boolean;
}
interface MixerState {
@@ -108,56 +55,26 @@ interface MixerState {
mic: MicState;
}
+const BasePlayerState: PlayerState = {
+ loadedItem: null,
+ loading: -1,
+ state: "stopped",
+ volume: 1,
+ gain: 1,
+ timeCurrent: 0,
+ timeRemaining: 0,
+ timeLength: 0,
+ playOnLoad: false,
+ autoAdvance: true,
+ repeat: "none",
+ tracklistItemID: -1,
+ loadError: false,
+};
+
const mixerState = createSlice({
name: "Player",
initialState: {
- players: [
- {
- loadedItem: null,
- loading: -1,
- state: "stopped",
- volume: 1,
- gain: 1,
- timeCurrent: 0,
- timeRemaining: 0,
- timeLength: 0,
- playOnLoad: false,
- autoAdvance: true,
- repeat: "none",
- tracklistItemID: -1,
- loadError: false,
- },
- {
- loadedItem: null,
- loading: -1,
- state: "stopped",
- volume: 1,
- gain: 1,
- timeCurrent: 0,
- timeRemaining: 0,
- timeLength: 0,
- playOnLoad: false,
- autoAdvance: true,
- repeat: "none",
- tracklistItemID: -1,
- loadError: false,
- },
- {
- loadedItem: null,
- loading: -1,
- state: "stopped",
- volume: 1,
- gain: 1,
- timeCurrent: 0,
- timeRemaining: 0,
- timeLength: 0,
- playOnLoad: false,
- autoAdvance: true,
- repeat: "none",
- tracklistItemID: -1,
- loadError: false,
- },
- ],
+ players: [BasePlayerState, BasePlayerState, BasePlayerState],
mic: {
open: false,
volume: 1,
@@ -165,7 +82,6 @@ const mixerState = createSlice({
baseGain: 1,
openError: null,
id: "None",
- calibration: false,
},
} as MixerState,
reducers: {
@@ -304,12 +220,6 @@ const mixerState = createSlice({
) {
state.players[action.payload.player].tracklistItemID = action.payload.id;
},
- startMicCalibration(state) {
- state.mic.calibration = true;
- },
- stopMicCalibration(state) {
- state.mic.calibration = false;
- },
},
});
@@ -321,8 +231,8 @@ export const load = (
player: number,
item: PlanItem | Track | AuxItem
): AppThunk => async (dispatch, getState) => {
- if (typeof wavesurfers[player] !== "undefined") {
- if (wavesurfers[player].isPlaying()) {
+ if (typeof audioEngine.players[player] !== "undefined") {
+ if (audioEngine.players[player]?.isPlaying) {
// already playing, don't kill playback
return;
}
@@ -363,130 +273,6 @@ export const load = (
console.log("loading");
- let waveform = document.getElementById("waveform-" + player.toString());
- if (waveform !== null) {
- waveform.innerHTML = "";
- }
- const wavesurfer = WaveSurfer.create({
- audioContext,
- container: "#waveform-" + player.toString(),
- waveColor: "#CCCCFF",
- progressColor: "#9999FF",
- backend: "MediaElementWebAudio",
- responsive: true,
- xhr: {
- credentials: "include",
- } as any,
- plugins: [
- CursorPlugin.create({
- showTime: true,
- opacity: 1,
- customShowTimeStyle: {
- "background-color": "#000",
- color: "#fff",
- padding: "2px",
- "font-size": "10px",
- },
- }),
- RegionsPlugin.create({}),
- ],
- });
-
- wavesurfer.on("ready", () => {
- dispatch(mixerState.actions.itemLoadComplete({ player }));
- dispatch(
- mixerState.actions.setTimeLength({
- player,
- time: wavesurfer.getDuration(),
- })
- );
- dispatch(
- mixerState.actions.setTimeCurrent({
- player,
- time: 0,
- })
- );
- const state = getState().mixer.players[player];
- if (state.playOnLoad) {
- wavesurfer.play();
- }
- if (state.loadedItem && "intro" in state.loadedItem) {
- wavesurfer.addRegion({
- id: "intro",
- resize: false,
- start: 0,
- end: state.loadedItem.intro,
- color: "rgba(125,0,255, 0.12)",
- });
- }
- });
- wavesurfer.on("play", () => {
- dispatch(mixerState.actions.setPlayerState({ player, state: "playing" }));
- });
- wavesurfer.on("pause", () => {
- dispatch(
- mixerState.actions.setPlayerState({
- player,
- state: wavesurfer.getCurrentTime() === 0 ? "stopped" : "paused",
- })
- );
- });
- wavesurfer.on("seek", () => {
- dispatch(
- mixerState.actions.setTimeCurrent({
- player,
- time: wavesurfer.getCurrentTime(),
- })
- );
- });
- wavesurfer.on("finish", () => {
- dispatch(mixerState.actions.setPlayerState({ player, state: "stopped" }));
- const state = getState().mixer.players[player];
- if (state.tracklistItemID !== -1) {
- dispatch(BroadcastState.tracklistEnd(state.tracklistItemID));
- }
- if (state.repeat === "one") {
- wavesurfer.play();
- } else if (state.repeat === "all") {
- if ("channel" in item) {
- // it's not in the CML/libraries "column"
- const itsChannel = getState()
- .showplan.plan!.filter((x) => x.channel === item.channel)
- .sort((x, y) => x.weight - y.weight);
- const itsIndex = itsChannel.indexOf(item);
- if (itsIndex === itsChannel.length - 1) {
- dispatch(load(player, itsChannel[0]));
- }
- }
- } else if (state.autoAdvance) {
- if ("channel" in item) {
- // it's not in the CML/libraries "column"
- const itsChannel = getState()
- .showplan.plan!.filter((x) => x.channel === item.channel)
- .sort((x, y) => x.weight - y.weight);
- const itsIndex = itsChannel.indexOf(item);
- if (itsIndex > -1 && itsIndex !== itsChannel.length - 1) {
- dispatch(load(player, itsChannel[itsIndex + 1]));
- }
- }
- }
- });
- wavesurfer.on("audioprocess", () => {
- if (
- Math.abs(
- wavesurfer.getCurrentTime() -
- getState().mixer.players[player].timeCurrent
- ) > 0.5
- ) {
- dispatch(
- mixerState.actions.setTimeCurrent({
- player,
- time: wavesurfer.getCurrentTime(),
- })
- );
- }
- });
-
try {
const signal = loadAbortControllers[player].signal; // hang on to the signal, even if its controller gets replaced
const result = await fetch(url, {
@@ -509,22 +295,93 @@ export const load = (
const blob = new Blob([rawData]);
const objectUrl = URL.createObjectURL(blob);
- const audio = new Audio(objectUrl);
+ const playerInstance = await audioEngine.createPlayer(player, objectUrl);
- wavesurfer.load(audio);
+ playerInstance.on("loadComplete", (duration) => {
+ console.log("loadComplete");
+ dispatch(mixerState.actions.itemLoadComplete({ player }));
+ dispatch(
+ mixerState.actions.setTimeLength({
+ player,
+ time: duration,
+ })
+ );
+ dispatch(
+ mixerState.actions.setTimeCurrent({
+ player,
+ time: 0,
+ })
+ );
+ const state = getState().mixer.players[player];
+ if (state.playOnLoad) {
+ playerInstance.play();
+ }
+ if (state.loadedItem && "intro" in state.loadedItem) {
+ playerInstance.setIntro(state.loadedItem.intro);
+ }
+ });
- // THIS IS BAD
- (wavesurfer as any).backend.gainNode.disconnect();
- (wavesurfer as any).backend.gainNode.connect(finalCompressor);
- (wavesurfer as any).backend.gainNode.connect(audioContext.destination);
+ playerInstance.on("play", () => {
+ dispatch(mixerState.actions.setPlayerState({ player, state: "playing" }));
+ });
+ playerInstance.on("pause", () => {
+ dispatch(
+ mixerState.actions.setPlayerState({
+ player,
+ state: playerInstance.currentTime === 0 ? "stopped" : "paused",
+ })
+ );
+ });
+ playerInstance.on("timeChange", (time) => {
+ if (Math.abs(time - getState().mixer.players[player].timeCurrent) > 0.5) {
+ dispatch(
+ mixerState.actions.setTimeCurrent({
+ player,
+ time,
+ })
+ );
+ }
+ });
+ playerInstance.on("finish", () => {
+ dispatch(mixerState.actions.setPlayerState({ player, state: "stopped" }));
+ const state = getState().mixer.players[player];
+ if (state.tracklistItemID !== -1) {
+ dispatch(BroadcastState.tracklistEnd(state.tracklistItemID));
+ }
+ if (state.repeat === "one") {
+ playerInstance.play();
+ } else if (state.repeat === "all") {
+ if ("channel" in item) {
+ // it's not in the CML/libraries "column"
+ const itsChannel = getState()
+ .showplan.plan!.filter((x) => x.channel === item.channel)
+ .sort((x, y) => x.weight - y.weight);
+ const itsIndex = itsChannel.indexOf(item);
+ if (itsIndex === itsChannel.length - 1) {
+ dispatch(load(player, itsChannel[0]));
+ }
+ }
+ } else if (state.autoAdvance) {
+ if ("channel" in item) {
+ // it's not in the CML/libraries "column"
+ const itsChannel = getState()
+ .showplan.plan!.filter((x) => x.channel === item.channel)
+ .sort((x, y) => x.weight - y.weight);
+ const itsIndex = itsChannel.indexOf(item);
+ if (itsIndex > -1 && itsIndex !== itsChannel.length - 1) {
+ dispatch(load(player, itsChannel[itsIndex + 1]));
+ }
+ }
+ }
+ });
// Double-check we haven't been aborted since
if (signal.aborted) {
+ // noinspection ExceptionCaughtLocallyJS
throw new DOMException("abort load", "AbortError");
}
- wavesurfer.setVolume(getState().mixer.players[player].gain);
- wavesurfers[player] = wavesurfer;
+ playerInstance.setVolume(getState().mixer.players[player].gain);
delete loadAbortControllers[player];
} catch (e) {
if ("name" in e && e.name === "AbortError") {
@@ -540,20 +397,20 @@ export const play = (player: number): AppThunk => async (
dispatch,
getState
) => {
- if (typeof wavesurfers[player] === "undefined") {
+ if (typeof audioEngine.players[player] === "undefined") {
console.log("nothing loaded");
return;
}
- if (audioContext.state !== "running") {
+ if (audioEngine.audioContext.state !== "running") {
console.log("Resuming AudioContext because Chrome bad");
- await audioContext.resume();
+ await audioEngine.audioContext.resume();
}
- var state = getState().mixer.players[player];
+ const state = getState().mixer.players[player];
if (state.loading !== -1) {
console.log("not ready");
return;
}
- wavesurfers[player].play();
+ audioEngine.players[player]?.play();
if (state.loadedItem && "album" in state.loadedItem) {
//track
@@ -567,7 +424,7 @@ export const play = (player: number): AppThunk => async (
};
export const pause = (player: number): AppThunk => (dispatch, getState) => {
- if (typeof wavesurfers[player] === "undefined") {
+ if (typeof audioEngine.players[player] === "undefined") {
console.log("nothing loaded");
return;
}
@@ -575,15 +432,15 @@ export const pause = (player: number): AppThunk => (dispatch, getState) => {
console.log("not ready");
return;
}
- if (wavesurfers[player].isPlaying()) {
- wavesurfers[player].pause();
+ if (audioEngine.players[player]?.isPlaying) {
+ audioEngine.players[player]?.pause();
} else {
- wavesurfers[player].play();
+ audioEngine.players[player]?.play();
}
};
export const stop = (player: number): AppThunk => (dispatch, getState) => {
- if (typeof wavesurfers[player] === "undefined") {
+ if (typeof audioEngine.players[player] === "undefined") {
console.log("nothing loaded");
return;
}
@@ -592,7 +449,7 @@ export const stop = (player: number): AppThunk => (dispatch, getState) => {
console.log("not ready");
return;
}
- wavesurfers[player].stop();
+ audioEngine.players[player]?.stop();
// Incase wavesurver wasn't playing, it won't 'finish', so just make sure the UI is stopped.
dispatch(mixerState.actions.setPlayerState({ player, state: "stopped" }));
@@ -608,8 +465,8 @@ export const {
} = mixerState.actions;
export const redrawWavesurfers = (): AppThunk => () => {
- wavesurfers.forEach(function(item) {
- item.drawBuffer();
+ audioEngine.players.forEach(function(item) {
+ item?.redraw();
});
};
@@ -668,8 +525,8 @@ export const setVolume = (
.time(FADE_TIME_SECONDS * 1000)
.easing((Between as any).Easing.Exponential.InOut)
.on("update", (val: number) => {
- if (typeof wavesurfers[player] !== "undefined") {
- wavesurfers[player].setVolume(val);
+ if (typeof audioEngine.players[player] !== "undefined") {
+ audioEngine.players[player]?.setVolume(val);
}
})
.on("complete", () => {
@@ -693,9 +550,9 @@ export const openMicrophone = (micID: string): AppThunk => async (
// if (getState().mixer.mic.open) {
// micSource?.disconnect();
// }
- if (audioContext.state !== "running") {
+ if (audioEngine.audioContext.state !== "running") {
console.log("Resuming AudioContext because Chrome bad");
- await audioContext.resume();
+ await audioEngine.audioContext.resume();
}
dispatch(mixerState.actions.setMicError(null));
if (!("mediaDevices" in navigator)) {
@@ -704,15 +561,7 @@ export const openMicrophone = (micID: string): AppThunk => async (
return;
}
try {
- micMedia = await navigator.mediaDevices.getUserMedia({
- audio: {
- deviceId: { exact: micID },
- echoCancellation: false,
- autoGainControl: false,
- noiseSuppression: false,
- latency: 0.01,
- },
- });
+ await audioEngine.openMic(micID);
} catch (e) {
if (e instanceof DOMException) {
switch (e.message) {
@@ -727,33 +576,12 @@ export const openMicrophone = (micID: string): AppThunk => async (
}
return;
}
- // Okay, we have a mic stream, time to do some audio nonsense
+
const state = getState().mixer.mic;
- micSource = audioContext.createMediaStreamSource(micMedia);
+ audioEngine.setMicCalibrationGain(state.baseGain);
+ audioEngine.setMicVolume(state.volume);
- micCalibrationGain = audioContext.createGain();
- micCalibrationGain.gain.value = state.baseGain;
-
- micCompressor = audioContext.createDynamicsCompressor();
- micCompressor.ratio.value = 3; // mic compressor - fairly gentle, can be upped
- micCompressor.threshold.value = -18;
- micCompressor.attack.value = 0.01;
- micCompressor.release.value = 0.1;
-
- micMixGain = audioContext.createGain();
- micMixGain.gain.value = state.volume;
-
- micSource
- .connect(micCalibrationGain)
- .connect(micCompressor)
- .connect(micMixGain)
- .connect(finalCompressor);
dispatch(mixerState.actions.micOpen(micID));
-
- const state2 = getState();
- if (state2.optionsMenu.open && state2.optionsMenu.currentTab === "mic") {
- dispatch(startMicCalibration());
- }
};
export const setMicVolume = (level: MicVolumePresetEnum): AppThunk => (
@@ -773,84 +601,28 @@ export const setMicVolume = (level: MicVolumePresetEnum): AppThunk => (
mixerState.actions.setMicLevels({ volume: levelVal, gain: levelVal })
);
// latency, plus a little buffer
- }, audioContext.baseLatency * 1000 + 150);
+ }, audioEngine.audioContext.baseLatency * 1000 + 150);
}
};
-let analyser: AnalyserNode | null = null;
-
-const CALIBRATE_THE_CALIBRATOR = false;
-
-export const startMicCalibration = (): AppThunk => async (
- dispatch,
- getState
-) => {
- if (!getState().mixer.mic.open) {
- return;
- }
- dispatch(mixerState.actions.startMicCalibration());
- let input: AudioNode;
- if (CALIBRATE_THE_CALIBRATOR) {
- const sauce = new Audio(
- "https://ury.org.uk/myradio/NIPSWeb/managed_play/?managedid=6489"
- ); // URY 1K Sine -2.5dbFS PPM5
- sauce.crossOrigin = "use-credentials";
- sauce.autoplay = true;
- sauce.load();
- input = audioContext.createMediaElementSource(sauce);
- } else {
- input = micCalibrationGain!;
- }
- analyser = audioContext.createAnalyser();
- analyser.fftSize = 8192;
- input.connect(analyser);
-};
-
-let float: Float32Array | null = null;
-
-export function getMicAnalysis() {
- if (!analyser) {
- throw new Error();
- }
- if (!float) {
- float = new Float32Array(analyser.fftSize);
- }
- analyser.getFloatTimeDomainData(float);
- let peak = 0;
- for (let i = 0; i < float.length; i++) {
- peak = Math.max(peak, float[i] ** 2);
- }
- return 10 * Math.log10(peak);
-}
-
-export const stopMicCalibration = (): AppThunk => (dispatch, getState) => {
- if (getState().mixer.mic.calibration === null) {
- return;
- }
- dispatch(mixerState.actions.stopMicCalibration());
-};
-
export const mixerMiddleware: Middleware<{}, RootState, Dispatch> = (
store
) => (next) => (action) => {
const oldState = store.getState().mixer;
const result = next(action);
const newState = store.getState().mixer;
+
newState.players.forEach((state, index) => {
- if (typeof wavesurfers[index] !== "undefined") {
- if (oldState.players[index].gain !== newState.players[index].gain) {
- wavesurfers[index].setVolume(state.gain);
- }
+ if (oldState.players[index].gain !== newState.players[index].gain) {
+ audioEngine.players[index]?.setVolume(state.gain);
}
});
- if (
- newState.mic.baseGain !== oldState.mic.baseGain &&
- micCalibrationGain !== null
- ) {
- micCalibrationGain.gain.value = newState.mic.baseGain;
+
+ if (newState.mic.baseGain !== oldState.mic.baseGain) {
+ audioEngine.setMicCalibrationGain(newState.mic.baseGain);
}
- if (newState.mic.volume !== oldState.mic.volume && micMixGain !== null) {
- micMixGain.gain.value = newState.mic.volume;
+ if (newState.mic.volume !== oldState.mic.volume) {
+ audioEngine.setMicVolume(newState.mic.volume);
}
return result;
};
diff --git a/src/optionsMenu/MicTab.tsx b/src/optionsMenu/MicTab.tsx
index e4626c3..417b50f 100644
--- a/src/optionsMenu/MicTab.tsx
+++ b/src/optionsMenu/MicTab.tsx
@@ -29,10 +29,16 @@ export function MicTab() {
const [openError, setOpenError] = useState(null);
async function fetchMicNames() {
+ console.log("start fetchNames");
+ if (!("getUserMedia" in navigator.mediaDevices)) {
+ setOpenError("NOT_SECURE_CONTEXT");
+ return;
+ }
// Because Chrome, we have to call getUserMedia() before enumerateDevices()
try {
await navigator.mediaDevices.getUserMedia({ audio: true });
} catch (e) {
+ console.warn(e);
if (e instanceof DOMException) {
switch (e.message) {
case "Permission denied":
@@ -46,8 +52,11 @@ export function MicTab() {
}
return;
}
+ console.log("done");
try {
+ console.log("gUM");
const devices = await navigator.mediaDevices.enumerateDevices();
+ console.log(devices);
setMicList(reduceToInputs(devices));
} catch (e) {
setOpenError("UNKNOWN_ENUM");
@@ -61,7 +70,11 @@ export function MicTab() {
return (
<>
-