Bonnie Kwong <tersewings@protonmail.com>
Bonnie Kwong <tersewings@protonmail.com> Bonnie Kwong <bonniekwong@Bonnies-MBP-2.attlocal.net>
-Omar Costa Hamido <omarcostinha@gmail.com>
+Omar Costa Hamido <ocostaha@uci.edu>
Mike Dickey <mike@mikedickey.com>
Mike Dickey <mike@mikedickey.com> Mike Dickey <mdickey@splunk.com>
if (NOT novs)
set (qjacktrip_SRC ${qjacktrip_SRC}
src/gui/virtualstudio.cpp
+ src/gui/vsApi.cpp
+ src/gui/vsAuth.cpp
+ src/gui/vsDeviceCodeFlow.cpp
src/gui/vsInit.cpp
src/gui/vsQuickView.cpp
src/gui/vsServerInfo.cpp
src/gui/vsWebSocket.cpp
src/gui/vsPermissions.cpp
src/gui/qjacktrip.qrc
+ src/Analyzer.cpp
src/Monitor.cpp
src/Volume.cpp
src/Tone.cpp
"--device=dri",
"--share=network",
"--filesystem=xdg-run/pipewire-0",
- "--env=PIPEWIRE_LATENCY=256/48000"
+ "--env=PIPEWIRE_LATENCY=256/48000",
+ "--env=QT_QUICK_CONTROLS_STYLE=universal"
],
"cleanup": [
"/lib/python3.10",
+- Version: "1.10.0"
+ Date: 2023-06-16
+ Description:
+ - (added) VS Mode automatically mute when feedback is detected
+ - (added) VS Mode - notify people of network outages
+ - (updated) PLC optimizations for slower hardware devices
+ - (updated) VS Mode volume meter performance improvements
+ - (updated) VS Mode login no longer listens on a local port
+ - (updated) VS Mode improved clarity of startup messages
+ - (fixed) Windows crash caused by Realtek ASIO driver
+ - (fixed) More friendly message for Windows Terminal bug
+ - (fixed) Faster startup by reducing and deferring audio interface scans
+ - (fixed) Switching from VS to Classic mode leaves audio open
- Version: "1.9.0"
Date: 2023-05-05
Description:
application_id = 'org.jacktrip.JackTrip'
name_suffix = ''
}
+QMAKE_CFLAGS_RELEASE += -DNDEBUG
+QMAKE_CXXFLAGS_RELEASE += -DNDEBUG
equals(QT_EDITION, "OpenSource") {
DEFINES += QT_OPENSOURCE
# Input
HEADERS += src/DataProtocol.h \
src/JackTrip.h \
+ src/Analyzer.h \
src/Effects.h \
src/Compressor.h \
src/CompressorPresets.h \
src/Limiter.h \
src/Regulator.h \
+ src/WaitFreeRingBuffer.h \
+ src/WaitFreeFrameBuffer.h \
src/Reverb.h \
src/Meter.h \
src/Monitor.h \
src/gui/vuMeter.h
!novs {
HEADERS += src/gui/virtualstudio.h \
+ src/gui/vsApi.h \
+ src/gui/vsAuth.h \
+ src/gui/vsDeviceCodeFlow.h \
src/gui/vsInit.h \
src/gui/vsDevice.h \
src/gui/vsAudioInterface.h \
SOURCES += src/DataProtocol.cpp \
src/JackTrip.cpp \
+ src/Analyzer.cpp \
src/Compressor.cpp \
src/Limiter.cpp \
src/Regulator.cpp \
src/gui/vuMeter.cpp
!novs {
SOURCES += src/gui/virtualstudio.cpp \
+ src/gui/vsApi.cpp \
+ src/gui/vsAuth.cpp \
+ src/gui/vsDeviceCodeFlow.cpp \
src/gui/vsInit.cpp \
src/gui/vsDevice.cpp \
src/gui/vsAudioInterface.cpp \
update-desktop-database $HOME/.local/share/applications
```
+When using jacktrip with the JACK Audio Connection Kit (or Pipewire), ensure that your user account has permission to schedule realtime processes.
+`ulimit -r` should return a value greater than 40.
+
Further information and instructions are available on https://jacktrip.github.io/jacktrip/.
Please report any security concerns to vulnerabilities@jacktrip.org
- -Dprofile=development
sources:
- type: git
- disable-submodules: true
+ disable-submodules: false
url: https://github.com/jacktrip/jacktrip.git
branch: dev
- -Dprofile=development
sources:
- type: git
- disable-submodules: true
+ disable-submodules: false
url: {{ env['REPO'] }}
branch: {{ env['REF'] }}
buildsystem: meson
sources:
- type: git
- disable-submodules: true
+ disable-submodules: false
url: https://github.com/jacktrip/jacktrip.git
branch: main
src = [ 'src/DataProtocol.cpp',
'src/JackTrip.cpp',
'src/ProcessPlugin.cpp',
+ 'src/Analyzer.cpp',
'src/AudioTester.cpp',
'src/jacktrip_globals.cpp',
'src/JackTripWorker.cpp',
moc_h = ['src/DataProtocol.h',
'src/JackTrip.h',
'src/ProcessPlugin.h',
+ 'src/Analyzer.h',
'src/Meter.h',
'src/Monitor.h',
'src/StereoToMono.h',
'src/JackTripWorker.h',
'src/PacketHeader.h',
'src/Regulator.h',
+ 'src/WaitFreeRingBuffer.h',
+ 'src/WaitFreeFrameBuffer.h',
'src/Settings.h',
'src/UdpDataProtocol.h',
'src/UdpHubListener.h',
else
src += [
'src/gui/virtualstudio.cpp',
+ 'src/gui/vsAuth.cpp',
+ 'src/gui/vsApi.cpp',
+ 'src/gui/vsDeviceCodeFlow.cpp',
'src/gui/vsInit.cpp',
'src/gui/vsDevice.cpp',
'src/gui/vsAudioInterface.cpp',
]
moc_h += [
'src/gui/virtualstudio.h',
+ 'src/gui/vsApi.h',
+ 'src/gui/vsAuth.h',
+ 'src/gui/vsDeviceCodeFlow.h',
'src/gui/vsInit.h',
'src/gui/vsDevice.h',
'src/gui/vsAudioInterface.h',
deps += apple_av_dep
endif
-qres_files = qt.compile_resources(sources: qres)
+qres_files = []
+if qres.length() > 0
+ qres_files = qt.compile_resources(sources: qres)
+endif
moc_files = qt.compile_moc(headers: moc_h, extra_args: defines)
-ui_files = qt.compile_ui(sources: ui_h)
+ui_files = []
+if ui_h.length() > 0
+ ui_files = qt.compile_ui(sources: ui_h)
+endif
jacktrip = executable('jacktrip', src, qres_files, ui_files, moc_files, include_directories: incdirs, dependencies: deps, c_args: c_defines, cpp_args: defines, install: true )
{
"app_name": "JackTrip",
"releases": [
+ {
+ "version": "1.9.0",
+ "changelog": "Full changelog at https://github.com/jacktrip/jacktrip/releases/tag/v1.9.0",
+ "download": {
+ "date": "2023-05-12T00:00:00Z",
+ "url": "https://files.jacktrip.org/app-builds/JackTrip-v1.9.0-macOS-x64-signed-installer.pkg",
+ "downloadSize": "22806741",
+ "sha256": "71544c899c7ed4a6a93a4ee1c2452a895f61e25a000954e0e40584abef488488"
+ }
+ },
{
"version": "1.9.0-beta3",
"changelog": "Full changelog at https://github.com/jacktrip/jacktrip/releases/tag/v1.9.0-beta3",
{
"app_name": "JackTrip",
"releases": [
+ {
+ "version": "1.9.0",
+ "changelog": "Full changelog at https://github.com/jacktrip/jacktrip/releases/tag/v1.9.0",
+ "download": {
+ "date": "2023-05-12T00:00:00Z",
+ "url": "https://files.jacktrip.org/app-builds/JackTrip-v1.9.0-Windows-x64-signed-installer.msi",
+ "downloadSize": "46383104",
+ "sha256": "ab73d244c04d7c5a6e553aa69dc989a2dcc519056bc2f7e99126af8a5bdd8f6f"
+ }
+ },
{
"version": "1.9.0-beta3",
"changelog": "Full changelog at https://github.com/jacktrip/jacktrip/releases/tag/v1.9.0-beta3",
{
"app_name": "JackTrip",
"releases": [
+ {
+ "version": "1.9.0",
+ "changelog": "Full changelog at https://github.com/jacktrip/jacktrip/releases/tag/v1.9.0",
+ "download": {
+ "date": "2023-05-12T00:00:00Z",
+ "url": "https://files.jacktrip.org/app-builds/JackTrip-v1.9.0-Linux-x64-binary.zip",
+ "downloadSize": "13665582",
+ "sha256": "1949e6ca152b753bc2113a8b663bd0b0aeecf871912245d056e2d28007ec45db"
+ }
+ },
{
"version": "1.8.1",
"changelog": "Full changelog at https://github.com/jacktrip/jacktrip/releases/tag/v1.8.1",
{
"app_name": "JackTrip",
"releases": [
+ {
+ "version": "1.9.0",
+ "changelog": "Full changelog at https://github.com/jacktrip/jacktrip/releases/tag/v1.9.0",
+ "download": {
+ "date": "2023-05-12T00:00:00Z",
+ "url": "https://files.jacktrip.org/app-builds/JackTrip-v1.9.0-macOS-x64-signed-installer.pkg",
+ "downloadSize": "22806741",
+ "sha256": "71544c899c7ed4a6a93a4ee1c2452a895f61e25a000954e0e40584abef488488"
+ }
+ },
{
"version": "1.8.1",
"changelog": "Full changelog at https://github.com/jacktrip/jacktrip/releases/tag/v1.8.1",
{
"app_name": "JackTrip",
"releases": [
+ {
+ "version": "1.9.0",
+ "changelog": "Full changelog at https://github.com/jacktrip/jacktrip/releases/tag/v1.9.0",
+ "download": {
+ "date": "2023-05-12T00:00:00Z",
+ "url": "https://files.jacktrip.org/app-builds/JackTrip-v1.9.0-Windows-x64-signed-installer.msi",
+ "downloadSize": "46383104",
+ "sha256": "ab73d244c04d7c5a6e553aa69dc989a2dcc519056bc2f7e99126af8a5bdd8f6f"
+ }
+ },
{
"version": "1.8.1",
"changelog": "Full changelog at https://github.com/jacktrip/jacktrip/releases/tag/v1.8.1",
--- /dev/null
+//*****************************************************************
+/*
+ JackTrip: A System for High-Quality Audio Network Performance
+ over the Internet
+
+ Copyright (c) 2020 Julius Smith, Juan-Pablo Caceres, Chris Chafe.
+ SoundWIRE group at CCRMA, Stanford University.
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation
+ files (the "Software"), to deal in the Software without
+ restriction, including without limitation the rights to use,
+ copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following
+ conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ OTHER DEALINGS IN THE SOFTWARE.
+*/
+//*****************************************************************
+
+/**
+ * \file Analyzer.cpp
+ * \author Dominick Hing
+ * \date May 2023
+ * \license MIT
+ */
+
+#include "Analyzer.h"
+
+#include <QMutexLocker>
+#include <iostream>
+
+#include "jacktrip_types.h"
+
+//*******************************************************************************
+Analyzer::Analyzer(int numchans, bool verboseFlag)
+ : mNumChannels(numchans), mCircularBufferPtr(nullptr)
+{
+ setVerbose(verboseFlag);
+
+ // size full spectra
+ mCurrentNorms.resize(mFftSize);
+ mCurrentSpectra.resize(mFftSize);
+
+ // allocate buffers for holding on to past spectra
+ int nPositiveFreqs = 0.5 * mFftSize + 1;
+ mSpectra = new float*[mNumSpectra];
+ mSpectraDifferentials = new float*[mNumSpectra];
+ for (int i = 0; i < mNumSpectra; i++) {
+ mSpectra[i] = new float[nPositiveFreqs];
+ mSpectraDifferentials[i] = new float[nPositiveFreqs];
+ }
+}
+
+//*******************************************************************************
+Analyzer::~Analyzer()
+{
+ mTimer.stop();
+ for (int i = 0; i < mNumSpectra; i++) {
+ delete mSpectra[i];
+ delete mSpectraDifferentials[i];
+ }
+
+ if (mCircularBufferPtr != nullptr) {
+ delete mCircularBufferPtr;
+ }
+
+ delete mSpectra;
+ delete mSpectraDifferentials;
+}
+
+//*******************************************************************************
+void Analyzer::init(int samplingRate, int bufferSize)
+{
+ ProcessPlugin::init(samplingRate, bufferSize);
+ fs = float(fSamplingFreq);
+
+ mPushBuffer.resize(mBufferSize);
+ mCircularBufferPtr = new WaitFreeFrameBuffer<4096>(mBufferSize * sizeof(float));
+
+ /* Start timer */
+ connect(&mTimer, &QTimer::timeout, this, &Analyzer::onTick);
+ mTimer.setTimerType(Qt::PreciseTimer);
+ mTimer.setInterval(mInterval);
+ mTimer.setSingleShot(false);
+ mTimer.start();
+
+ inited = true;
+}
+
+//*******************************************************************************
+void Analyzer::compute(int nframes, float** inputs, float** outputs)
+{
+ if (not inited) {
+ std::cerr << "*** Analyzer " << this << ": init never called! Doing it now.\n";
+ init(0, 0);
+ }
+
+ // just a sanity check; should never happen
+ if (nframes > mBufferSize)
+ nframes = mBufferSize;
+
+ // sum up all channels and add it to the buffer
+ for (int i = 0; i < nframes; i++) {
+ mPushBuffer[i] = 0;
+ for (int ch = 0; ch < mNumChannels; ch++) {
+ if (!mIsMonitoringAnalyzer) {
+ mPushBuffer[i] += inputs[ch][i];
+ } else {
+ mPushBuffer[i] += outputs[ch][i];
+ }
+ }
+ }
+ int8_t* ptr = reinterpret_cast<int8_t*>(mPushBuffer.data());
+ mCircularBufferPtr->push(ptr);
+
+ hasProcessedAudio = true;
+}
+
+//*******************************************************************************
+void Analyzer::onTick()
+{
+ // cannot process audio if the no samples have been added to the ring buffer yet
+ if (!hasProcessedAudio) {
+ return;
+ }
+
+ const uint32_t buffers = mCircularBufferPtr->size();
+ const uint32_t samples = buffers * mBufferSize;
+
+ // require at least mFftSize values to process, otherwise return
+ if (samples < mFftSize) {
+ // shouldn't happen due to 48khz sample rate and timing config, but just in case
+ return;
+ }
+
+ mPullBuffer.resize(samples);
+ int8_t* pullPtr = reinterpret_cast<int8_t*>(mPullBuffer.data());
+ for (uint32_t i = 0; i < buffers; i++) {
+ mCircularBufferPtr->pop(pullPtr);
+ pullPtr += mCircularBufferPtr->getBytesPerFrame();
+ }
+
+ const char* err_str = NULL;
+ simple_fft::FFT(&mPullBuffer[mPullBuffer.size() - mFftSize - 1], mCurrentSpectra,
+ mFftSize, err_str);
+ for (uint32_t i = 0; i < mFftSize; i++) {
+ mCurrentNorms[i] = norm(mCurrentSpectra[i]);
+ }
+
+ // update instance spectra and differentials buffers
+ updateSpectra();
+ updateSpectraDifferentials();
+
+ // check for audio feedback loops
+ bool detectedFeedback = checkForAudioFeedback();
+ if (detectedFeedback) {
+ emit signalFeedbackDetected();
+ }
+}
+
+//*******************************************************************************
+void Analyzer::updateSpectra()
+{
+ int nPositiveFreqs = .5 * mFftSize + 1;
+ float* currentSpectra = mSpectra[0];
+ for (int i = 0; i < nPositiveFreqs; i++) {
+ currentSpectra[i] = mCurrentNorms[i];
+ }
+
+ // shift all buffers by 1 forward
+ for (int i = 0; i < mNumSpectra - 1; i++) {
+ mSpectra[i] = mSpectra[i + 1];
+ }
+ mSpectra[mNumSpectra - 1] = currentSpectra;
+}
+
+//*******************************************************************************
+void Analyzer::updateSpectraDifferentials()
+{
+ int nPositiveFreqs = .5 * mFftSize + 1;
+
+ // compute spectra differentials
+ for (int i = 0; i < nPositiveFreqs; i++) {
+ // set the first spectra differential to 0
+ mSpectraDifferentials[0][i] = 0;
+ }
+
+ for (int i = 1; i < mNumSpectra; i++) {
+ for (int j = 0; j < nPositiveFreqs; j++) {
+ mSpectraDifferentials[i][j] = mSpectra[i][j] - mSpectra[i - 1][j];
+ }
+ }
+}
+
+//*******************************************************************************
+bool Analyzer::checkForAudioFeedback()
+{
+ if (!testSpectralPeakAboveThreshold()) {
+ return false;
+ }
+
+ if (!testSpectralPeakAbnormallyHigh()) {
+ return false;
+ }
+
+ if (!testSpectralPeakGrowing()) {
+ return false;
+ }
+
+ return true;
+}
+
+//*******************************************************************************
+bool Analyzer::testSpectralPeakAboveThreshold()
+{
+ // this test checks if the peak of the latest spectra is above a certain threshold
+
+ float* latestSpectra = mSpectra[mNumSpectra - 1];
+ int nPositiveFreqs = .5 * mFftSize + 1;
+
+ // the exact threshold can be adjusted using the mThresholdMultiplier
+ // for a non-clipping signal, we can expect any value to be between 0 and N^2
+ // with N being the number of FFT channels
+ float threshold = 128 * 128 * mPeakThresholdMultipler;
+
+ float peak = 0.0f;
+ for (int i = 0; i < nPositiveFreqs; i++) {
+ if (latestSpectra[i] > peak) {
+ peak = latestSpectra[i];
+ }
+ }
+ return peak > threshold;
+}
+
+//*******************************************************************************
+bool Analyzer::testSpectralPeakAbnormallyHigh()
+{
+ // this test checks if the peak of the latest spectra is substantially higher than
+ // the other frequencies in the sample. As a heuristic we are checking if the peak is
+ // more than a few orders of magnitude above the median frequency - in other words if
+ // the peak / median exceeds a certain threshold
+
+ float* latestSpectra = mSpectra[mNumSpectra - 1];
+ int nPositiveFreqs = .5 * mFftSize + 1;
+
+ std::vector<float> latestSpectraSorted;
+ for (int i = 0; i < nPositiveFreqs; i++) {
+ latestSpectraSorted.push_back(latestSpectra[i]);
+ }
+ std::sort(latestSpectraSorted.begin(), latestSpectraSorted.end(), std::less<float>());
+
+ float threshold = mPeakDeviationThresholdMultiplier * 100 * 100;
+
+ float peak = 0.0f;
+ for (int i = 0; i < nPositiveFreqs; i++) {
+ if (latestSpectra[i] > peak) {
+ peak = latestSpectra[i];
+ }
+ }
+
+ float median = latestSpectraSorted[(int)(nPositiveFreqs / 2)];
+
+ return peak / median > threshold;
+}
+
+//*******************************************************************************
+bool Analyzer::testSpectralPeakGrowing()
+{
+ // this test checks if the peak of the spectra has a history of growth over the last
+ // few samples. This likely indicates a positive feedback loop
+
+ float* latestSpectra = mSpectra[mNumSpectra - 1];
+ int nPositiveFreqs = .5 * mFftSize + 1;
+
+ float peak = 0.0f;
+ int peakIndex = 0;
+ for (int i = 0; i < nPositiveFreqs; i++) {
+ if (latestSpectra[i] > peak) {
+ peak = latestSpectra[i];
+ peakIndex = i;
+ }
+ }
+
+ std::vector<float> valueVsTime;
+ std::vector<float> valueVsTimeSorted;
+ std::vector<float> differentials;
+ for (int i = 0; i < mNumSpectra; i++) {
+ valueVsTime.push_back(mSpectra[i][peakIndex]);
+ valueVsTimeSorted.push_back(mSpectra[i][peakIndex]);
+ differentials.push_back(mSpectraDifferentials[i][peakIndex]);
+ }
+ std::sort(valueVsTimeSorted.begin(), valueVsTimeSorted.end(), std::less<float>());
+
+ // test that the current value is the largest value
+ if (valueVsTimeSorted[mNumSpectra - 1] != valueVsTime[mNumSpectra - 1]) {
+ return false;
+ }
+
+ uint32_t numPositiveDifferentials = 0;
+ uint32_t numLargeDifferentials = 0;
+ for (int i = 0; i < mNumSpectra; i++) {
+ if (differentials[i] > 0) {
+ numPositiveDifferentials++;
+ }
+
+ if (differentials[i] > 10 * 10 * mDifferentialThresholdMultiplier) {
+ numLargeDifferentials++;
+ }
+ }
+
+ if (numPositiveDifferentials == (uint32_t)mNumSpectra * (mNumSpectra * 0.8)
+ && numLargeDifferentials >= 1) {
+ return true;
+ }
+
+ if (numPositiveDifferentials >= (uint32_t)(mNumSpectra * 0.6)
+ && numLargeDifferentials >= 2) {
+ return true;
+ }
+
+ return false;
+}
+
+//*******************************************************************************
+void Analyzer::updateNumChannels(int nChansIn, int nChansOut)
+{
+ if (outgoingPluginToNetwork) {
+ mNumChannels = nChansIn;
+ } else {
+ mNumChannels = nChansOut;
+ }
+}
+
+//*******************************************************************************
+void Analyzer::setIsMonitoringAnalyzer(bool isMonitoringAnalyzer)
+{
+ mIsMonitoringAnalyzer = isMonitoringAnalyzer;
+}
\ No newline at end of file
--- /dev/null
+//*****************************************************************
+/*
+ JackTrip: A System for High-Quality Audio Network Performance
+ over the Internet
+
+ Copyright (c) 2020 Julius Smith, Juan-Pablo Caceres, Chris Chafe.
+ SoundWIRE group at CCRMA, Stanford University.
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation
+ files (the "Software"), to deal in the Software without
+ restriction, including without limitation the rights to use,
+ copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following
+ conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ OTHER DEALINGS IN THE SOFTWARE.
+*/
+//*****************************************************************
+
+/**
+ * \file Analyzer.h
+ * \author Dominick Hing
+ * \date May 2023
+ * \license MIT
+ */
+
+#ifndef __ANALYZER_H__
+#define __ANALYZER_H__
+
+#include <QMutex>
+#include <QObject>
+#include <QTimer>
+#include <vector>
+
+#include "ProcessPlugin.h"
+#include "WaitFreeFrameBuffer.h"
+#include "externals/Simple-FFT/include/simple_fft/fft.h"
+#include "externals/Simple-FFT/include/simple_fft/fft_settings.h"
+
+typedef std::vector<real_type> RealArray1D;
+typedef std::vector<complex_type> ComplexArray1D;
+
+/** \brief The Analyzer plugin adjusts the level of the signal via multiplication
+ */
+class Analyzer : public ProcessPlugin
+{
+ Q_OBJECT;
+
+ public:
+ /// \brief The class constructor sets the number of channels to measure
+ Analyzer(int numchans, bool verboseFlag = false);
+
+ /// \brief The class destructor
+ virtual ~Analyzer();
+
+ void init(int samplingRate, int bufferSize) override;
+ int getNumInputs() override { return (mNumChannels); }
+ int getNumOutputs() override { return (mNumChannels); }
+ void compute(int nframes, float** inputs, float** outputs) override;
+ const char* getName() const override { return "Analyzer"; };
+
+ void updateNumChannels(int nChansIn, int nChansOut) override;
+ void setIsMonitoringAnalyzer(bool isMonitoringAnalyzer);
+
+ private:
+ void addFramesToQueue(int nframes, float* samples);
+ void resizeRingBuffer();
+ void onTick();
+ void updateSpectra();
+ void updateSpectraDifferentials();
+ bool checkForAudioFeedback();
+
+ bool testSpectralPeakAboveThreshold();
+ bool testSpectralPeakAbnormallyHigh();
+ bool testSpectralPeakGrowing();
+
+ int mInterval = 100;
+ float mPeakThresholdMultipler = 0.5;
+ float mPeakDeviationThresholdMultiplier = 0.4;
+ float mDifferentialThresholdMultiplier = 0.05;
+
+ float fs;
+ int mNumChannels;
+ bool mIsMonitoringAnalyzer = false;
+ bool hasProcessedAudio = false;
+ QTimer mTimer;
+
+ uint32_t mFftSize = 128; // FFT size parameter
+
+ // ring buffer that doesn't require locking
+ WaitFreeFrameBuffer<4096>* mCircularBufferPtr;
+
+ // buffer used to push sums into circular buffer
+ std::vector<float> mPushBuffer;
+
+ // buffer used to pull sums from circular buffer
+ std::vector<float> mPullBuffer;
+
+ // buffers used to store current points of FFT
+ std::vector<complex_type> mCurrentSpectra;
+ std::vector<float> mCurrentNorms;
+
+ // mSpectra and mSpectra store a history of the spectral analyses
+ int mNumSpectra = 10;
+ float** mSpectra = nullptr;
+ float** mSpectraDifferentials = nullptr;
+
+ signals:
+ void signalFeedbackDetected();
+};
+
+#endif
\ No newline at end of file
for (ProcessPlugin* plugin : qAsConst(mProcessPluginsFromNetwork)) {
plugin->setOutgoingToNetwork(false);
plugin->updateNumChannels(nChansIn, nChansOut);
- plugin->init(mSampleRate);
+ plugin->init(mSampleRate, mBufferSizeInSamples);
}
for (ProcessPlugin* plugin : qAsConst(mProcessPluginsToNetwork)) {
plugin->setOutgoingToNetwork(true);
plugin->updateNumChannels(nChansIn, nChansOut);
- plugin->init(mSampleRate);
+ plugin->init(mSampleRate, mBufferSizeInSamples);
}
for (ProcessPlugin* plugin : qAsConst(mProcessPluginsToMonitor)) {
plugin->setOutgoingToNetwork(false);
plugin->updateNumChannels(nChansMon, nChansMon);
- plugin->init(mSampleRate);
+ plugin->init(mSampleRate, mBufferSizeInSamples);
}
}
}
}
//*******************************************************************************
-void Compressor::init(int samplingRate)
+void Compressor::init(int samplingRate, int bufferSize)
{
- ProcessPlugin::init(samplingRate);
- if (samplingRate != fSamplingFreq) {
- std::cerr << "Sampling rate not set by superclass!\n";
- std::exit(1);
- }
+ ProcessPlugin::init(samplingRate, bufferSize);
fs = float(fSamplingFreq);
for (int i = 0; i < mNumChannels; i++) {
static_cast<compressordsp*>(compressorP[i])
{
if (not inited) {
std::cerr << "*** Compressor " << this << ": init never called! Doing it now.\n";
- if (fSamplingFreq <= 0) {
- fSamplingFreq = 48000;
- std::cout << "Compressor " << this
- << ": *** HAD TO GUESS the sampling rate (chose 48000 Hz) ***\n";
- }
- init(fSamplingFreq);
+ init(0, 0);
}
for (int i = 0; i < mNumChannels; i++) {
static_cast<compressordsp*>(compressorP[i])
// void setParamAllChannels(std::string& pName, float p) {
void setParamAllChannels(const char pName[], float p);
- void init(int samplingRate) override;
+ void init(int samplingRate, int bufferSize) override;
int getNumInputs() override { return (mNumChannels); }
int getNumOutputs() override { return (mNumChannels); }
void compute(int nframes, float** inputs, float** outputs) override;
#endif // endwhere
, mBufferQueueLength(BufferQueueLength)
, mBufferStrategy(1)
+ , mRegulatorThreadPtr(NULL)
, mBroadcastQueueLength(0)
, mSampleRate(gDefaultSampleRate)
, mDeviceID(gDefaultDeviceID)
cout << "Using experimental buffer strategy " << mBufferStrategy
<< "-- Regulator with PLC (worker="
<< (use_worker_thread ? "true" : "false") << ")" << endl;
- mReceiveRingBuffer = new Regulator(mNumAudioChansOut, mAudioBitResolution,
- mAudioBufferSize, mBufferQueueLength,
- use_worker_thread, mBroadcastQueueLength);
+ Regulator* regulator_ptr =
+ new Regulator(mNumAudioChansOut, mAudioBitResolution, mAudioBufferSize,
+ mBufferQueueLength, mBroadcastQueueLength, mSampleRate);
+ mReceiveRingBuffer = regulator_ptr;
+ if (use_worker_thread) {
+#ifdef REGULATOR_SHARED_WORKER_THREAD
+ regulator_ptr->enableWorkerThread(mRegulatorThreadPtr);
+#else
+ regulator_ptr->enableWorkerThread();
+#endif
+ }
// bufStrategy 3 or 4, mBufferQueueLength is in integer msec not packets
mPacketHeader->setBufferRequiresSameSettings(false); // = asym is default
if (!mAudioTesterP.isNull() && mAudioTesterP->getEnabled()) {
mIOStatLogStream << "\n";
}
- if (getBufferStrategy() != 3)
+ if (getBufferStrategy() != 3 && getBufferStrategy() != 4)
mIOStatLogStream << now.toLocal8Bit().constData() << " "
<< getPeerAddress().toLocal8Bit().constData()
<< " send: " << send_io_stat.underruns << "/"
<< recv_io_stat.broadcast_delta
<< " autoq: " << 0.1 * recv_io_stat.autoq_corr << "/"
<< 0.1 * recv_io_stat.autoq_rate << endl;
- else { // bufstrategy 3
+ else { // bufstrategy 3 or 4
mIOStatLogStream
<< now.toLocal8Bit().constData() << " "
<< getPeerAddress().toLocal8Bit().constData()
{
mBufferStrategy = BufferStrategy;
}
+ void setRegulatorThread(QThread* ptr) { mRegulatorThreadPtr = ptr; }
/// \brief Sets (override) Audio Bit Resolution after construction
virtual void setAudioBitResolution(
AudioInterface::audioBitResolutionT AudioBitResolution)
{
return mNumAudioChansOut; /*return mAudioInterface->getNumOutputChannels();*/
}
-#ifndef NO_JACK
QString getAssignedClientName()
{
+#ifndef NO_JACK
if (mAudioInterface && mAudiointerfaceMode == JackTrip::JACK) {
return static_cast<JackAudioInterface*>(mAudioInterface)
->getAssignedClientName();
} else {
return QLatin1String("");
}
- }
+#else
+ return QLatin1String("");
#endif
+ }
virtual bool checkPeerSettings(int8_t* full_packet);
void increaseSequenceNumber() { mPacketHeader->increaseSequenceNumber(); }
int getSequenceNumber() const { return mPacketHeader->getSequenceNumber(); }
#endif // endwhere
int mBufferQueueLength; ///< Audio Buffer from network queue length
int mBufferStrategy;
+ QThread* mRegulatorThreadPtr;
int mBroadcastQueueLength;
uint32_t mSampleRate; ///< Sample Rate
uint32_t mDeviceID; ///< RTAudio DeviceID
mJackTrip->setBindPorts(mServerPort);
// jacktrip.setPeerPorts(mClientPort);
mJackTrip->setBufferStrategy(mBufferStrategy);
+ mJackTrip->setRegulatorThread(mRegulatorThreadPtr);
mJackTrip->setNetIssuesSimulation(mSimulatedLossRate, mSimulatedJitterRate,
mSimulatedDelayRel);
mJackTrip->setBroadcast(mBroadcastQueue);
AudioInterface::audioBitResolutionT AudioBitResolution = AudioInterface::BIT16,
const QString& clientName = QLatin1String(""));
/// \brief The class destructor
- ~JackTripWorker() = default;
+ virtual ~JackTripWorker() { stopThread(); }
/// \brief Starts the jacktrip process
void start();
int getID() { return mID; }
void setBufferStrategy(int BufferStrategy) { mBufferStrategy = BufferStrategy; }
+ void setRegulatorThread(QThread* ptr) { mRegulatorThreadPtr = ptr; }
void setNetIssuesSimulation(double loss, double jitter, double delay_rel)
{
mSimulatedLossRate = loss;
int mID = 0; ///< ID thread number
- int mBufferStrategy = 1;
- int mBroadcastQueue = 0;
- double mSimulatedLossRate = 0.0;
- double mSimulatedJitterRate = 0.0;
- double mSimulatedDelayRel = 0.0;
- bool mUseRtUdpPriority = false;
+ int mBufferStrategy = 1;
+ int mBroadcastQueue = 0;
+ double mSimulatedLossRate = 0.0;
+ double mSimulatedJitterRate = 0.0;
+ double mSimulatedDelayRel = 0.0;
+ bool mUseRtUdpPriority = false;
+ int mIOStatTimeout = 0;
+ QThread* mRegulatorThreadPtr = NULL;
- int mIOStatTimeout = 0;
QSharedPointer<std::ostream> mIOStatStream;
#ifdef WAIR // wair
int mNumNetRevChans = 0; ///< Number of Net Channels = net combs
}
//*******************************************************************************
-void Limiter::init(int samplingRate)
+void Limiter::init(int samplingRate, int bufferSize)
{
- ProcessPlugin::init(samplingRate);
- if (samplingRate != fSamplingFreq) {
- std::cerr << "Sampling rate not set by superclass!\n";
- std::exit(1);
- }
+ ProcessPlugin::init(samplingRate, bufferSize);
fs = float(fSamplingFreq);
for (int i = 0; i < mNumChannels; i++) {
static_cast<limiterdsp*>(limiterP[i])
{
if (not inited) {
std::cerr << "*** Limiter " << this << ": init never called! Doing it now.\n";
- if (fSamplingFreq <= 0) {
- fSamplingFreq = 48000;
- std::cout << "Limiter " << this
- << ": *** HAD TO GUESS the sampling rate (chose 48000 Hz) ***\n";
- }
- init(fSamplingFreq);
+ init(0, 0);
}
#ifdef SINE_TEST
float sineTestOut[nframes];
/// \brief The class destructor
virtual ~Limiter();
- void init(int samplingRate) override;
+ void init(int samplingRate, int bufferSize) override;
int getNumInputs() override { return (mNumChannels); }
int getNumOutputs() override { return (mNumChannels); }
void compute(int nframes, float** inputs, float** outputs) override;
#include "Meter.h"
+#include <algorithm>
#include <iostream>
#include "jacktrip_types.h"
//*******************************************************************************
Meter::~Meter()
{
+ mTimer.stop();
for (int i = 0; i < mNumChannels; i++) {
delete static_cast<meterdsp*>(meterP[i]);
}
}
//*******************************************************************************
-void Meter::init(int samplingRate)
+void Meter::init(int samplingRate, int bufferSize)
{
- ProcessPlugin::init(samplingRate);
- if (samplingRate != fSamplingFreq) {
- std::cerr << "Sampling rate not set by superclass!\n";
- std::exit(1);
- }
+ ProcessPlugin::init(samplingRate, bufferSize);
fs = float(fSamplingFreq);
for (int i = 0; i < mNumChannels; i++) {
{
if (not inited) {
std::cerr << "*** Meter " << this << ": init never called! Doing it now.\n";
- if (fSamplingFreq <= 0) {
- fSamplingFreq = 48000;
- std::cout << "Meter " << this
- << ": *** HAD TO GUESS the sampling rate (chose 48000 Hz) ***\n";
- }
- init(fSamplingFreq);
+ init(0, 0);
}
// Will measure inputs by default unless mMeasureOutputBuffer = true,
/* Use the existing value of mValues[i] as
the threshold - this will be reset to the default floor of -80dB
on each timeout */
- float max = mValues[i];
- for (int j = 0; j < nframes; j++) {
- if (mBuffer[j] > max) {
- max = mBuffer[j];
- }
- }
+ float maxSample = *std::max_element(mBuffer, mBuffer + nframes);
/* Update mValues */
- mValues[i] = max;
+ mValues[i] = std::max(mValues[i], maxSample);
}
/* Set processed audio flag */
/// \brief The class destructor
virtual ~Meter();
- void init(int samplingRate) override;
+ void init(int samplingRate, int bufferSize) override;
int getNumInputs() override { return (mNumChannels); }
int getNumOutputs() override { return (mNumChannels); }
void compute(int nframes, float** inputs, float** outputs) override;
}
//*******************************************************************************
-void Monitor::init(int samplingRate)
+void Monitor::init(int samplingRate, int bufferSize)
{
- ProcessPlugin::init(samplingRate);
- if (samplingRate != fSamplingFreq) {
- std::cerr << "Sampling rate not set by superclass!\n";
- std::exit(1);
- }
+ ProcessPlugin::init(samplingRate, bufferSize);
fs = float(fSamplingFreq);
for (int i = 0; i < mNumChannels; i++) {
{
if (not inited) {
std::cerr << "*** Monitor " << this << ": init never called! Doing it now.\n";
- if (fSamplingFreq <= 0) {
- fSamplingFreq = 48000;
- std::cout << "Monitor " << this
- << ": *** HAD TO GUESS the sampling rate (chose 48000 Hz) ***\n";
- }
- init(fSamplingFreq);
+ init(0, 0);
}
if (mBufSize < nframes) {
/// \brief The class destructor
virtual ~Monitor();
- void init(int samplingRate) override;
+ void init(int samplingRate, int bufferSize) override;
int getNumInputs() override { return (mNumChannels); }
int getNumOutputs() override { return (mNumChannels); }
void compute(int nframes, float** inputs, float** outputs) override;
* initializes the Sampling Frequency. If a class instance depends on the
* sampling frequency, it should be initialize here.
*/
- virtual void init(int samplingRate)
+ virtual void init(int samplingRate, int bufferSize)
{
+ if (samplingRate <= 0) {
+ samplingRate = 48000;
+ printf("%s: *** HAD TO GUESS the sampling rate (chose 48000 Hz) ***\n",
+ getName());
+ }
+ if (bufferSize <= 0) {
+ bufferSize = 128;
+ printf("%s: *** HAD TO GUESS the buffer size (chose 128) ***\n", getName());
+ }
fSamplingFreq = samplingRate;
+ mBufferSize = bufferSize;
if (verbose) {
- printf("%s: init(%d)\n", getName(), samplingRate);
+ printf("%s: init(%d, %d)\n", getName(), samplingRate, bufferSize);
}
}
virtual bool getInited() { return inited; }
protected:
int fSamplingFreq; //< Faust Data member, Sampling Rate
+ int mBufferSize; //< expected number of samples per compute callbacks
bool inited = false;
bool verbose = false;
bool outgoingPluginToNetwork = false; //< Tells the plugin if it processes audio
constexpr int WindowDivisor = 8; // for faster auto tracking
constexpr int MaxFPP = 1024; // tested up to this FPP
//*******************************************************************************
-Regulator::Regulator(int rcvChannels, int bit_res, int FPP, int qLen,
- bool use_worker_thread, int bqLen)
+Regulator::Regulator(int rcvChannels, int bit_res, int FPP, int qLen, int bqLen,
+ int sample_rate)
: RingBuffer(0, 0)
, mNumChannels(rcvChannels)
, mAudioBitRes(bit_res)
, mFPP(FPP)
+ , mSampleRate(sample_rate)
, mMsecTolerance((double)qLen) // handle non-auto mode, expects positive qLen
+ , pushStat(NULL)
+ , pullStat(NULL)
, mAuto(false)
- , mUseWorkerThread(use_worker_thread)
+ , mUseWorkerThread(false)
, m_b_BroadcastQueueLength(bqLen)
, mRegulatorThreadPtr(NULL)
, mRegulatorWorkerPtr(NULL)
if (gVerboseFlag)
cout << "mHist = " << mHist << " at " << mFPP << "\n";
- mBytes = mFPP * mNumChannels * mBitResolutionMode;
- mPullQueue = new int8_t[mBytes * 2];
- mXfrBuffer = mPullQueue;
- mPacketCnt = 0; // burg initialization
- mLastPacket = nullptr;
- mNextPacket.store(mLastPacket, std::memory_order_release);
- mWorkerUnderruns = 0;
+ mBytes = mFPP * mNumChannels * mBitResolutionMode;
+ mXfrBuffer = new int8_t[mBytes];
+ mPacketCnt = 0; // burg initialization
mFadeUp.resize(mFPP, 0.0);
mFadeDown.resize(mFPP, 0.0);
for (int i = 0; i < mFPP; i++) {
memcpy(mAssembledPacket, mXfrBuffer, mBytes);
mLastLostCount = 0; // for stats
mIncomingTimer.start();
- mLastSeqNumIn = -1;
+ mLastSeqNumIn.store(-1, std::memory_order_relaxed);
mLastSeqNumOut = -1;
mPhasor.resize(mNumChannels, 0.0);
mIncomingTiming.resize(ModSeqNumInit);
<< m_b_BroadcastQueueLength;
// have not implemented the mJackTrip->queueLengthChanged functionality
}
- if (mUseWorkerThread) {
- mRegulatorThreadPtr = new QThread();
- mRegulatorThreadPtr->setObjectName("RegulatorThread");
- RegulatorWorker* workerPtr = new RegulatorWorker(this);
- workerPtr->moveToThread(mRegulatorThreadPtr);
- mRegulatorThreadPtr->start();
- mRegulatorWorkerPtr = workerPtr;
+}
+
+void Regulator::enableWorkerThread(QThread* thread_ptr)
+{
+ if (thread_ptr == nullptr) {
+ // create owned regulator thread (client mode)
+ if (mRegulatorThreadPtr == nullptr) {
+ mRegulatorThreadPtr = new QThread();
+ mRegulatorThreadPtr->setObjectName("RegulatorThread");
+ mRegulatorThreadPtr->start();
+ }
+ thread_ptr = mRegulatorThreadPtr;
}
+ if (mRegulatorWorkerPtr != nullptr) {
+ delete mRegulatorWorkerPtr;
+ }
+ mRegulatorWorkerPtr = new RegulatorWorker(this);
+ mRegulatorWorkerPtr->moveToThread(thread_ptr);
+ mUseWorkerThread = true;
}
void Regulator::changeGlobal(double x)
Regulator::~Regulator()
{
- delete[] mPullQueue;
+ if (mRegulatorThreadPtr != nullptr) {
+ // Stop the Regulator thread before deleting other things
+ mRegulatorThreadPtr->quit();
+ mRegulatorThreadPtr->wait();
+ delete mRegulatorThreadPtr;
+ }
+ if (mRegulatorWorkerPtr != nullptr)
+ delete mRegulatorWorkerPtr;
+ delete[] mXfrBuffer;
delete[] mZeros;
delete[] mAssembledPacket;
delete pushStat;
};
if (m_b_BroadcastQueueLength)
delete m_b_BroadcastRingBuffer;
- if (mRegulatorWorkerPtr != nullptr)
- delete mRegulatorWorkerPtr;
- if (mRegulatorThreadPtr != nullptr) {
- // Stop the Regulator thread
- mRegulatorThreadPtr->quit();
- mRegulatorThreadPtr->wait();
- delete mRegulatorThreadPtr;
- }
}
void Regulator::setFPPratio()
{
if (m_b_BroadcastQueueLength)
m_b_BroadcastRingBuffer->insertSlotNonBlocking(buf, mBytes, 0, seq_num);
- QMutexLocker locker(&mMutex);
seq_num %= mModSeqNum;
// if (seq_num==0) return; // impose regular loss
mIncomingTiming[seq_num] =
mMsecTolerance + (double)mIncomingTimer.nsecsElapsed() / 1000000.0;
- mLastSeqNumIn = seq_num;
- if (mLastSeqNumIn != -1)
- memcpy(mSlots[mLastSeqNumIn % mNumSlots], buf, mBytes);
+ if (seq_num != -1)
+ memcpy(mSlots[seq_num % mNumSlots], buf, mBytes);
+ mLastSeqNumIn.store(seq_num, std::memory_order_release);
};
-//*******************************************************************************
-void Regulator::pullPacket(int8_t* buf)
-{ // only for mBufferStrategy == 4, not using workerThread
- pullPacket();
- memcpy(buf, mXfrBuffer, mBytes);
-}
-
//*******************************************************************************
void Regulator::pullPacket()
{
- QMutexLocker locker(&mMutex);
- mSkip = 0;
- if ((mLastSeqNumIn == -1) || (!mFPPratioIsSet)) {
+ int lastSeqNumIn = mLastSeqNumIn.load(std::memory_order_acquire);
+ mSkip = 0;
+ if ((lastSeqNumIn == -1) || (!mFPPratioIsSet)) {
goto ZERO_OUTPUT;
} else {
mLastSeqNumOut++;
mLastSeqNumOut %= mModSeqNum;
double now = (double)mIncomingTimer.nsecsElapsed() / 1000000.0;
for (int i = mLostWindow; i >= 0; i--) {
- int next = mLastSeqNumIn - i;
+ int next = lastSeqNumIn - i;
if (next < 0)
next += mModSeqNum;
if (mIncomingTiming[next] < mIncomingTiming[mLastSeqNumOut])
// make this a global value? -- same threshold as
// UdpDataProtocol::printUdpWaitedTooLong
double wait_time = 30; // msec
- if ((mLastSeqNumOut == mLastSeqNumIn)
+ if ((mLastSeqNumOut == lastSeqNumIn)
&& ((now - mIncomingTiming[mLastSeqNumOut]) > wait_time)) {
// std::cout << (mIncomingTiming[mLastSeqNumOut] - now)
- // << "mLastSeqNumIn: " << mLastSeqNumIn <<
+ // << "lastSeqNumIn: " << lastSeqNumIn <<
// "\tmLastSeqNumOut: " << mLastSeqNumOut << std::endl;
goto ZERO_OUTPUT;
} // "good underrun", not a stuck client
- // std::cout << "within window -- mLastSeqNumIn: " <<
- // mLastSeqNumIn <<
+ // std::cout << "within window -- lastSeqNumIn: " <<
+ // lastSeqNumIn <<
// "\tmLastSeqNumOut: " << mLastSeqNumOut << std::endl;
goto UNDERRUN;
}
memcpy(mXfrBuffer, mZeros, mBytes);
OUTPUT:
- // swap positions of mXfrBuffer and mNextPacket
- mNextPacket.store(mXfrBuffer, std::memory_order_release);
- if (mXfrBuffer == mPullQueue) {
- mXfrBuffer = mPullQueue + mBytes;
- } else {
- mXfrBuffer = mPullQueue;
- }
+ return;
};
//*******************************************************************************
return tmp;
}
-void BurgAlgorithm::train(std::vector<long double>& coeffs, const std::vector<float>& x)
+void BurgAlgorithm::train(std::vector<double>& coeffs, const std::vector<double>& x)
{
// GET SIZE FROM INPUT VECTORS
size_t N = x.size() - 1;
// than the AR order is";
// INITIALIZE Ak
- // vector<long double> Ak(m + 1, 0.0);
+ // vector<double> Ak(m + 1, 0.0);
Ak.assign(m + 1, 0.0);
Ak[0] = 1.0;
// INITIALIZE f and b
- // vector<long double> f;
+ // vector<double> f;
f.resize(x.size());
for (unsigned int i = 0; i < x.size(); i++)
f[i] = x[i];
- // vector<long double> b(f);
+ // vector<double> b(f);
b = f;
// INITIALIZE Dk
- long double Dk = 0.0;
+ double Dk = 0.0;
for (size_t j = 0; j <= N; j++) // CC: N is $#x-1 in C++ but $#x in perl
{
Dk += 2.00001 * f[j] * f[j]; // CC: needs more damping than orig 2.0
// BURG RECURSION
for (size_t k = 0; k < m; k++) {
// COMPUTE MU
- long double mu = 0.0;
+ double mu = 0.0;
for (size_t n = 0; n <= N - k - 1; n++) {
mu += f[n + k + 1] * b[n];
}
// UPDATE Ak
for (size_t n = 0; n <= (k + 1) / 2; n++) {
- long double t1 = Ak[n] + mu * Ak[k + 1 - n];
- long double t2 = Ak[k + 1 - n] + mu * Ak[n];
- Ak[n] = t1;
- Ak[k + 1 - n] = t2;
+ double t1 = Ak[n] + mu * Ak[k + 1 - n];
+ double t2 = Ak[k + 1 - n] + mu * Ak[n];
+ Ak[n] = t1;
+ Ak[k + 1 - n] = t2;
}
// UPDATE f and b
for (size_t n = 0; n <= N - k - 1; n++) {
- long double t1 = f[n + k + 1] + mu * b[n]; // were double
- long double t2 = b[n] + mu * f[n + k + 1];
- f[n + k + 1] = t1;
- b[n] = t2;
+ double t1 = f[n + k + 1] + mu * b[n]; // were double
+ double t2 = b[n] + mu * f[n + k + 1];
+ f[n + k + 1] = t1;
+ b[n] = t2;
}
// UPDATE Dk
coeffs.assign(++Ak.begin(), Ak.end());
}
-void BurgAlgorithm::predict(std::vector<long double>& coeffs, std::vector<float>& tail)
+void BurgAlgorithm::predict(std::vector<double>& coeffs, std::vector<double>& tail)
{
size_t m = coeffs.size();
// qDebug() << "tail.at(0)" << tail[0]*32768;
{
if (mUseWorkerThread) {
// use separate worker thread for PLC
- const void* ptrToPacket = mNextPacket.load(std::memory_order_acquire);
- if (ptrToPacket == mLastPacket) {
- mWorkerUnderruns++;
- ::memset(ptrToReadSlot, 0, mBytes);
- if (ptrToPacket == nullptr) {
- // first time run
- mRegulatorWorkerPtr->startPullingNextPacket();
- }
- } else {
- ::memcpy(ptrToReadSlot, ptrToPacket, mBytes);
- mLastPacket = ptrToPacket;
- mRegulatorWorkerPtr->startPullingNextPacket();
- }
- } else {
- // use jack callback thread to perform PLC
- pullPacket(ptrToReadSlot);
+ mRegulatorWorkerPtr->pop(ptrToReadSlot);
+ return;
}
+ // use jack callback thread to perform PLC
+ pullPacket();
+ memcpy(ptrToReadSlot, mXfrBuffer, mBytes);
}
//*******************************************************************************
bool Regulator::getStats(RingBuffer::IOStat* stat, bool reset)
{
- QMutexLocker locker(&mMutex);
if (reset) { // all are unused, this is copied from superclass
mUnderruns = 0;
mOverflows = 0;
mBroadcastSkew = 0;
}
- if (mUseWorkerThread) {
- cout << "PLC worker underruns: " << mWorkerUnderruns << endl;
- mWorkerUnderruns = 0;
+ if (mUseWorkerThread && mRegulatorWorkerPtr != nullptr) {
+ mRegulatorWorkerPtr->getStats();
}
// hijack of struct IOStat {
#ifndef __REGULATOR_H__
#define __REGULATOR_H__
+//#define REGULATOR_SHARED_WORKER_THREAD
+
#include <math.h>
#include <QDebug>
#include "AudioInterface.h"
#include "RingBuffer.h"
+#include "WaitFreeFrameBuffer.h"
#include "jacktrip_globals.h"
// forward declaration
{
public:
bool classify(double d);
- void train(std::vector<long double>& coeffs, const std::vector<float>& x);
- void predict(std::vector<long double>& coeffs, std::vector<float>& tail);
+ void train(std::vector<double>& coeffs, const std::vector<double>& x);
+ void predict(std::vector<double>& coeffs, std::vector<double>& tail);
private:
// the following are class members to minimize heap memory allocations
- std::vector<long double> Ak;
- std::vector<long double> f;
- std::vector<long double> b;
+ std::vector<double> Ak;
+ std::vector<double> f;
+ std::vector<double> b;
};
class ChanData
int ch;
int trainSamps;
std::vector<sample_t> mTruth;
- std::vector<sample_t> mTrain;
- std::vector<sample_t> mTail;
+ std::vector<double> mTrain;
+ std::vector<double> mTail;
std::vector<sample_t> mPrediction; // ORDER
- std::vector<long double> mCoeffs;
+ std::vector<double> mCoeffs;
std::vector<sample_t> mXfadedPred;
std::vector<sample_t> mLastPred;
std::vector<std::vector<sample_t>> mLastPackets;
class Regulator : public RingBuffer
{
public:
- Regulator(int rcvChannels, int bit_res, int FPP, int qLen, bool use_worker_thread,
- int bqLen);
+ /// construct a new regulator
+ Regulator(int rcvChannels, int bit_res, int FPP, int qLen, int bqLen,
+ int sample_rate);
+
+ // virtual destructor
virtual ~Regulator();
- void shimFPP(const int8_t* buf, int len, int seq_num);
- void pushPacket(const int8_t* buf, int seq_num);
+ /// @brief enables use of a separate worker thread for pulling packets
+ /// @param thread_ptr pointer to shared thread; if null, a unique one will be used
+ void enableWorkerThread(QThread* thread_ptr = nullptr);
+
// can hijack unused2 to propagate incoming seq num if needed
// option is in UdpDataProtocol
// if (!mJackTrip->writeAudioBuffer(src, host_buf_size, last_seq_num))
return (true);
}
- void pullPacket(int8_t* buf);
-
- void pullPacket();
-
+ /// @brief called by audio interface to get the next buffer of samples
+ /// @param ptrToReadSlot new samples will be copied to this memory block
virtual void readSlotNonBlocking(int8_t* ptrToReadSlot);
+ /// @brief called by broadcast ports to get the next buffer of samples
+ /// @param ptrToReadSlot new samples will be copied to this memory block
virtual void readBroadcastSlot(int8_t* ptrToReadSlot)
{
m_b_BroadcastRingBuffer->readBroadcastSlot(ptrToReadSlot);
}
+ /// @brief returns sample rate
+ inline int getSampleRate() const { return mSampleRate; }
+
+ /// @brief returns number of bytes in an audio "packet"
+ inline int getPacketSize() const { return mBytes; }
+
+ /// @brief returns number of samples, or frames per callback period
+ inline int getBufferSizeInSamples() const { return mFPP; }
+
+ /// @brief returns time taken for last PLC prediction, in milliseconds
+ inline double getLastDspElapsed() const
+ {
+ return pullStat == nullptr ? 0 : pullStat->lastPLCdspElapsed;
+ }
+
// virtual QString getStats(uint32_t statCount, uint32_t lostCount);
virtual bool getStats(IOStat* stat, bool reset);
private:
+ void shimFPP(const int8_t* buf, int len, int seq_num);
+ void pushPacket(const int8_t* buf, int seq_num);
+ void pullPacket();
void setFPPratio();
bool mFPPratioIsSet;
void processPacket(bool glitch);
int mAudioBitRes;
int mFPP;
int mPeerFPP;
+ int mSampleRate;
uint32_t mLastLostCount;
int mNumSlots;
int mHist;
BurgAlgorithm ba;
int mBytes;
int mBytesPeerPacket;
- int8_t* mPullQueue;
int8_t* mXfrBuffer;
- const void* mLastPacket;
- int mWorkerUnderruns;
- std::atomic<const void*> mNextPacket;
int8_t* mAssembledPacket;
int mPacketCnt;
sample_t bitsToSample(int ch, int frame);
StdDev* pushStat;
StdDev* pullStat;
QElapsedTimer mIncomingTimer;
- int mLastSeqNumIn;
+ std::atomic<int> mLastSeqNumIn;
int mLastSeqNumOut;
std::vector<double> mPhasor;
std::vector<double> mIncomingTiming;
/// thread used to pull packets from Regulator (if mBufferStrategy==3)
QThread* mRegulatorThreadPtr;
+
/// worker used to pull packets from Regulator (if mBufferStrategy==3)
RegulatorWorker* mRegulatorWorkerPtr;
+
+ friend class RegulatorWorker;
};
class RegulatorWorker : public QObject
Q_OBJECT;
public:
- RegulatorWorker(Regulator* rPtr) : mRegulatorPtr(rPtr)
+ RegulatorWorker(Regulator* rPtr)
+ : mRegulatorPtr(rPtr)
+ , mPacketQueue(rPtr->getPacketSize())
+ , mPacketQueueTarget(1)
+ , mLastUnderrun(0)
+ , mUnderrun(false)
+ , mStarted(false)
{
+ // wire up signals
QObject::connect(this, &RegulatorWorker::startup, this,
&RegulatorWorker::setRealtimePriority, Qt::QueuedConnection);
QObject::connect(this, &RegulatorWorker::signalPullPacket, this,
&RegulatorWorker::pullPacket, Qt::QueuedConnection);
+ // set thread to realtime priority
emit startup();
}
+
virtual ~RegulatorWorker() {}
- void startPullingNextPacket() { emit signalPullPacket(); }
+
+ bool pop(int8_t* pktPtr)
+ {
+ // start pulling more packets to maintain target
+ emit signalPullPacket();
+
+ if (mPacketQueue.pop(pktPtr))
+ return true;
+
+ // use silence for underruns
+ ::memset(pktPtr, 0, mPacketQueue.getBytesPerFrame());
+
+ // trigger underrun to re-evaluate queue target
+ mUnderrun.store(true, std::memory_order_relaxed);
+
+ return false;
+ }
+
+ void getStats()
+ {
+ std::cout << "PLC worker queue: size=" << mPacketQueue.size()
+ << " target=" << mPacketQueueTarget
+ << " underruns=" << mPacketQueue.getUnderruns()
+ << " overruns=" << mPacketQueue.getOverruns() << std::endl;
+ mPacketQueue.clearStats();
+ }
signals:
void signalPullPacket();
+ void signalMaxQueueSize();
void startup();
public slots:
void pullPacket()
{
- if (mRegulatorPtr != nullptr) {
+ if (mUnderrun.load(std::memory_order_relaxed)) {
+ if (mStarted) {
+ // allow up to 1 underrun per second before adjusting target
+ double now =
+ (double)mRegulatorPtr->mIncomingTimer.nsecsElapsed() / 1000000.0;
+ if (mLastUnderrun != 0 && now - mLastUnderrun < 1000.0)
+ updateQueueTarget();
+ mLastUnderrun = now;
+ mUnderrun.store(false, std::memory_order_relaxed);
+ } else {
+ mStarted = true;
+ }
+ }
+ std::size_t qSize = mPacketQueue.size();
+ while (qSize < mPacketQueueTarget) {
mRegulatorPtr->pullPacket();
+ qSize = mPacketQueue.push(mRegulatorPtr->mXfrBuffer);
}
}
void setRealtimePriority() { setRealtimeProcessPriority(); }
private:
+ void updateQueueTarget()
+ {
+ // cap queue size at 4x the time it takes to run a prediction
+ double samples =
+ (mRegulatorPtr->getLastDspElapsed() * 4 * mRegulatorPtr->getSampleRate())
+ / 1000;
+ std::size_t maxPackets = (samples / mRegulatorPtr->getBufferSizeInSamples()) + 1;
+ if (maxPackets > mPacketQueue.capacity() / 2)
+ maxPackets = mPacketQueue.capacity() / 2;
+ if (mPacketQueueTarget < maxPackets) {
+ // adjust queue target
+ ++mPacketQueueTarget;
+ std::cout << "PLC worker queue: adjusting target=" << mPacketQueueTarget
+ << " (max=" << maxPackets
+ << ", lastDspElapsed=" << mRegulatorPtr->getLastDspElapsed() << ")"
+ << std::endl;
+ if (mPacketQueueTarget == maxPackets) {
+ emit signalMaxQueueSize();
+ std::cout << "PLC worker queue: reached MAX target!" << std::endl;
+ }
+ }
+ }
+
+ /// pointer to Regulator for pulling packets
Regulator* mRegulatorPtr;
+
+ /// queue of ready packets (if mBufferStrategy==3)
+ WaitFreeFrameBuffer<> mPacketQueue;
+
+ /// target size for the packet queue
+ std::size_t mPacketQueueTarget;
+
+ /// time of last underrun, in milliseconds
+ double mLastUnderrun;
+
+ /// last value of packet queue underruns
+ std::atomic<bool> mUnderrun;
+
+ /// will be true after first packet is pushed
+ bool mStarted;
};
#endif //__REGULATOR_H__
}
//*******************************************************************************
-void Reverb::init(int samplingRate)
+void Reverb::init(int samplingRate, int bufferSize)
{
- ProcessPlugin::init(samplingRate);
- // std::cout << "Reverb: init(" << samplingRate << ")\n";
- if (samplingRate != fSamplingFreq) {
- std::cerr << "Sampling rate not set by superclass!\n";
- std::exit(1);
- }
+ ProcessPlugin::init(samplingRate, bufferSize);
fs = float(fSamplingFreq);
if (mReverbLevel <= 1.0) { // freeverb:
static_cast<freeverbdsp*>(freeverbStereoP)
{
if (not inited) {
std::cerr << "*** Reverb " << this << ": init never called! Doing it now.\n";
- if (fSamplingFreq <= 0) {
- fSamplingFreq = 48000;
- std::cout << "Reverb " << this
- << ": *** HAD TO GUESS the sampling rate (chose 48000 Hz) ***\n";
- }
- init(fSamplingFreq);
+ init(0, 0);
}
if (mReverbLevel <= 1.0) {
if (mNumInChannels == 1) {
/// \brief The class destructor
virtual ~Reverb();
- void init(int samplingRate) override;
+ void init(int samplingRate, int bufferSize) override;
int getNumInputs() override { return (mNumInChannels); }
int getNumOutputs() override { return (mNumOutChannels); }
void compute(int nframes, float** inputs, float** outputs) override;
using std::cout;
using std::endl;
+//*******************************************************************************
+void RtAudioDevice::print() const
+{
+ std::cout << "[" << RtAudio::getApiDisplayName(this->api) << " - " << this->apiIndex
+ << "]"
+ << ": \"";
+ std::cout << this->name.toStdString() << "\" ";
+ std::cout << "(" << this->inputChannels << " ins, " << this->outputChannels
+ << " outs)" << endl;
+}
+
//*******************************************************************************
RtAudioInterface::RtAudioInterface(QVarLengthArray<int> InputChans,
QVarLengthArray<int> OutputChans,
std::string api_in;
std::string api_out;
- QStringList all_input_devices;
- QStringList all_output_devices;
- getDeviceList(&all_input_devices, NULL, NULL, true);
- getDeviceList(&all_output_devices, NULL, NULL, false);
+ if (mDevices.empty())
+ scanDevices(mDevices);
- unsigned int n_devices_input = all_input_devices.size();
- unsigned int n_devices_output = all_output_devices.size();
+ unsigned int n_devices_input = getNumInputDevices();
+ unsigned int n_devices_output = getNumOutputDevices();
unsigned int n_devices_total = n_devices_input + n_devices_output;
RtAudio* rtAudioIn = NULL;
unsigned int sampleRate = getSampleRate(); // mSamplingRate;
unsigned int bufferFrames = getBufferSizeInSamples(); // mBufferSize;
mStereoToMonoMixer = new StereoToMono();
- mStereoToMonoMixer->init(sampleRate);
+ mStereoToMonoMixer->init(sampleRate, bufferFrames);
// Setup parent class
AudioInterface::setup(verbose);
//*******************************************************************************
void RtAudioInterface::printDevices()
{
- std::vector<RtAudio::Api> apis;
- RtAudio::getCompiledApi(apis);
+ QVector<RtAudioDevice> devices;
+ scanDevices(devices);
+}
- for (uint32_t i = 0; i < apis.size(); i++) {
-#ifdef _WIN32
- if (apis.at(i) == RtAudio::UNIX_JACK) {
- continue;
+//*******************************************************************************
+unsigned int RtAudioInterface::getNumInputDevices() const
+{
+ unsigned int deviceCount = 0;
+ for (int n = 0; n < mDevices.size(); ++n) {
+ if (mDevices[n].inputChannels > 0) {
+ ++deviceCount;
}
-#endif
- RtAudio rtaudio(apis.at(i));
- unsigned int devices = rtaudio.getDeviceCount();
- for (unsigned int j = 0; j < devices; j++) {
- RtAudio::DeviceInfo info = rtaudio.getDeviceInfo(j);
- if (info.probed == true) {
- std::cout << "[" << RtAudio::getApiDisplayName(rtaudio.getCurrentApi())
- << " - " << j << "]"
- << ": \"";
- std::cout << info.name << "\" ";
- std::cout << "(" << info.inputChannels << " ins, " << info.outputChannels
- << " outs)" << endl;
- }
+ }
+ return deviceCount;
+}
+
+//*******************************************************************************
+unsigned int RtAudioInterface::getNumOutputDevices() const
+{
+ unsigned int deviceCount = 0;
+ for (int n = 0; n < mDevices.size(); ++n) {
+ if (mDevices[n].outputChannels > 0) {
+ ++deviceCount;
}
}
+ return deviceCount;
}
//*******************************************************************************
}
//*******************************************************************************
-void RtAudioInterface::getDeviceList(QStringList* list, QStringList* categories,
- QList<int>* channels, bool isInput)
+void RtAudioInterface::getDeviceInfoFromName(std::string deviceName, int* index,
+ std::string* api, bool isInput) const
{
- RtAudio baseRtAudio;
- RtAudio::Api baseRtAudioApi = baseRtAudio.getCurrentApi();
- if (categories != NULL) {
- categories->clear();
- }
- if (channels != NULL) {
- channels->clear();
- }
- list->clear();
-
- // Explicitly add default device
- QString defaultDeviceName = "";
- uint32_t defaultDeviceIdx;
- RtAudio::DeviceInfo defaultDeviceInfo;
- if (isInput) {
- defaultDeviceIdx = baseRtAudio.getDefaultInputDevice();
- } else {
- defaultDeviceIdx = baseRtAudio.getDefaultOutputDevice();
- }
-
- if (defaultDeviceIdx != 0) {
- defaultDeviceInfo = baseRtAudio.getDeviceInfo(defaultDeviceIdx);
- defaultDeviceName = QString::fromStdString(defaultDeviceInfo.name);
- }
-
- if (defaultDeviceName != "") {
- list->append(defaultDeviceName);
- if (categories != NULL) {
-#ifdef _WIN32
- switch (baseRtAudioApi) {
- case RtAudio::WINDOWS_ASIO:
- categories->append(QStringLiteral("Low-Latency (ASIO)"));
- break;
- case RtAudio::WINDOWS_WASAPI:
- categories->append(QStringLiteral("High-Latency (Non-ASIO)"));
- break;
- case RtAudio::WINDOWS_DS:
- categories->append(QStringLiteral("High-Latency (Non-ASIO)"));
- break;
- default:
- categories->append(QStringLiteral(""));
- break;
- }
-#else
- categories->append(QStringLiteral(""));
-#endif
- }
- if (channels != NULL) {
- if (isInput) {
- channels->append(defaultDeviceInfo.inputChannels);
- } else {
- channels->append(defaultDeviceInfo.outputChannels);
+ const QVector<RtAudioDevice>& devices(getDevices());
+ for (int n = 0; n < devices.size(); ++n) {
+ if (deviceName == devices[n].name.toStdString()) {
+ if ((isInput && devices[n].inputChannels > 0)
+ || (!isInput && devices[n].outputChannels > 0)) {
+ *index = devices[n].apiIndex;
+ *api = RtAudio::getApiName(devices[n].api);
+ return;
}
}
}
- std::vector<RtAudio::Api> apis;
- RtAudio::getCompiledApi(apis);
-
- for (uint32_t i = 0; i < apis.size(); i++) {
-#ifdef _WIN32
- if (apis.at(i) == RtAudio::UNIX_JACK) {
- continue;
- }
-#endif
- RtAudio::Api api = apis.at(i);
- RtAudio rtaudio(api);
- unsigned int devices = rtaudio.getDeviceCount();
- for (unsigned int j = 0; j < devices; j++) {
- RtAudio::DeviceInfo info = rtaudio.getDeviceInfo(j);
- if (info.probed == true) {
- // Don't include duplicate entries
- if (list->contains(QString::fromStdString(info.name))) {
- continue;
- }
-
- // Skip the default device, since we already added it
- if (QString::fromStdString(info.name) == defaultDeviceName
- && api == baseRtAudioApi) {
- continue;
- }
-
- if (QString::fromStdString(info.name) == "JackRouter") {
- continue;
- }
-
- if (info.probed == false) {
- continue;
- }
-
- if (isInput && info.inputChannels > 0) {
- list->append(QString::fromStdString(info.name));
- if (channels != NULL) {
- channels->append(info.inputChannels);
- }
- } else if (!isInput && info.outputChannels > 0) {
- list->append(QString::fromStdString(info.name));
- if (channels != NULL) {
- channels->append(info.outputChannels);
- }
- } else {
- continue;
- }
-
- if (categories == NULL) {
- continue;
- }
-
-#ifdef _WIN32
- switch (api) {
- case RtAudio::WINDOWS_ASIO:
- categories->append("Low-Latency (ASIO)");
- break;
- case RtAudio::WINDOWS_WASAPI:
- categories->append("High-Latency (Non-ASIO)");
- break;
- case RtAudio::WINDOWS_DS:
- categories->append("High-Latency (Non-ASIO)");
- break;
- default:
- categories->append("");
- break;
- }
-#else
- categories->append("");
-#endif
- }
- }
- }
+ *index = -1;
+ *api = "";
+ return;
}
//*******************************************************************************
-void RtAudioInterface::getDeviceInfoFromName(std::string deviceName, int* index,
- std::string* api, bool isInput)
+void RtAudioInterface::scanDevices(QVector<RtAudioDevice>& devices)
{
std::vector<RtAudio::Api> apis;
RtAudio::getCompiledApi(apis);
+ devices.clear();
+
+ std::cout << "RTAudio: scanning devices..." << std::endl;
for (uint32_t i = 0; i < apis.size(); i++) {
#ifdef _WIN32
}
#endif
RtAudio rtaudio(apis.at(i));
- unsigned int devices = rtaudio.getDeviceCount();
- for (unsigned int j = 0; j < devices; j++) {
+ unsigned int numDevices = rtaudio.getDeviceCount();
+ for (unsigned int j = 0; j < numDevices; j++) {
RtAudio::DeviceInfo info = rtaudio.getDeviceInfo(j);
- if (info.probed == true
- && deviceName == QString::fromStdString(info.name).toStdString()) {
- if ((isInput && info.inputChannels > 0)
- || (!isInput && info.outputChannels > 0)) {
- *index = j;
- *api = RtAudio::getApiName(rtaudio.getCurrentApi());
- return;
- }
- }
+ if (!info.probed || (info.inputChannels == 0 && info.outputChannels == 0))
+ continue;
+ RtAudioDevice device;
+ device.api = rtaudio.getCurrentApi();
+ device.apiIndex = j;
+ device.name = QString::fromStdString(info.name);
+ device.inputChannels = info.inputChannels;
+ device.outputChannels = info.outputChannels;
+ devices.push_back(device);
+ device.print();
}
}
-
- *index = -1;
- *api = "";
- return;
}
#include <RtAudio.h>
#include <QQueue>
+#include <QString>
+#include <QVector>
#include "AudioInterface.h"
#include "StereoToMono.h"
#include "jacktrip_globals.h"
class JackTrip; // Forward declaration
+/// \brief Simple Class that represents an audio interface available via RtAudio
+struct RtAudioDevice {
+ RtAudio::Api api;
+ QString name;
+ int apiIndex;
+ int inputChannels;
+ int outputChannels;
+ void print() const;
+};
+
/// \brief Base Class that provides an interface with RtAudio
class RtAudioInterface : public AudioInterface
{
/// \brief This has no effect in RtAudio
virtual void connectDefaultPorts() {}
- static void getDeviceList(QStringList* list, QStringList* categories,
- QList<int>* channels, bool isInput);
- static void getDeviceInfoFromName(std::string deviceName, int* index,
- std::string* api, bool isInput);
+ // returns number of available input audio devices
+ unsigned int getNumInputDevices() const;
+
+ // returns number of available output audio devices
+ unsigned int getNumOutputDevices() const;
+
+ // populates devices with all available audio interfaces
+ static void scanDevices(QVector<RtAudioDevice>& devices);
+
+ // sets devices to available audio interfaces
+ void setDevices(QVector<RtAudioDevice>& devices) { mDevices = devices; }
+
+ // returns all available audio devices
+ inline const QVector<RtAudioDevice>& getDevices() const { return mDevices; }
//--------------SETTERS---------------------------------------------
/// \brief This has no effect in RtAudio
const std::string& errorText);
void printDeviceInfo(std::string api, unsigned int deviceId);
+ // retrieves info about an audio device by search for its name
+ void getDeviceInfoFromName(std::string deviceName, int* index, std::string* api,
+ bool isInput) const;
+
QVarLengthArray<float*>
mInBuffer; ///< Vector of Input buffers/channel read from JACK
QVarLengthArray<float*>
- mOutBuffer; ///< Vector of Output buffer/channel to write to JACK
+ mOutBuffer; ///< Vector of Output buffer/channel to write to JACK
+ QVector<RtAudioDevice>
+ mDevices; ///< Vector of audio interfaces available via RTAudio
RtAudio* mRtAudio; ///< RtAudio class if the input and output device are the same
unsigned int getDefaultDeviceForLinuxPulseAudio(bool isInput);
}
//*******************************************************************************
-void StereoToMono::init(int samplingRate)
+void StereoToMono::init(int samplingRate, int bufferSize)
{
- ProcessPlugin::init(samplingRate);
- if (samplingRate != fSamplingFreq) {
- std::cerr << "Sampling rate not set by superclass!\n";
- std::exit(1);
- }
+ ProcessPlugin::init(samplingRate, bufferSize);
fs = float(fSamplingFreq);
static_cast<stereotomonodsp*>(stereoToMonoP)->init(fs);
if (not inited) {
std::cerr << "*** Stereo-to-Mono " << this
<< ": init never called! Doing it now.\n";
- if (fSamplingFreq <= 0) {
- fSamplingFreq = 48000;
- std::cout << "Stereo-to-Mono " << this
- << ": *** HAD TO GUESS the sampling rate (chose 48000 Hz) ***\n";
- }
- init(fSamplingFreq);
+ init(0, 0);
}
static_cast<stereotomonodsp*>(stereoToMonoP)->compute(nframes, inputs, outputs);
}
\ No newline at end of file
/// \brief The class destructor
virtual ~StereoToMono();
- void init(int samplingRate) override;
+ void init(int samplingRate, int bufferSize) override;
int getNumInputs() override { return 2; }
int getNumOutputs() override { return 2; }
void compute(int nframes, float** inputs, float** outputs) override;
}
//*******************************************************************************
-void Tone::init(int samplingRate)
+void Tone::init(int samplingRate, int bufferSize)
{
- ProcessPlugin::init(samplingRate);
- if (samplingRate != fSamplingFreq) {
- std::cerr << "Sampling rate not set by superclass!\n";
- std::exit(1);
- }
+ ProcessPlugin::init(samplingRate, bufferSize);
fs = float(fSamplingFreq);
for (int i = 0; i < mNumChannels; i++) {
{
if (not inited) {
std::cerr << "*** Tone " << this << ": init never called! Doing it now.\n";
- if (fSamplingFreq <= 0) {
- fSamplingFreq = 48000;
- std::cout << "Tone " << this
- << ": *** HAD TO GUESS the sampling rate (chose 48000 Hz) ***\n";
- }
- init(fSamplingFreq);
+ init(0, 0);
}
for (int i = 0; i < mNumChannels; i++) {
/// \brief The class destructor
virtual ~Tone();
- void init(int samplingRate) override;
+ void init(int samplingRate, int bufferSize) override;
int getNumInputs() override { return (mNumChannels); }
int getNumOutputs() override { return (mNumChannels); }
void compute(int nframes, float** inputs, float** outputs) override;
"client fan out/in, including server", "full mix, including server"})
, m_connectDefaultAudioPorts(false)
, mIOStatTimeout(0)
+ , mRegulatorThreadPtr(NULL)
{
// Register JackTripWorker with the hub listener
// mJTWorker = new JackTripWorker(this);
//*******************************************************************************
UdpHubListener::~UdpHubListener()
{
+ mStopCheckTimer.stop();
QMutexLocker lock(&mMutex);
+ if (mRegulatorThreadPtr != NULL) {
+ mRegulatorThreadPtr->quit();
+ mRegulatorThreadPtr->wait();
+ delete mRegulatorThreadPtr;
+ }
// delete mJTWorker;
for (int i = 0; i < gMaxThreads; i++) {
delete mJTWorkers->at(i);
mStopCheckTimer.setInterval(200);
connect(&mStopCheckTimer, &QTimer::timeout, this, &UdpHubListener::stopCheck);
mStopCheckTimer.start();
+
+#ifdef REGULATOR_SHARED_WORKER_THREAD
+ // Start regulator thread if bufstrategy == 3
+ if (mBufferStrategy == 3) {
+ // create shared regulator thread
+ mRegulatorThreadPtr = new QThread();
+ mRegulatorThreadPtr->setObjectName("RegulatorThread");
+ mRegulatorThreadPtr->start();
+ }
+#endif
+
+ emit signalStarted();
}
void UdpHubListener::receivedNewConnection()
mJTWorkers->at(id)->setIOStatStream(mIOStatStream);
}
mJTWorkers->at(id)->setBufferStrategy(mBufferStrategy);
+ mJTWorkers->at(id)->setRegulatorThread(mRegulatorThreadPtr);
mJTWorkers->at(id)->setNetIssuesSimulation(mSimulatedLossRate, mSimulatedJitterRate,
mSimulatedDelayRel);
mJTWorkers->at(id)->setBroadcast(mBroadcastQueue);
iterator.next();
}
}
+ if (mRegulatorThreadPtr != nullptr) {
+ // Stop the Regulator thread
+ mRegulatorThreadPtr->quit();
+ mRegulatorThreadPtr->wait();
+ }
}
// TODO:
// USE bool QAbstractSocket::isValid () const to check if socket is connect. if not, exit
void stopCheck();
signals:
- void Listening();
- void ClientAddressSet();
+ void signalStarted();
void signalRemoveThread(int id);
void signalStopped();
void signalError(const QString& errorMessage);
int mIOStatTimeout;
QSharedPointer<std::ostream> mIOStatStream;
+ /// thread used to pull packets from Regulator (if mBufferStrategy==3)
+ QThread* mRegulatorThreadPtr;
+
int mBufferStrategy;
int mBroadcastQueue;
double mSimulatedLossRate;
}
//*******************************************************************************
-void Volume::init(int samplingRate)
+void Volume::init(int samplingRate, int bufferSize)
{
- ProcessPlugin::init(samplingRate);
- if (samplingRate != fSamplingFreq) {
- std::cerr << "Sampling rate not set by superclass!\n";
- std::exit(1);
- }
+ ProcessPlugin::init(samplingRate, bufferSize);
fs = float(fSamplingFreq);
for (int i = 0; i < mNumChannels; i++) {
{
if (not inited) {
std::cerr << "*** Volume " << this << ": init never called! Doing it now.\n";
- if (fSamplingFreq <= 0) {
- fSamplingFreq = 48000;
- std::cout << "Volume " << this
- << ": *** HAD TO GUESS the sampling rate (chose 48000 Hz) ***\n";
- }
- init(fSamplingFreq);
+ init(0, 0);
}
for (int i = 0; i < mNumChannels; i++) {
/// \brief The class destructor
virtual ~Volume();
- void init(int samplingRate) override;
+ void init(int samplingRate, int bufferSize) override;
int getNumInputs() override { return (mNumChannels); }
int getNumOutputs() override { return (mNumChannels); }
void compute(int nframes, float** inputs, float** outputs) override;
--- /dev/null
+//*****************************************************************
+/*
+ JackTrip: A System for High-Quality Audio Network Performance
+ over the Internet
+
+ Copyright (c) 2008-2023 Juan-Pablo Caceres, Chris Chafe.
+ SoundWIRE group at CCRMA, Stanford University.
+ JackTrip Labs, Inc.
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation
+ files (the "Software"), to deal in the Software without
+ restriction, including without limitation the rights to use,
+ copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following
+ conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ OTHER DEALINGS IN THE SOFTWARE.
+*/
+//*****************************************************************
+
+/**
+ * \file WaitFreeFrameBuffer.h
+ * \author Mike Dickey
+ * \date May 2023
+ */
+
+#ifndef __WAITFREEFRAMEBUFFER_H__
+#define __WAITFREEFRAMEBUFFER_H__
+
+#include <cstring>
+
+#include "WaitFreeRingBuffer.h"
+
+// WaitFreeFrameBuffer is a wait-free FIFO data structure for audio frames
+// that only supports a single producer and a single consumer
+template<std::size_t Size = 64>
+class WaitFreeFrameBuffer : public WaitFreeRingBuffer<int8_t*, Size>
+{
+ public:
+ /// @brief constructor requires number of bytes per frame
+ /// @param bytesPerFrame
+ WaitFreeFrameBuffer(std::size_t bytesPerFrame)
+ : WaitFreeRingBuffer<int8_t*, Size>(), mBytesPerFrame(bytesPerFrame)
+ {
+ for (std::size_t n = 0; n < Size; ++n) {
+ this->mRing[n] = new int8_t[mBytesPerFrame];
+ }
+ }
+
+ /// @brief virtual destructor
+ virtual ~WaitFreeFrameBuffer()
+ {
+ for (std::size_t n = 0; n < Size; ++n) {
+ delete[] this->mRing[n];
+ }
+ }
+
+ /// returns bytes stored in each frame
+ inline std::size_t getBytesPerFrame() const { return mBytesPerFrame; }
+
+ private:
+ virtual void setItem(int8_t*& item, int8_t* const& value)
+ {
+ ::memcpy(item, value, mBytesPerFrame);
+ }
+
+ virtual void getItem(int8_t* const& item, int8_t*& value)
+ {
+ ::memcpy(value, item, mBytesPerFrame);
+ }
+
+ std::size_t mBytesPerFrame;
+};
+
+#endif // __WAITFREEFRAMEBUFFER_H__
--- /dev/null
+//*****************************************************************
+/*
+ JackTrip: A System for High-Quality Audio Network Performance
+ over the Internet
+
+ Copyright (c) 2008-2023 Juan-Pablo Caceres, Chris Chafe.
+ SoundWIRE group at CCRMA, Stanford University.
+ JackTrip Labs, Inc.
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation
+ files (the "Software"), to deal in the Software without
+ restriction, including without limitation the rights to use,
+ copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following
+ conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ OTHER DEALINGS IN THE SOFTWARE.
+*/
+//*****************************************************************
+
+/**
+ * \file WaitFreeRingBuffer.h
+ * \author Mike Dickey
+ * \date May 2023
+ */
+
+// Adapted from https://www.boost.org/doc/libs/1_64_0/doc/html/atomic/usage_examples.html
+
+#ifndef __WAITFREERINGBUFFER_H__
+#define __WAITFREERINGBUFFER_H__
+
+#include <atomic>
+
+// WaitFreeRingBuffer is a simple wait-free FIFO data structure
+// that only supports a single producer and a single consumer
+// and loosely tracks xrun stats on a low-cost basis
+template<typename T = int8_t*, std::size_t Size = 64>
+class WaitFreeRingBuffer
+{
+ public:
+ /// @brief default constructor
+ WaitFreeRingBuffer() : mHeadPtr(0), mTailPtr(0), mUnderruns(0), mOverruns(0) {}
+
+ /// @brief virtual destructor
+ virtual ~WaitFreeRingBuffer() {}
+
+ /// @brief push a value into the buffer
+ /// @param value the next free item in buffer is assigned to this
+ /// @return new number of items in the buffer if success, or 0 if overrun
+ std::size_t push(const T& value)
+ {
+ std::size_t tail = mTailPtr.load(std::memory_order_acquire);
+ std::size_t head = mHeadPtr.load(std::memory_order_relaxed);
+ std::size_t next_head = next(head);
+ if (next_head == tail) {
+ ++mOverruns;
+ return 0;
+ }
+ setItem(mRing[head], value);
+ mHeadPtr.store(next_head, std::memory_order_release);
+ return (next_head >= tail) ? (next_head - tail) : (next_head + (Size - tail));
+ }
+
+ /// @brief pop a value from the buffer
+ /// @param value will be assigned to the next available item in buffer
+ /// @return false if underrun
+ bool pop(T& value)
+ {
+ std::size_t tail = mTailPtr.load(std::memory_order_relaxed);
+ if (tail == mHeadPtr.load(std::memory_order_acquire)) {
+ ++mUnderruns;
+ return false;
+ }
+ getItem(mRing[tail], value);
+ mTailPtr.store(next(tail), std::memory_order_release);
+ return true;
+ }
+
+ /// @brief clear xrun stats only
+ void clearStats() { mUnderruns = mOverruns = 0; }
+
+ /// @brief clear all frames and xrun stats
+ void clear()
+ {
+ mHeadPtr.store(0, std::memory_order_release);
+ mTailPtr.store(0, std::memory_order_release);
+ clearStats();
+ }
+
+ /// returns the number of items in the buffer
+ std::size_t size() const
+ {
+ std::size_t head = mHeadPtr.load(std::memory_order_relaxed);
+ std::size_t tail = mTailPtr.load(std::memory_order_relaxed);
+ return (head >= tail) ? (head - tail) : (head + (Size - tail));
+ }
+
+ /// returns true if the buffer is empty
+ bool empty() const
+ {
+ std::size_t head = mHeadPtr.load(std::memory_order_relaxed);
+ std::size_t tail = mTailPtr.load(std::memory_order_relaxed);
+ return head == tail;
+ }
+
+ /// returns maximum capacity for the buffer
+ inline std::size_t capacity() const { return Size; }
+
+ /// returns number of times that a pop failed due to it being empty
+ inline std::size_t getUnderruns() const { return mUnderruns; }
+
+ /// returns number of times that a push failed due to it being full
+ inline std::size_t getOverruns() const { return mOverruns; }
+
+ protected:
+ /// @brief assigns an item in the buffer to value
+ /// @param item
+ /// @param value
+ virtual void setItem(T& item, const T& value) { item = value; }
+
+ /// @brief assigns value to an item in the buffer
+ /// @param item
+ /// @param value
+ virtual void getItem(const T& item, T& value) { value = item; }
+
+ /// items stored in the buffer
+ T mRing[Size];
+
+ private:
+ /// returns next item in the buffer
+ std::size_t next(std::size_t current) { return (current + 1) % Size; }
+
+ /// position of the head of the buffer
+ std::atomic<std::size_t> mHeadPtr;
+
+ /// position of the tail of the buffer
+ std::atomic<std::size_t> mTailPtr;
+
+ /// approximate number of underruns
+ std::size_t mUnderruns;
+
+ /// approximate number of overruns
+ std::size_t mOverruns;
+};
+
+#endif // __WAITFREERINGBUFFER_H__
property int buttonHeight: 25
property string backgroundColour: virtualstudio.darkMode ? "#272525" : "#FAFBFB"
+ property real imageLightnessValue: virtualstudio.darkMode ? 0.8 : 0.2
property string textColour: virtualstudio.darkMode ? "#FAFBFB" : "#0F0D0D"
property string buttonColour: virtualstudio.darkMode ? "#494646" : "#EAECEC"
property string buttonHoverColour: virtualstudio.darkMode ? "#5B5858" : "#D3D4D4"
property bool isUsingRtAudio: virtualstudio.audioBackend == "RtAudio"
property bool hasNoBackend: !isUsingJack && !isUsingRtAudio && !virtualstudio.backendAvailable;
- property int inputCurrIndex: getCurrentInputDeviceIndex()
- property int outputCurrIndex: getCurrentOutputDeviceIndex()
-
function getCurrentInputDeviceIndex () {
if (virtualstudio.inputDevice === "") {
- return inputComboModel.findIndex(elem => elem.type === "element");
+ return virtualstudio.inputComboModel.findIndex(elem => elem.type === "element");
}
- let idx = inputComboModel.findIndex(elem => elem.type === "element" && elem.text === virtualstudio.inputDevice);
+ let idx = virtualstudio.inputComboModel.findIndex(elem => elem.type === "element" && elem.text === virtualstudio.inputDevice);
if (idx < 0) {
- idx = inputComboModel.findIndex(elem => elem.type === "element");
+ idx = virtualstudio.inputComboModel.findIndex(elem => elem.type === "element");
}
return idx;
}
function getCurrentOutputDeviceIndex() {
if (virtualstudio.outputDevice === "") {
- return outputComboModel.findIndex(elem => elem.type === "element");
+ return virtualstudio.outputComboModel.findIndex(elem => elem.type === "element");
}
- let idx = outputComboModel.findIndex(elem => elem.type === "element" && elem.text === virtualstudio.outputDevice);
+ let idx = virtualstudio.outputComboModel.findIndex(elem => elem.type === "element" && elem.text === virtualstudio.outputDevice);
if (idx < 0) {
- idx = outputComboModel.findIndex(elem => elem.type === "element");
+ idx = virtualstudio.outputComboModel.findIndex(elem => elem.type === "element");
}
return idx;
}
- Item {
- id: usingRtAudio
- anchors.top: parent.top
- anchors.topMargin: 24 * virtualstudio.uiScale
- anchors.bottom: parent.bottom
- anchors.left: parent.left
- anchors.leftMargin: 24 * virtualstudio.uiScale
- anchors.right: parent.right
-
- visible: parent.isUsingRtAudio
-
- Rectangle {
- id: leftSpacer
- x: 0; y: 0
- width: 144 * virtualstudio.uiScale
- height: 0
- color: "transparent"
- }
-
- Text {
- id: outputLabel
- x: 0; y: 0
- text: "Output Device"
- font { family: "Poppins"; pixelSize: fontSmall * virtualstudio.fontScale * virtualstudio.uiScale }
- color: textColour
- }
+ Loader {
+ anchors.fill: parent
+ sourceComponent: isUsingRtAudio ? usingRtAudio : (isUsingJack ? usingJACK : noBackend)
+ }
- Image {
- id: outputHelpIcon
- anchors.left: outputLabel.right
- anchors.bottom: outputLabel.top
- anchors.bottomMargin: -8 * virtualstudio.uiScale
- source: "help.svg"
- sourceSize: Qt.size(12 * virtualstudio.uiScale, 12 * virtualstudio.uiScale)
- fillMode: Image.PreserveAspectFit
- smooth: true
+ Component {
+ id: usingRtAudio
- property bool showToolTip: false
+ Item {
+ anchors.top: parent.top
+ anchors.topMargin: 24 * virtualstudio.uiScale
+ anchors.bottom: parent.bottom
+ anchors.left: parent.left
+ anchors.leftMargin: 24 * virtualstudio.uiScale
+ anchors.right: parent.right
- Colorize {
- anchors.fill: parent
- source: parent
- hue: 0
- saturation: 0
- lightness: virtualstudio.darkMode ? 0.8 : 0.2
+ Rectangle {
+ id: leftSpacer
+ x: 0; y: 0
+ width: 144 * virtualstudio.uiScale
+ height: 0
+ color: "transparent"
}
- MouseArea {
- id: outputMouseArea
- anchors.fill: parent
- hoverEnabled: true
- onEntered: outputHelpIcon.showToolTip = true
- onExited: outputHelpIcon.showToolTip = false
+ Text {
+ id: outputLabel
+ x: 0; y: 0
+ text: "Output Device"
+ font { family: "Poppins"; pixelSize: fontSmall * virtualstudio.fontScale * virtualstudio.uiScale }
+ color: textColour
}
- ToolTip {
- visible: outputHelpIcon.showToolTip
- contentItem: Rectangle {
- color: toolTipBackgroundColour
- radius: 3
+ Image {
+ id: outputHelpIcon
+ anchors.left: outputLabel.right
+ anchors.bottom: outputLabel.top
+ anchors.bottomMargin: -8 * virtualstudio.uiScale
+ source: "help.svg"
+ sourceSize: Qt.size(12 * virtualstudio.uiScale, 12 * virtualstudio.uiScale)
+ fillMode: Image.PreserveAspectFit
+ smooth: true
+
+ property bool showToolTip: false
+
+ MouseArea {
+ id: outputMouseArea
anchors.fill: parent
- anchors.bottomMargin: bottomToolTipMargin * virtualstudio.uiScale
- anchors.rightMargin: rightToolTipMargin * virtualstudio.uiScale
- layer.enabled: true
- border.width: 1
- border.color: buttonStroke
+ hoverEnabled: true
+ onEntered: outputHelpIcon.showToolTip = true
+ onExited: outputHelpIcon.showToolTip = false
+ }
- Text {
- anchors.centerIn: parent
- font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale}
- text: qsTr("How you'll hear the studio audio")
- color: toolTipTextColour
+ ToolTip {
+ visible: outputHelpIcon.showToolTip
+ contentItem: Rectangle {
+ color: toolTipBackgroundColour
+ radius: 3
+ anchors.fill: parent
+ anchors.bottomMargin: bottomToolTipMargin * virtualstudio.uiScale
+ anchors.rightMargin: rightToolTipMargin * virtualstudio.uiScale
+ layer.enabled: true
+ border.width: 1
+ border.color: buttonStroke
+
+ Text {
+ anchors.centerIn: parent
+ font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale}
+ text: qsTr("How you'll hear the studio audio")
+ color: toolTipTextColour
+ }
+ }
+ background: Rectangle {
+ color: "transparent"
}
- }
- background: Rectangle {
- color: "transparent"
}
}
- }
- Image {
- id: headphonesIcon
- anchors.left: outputLabel.left
- anchors.verticalCenter: outputDeviceMeters.verticalCenter
- source: "headphones.svg"
- sourceSize: Qt.size(28 * virtualstudio.uiScale, 28 * virtualstudio.uiScale)
- fillMode: Image.PreserveAspectFit
- smooth: true
+ Colorize {
+ anchors.fill: outputHelpIcon
+ source: outputHelpIcon
+ hue: 0
+ saturation: 0
+ lightness: imageLightnessValue
+ }
+
+ Image {
+ id: headphonesIcon
+ anchors.left: outputLabel.left
+ anchors.verticalCenter: outputDeviceMeters.verticalCenter
+ source: "headphones.svg"
+ sourceSize: Qt.size(28 * virtualstudio.uiScale, 28 * virtualstudio.uiScale)
+ fillMode: Image.PreserveAspectFit
+ smooth: true
+ }
Colorize {
- anchors.fill: parent
- source: parent
+ anchors.fill: headphonesIcon
+ source: headphonesIcon
hue: 0
saturation: 0
- lightness: virtualstudio.darkMode ? 1 : 0
+ lightness: imageLightnessValue
}
- }
- ComboBox {
- id: outputCombo
- anchors.left: leftSpacer.right
- anchors.verticalCenter: outputLabel.verticalCenter
- anchors.rightMargin: rightMargin * virtualstudio.uiScale
- width: parent.width - leftSpacer.width - rightMargin * virtualstudio.uiScale
- model: outputComboModel
- currentIndex: outputCurrIndex
- delegate: ItemDelegate {
- required property var modelData
- required property int index
-
- leftPadding: 0
-
- width: parent.width
- contentItem: Text {
- leftPadding: modelData.type === "element" && outputCombo.model.filter(it => it.type === "header").length > 0 ? 24 : 12
- text: modelData.text
- font.bold: modelData.type === "header"
- }
- highlighted: outputCombo.highlightedIndex === index
- MouseArea {
- anchors.fill: parent
- onClicked: {
- if (modelData.type == "element") {
- outputCombo.currentIndex = index
- outputCombo.popup.close()
- virtualstudio.outputDevice = modelData.text
- if (modelData.category === "Low-Latency (ASIO)") {
- let inputComboIdx = inputCombo.model.findIndex(it => it.category === "Low-Latency (ASIO)" && it.text === modelData.text);
- if (inputComboIdx !== null && inputComboIdx !== undefined) {
- inputCombo.currentIndex = inputComboIdx;
- virtualstudio.inputDevice = modelData.text
+ ComboBox {
+ id: outputCombo
+ anchors.left: leftSpacer.right
+ anchors.verticalCenter: outputLabel.verticalCenter
+ anchors.rightMargin: rightMargin * virtualstudio.uiScale
+ width: parent.width - leftSpacer.width - rightMargin * virtualstudio.uiScale
+ model: virtualstudio.outputComboModel
+ currentIndex: getCurrentOutputDeviceIndex()
+ delegate: ItemDelegate {
+ required property var modelData
+ required property int index
+
+ leftPadding: 0
+
+ width: parent.width
+ contentItem: Text {
+ leftPadding: modelData.type === "element" && outputCombo.model.filter(it => it.type === "header").length > 0 ? 24 : 12
+ text: modelData.text
+ font.bold: modelData.type === "header"
+ }
+ highlighted: outputCombo.highlightedIndex === index
+ MouseArea {
+ anchors.fill: parent
+ onClicked: {
+ if (modelData.type == "element") {
+ outputCombo.currentIndex = index
+ outputCombo.popup.close()
+ virtualstudio.outputDevice = modelData.text
+ if (modelData.category === "Low-Latency (ASIO)") {
+ let inputComboIdx = inputCombo.model.findIndex(it => it.category === "Low-Latency (ASIO)" && it.text === modelData.text);
+ if (inputComboIdx !== null && inputComboIdx !== undefined) {
+ inputCombo.currentIndex = inputComboIdx;
+ virtualstudio.inputDevice = modelData.text
+ }
}
+ virtualstudio.restartAudio()
+ virtualstudio.validateDevicesState()
}
- virtualstudio.restartAudio()
- virtualstudio.validateDevicesState()
}
}
}
+ contentItem: Text {
+ leftPadding: 12
+ font: outputCombo.font
+ horizontalAlignment: Text.AlignHLeft
+ verticalAlignment: Text.AlignVCenter
+ elide: Text.ElideRight
+ text: outputCombo.model[outputCombo.currentIndex]!=undefined && outputCombo.model[outputCombo.currentIndex].text ? outputCombo.model[outputCombo.currentIndex].text : ""
+ }
}
- contentItem: Text {
- leftPadding: 12
- font: outputCombo.font
- horizontalAlignment: Text.AlignHLeft
- verticalAlignment: Text.AlignVCenter
- elide: Text.ElideRight
- text: outputCombo.model[outputCombo.currentIndex].text ? outputCombo.model[outputCombo.currentIndex].text : ""
+
+ Meter {
+ id: outputDeviceMeters
+ anchors.left: outputCombo.left
+ anchors.right: outputCombo.right
+ anchors.top: outputCombo.bottom
+ anchors.topMargin: 16 * virtualstudio.uiScale
+ height: 24 * virtualstudio.uiScale
+ model: virtualstudio.outputMeterLevels
+ clipped: virtualstudio.outputClipped
+ enabled: virtualstudio.audioReady && !Boolean(virtualstudio.devicesError)
}
- }
- Meter {
- id: outputDeviceMeters
- anchors.left: outputCombo.left
- anchors.right: outputCombo.right
- anchors.top: outputCombo.bottom
- anchors.topMargin: 16 * virtualstudio.uiScale
- height: 24 * virtualstudio.uiScale
- model: outputMeterModel
- clipped: outputClipped
- enabled: virtualstudio.audioReady && !Boolean(virtualstudio.devicesError)
- }
+ Slider {
+ id: outputSlider
+ from: 0.0
+ value: audioInterface ? audioInterface.outputVolume : 0.5
+ onMoved: { audioInterface.outputVolume = value }
+ to: 1.0
+ padding: 0
+ anchors.left: outputQuieterIcon.right
+ anchors.leftMargin: 8 * virtualstudio.uiScale
+ anchors.right: outputLouderIcon.left
+ anchors.rightMargin: 8 * virtualstudio.uiScale
+ anchors.top: outputDeviceMeters.bottom
+ anchors.topMargin: 16 * virtualstudio.uiScale
- Slider {
- id: outputSlider
- from: 0.0
- value: audioInterface ? audioInterface.outputVolume : 0.5
- onMoved: { audioInterface.outputVolume = value }
- to: 1.0
- padding: 0
- anchors.left: outputQuieterIcon.right
- anchors.leftMargin: 8 * virtualstudio.uiScale
- anchors.right: outputLouderIcon.left
- anchors.rightMargin: 8 * virtualstudio.uiScale
- anchors.top: outputDeviceMeters.bottom
- anchors.topMargin: 16 * virtualstudio.uiScale
-
- background: Rectangle {
- x: outputSlider.leftPadding
- y: outputSlider.topPadding + outputSlider.availableHeight / 2 - height / 2
- implicitWidth: parent.width
- implicitHeight: 6
- width: outputSlider.availableWidth
- height: implicitHeight
- radius: 4
- color: sliderTrackColour
-
- Rectangle {
- width: outputSlider.visualPosition * parent.width
- height: parent.height
- color: sliderActiveTrackColour
+ background: Rectangle {
+ x: outputSlider.leftPadding
+ y: outputSlider.topPadding + outputSlider.availableHeight / 2 - height / 2
+ implicitWidth: parent.width
+ implicitHeight: 6
+ width: outputSlider.availableWidth
+ height: implicitHeight
radius: 4
+ color: sliderTrackColour
+
+ Rectangle {
+ width: outputSlider.visualPosition * parent.width
+ height: parent.height
+ color: sliderActiveTrackColour
+ radius: 4
+ }
}
- }
- handle: Rectangle {
- x: outputSlider.leftPadding + outputSlider.visualPosition * (outputSlider.availableWidth - width)
- y: outputSlider.topPadding + outputSlider.availableHeight / 2 - height / 2
- implicitWidth: 26 * virtualstudio.uiScale
- implicitHeight: 26 * virtualstudio.uiScale
- radius: 13 * virtualstudio.uiScale
- color: outputSlider.pressed ? sliderPressedColour : sliderColour
- border.color: buttonStroke
+ handle: Rectangle {
+ x: outputSlider.leftPadding + outputSlider.visualPosition * (outputSlider.availableWidth - width)
+ y: outputSlider.topPadding + outputSlider.availableHeight / 2 - height / 2
+ implicitWidth: 26 * virtualstudio.uiScale
+ implicitHeight: 26 * virtualstudio.uiScale
+ radius: 13 * virtualstudio.uiScale
+ color: outputSlider.pressed ? sliderPressedColour : sliderColour
+ border.color: buttonStroke
+ }
}
- }
- Image {
- id: outputQuieterIcon
- anchors.left: outputCombo.left
- anchors.verticalCenter: outputSlider.verticalCenter
- source: "quiet.svg"
- sourceSize: Qt.size(16 * virtualstudio.uiScale, 16 * virtualstudio.uiScale)
- fillMode: Image.PreserveAspectFit
- smooth: true
+ Image {
+ id: outputQuieterIcon
+ anchors.left: outputCombo.left
+ anchors.verticalCenter: outputSlider.verticalCenter
+ source: "quiet.svg"
+ sourceSize: Qt.size(16 * virtualstudio.uiScale, 16 * virtualstudio.uiScale)
+ fillMode: Image.PreserveAspectFit
+ smooth: true
+ }
Colorize {
- anchors.fill: parent
- source: parent
+ anchors.fill: outputQuieterIcon
+ source: outputQuieterIcon
hue: 0
saturation: 0
- lightness: virtualstudio.darkMode ? 1 : 0
+ lightness: imageLightnessValue
}
- }
- Image {
- id: outputLouderIcon
- anchors.right: parent.right
- anchors.rightMargin: rightMargin * virtualstudio.uiScale
- anchors.verticalCenter: outputSlider.verticalCenter
- source: "loud.svg"
- sourceSize: Qt.size(16 * virtualstudio.uiScale, 16 * virtualstudio.uiScale)
- fillMode: Image.PreserveAspectFit
- smooth: true
+ Image {
+ id: outputLouderIcon
+ anchors.right: parent.right
+ anchors.rightMargin: rightMargin * virtualstudio.uiScale
+ anchors.verticalCenter: outputSlider.verticalCenter
+ source: "loud.svg"
+ sourceSize: Qt.size(16 * virtualstudio.uiScale, 16 * virtualstudio.uiScale)
+ fillMode: Image.PreserveAspectFit
+ smooth: true
+ }
Colorize {
- anchors.fill: parent
- source: parent
+ anchors.fill: outputLouderIcon
+ source: outputLouderIcon
hue: 0
saturation: 0
- lightness: virtualstudio.darkMode ? 1 : 0
+ lightness: imageLightnessValue
}
- }
- Text {
- id: outputChannelsLabel
- anchors.left: outputCombo.left
- anchors.right: outputCombo.horizontalCenter
- anchors.top: outputSlider.bottom
- anchors.topMargin: 12 * virtualstudio.uiScale
- textFormat: Text.RichText
- text: "Output Channel(s)"
- font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale }
- color: textColour
- }
+ Text {
+ id: outputChannelsLabel
+ anchors.left: outputCombo.left
+ anchors.right: outputCombo.horizontalCenter
+ anchors.top: outputSlider.bottom
+ anchors.topMargin: 12 * virtualstudio.uiScale
+ textFormat: Text.RichText
+ text: "Output Channel(s)"
+ font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale }
+ color: textColour
+ }
- ComboBox {
- id: outputChannelsCombo
- anchors.left: outputCombo.left
- anchors.right: outputCombo.horizontalCenter
- anchors.rightMargin: 8 * virtualstudio.uiScale
- anchors.top: outputChannelsLabel.bottom
- anchors.topMargin: 4 * virtualstudio.uiScale
- model: outputChannelsComboModel
- currentIndex: (() => {
- let idx = outputChannelsComboModel.findIndex(elem => elem.baseChannel === virtualstudio.baseOutputChannel
- && elem.numChannels === virtualstudio.numOutputChannels);
- if (idx < 0) {
- idx = 0;
+ ComboBox {
+ id: outputChannelsCombo
+ anchors.left: outputCombo.left
+ anchors.right: outputCombo.horizontalCenter
+ anchors.rightMargin: 8 * virtualstudio.uiScale
+ anchors.top: outputChannelsLabel.bottom
+ anchors.topMargin: 4 * virtualstudio.uiScale
+ model: virtualstudio.outputChannelsComboModel
+ currentIndex: (() => {
+ let idx = virtualstudio.outputChannelsComboModel.findIndex(elem => elem.baseChannel === virtualstudio.baseOutputChannel
+ && elem.numChannels === virtualstudio.numOutputChannels);
+ if (idx < 0) {
+ idx = 0;
+ }
+ return idx;
+ })()
+ delegate: ItemDelegate {
+ required property var modelData
+ required property int index
+ width: parent.width
+ contentItem: Text {
+ text: modelData.label
+ }
+ highlighted: outputChannelsCombo.highlightedIndex === index
+ MouseArea {
+ anchors.fill: parent
+ onClicked: {
+ outputChannelsCombo.currentIndex = index
+ outputChannelsCombo.popup.close()
+ virtualstudio.baseOutputChannel = modelData.baseChannel
+ virtualstudio.numOutputChannels = modelData.numChannels
+ virtualstudio.validateDevicesState()
+ }
+ }
}
- return idx;
- })()
- delegate: ItemDelegate {
- required property var modelData
- required property int index
- width: parent.width
contentItem: Text {
- text: modelData.label
- }
- highlighted: outputChannelsCombo.highlightedIndex === index
- MouseArea {
- anchors.fill: parent
- onClicked: {
- outputChannelsCombo.currentIndex = index
- outputChannelsCombo.popup.close()
- virtualstudio.baseOutputChannel = modelData.baseChannel
- virtualstudio.numOutputChannels = modelData.numChannels
- virtualstudio.validateDevicesState()
- }
+ leftPadding: 12
+ font: inputCombo.font
+ horizontalAlignment: Text.AlignHLeft
+ verticalAlignment: Text.AlignVCenter
+ elide: Text.ElideRight
+ text: outputChannelsCombo.model[outputChannelsCombo.currentIndex].label || ""
}
}
- contentItem: Text {
- leftPadding: 12
- font: inputCombo.font
- horizontalAlignment: Text.AlignHLeft
- verticalAlignment: Text.AlignVCenter
- elide: Text.ElideRight
- text: outputChannelsCombo.model[outputChannelsCombo.currentIndex].label || ""
+
+ Button {
+ id: testOutputAudioButton
+ background: Rectangle {
+ radius: 6 * virtualstudio.uiScale
+ color: testOutputAudioButton.down ? buttonPressedColour : (testOutputAudioButton.hovered ? buttonHoverColour : buttonColour)
+ border.width: 1
+ border.color: testOutputAudioButton.down || testOutputAudioButton.hovered ? buttonPressedStroke : (testOutputAudioButton.hovered ? buttonHoverStroke : buttonStroke)
+ }
+ onClicked: { virtualstudio.playOutputAudio() }
+ anchors.right: parent.right
+ anchors.rightMargin: rightMargin * virtualstudio.uiScale
+ anchors.verticalCenter: outputChannelsCombo.verticalCenter
+ width: 144 * virtualstudio.uiScale; height: 30 * virtualstudio.uiScale
+ Text {
+ text: "Play Test Tone"
+ font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale }
+ anchors { horizontalCenter: parent.horizontalCenter; verticalCenter: parent.verticalCenter }
+ color: textColour
+ }
}
- }
- Button {
- id: testOutputAudioButton
- background: Rectangle {
- radius: 6 * virtualstudio.uiScale
- color: testOutputAudioButton.down ? buttonPressedColour : (testOutputAudioButton.hovered ? buttonHoverColour : buttonColour)
- border.width: 1
- border.color: testOutputAudioButton.down || testOutputAudioButton.hovered ? buttonPressedStroke : (testOutputAudioButton.hovered ? buttonHoverStroke : buttonStroke)
+ Rectangle {
+ id: divider1
+ anchors.top: testOutputAudioButton.bottom
+ anchors.topMargin: 24 * virtualstudio.uiScale
+ width: parent.width - x - (16 * virtualstudio.uiScale); height: 2 * virtualstudio.uiScale
+ color: "#E0E0E0"
}
- onClicked: { virtualstudio.playOutputAudio() }
- anchors.right: parent.right
- anchors.rightMargin: rightMargin * virtualstudio.uiScale
- anchors.verticalCenter: outputChannelsCombo.verticalCenter
- width: 144 * virtualstudio.uiScale; height: 30 * virtualstudio.uiScale
+
Text {
- text: "Play Test Tone"
- font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale }
- anchors { horizontalCenter: parent.horizontalCenter; verticalCenter: parent.verticalCenter }
+ id: inputLabel
+ anchors.left: outputLabel.left
+ anchors.top: divider1.bottom
+ anchors.topMargin: 32 * virtualstudio.uiScale
+ text: "Input Device"
+ font { family: "Poppins"; pixelSize: fontSmall * virtualstudio.fontScale * virtualstudio.uiScale }
color: textColour
}
- }
- Rectangle {
- id: divider1
- anchors.top: testOutputAudioButton.bottom
- anchors.topMargin: 24 * virtualstudio.uiScale
- width: parent.width - x - (16 * virtualstudio.uiScale); height: 2 * virtualstudio.uiScale
- color: "#E0E0E0"
- }
+ Image {
+ id: inputHelpIcon
+ anchors.left: inputLabel.right
+ anchors.bottom: inputLabel.top
+ anchors.bottomMargin: -8 * virtualstudio.uiScale
+ source: "help.svg"
+ sourceSize: Qt.size(12 * virtualstudio.uiScale, 12 * virtualstudio.uiScale)
+ fillMode: Image.PreserveAspectFit
+ smooth: true
- Text {
- id: inputLabel
- anchors.left: outputLabel.left
- anchors.top: divider1.bottom
- anchors.topMargin: 32 * virtualstudio.uiScale
- text: "Input Device"
- font { family: "Poppins"; pixelSize: fontSmall * virtualstudio.fontScale * virtualstudio.uiScale }
- color: textColour
- }
+ property bool showToolTip: false
- Image {
- id: inputHelpIcon
- anchors.left: inputLabel.right
- anchors.bottom: inputLabel.top
- anchors.bottomMargin: -8 * virtualstudio.uiScale
- source: "help.svg"
- sourceSize: Qt.size(12 * virtualstudio.uiScale, 12 * virtualstudio.uiScale)
- fillMode: Image.PreserveAspectFit
- smooth: true
+ MouseArea {
+ id: inputMouseArea
+ anchors.fill: parent
+ hoverEnabled: true
+ onEntered: inputHelpIcon.showToolTip = true
+ onExited: inputHelpIcon.showToolTip = false
+ }
- property bool showToolTip: false
+ ToolTip {
+ visible: inputHelpIcon.showToolTip
+ contentItem: Rectangle {
+ color: toolTipBackgroundColour
+ radius: 3
+ anchors.fill: parent
+ anchors.bottomMargin: bottomToolTipMargin * virtualstudio.uiScale
+ anchors.rightMargin: rightToolTipMargin * virtualstudio.uiScale
+ layer.enabled: true
+ border.width: 1
+ border.color: buttonStroke
+
+ Text {
+ anchors.centerIn: parent
+ font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale}
+ text: qsTr("Audio sent to the studio (microphone, instrument, mixer, etc.)")
+ color: toolTipTextColour
+ }
+ }
+ background: Rectangle {
+ color: "transparent"
+ }
+ }
+ }
Colorize {
- anchors.fill: parent
- source: parent
+ anchors.fill: inputHelpIcon
+ source: inputHelpIcon
hue: 0
saturation: 0
- lightness: virtualstudio.darkMode ? 0.8 : 0.2
+ lightness: imageLightnessValue
}
- MouseArea {
- id: inputMouseArea
- anchors.fill: parent
- hoverEnabled: true
- onEntered: inputHelpIcon.showToolTip = true
- onExited: inputHelpIcon.showToolTip = false
+ Image {
+ id: microphoneIcon
+ anchors.left: outputLabel.left
+ anchors.verticalCenter: inputDeviceMeters.verticalCenter
+ source: "mic.svg"
+ sourceSize: Qt.size(32 * virtualstudio.uiScale, 32 * virtualstudio.uiScale)
+ fillMode: Image.PreserveAspectFit
+ smooth: true
}
- ToolTip {
- visible: inputHelpIcon.showToolTip
- contentItem: Rectangle {
- color: toolTipBackgroundColour
- radius: 3
- anchors.fill: parent
- anchors.bottomMargin: bottomToolTipMargin * virtualstudio.uiScale
- anchors.rightMargin: rightToolTipMargin * virtualstudio.uiScale
- layer.enabled: true
- border.width: 1
- border.color: buttonStroke
-
- Text {
- anchors.centerIn: parent
- font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale}
- text: qsTr("Audio sent to the studio (microphone, instrument, mixer, etc.)")
- color: toolTipTextColour
- }
- }
- background: Rectangle {
- color: "transparent"
- }
- }
- }
-
- Image {
- id: microphoneIcon
- anchors.left: outputLabel.left
- anchors.verticalCenter: inputDeviceMeters.verticalCenter
- source: "mic.svg"
- sourceSize: Qt.size(32 * virtualstudio.uiScale, 32 * virtualstudio.uiScale)
- fillMode: Image.PreserveAspectFit
- smooth: true
-
Colorize {
- anchors.fill: parent
- source: parent
+ anchors.fill: microphoneIcon
+ source: microphoneIcon
hue: 0
saturation: 0
- lightness: virtualstudio.darkMode ? 1 : 0
+ lightness: imageLightnessValue
}
- }
-
- ComboBox {
- id: inputCombo
- model: inputComboModel
- currentIndex: inputCurrIndex
- anchors.left: outputCombo.left
- anchors.right: outputCombo.right
- anchors.verticalCenter: inputLabel.verticalCenter
- delegate: ItemDelegate {
- required property var modelData
- required property int index
- leftPadding: 0
-
- width: parent.width
- contentItem: Text {
- leftPadding: modelData.type === "element" && inputCombo.model.filter(it => it.type === "header").length > 0 ? 24 : 12
- text: modelData.text
- font.bold: modelData.type === "header"
- }
- highlighted: inputCombo.highlightedIndex === index
- MouseArea {
- anchors.fill: parent
- onClicked: {
- if (modelData.type == "element") {
- inputCombo.currentIndex = index
- inputCombo.popup.close()
- virtualstudio.inputDevice = modelData.text
- if (modelData.category === "Low-Latency (ASIO)") {
- let outputComboIdx = outputCombo.model.findIndex(it => it.category === "Low-Latency (ASIO)" && it.text === modelData.text);
- if (outputComboIdx !== null && outputComboIdx !== undefined) {
- outputCombo.currentIndex = outputComboIdx;
- virtualstudio.outputDevice = modelData.text
+ ComboBox {
+ id: inputCombo
+ model: virtualstudio.inputComboModel
+ currentIndex: getCurrentInputDeviceIndex()
+ anchors.left: outputCombo.left
+ anchors.right: outputCombo.right
+ anchors.verticalCenter: inputLabel.verticalCenter
+ delegate: ItemDelegate {
+ required property var modelData
+ required property int index
+
+ leftPadding: 0
+
+ width: parent.width
+ contentItem: Text {
+ leftPadding: modelData.type === "element" && inputCombo.model.filter(it => it.type === "header").length > 0 ? 24 : 12
+ text: modelData.text
+ font.bold: modelData.type === "header"
+ }
+ highlighted: inputCombo.highlightedIndex === index
+ MouseArea {
+ anchors.fill: parent
+ onClicked: {
+ if (modelData.type == "element") {
+ inputCombo.currentIndex = index
+ inputCombo.popup.close()
+ virtualstudio.inputDevice = modelData.text
+ if (modelData.category === "Low-Latency (ASIO)") {
+ let outputComboIdx = outputCombo.model.findIndex(it => it.category === "Low-Latency (ASIO)" && it.text === modelData.text);
+ if (outputComboIdx !== null && outputComboIdx !== undefined) {
+ outputCombo.currentIndex = outputComboIdx;
+ virtualstudio.outputDevice = modelData.text
+ }
}
+ virtualstudio.restartAudio()
+ virtualstudio.validateDevicesState()
}
- virtualstudio.restartAudio()
- virtualstudio.validateDevicesState()
}
}
}
+ contentItem: Text {
+ leftPadding: 12
+ font: inputCombo.font
+ horizontalAlignment: Text.AlignHLeft
+ verticalAlignment: Text.AlignVCenter
+ elide: Text.ElideRight
+ text: inputCombo.model[inputCombo.currentIndex] != undefined && inputCombo.model[inputCombo.currentIndex].text ? inputCombo.model[inputCombo.currentIndex].text : ""
+ }
}
- contentItem: Text {
- leftPadding: 12
- font: inputCombo.font
- horizontalAlignment: Text.AlignHLeft
- verticalAlignment: Text.AlignVCenter
- elide: Text.ElideRight
- text: inputCombo.model[inputCombo.currentIndex].text ? inputCombo.model[inputCombo.currentIndex].text : ""
+
+ Meter {
+ id: inputDeviceMeters
+ anchors.left: inputCombo.left
+ anchors.right: inputCombo.right
+ anchors.top: inputCombo.bottom
+ anchors.topMargin: 16 * virtualstudio.uiScale
+ height: 24 * virtualstudio.uiScale
+ model: virtualstudio.inputMeterLevels
+ clipped: virtualstudio.inputClipped
+ enabled: virtualstudio.audioReady && !Boolean(virtualstudio.devicesError)
}
- }
- Meter {
- id: inputDeviceMeters
- anchors.left: inputCombo.left
- anchors.right: inputCombo.right
- anchors.top: inputCombo.bottom
- anchors.topMargin: 16 * virtualstudio.uiScale
- height: 24 * virtualstudio.uiScale
- model: inputMeterModel
- clipped: inputClipped
- enabled: virtualstudio.audioReady && !Boolean(virtualstudio.devicesError)
- }
+ Slider {
+ id: inputSlider
+ from: 0.0
+ value: audioInterface ? audioInterface.inputVolume : 0.5
+ onMoved: { audioInterface.inputVolume = value }
+ to: 1.0
+ padding: 0
+ anchors.left: inputQuieterIcon.right
+ anchors.leftMargin: 8 * virtualstudio.uiScale
+ anchors.right: inputLouderIcon.left
+ anchors.rightMargin: 8 * virtualstudio.uiScale
+ anchors.top: inputDeviceMeters.bottom
+ anchors.topMargin: 16 * virtualstudio.uiScale
- Slider {
- id: inputSlider
- from: 0.0
- value: audioInterface ? audioInterface.inputVolume : 0.5
- onMoved: { audioInterface.inputVolume = value }
- to: 1.0
- padding: 0
- anchors.left: inputQuieterIcon.right
- anchors.leftMargin: 8 * virtualstudio.uiScale
- anchors.right: inputLouderIcon.left
- anchors.rightMargin: 8 * virtualstudio.uiScale
- anchors.top: inputDeviceMeters.bottom
- anchors.topMargin: 16 * virtualstudio.uiScale
-
- background: Rectangle {
- x: inputSlider.leftPadding
- y: inputSlider.topPadding + inputSlider.availableHeight / 2 - height / 2
- implicitWidth: parent.width
- implicitHeight: 6
- width: inputSlider.availableWidth
- height: implicitHeight
- radius: 4
- color: sliderTrackColour
-
- Rectangle {
- width: inputSlider.visualPosition * parent.width
- height: parent.height
- color: sliderActiveTrackColour
+ background: Rectangle {
+ x: inputSlider.leftPadding
+ y: inputSlider.topPadding + inputSlider.availableHeight / 2 - height / 2
+ implicitWidth: parent.width
+ implicitHeight: 6
+ width: inputSlider.availableWidth
+ height: implicitHeight
radius: 4
+ color: sliderTrackColour
+
+ Rectangle {
+ width: inputSlider.visualPosition * parent.width
+ height: parent.height
+ color: sliderActiveTrackColour
+ radius: 4
+ }
}
- }
- handle: Rectangle {
- x: inputSlider.leftPadding + inputSlider.visualPosition * (inputSlider.availableWidth - width)
- y: inputSlider.topPadding + inputSlider.availableHeight / 2 - height / 2
- implicitWidth: 26 * virtualstudio.uiScale
- implicitHeight: 26 * virtualstudio.uiScale
- radius: 13 * virtualstudio.uiScale
- color: inputSlider.pressed ? sliderPressedColour : sliderColour
- border.color: buttonStroke
+ handle: Rectangle {
+ x: inputSlider.leftPadding + inputSlider.visualPosition * (inputSlider.availableWidth - width)
+ y: inputSlider.topPadding + inputSlider.availableHeight / 2 - height / 2
+ implicitWidth: 26 * virtualstudio.uiScale
+ implicitHeight: 26 * virtualstudio.uiScale
+ radius: 13 * virtualstudio.uiScale
+ color: inputSlider.pressed ? sliderPressedColour : sliderColour
+ border.color: buttonStroke
+ }
}
- }
- Image {
- id: inputQuieterIcon
- anchors.left: inputDeviceMeters.left
- anchors.verticalCenter: inputSlider.verticalCenter
- source: "quiet.svg"
- sourceSize: Qt.size(16 * virtualstudio.uiScale, 16 * virtualstudio.uiScale)
- fillMode: Image.PreserveAspectFit
- smooth: true
+ Image {
+ id: inputQuieterIcon
+ anchors.left: inputDeviceMeters.left
+ anchors.verticalCenter: inputSlider.verticalCenter
+ source: "quiet.svg"
+ sourceSize: Qt.size(16 * virtualstudio.uiScale, 16 * virtualstudio.uiScale)
+ fillMode: Image.PreserveAspectFit
+ smooth: true
+ }
Colorize {
- anchors.fill: parent
- source: parent
+ anchors.fill: inputQuieterIcon
+ source: inputQuieterIcon
hue: 0
saturation: 0
- lightness: virtualstudio.darkMode ? 1 : 0
+ lightness: imageLightnessValue
}
- }
- Image {
- id: inputLouderIcon
- anchors.right: parent.right
- anchors.rightMargin: rightMargin * virtualstudio.uiScale
- anchors.verticalCenter: inputSlider.verticalCenter
- source: "loud.svg"
- sourceSize: Qt.size(16 * virtualstudio.uiScale, 16 * virtualstudio.uiScale)
- fillMode: Image.PreserveAspectFit
- smooth: true
+ Image {
+ id: inputLouderIcon
+ anchors.right: parent.right
+ anchors.rightMargin: rightMargin * virtualstudio.uiScale
+ anchors.verticalCenter: inputSlider.verticalCenter
+ source: "loud.svg"
+ sourceSize: Qt.size(16 * virtualstudio.uiScale, 16 * virtualstudio.uiScale)
+ fillMode: Image.PreserveAspectFit
+ smooth: true
+ }
Colorize {
- anchors.fill: parent
- source: parent
+ anchors.fill: inputLouderIcon
+ source: inputLouderIcon
hue: 0
saturation: 0
- lightness: virtualstudio.darkMode ? 1 : 0
+ lightness: imageLightnessValue
}
- }
- Button {
- id: hiddenInputButton
- anchors.right: parent.right
- anchors.rightMargin: rightMargin * virtualstudio.uiScale
- anchors.verticalCenter: inputSlider.verticalCenter
- width: 144 * virtualstudio.uiScale; height: 30 * virtualstudio.uiScale
- visible: false
- }
+ Button {
+ id: hiddenInputButton
+ anchors.right: parent.right
+ anchors.rightMargin: rightMargin * virtualstudio.uiScale
+ anchors.verticalCenter: inputSlider.verticalCenter
+ width: 144 * virtualstudio.uiScale; height: 30 * virtualstudio.uiScale
+ visible: false
+ }
- Text {
- id: inputChannelsLabel
- anchors.left: inputCombo.left
- anchors.right: inputCombo.horizontalCenter
- anchors.top: inputSlider.bottom
- anchors.topMargin: 12 * virtualstudio.uiScale
- textFormat: Text.RichText
- text: "Input Channel(s)"
- font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale }
- color: textColour
- }
+ Text {
+ id: inputChannelsLabel
+ anchors.left: inputCombo.left
+ anchors.right: inputCombo.horizontalCenter
+ anchors.top: inputSlider.bottom
+ anchors.topMargin: 12 * virtualstudio.uiScale
+ textFormat: Text.RichText
+ text: "Input Channel(s)"
+ font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale }
+ color: textColour
+ }
- ComboBox {
- id: inputChannelsCombo
- anchors.left: inputCombo.left
- anchors.right: inputCombo.horizontalCenter
- anchors.rightMargin: 8 * virtualstudio.uiScale
- anchors.top: inputChannelsLabel.bottom
- anchors.topMargin: 4 * virtualstudio.uiScale
- model: inputChannelsComboModel
- currentIndex: (() => {
- let idx = inputChannelsComboModel.findIndex(elem => elem.baseChannel === virtualstudio.baseInputChannel
- && elem.numChannels === virtualstudio.numInputChannels);
- if (idx < 0) {
- idx = 0;
+ ComboBox {
+ id: inputChannelsCombo
+ anchors.left: inputCombo.left
+ anchors.right: inputCombo.horizontalCenter
+ anchors.rightMargin: 8 * virtualstudio.uiScale
+ anchors.top: inputChannelsLabel.bottom
+ anchors.topMargin: 4 * virtualstudio.uiScale
+ model: virtualstudio.inputChannelsComboModel
+ currentIndex: (() => {
+ let idx = virtualstudio.inputChannelsComboModel.findIndex(elem => elem.baseChannel === virtualstudio.baseInputChannel
+ && elem.numChannels === virtualstudio.numInputChannels);
+ if (idx < 0) {
+ idx = 0;
+ }
+ return idx;
+ })()
+ delegate: ItemDelegate {
+ required property var modelData
+ required property int index
+ width: parent.width
+ contentItem: Text {
+ text: modelData.label
+ }
+ highlighted: inputChannelsCombo.highlightedIndex === index
+ MouseArea {
+ anchors.fill: parent
+ onClicked: {
+ inputChannelsCombo.currentIndex = index
+ inputChannelsCombo.popup.close()
+ virtualstudio.baseInputChannel = modelData.baseChannel
+ virtualstudio.numInputChannels = modelData.numChannels
+ virtualstudio.validateDevicesState()
+ }
+ }
}
- return idx;
- })()
- delegate: ItemDelegate {
- required property var modelData
- required property int index
- width: parent.width
contentItem: Text {
- text: modelData.label
- }
- highlighted: inputChannelsCombo.highlightedIndex === index
- MouseArea {
- anchors.fill: parent
- onClicked: {
- inputChannelsCombo.currentIndex = index
- inputChannelsCombo.popup.close()
- virtualstudio.baseInputChannel = modelData.baseChannel
- virtualstudio.numInputChannels = modelData.numChannels
- virtualstudio.validateDevicesState()
- }
+ leftPadding: 12
+ font: inputCombo.font
+ horizontalAlignment: Text.AlignHLeft
+ verticalAlignment: Text.AlignVCenter
+ elide: Text.ElideRight
+ text: inputChannelsCombo.model[inputChannelsCombo.currentIndex].label || ""
}
}
- contentItem: Text {
- leftPadding: 12
- font: inputCombo.font
- horizontalAlignment: Text.AlignHLeft
- verticalAlignment: Text.AlignVCenter
- elide: Text.ElideRight
- text: inputChannelsCombo.model[inputChannelsCombo.currentIndex].label || ""
- }
- }
- Text {
- id: inputMixModeLabel
- anchors.left: inputCombo.horizontalCenter
- anchors.right: inputCombo.right
- anchors.rightMargin: 8 * virtualstudio.uiScale
- anchors.top: inputSlider.bottom
- anchors.topMargin: 12 * virtualstudio.uiScale
- textFormat: Text.RichText
- text: "Mono / Stereo"
- font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale }
- color: textColour
- }
+ Text {
+ id: inputMixModeLabel
+ anchors.left: inputCombo.horizontalCenter
+ anchors.right: inputCombo.right
+ anchors.rightMargin: 8 * virtualstudio.uiScale
+ anchors.top: inputSlider.bottom
+ anchors.topMargin: 12 * virtualstudio.uiScale
+ textFormat: Text.RichText
+ text: "Mono / Stereo"
+ font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale }
+ color: textColour
+ }
- ComboBox {
- id: inputMixModeCombo
- anchors.left: inputCombo.horizontalCenter
- anchors.right: inputCombo.right
- anchors.rightMargin: 8 * virtualstudio.uiScale
- anchors.top: inputMixModeLabel.bottom
- anchors.topMargin: 4 * virtualstudio.uiScale
- model: inputMixModeComboModel
- currentIndex: (() => {
- let idx = inputMixModeComboModel.findIndex(elem => elem.value === virtualstudio.inputMixMode);
- if (idx < 0) {
- idx = 0;
+ ComboBox {
+ id: inputMixModeCombo
+ anchors.left: inputCombo.horizontalCenter
+ anchors.right: inputCombo.right
+ anchors.rightMargin: 8 * virtualstudio.uiScale
+ anchors.top: inputMixModeLabel.bottom
+ anchors.topMargin: 4 * virtualstudio.uiScale
+ model: virtualstudio.inputMixModeComboModel
+ currentIndex: (() => {
+ let idx = virtualstudio.inputMixModeComboModel.findIndex(elem => elem.value === virtualstudio.inputMixMode);
+ if (idx < 0) {
+ idx = 0;
+ }
+ return idx;
+ })()
+ delegate: ItemDelegate {
+ required property var modelData
+ required property int index
+ width: parent.width
+ contentItem: Text {
+ text: modelData.label
+ }
+ highlighted: inputMixModeCombo.highlightedIndex === index
+ MouseArea {
+ anchors.fill: parent
+ onClicked: {
+ inputMixModeCombo.currentIndex = index
+ inputMixModeCombo.popup.close()
+ virtualstudio.inputMixMode = virtualstudio.inputMixModeComboModel[index].value
+ virtualstudio.validateDevicesState()
+ }
+ }
}
- return idx;
- })()
- delegate: ItemDelegate {
- required property var modelData
- required property int index
- width: parent.width
contentItem: Text {
- text: modelData.label
- }
- highlighted: inputMixModeCombo.highlightedIndex === index
- MouseArea {
- anchors.fill: parent
- onClicked: {
- inputMixModeCombo.currentIndex = index
- inputMixModeCombo.popup.close()
- virtualstudio.inputMixMode = inputMixModeComboModel[index].value
- virtualstudio.validateDevicesState()
- }
+ leftPadding: 12
+ font: inputCombo.font
+ horizontalAlignment: Text.AlignHLeft
+ verticalAlignment: Text.AlignVCenter
+ elide: Text.ElideRight
+ text: inputMixModeCombo.model[inputMixModeCombo.currentIndex].label || ""
}
}
- contentItem: Text {
- leftPadding: 12
- font: inputCombo.font
- horizontalAlignment: Text.AlignHLeft
- verticalAlignment: Text.AlignVCenter
- elide: Text.ElideRight
- text: inputMixModeCombo.model[inputMixModeCombo.currentIndex].label || ""
+
+ Text {
+ id: inputChannelHelpMessage
+ anchors.left: inputChannelsCombo.left
+ anchors.leftMargin: 2 * virtualstudio.uiScale
+ anchors.right: inputChannelsCombo.right
+ anchors.top: inputChannelsCombo.bottom
+ anchors.topMargin: 8 * virtualstudio.uiScale
+ textFormat: Text.RichText
+ wrapMode: Text.WordWrap
+ text: "Choose up to 2 channels"
+ font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale }
+ color: textColour
}
- }
- Text {
- id: inputChannelHelpMessage
- anchors.left: inputChannelsCombo.left
- anchors.leftMargin: 2 * virtualstudio.uiScale
- anchors.right: inputChannelsCombo.right
- anchors.top: inputChannelsCombo.bottom
- anchors.topMargin: 8 * virtualstudio.uiScale
- textFormat: Text.RichText
- wrapMode: Text.WordWrap
- text: "Choose up to 2 channels"
- font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale }
- color: textColour
- }
+ Text {
+ id: inputMixModeHelpMessage
+ anchors.left: inputMixModeCombo.left
+ anchors.leftMargin: 2 * virtualstudio.uiScale
+ anchors.right: inputMixModeCombo.right
+ anchors.top: inputMixModeCombo.bottom
+ anchors.topMargin: 8 * virtualstudio.uiScale
+ textFormat: Text.RichText
+ wrapMode: Text.WordWrap
+ text: (() => {
+ if (virtualstudio.inputMixMode === 2) {
+ return "Treat the channels as Left and Right signals, coming through each speaker separately.";
+ } else if (virtualstudio.inputMixMode === 3) {
+ return "Combine the channels into one central channel coming through both speakers.";
+ } else if (virtualstudio.inputMixMode === 1) {
+ return "Send a single channel of audio";
+ } else {
+ return "";
+ }
+ })()
+ font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale }
+ color: textColour
+ }
- Text {
- id: inputMixModeHelpMessage
- anchors.left: inputMixModeCombo.left
- anchors.leftMargin: 2 * virtualstudio.uiScale
- anchors.right: inputMixModeCombo.right
- anchors.top: inputMixModeCombo.bottom
- anchors.topMargin: 8 * virtualstudio.uiScale
- textFormat: Text.RichText
- wrapMode: Text.WordWrap
- text: (() => {
- if (virtualstudio.inputMixMode === 2) {
- return "Treat the channels as Left and Right signals, coming through each speaker separately.";
- } else if (virtualstudio.inputMixMode === 3) {
- return "Combine the channels into one central channel coming through both speakers.";
- } else if (virtualstudio.inputMixMode === 1) {
- return "Send a single channel of audio";
- } else {
- return "";
+ Text {
+ id: warningOrErrorMessage
+ anchors.left: inputLabel.left
+ anchors.right: parent.right
+ anchors.rightMargin: 16 * virtualstudio.uiScale
+ anchors.top: inputMixModeHelpMessage.bottom
+ anchors.topMargin: 8 * virtualstudio.uiScale
+ anchors.bottomMargin: 8 * virtualstudio.uiScale
+ textFormat: Text.RichText
+ text: (virtualstudio.devicesError || virtualstudio.devicesWarning)
+ + ((virtualstudio.devicesErrorHelpUrl || virtualstudio.devicesWarningHelpUrl)
+ ? ` <a style="color: ${linkText};" href=${virtualstudio.devicesErrorHelpUrl || virtualstudio.devicesWarningHelpUrl}>Learn More.</a>`
+ : ""
+ )
+ onLinkActivated: link => {
+ virtualstudio.openLink(link)
}
- })()
- font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale }
- color: textColour
- }
-
- Text {
- id: warningOrErrorMessage
- anchors.left: inputLabel.left
- anchors.right: parent.right
- anchors.rightMargin: 16 * virtualstudio.uiScale
- anchors.top: inputMixModeHelpMessage.bottom
- anchors.topMargin: 8 * virtualstudio.uiScale
- anchors.bottomMargin: 8 * virtualstudio.uiScale
- textFormat: Text.RichText
- text: (virtualstudio.devicesError || virtualstudio.devicesWarning)
- + ((virtualstudio.devicesErrorHelpUrl || virtualstudio.devicesWarningHelpUrl)
- ? ` <a style="color: ${linkText};" href=${virtualstudio.devicesErrorHelpUrl || virtualstudio.devicesWarningHelpUrl}>Learn More.</a>`
- : ""
- )
- onLinkActivated: link => {
- virtualstudio.openLink(link)
+ horizontalAlignment: Text.AlignHLeft
+ wrapMode: Text.WordWrap
+ color: warningTextColour
+ font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale }
+ visible: Boolean(virtualstudio.devicesError) || Boolean(virtualstudio.devicesWarning);
}
- horizontalAlignment: Text.AlignHLeft
- wrapMode: Text.WordWrap
- color: warningTextColour
- font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale }
- visible: Boolean(virtualstudio.devicesError) || Boolean(virtualstudio.devicesWarning);
}
}
- Item {
+ Component {
id: usingJACK
- anchors.top: parent.top
- anchors.topMargin: 24 * virtualstudio.uiScale
- anchors.bottom: parent.bottom
- anchors.left: parent.left
- anchors.leftMargin: leftMargin * virtualstudio.uiScale
- anchors.right: parent.right
-
- visible: parent.isUsingJack
-
- Text {
- id: jackLabel
- x: 0; y: 0
- width: parent.width - rightMargin * virtualstudio.uiScale
- text: "Using JACK for audio input and output. Use QjackCtl to adjust your sample rate, buffer, and device settings."
- font { family: "Poppins"; pixelSize: fontSmall * virtualstudio.fontScale * virtualstudio.uiScale }
- wrapMode: Text.WordWrap
- color: textColour
- }
- Text {
- id: jackOutputLabel
- anchors.left: jackLabel.left
- anchors.top: jackLabel.bottom
- anchors.topMargin: 48 * virtualstudio.uiScale
- width: 144 * virtualstudio.uiScale
- text: "Output Volume"
- font { family: "Poppins"; pixelSize: fontSmall * virtualstudio.fontScale * virtualstudio.uiScale }
- wrapMode: Text.WordWrap
- color: textColour
- }
+ Item {
+ anchors.top: parent.top
+ anchors.topMargin: 24 * virtualstudio.uiScale
+ anchors.bottom: parent.bottom
+ anchors.left: parent.left
+ anchors.leftMargin: leftMargin * virtualstudio.uiScale
+ anchors.right: parent.right
- Image {
- id: jackHeadphonesIcon
- anchors.left: jackOutputLabel.left
- anchors.verticalCenter: jackOutputVolumeSlider.verticalCenter
- source: "headphones.svg"
- sourceSize: Qt.size(28 * virtualstudio.uiScale, 28 * virtualstudio.uiScale)
- fillMode: Image.PreserveAspectFit
- smooth: true
+ Text {
+ id: jackLabel
+ x: 0; y: 0
+ width: parent.width - rightMargin * virtualstudio.uiScale
+ text: "Using JACK for audio input and output. Use QjackCtl to adjust your sample rate, buffer, and device settings."
+ font { family: "Poppins"; pixelSize: fontSmall * virtualstudio.fontScale * virtualstudio.uiScale }
+ wrapMode: Text.WordWrap
+ color: textColour
+ }
+
+ Text {
+ id: jackOutputLabel
+ anchors.left: jackLabel.left
+ anchors.top: jackLabel.bottom
+ anchors.topMargin: 48 * virtualstudio.uiScale
+ width: 144 * virtualstudio.uiScale
+ text: "Output Volume"
+ font { family: "Poppins"; pixelSize: fontSmall * virtualstudio.fontScale * virtualstudio.uiScale }
+ wrapMode: Text.WordWrap
+ color: textColour
+ }
+
+ Image {
+ id: jackHeadphonesIcon
+ anchors.left: jackOutputLabel.left
+ anchors.verticalCenter: jackOutputVolumeSlider.verticalCenter
+ source: "headphones.svg"
+ sourceSize: Qt.size(28 * virtualstudio.uiScale, 28 * virtualstudio.uiScale)
+ fillMode: Image.PreserveAspectFit
+ smooth: true
+ }
Colorize {
- anchors.fill: parent
- source: parent
+ anchors.fill: jackHeadphonesIcon
+ source: jackHeadphonesIcon
hue: 0
saturation: 0
- lightness: virtualstudio.darkMode ? 1 : 0
+ lightness: imageLightnessValue
}
- }
-
- Meter {
- id: jackOutputMeters
- anchors.left: jackOutputLabel.right
- anchors.right: parent.right
- anchors.rightMargin: rightMargin * virtualstudio.uiScale
- anchors.verticalCenter: jackOutputLabel.verticalCenter
- height: 24 * virtualstudio.uiScale
- model: outputMeterModel
- clipped: outputClipped
- enabled: virtualstudio.audioReady && !Boolean(virtualstudio.devicesError)
- }
- Button {
- id: jackTestOutputAudioButton
- background: Rectangle {
- radius: 6 * virtualstudio.uiScale
- color: jackTestOutputAudioButton.down ? buttonPressedColour : (jackTestOutputAudioButton.hovered ? buttonHoverColour : buttonColour)
- border.width: 1
- border.color: jackTestOutputAudioButton.down ? buttonPressedStroke : (jackTestOutputAudioButton.hovered ? buttonHoverStroke : buttonStroke)
+ Meter {
+ id: jackOutputMeters
+ anchors.left: jackOutputLabel.right
+ anchors.right: parent.right
+ anchors.rightMargin: rightMargin * virtualstudio.uiScale
+ anchors.verticalCenter: jackOutputLabel.verticalCenter
+ height: 24 * virtualstudio.uiScale
+ model: virtualstudio.outputMeterLevels
+ clipped: virtualstudio.outputClipped
+ enabled: virtualstudio.audioReady && !Boolean(virtualstudio.devicesError)
}
- onClicked: { virtualstudio.playOutputAudio() }
- anchors.right: parent.right
- anchors.rightMargin: rightMargin * virtualstudio.uiScale
- anchors.verticalCenter: jackOutputVolumeSlider.verticalCenter
- width: 144 * virtualstudio.uiScale; height: 30 * virtualstudio.uiScale
- Text {
- text: "Play Test Tone"
- font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale }
- anchors { horizontalCenter: parent.horizontalCenter; verticalCenter: parent.verticalCenter }
- color: textColour
+
+ Button {
+ id: jackTestOutputAudioButton
+ background: Rectangle {
+ radius: 6 * virtualstudio.uiScale
+ color: jackTestOutputAudioButton.down ? buttonPressedColour : (jackTestOutputAudioButton.hovered ? buttonHoverColour : buttonColour)
+ border.width: 1
+ border.color: jackTestOutputAudioButton.down ? buttonPressedStroke : (jackTestOutputAudioButton.hovered ? buttonHoverStroke : buttonStroke)
+ }
+ onClicked: { virtualstudio.playOutputAudio() }
+ anchors.right: parent.right
+ anchors.rightMargin: rightMargin * virtualstudio.uiScale
+ anchors.verticalCenter: jackOutputVolumeSlider.verticalCenter
+ width: 144 * virtualstudio.uiScale; height: 30 * virtualstudio.uiScale
+ Text {
+ text: "Play Test Tone"
+ font { family: "Poppins"; pixelSize: fontExtraSmall * virtualstudio.fontScale * virtualstudio.uiScale }
+ anchors { horizontalCenter: parent.horizontalCenter; verticalCenter: parent.verticalCenter }
+ color: textColour
+ }
}
- }
- Slider {
- id: jackOutputVolumeSlider
- from: 0.0
- value: audioInterface ? audioInterface.outputVolume : 0.5
- onMoved: { audioInterface.outputVolume = value }
- to: 1.0
- padding: 0
- anchors.left: jackOutputQuieterButton.right
- anchors.leftMargin: 8 * virtualstudio.uiScale
- anchors.right: jackOutputLouderIcon.left
- anchors.rightMargin: 8 * virtualstudio.uiScale
- anchors.top: jackOutputMeters.bottom
- anchors.topMargin: 16 * virtualstudio.uiScale
-
- background: Rectangle {
- x: jackOutputVolumeSlider.leftPadding
- y: jackOutputVolumeSlider.topPadding + jackOutputVolumeSlider.availableHeight / 2 - height / 2
- implicitWidth: parent.width
- implicitHeight: 6
- width: jackOutputVolumeSlider.availableWidth
- height: implicitHeight
- radius: 4
- color: sliderTrackColour
-
- Rectangle {
- width: jackOutputVolumeSlider.visualPosition * parent.width
- height: parent.height
- color: sliderActiveTrackColour
+ Slider {
+ id: jackOutputVolumeSlider
+ from: 0.0
+ value: audioInterface ? audioInterface.outputVolume : 0.5
+ onMoved: { audioInterface.outputVolume = value }
+ to: 1.0
+ padding: 0
+ anchors.left: jackOutputQuieterButton.right
+ anchors.leftMargin: 8 * virtualstudio.uiScale
+ anchors.right: jackOutputLouderIcon.left
+ anchors.rightMargin: 8 * virtualstudio.uiScale
+ anchors.top: jackOutputMeters.bottom
+ anchors.topMargin: 16 * virtualstudio.uiScale
+
+ background: Rectangle {
+ x: jackOutputVolumeSlider.leftPadding
+ y: jackOutputVolumeSlider.topPadding + jackOutputVolumeSlider.availableHeight / 2 - height / 2
+ implicitWidth: parent.width
+ implicitHeight: 6
+ width: jackOutputVolumeSlider.availableWidth
+ height: implicitHeight
radius: 4
+ color: sliderTrackColour
+
+ Rectangle {
+ width: jackOutputVolumeSlider.visualPosition * parent.width
+ height: parent.height
+ color: sliderActiveTrackColour
+ radius: 4
+ }
}
- }
- handle: Rectangle {
- x: jackOutputVolumeSlider.leftPadding + jackOutputVolumeSlider.visualPosition * (jackOutputVolumeSlider.availableWidth - width)
- y: jackOutputVolumeSlider.topPadding + jackOutputVolumeSlider.availableHeight / 2 - height / 2
- implicitWidth: 26 * virtualstudio.uiScale
- implicitHeight: 26 * virtualstudio.uiScale
- radius: 13 * virtualstudio.uiScale
- color: jackOutputVolumeSlider.pressed ? sliderPressedColour : sliderColour
- border.color: buttonStroke
+ handle: Rectangle {
+ x: jackOutputVolumeSlider.leftPadding + jackOutputVolumeSlider.visualPosition * (jackOutputVolumeSlider.availableWidth - width)
+ y: jackOutputVolumeSlider.topPadding + jackOutputVolumeSlider.availableHeight / 2 - height / 2
+ implicitWidth: 26 * virtualstudio.uiScale
+ implicitHeight: 26 * virtualstudio.uiScale
+ radius: 13 * virtualstudio.uiScale
+ color: jackOutputVolumeSlider.pressed ? sliderPressedColour : sliderColour
+ border.color: buttonStroke
+ }
}
- }
- Image {
- id: jackOutputQuieterButton
- anchors.left: jackOutputMeters.left
- anchors.verticalCenter: jackOutputVolumeSlider.verticalCenter
- source: "quiet.svg"
- sourceSize: Qt.size(16 * virtualstudio.uiScale, 16 * virtualstudio.uiScale)
- fillMode: Image.PreserveAspectFit
- smooth: true
+ Image {
+ id: jackOutputQuieterButton
+ anchors.left: jackOutputMeters.left
+ anchors.verticalCenter: jackOutputVolumeSlider.verticalCenter
+ source: "quiet.svg"
+ sourceSize: Qt.size(16 * virtualstudio.uiScale, 16 * virtualstudio.uiScale)
+ fillMode: Image.PreserveAspectFit
+ smooth: true
+ }
Colorize {
- anchors.fill: parent
- source: parent
+ anchors.fill: jackOutputQuieterButton
+ source: jackOutputQuieterButton
hue: 0
saturation: 0
- lightness: virtualstudio.darkMode ? 1 : 0
+ lightness: imageLightnessValue
}
- }
- Image {
- id: jackOutputLouderIcon
- anchors.right: jackTestOutputAudioButton.left
- anchors.rightMargin: rightMargin * virtualstudio.uiScale
- anchors.verticalCenter: jackOutputVolumeSlider.verticalCenter
- source: "loud.svg"
- sourceSize: Qt.size(16 * virtualstudio.uiScale, 16 * virtualstudio.uiScale)
- fillMode: Image.PreserveAspectFit
- smooth: true
+ Image {
+ id: jackOutputLouderIcon
+ anchors.right: jackTestOutputAudioButton.left
+ anchors.rightMargin: rightMargin * virtualstudio.uiScale
+ anchors.verticalCenter: jackOutputVolumeSlider.verticalCenter
+ source: "loud.svg"
+ sourceSize: Qt.size(16 * virtualstudio.uiScale, 16 * virtualstudio.uiScale)
+ fillMode: Image.PreserveAspectFit
+ smooth: true
+ }
Colorize {
- anchors.fill: parent
- source: parent
+ anchors.fill: jackOutputLouderIcon
+ source: jackOutputLouderIcon
hue: 0
saturation: 0
- lightness: virtualstudio.darkMode ? 1 : 0
+ lightness: imageLightnessValue
}
- }
- Text {
- id: jackInputLabel
- anchors.left: jackLabel.left
- anchors.top: jackOutputVolumeSlider.bottom
- anchors.topMargin: 48 * virtualstudio.uiScale
- width: 144 * virtualstudio.uiScale
- text: "Input Volume"
- font { family: "Poppins"; pixelSize: fontSmall * virtualstudio.fontScale * virtualstudio.uiScale }
- wrapMode: Text.WordWrap
- color: textColour
- }
+ Text {
+ id: jackInputLabel
+ anchors.left: jackLabel.left
+ anchors.top: jackOutputVolumeSlider.bottom
+ anchors.topMargin: 48 * virtualstudio.uiScale
+ width: 144 * virtualstudio.uiScale
+ text: "Input Volume"
+ font { family: "Poppins"; pixelSize: fontSmall * virtualstudio.fontScale * virtualstudio.uiScale }
+ wrapMode: Text.WordWrap
+ color: textColour
+ }
- Image {
- id: jackMicrophoneIcon
- anchors.left: jackInputLabel.left
- anchors.verticalCenter: jackInputVolumeSlider.verticalCenter
- source: "mic.svg"
- sourceSize: Qt.size(32 * virtualstudio.uiScale, 32 * virtualstudio.uiScale)
- fillMode: Image.PreserveAspectFit
- smooth: true
+ Image {
+ id: jackMicrophoneIcon
+ anchors.left: jackInputLabel.left
+ anchors.verticalCenter: jackInputVolumeSlider.verticalCenter
+ source: "mic.svg"
+ sourceSize: Qt.size(32 * virtualstudio.uiScale, 32 * virtualstudio.uiScale)
+ fillMode: Image.PreserveAspectFit
+ smooth: true
+ }
Colorize {
- anchors.fill: parent
- source: parent
+ anchors.fill: jackMicrophoneIcon
+ source: jackMicrophoneIcon
hue: 0
saturation: 0
- lightness: virtualstudio.darkMode ? 1 : 0
+ lightness: imageLightnessValue
}
- }
- Meter {
- id: jackInputMeters
- anchors.left: jackInputLabel.right
- anchors.right: parent.right
- anchors.rightMargin: rightMargin * virtualstudio.uiScale
- anchors.verticalCenter: jackInputLabel.verticalCenter
- height: 24 * virtualstudio.uiScale
- model: inputMeterModel
- clipped: inputClipped
- enabled: virtualstudio.audioReady && !Boolean(virtualstudio.devicesError)
- }
+ Meter {
+ id: jackInputMeters
+ anchors.left: jackInputLabel.right
+ anchors.right: parent.right
+ anchors.rightMargin: rightMargin * virtualstudio.uiScale
+ anchors.verticalCenter: jackInputLabel.verticalCenter
+ height: 24 * virtualstudio.uiScale
+ model: virtualstudio.inputMeterLevels
+ clipped: virtualstudio.inputClipped
+ enabled: virtualstudio.audioReady && !Boolean(virtualstudio.devicesError)
+ }
- Slider {
- id: jackInputVolumeSlider
- from: 0.0
- value: audioInterface ? audioInterface.inputVolume : 0.5
- onMoved: { audioInterface.inputVolume = value }
- to: 1.0
- padding: 0
- anchors.left: jackInputQuieterButton.right
- anchors.leftMargin: 8 * virtualstudio.uiScale
- anchors.right: jackInputLouderIcon.left
- anchors.rightMargin: 8 * virtualstudio.uiScale
- anchors.top: jackInputMeters.bottom
- anchors.topMargin: 16 * virtualstudio.uiScale
-
- background: Rectangle {
- x: jackInputVolumeSlider.leftPadding
- y: jackInputVolumeSlider.topPadding + jackInputVolumeSlider.availableHeight / 2 - height / 2
- implicitWidth: parent.width
- implicitHeight: 6
- width: jackInputVolumeSlider.availableWidth
- height: implicitHeight
- radius: 4
- color: sliderTrackColour
-
- Rectangle {
- width: jackInputVolumeSlider.visualPosition * parent.width
- height: parent.height
- color: sliderActiveTrackColour
+ Slider {
+ id: jackInputVolumeSlider
+ from: 0.0
+ value: audioInterface ? audioInterface.inputVolume : 0.5
+ onMoved: { audioInterface.inputVolume = value }
+ to: 1.0
+ padding: 0
+ anchors.left: jackInputQuieterButton.right
+ anchors.leftMargin: 8 * virtualstudio.uiScale
+ anchors.right: jackInputLouderIcon.left
+ anchors.rightMargin: 8 * virtualstudio.uiScale
+ anchors.top: jackInputMeters.bottom
+ anchors.topMargin: 16 * virtualstudio.uiScale
+
+ background: Rectangle {
+ x: jackInputVolumeSlider.leftPadding
+ y: jackInputVolumeSlider.topPadding + jackInputVolumeSlider.availableHeight / 2 - height / 2
+ implicitWidth: parent.width
+ implicitHeight: 6
+ width: jackInputVolumeSlider.availableWidth
+ height: implicitHeight
radius: 4
+ color: sliderTrackColour
+
+ Rectangle {
+ width: jackInputVolumeSlider.visualPosition * parent.width
+ height: parent.height
+ color: sliderActiveTrackColour
+ radius: 4
+ }
}
- }
- handle: Rectangle {
- x: jackInputVolumeSlider.leftPadding + jackInputVolumeSlider.visualPosition * (jackInputVolumeSlider.availableWidth - width)
- y: jackInputVolumeSlider.topPadding + jackInputVolumeSlider.availableHeight / 2 - height / 2
- implicitWidth: 26 * virtualstudio.uiScale
- implicitHeight: 26 * virtualstudio.uiScale
- radius: 13 * virtualstudio.uiScale
- color: jackInputVolumeSlider.pressed ? sliderPressedColour : sliderColour
- border.color: buttonStroke
+ handle: Rectangle {
+ x: jackInputVolumeSlider.leftPadding + jackInputVolumeSlider.visualPosition * (jackInputVolumeSlider.availableWidth - width)
+ y: jackInputVolumeSlider.topPadding + jackInputVolumeSlider.availableHeight / 2 - height / 2
+ implicitWidth: 26 * virtualstudio.uiScale
+ implicitHeight: 26 * virtualstudio.uiScale
+ radius: 13 * virtualstudio.uiScale
+ color: jackInputVolumeSlider.pressed ? sliderPressedColour : sliderColour
+ border.color: buttonStroke
+ }
}
- }
- Image {
- id: jackInputQuieterButton
- anchors.left: jackInputMeters.left
- anchors.verticalCenter: jackInputVolumeSlider.verticalCenter
- source: "quiet.svg"
- sourceSize: Qt.size(16 * virtualstudio.uiScale, 16 * virtualstudio.uiScale)
- fillMode: Image.PreserveAspectFit
- smooth: true
+ Image {
+ id: jackInputQuieterButton
+ anchors.left: jackInputMeters.left
+ anchors.verticalCenter: jackInputVolumeSlider.verticalCenter
+ source: "quiet.svg"
+ sourceSize: Qt.size(16 * virtualstudio.uiScale, 16 * virtualstudio.uiScale)
+ fillMode: Image.PreserveAspectFit
+ smooth: true
+ }
Colorize {
- anchors.fill: parent
- source: parent
+ anchors.fill: jackInputQuieterButton
+ source: jackInputQuieterButton
hue: 0
saturation: 0
- lightness: virtualstudio.darkMode ? 1 : 0
+ lightness: imageLightnessValue
}
- }
- Image {
- id: jackInputLouderIcon
- anchors.right: parent.right
- anchors.rightMargin: rightMargin * virtualstudio.uiScale
- anchors.verticalCenter: jackInputVolumeSlider.verticalCenter
- source: "loud.svg"
- sourceSize: Qt.size(16 * virtualstudio.uiScale, 16 * virtualstudio.uiScale)
- fillMode: Image.PreserveAspectFit
- smooth: true
+ Image {
+ id: jackInputLouderIcon
+ anchors.right: parent.right
+ anchors.rightMargin: rightMargin * virtualstudio.uiScale
+ anchors.verticalCenter: jackInputVolumeSlider.verticalCenter
+ source: "loud.svg"
+ sourceSize: Qt.size(16 * virtualstudio.uiScale, 16 * virtualstudio.uiScale)
+ fillMode: Image.PreserveAspectFit
+ smooth: true
+ }
Colorize {
- anchors.fill: parent
- source: parent
+ anchors.fill: jackInputLouderIcon
+ source: jackInputLouderIcon
hue: 0
saturation: 0
- lightness: virtualstudio.darkMode ? 1 : 0
+ lightness: imageLightnessValue
}
- }
- Button {
- id: jackHiddenInputButton
- anchors.right: parent.right
- anchors.rightMargin: rightMargin * virtualstudio.uiScale
- anchors.verticalCenter: jackInputVolumeSlider.verticalCenter
- width: 144 * virtualstudio.uiScale; height: 30 * virtualstudio.uiScale
- visible: false
+ Button {
+ id: jackHiddenInputButton
+ anchors.right: parent.right
+ anchors.rightMargin: rightMargin * virtualstudio.uiScale
+ anchors.verticalCenter: jackInputVolumeSlider.verticalCenter
+ width: 144 * virtualstudio.uiScale; height: 30 * virtualstudio.uiScale
+ visible: false
+ }
}
-
}
- Item {
+ Component {
id: noBackend
- anchors.top: parent.top
- anchors.topMargin: 24 * virtualstudio.uiScale
- anchors.bottom: parent.bottom
- anchors.left: parent.left
- anchors.leftMargin: leftMargin * virtualstudio.uiScale
- anchors.right: parent.right
-
- visible: parent.hasNoBackend
-
- Text {
- id: noBackendLabel
- x: 0; y: 0
- width: parent.width - (16 * virtualstudio.uiScale)
- text: "JackTrip has been compiled without an audio backend. Please rebuild with the rtaudio flag or without the nojack flag."
- font { family: "Poppins"; pixelSize: fontMedium * virtualstudio.fontScale * virtualstudio.uiScale }
- wrapMode: Text.WordWrap
- color: textColour
+
+ Item {
+ anchors.top: parent.top
+ anchors.topMargin: 24 * virtualstudio.uiScale
+ anchors.bottom: parent.bottom
+ anchors.left: parent.left
+ anchors.leftMargin: leftMargin * virtualstudio.uiScale
+ anchors.right: parent.right
+
+ Text {
+ id: noBackendLabel
+ x: 0; y: 0
+ width: parent.width - (16 * virtualstudio.uiScale)
+ text: "JackTrip has been compiled without an audio backend. Please rebuild with the rtaudio flag or without the nojack flag."
+ font { family: "Poppins"; pixelSize: fontMedium * virtualstudio.fontScale * virtualstudio.uiScale }
+ wrapMode: Text.WordWrap
+ color: textColour
+ }
}
}
-}
\ No newline at end of file
+}
width: parent.width; height: parent.height
color: backgroundColour
}
-
+
property bool refreshing: false
-
+
property int buttonHeight: 25
property int buttonWidth: 103
property int extraSettingsButtonWidth: 16
property int createButtonTopMargin: 24
property int fontBig: 28
property int fontMedium: 11
-
+
property int scrollY: 0
-
+
property string backgroundColour: virtualstudio.darkMode ? "#272525" : "#FAFBFB"
property string textColour: virtualstudio.darkMode ? "#FAFBFB" : "#0F0D0D"
property string buttonColour: virtualstudio.darkMode ? "#494646" : "#EAECEC"
property string buttonHoverStroke: virtualstudio.darkMode ? "#7B7777" : "#BABCBC"
property string buttonPressedStroke: virtualstudio.darkMode ? "#827D7D" : "#BABCBC"
property string createButtonStroke: virtualstudio.darkMode ? "#AB0F0F" : "#0F0D0D"
-
+
function refresh() {
scrollY = studioListView.contentY;
var currentIndex = studioListView.indexAt(16 * virtualstudio.uiScale, studioListView.contentY);
}
virtualstudio.refreshStudios(currentIndex, true)
}
-
+
Rectangle {
z: 1
width: parent.width; height: parent.height
preventStealing: true
}
}
-
+
Component {
id: footer
Rectangle {
studioId: id ? id : ""
inviteKeyString: inviteKey ? inviteKey : ""
}
-
+
section {property: "type"; criteria: ViewSection.FullString; delegate: SectionHeading {} }
// Show sectionHeading if there are no Studios in list
studioListView.returnToBounds();
}
}
-
+
Component.onCompleted: {
// Customize scroll properties on different platforms
if (Qt.platform.os == "linux" || Qt.platform.os == "osx" ||
}
}
}
-
+
Rectangle {
x: 0; y: parent.height - 36 * virtualstudio.uiScale; width: parent.width; height: 36 * virtualstudio.uiScale
border.color: "#33979797"
color: backgroundColour
-
+
Button {
id: refreshButton
background: Rectangle {
color: textColour
}
}
-
+
Button {
id: aboutButton
background: Rectangle {
color: textColour
}
}
-
+
Button {
id: settingsButton
text: "Settings"
onClicked: { virtualstudio.windowState = "settings"; restartAudioTimer.restart(); }
display: AbstractButton.TextBesideIcon
font {
- family: "Poppins";
+ family: "Poppins";
pixelSize: fontMedium * virtualstudio.fontScale * virtualstudio.uiScale;
}
leftPadding: 0
width: (buttonWidth + extraSettingsButtonWidth) * virtualstudio.uiScale; height: buttonHeight * virtualstudio.uiScale
}
}
-
+
Connections {
target: virtualstudio
// Need to do this to avoid layout issues with our section header.
- function onNewScale() {
+ function onNewScale() {
studioListView.positionViewAtEnd();
studioListView.positionViewAtBeginning();
scrollY = studioListView.contentY;
property string muteButtonMutedColor: "#FCB6B6"
property string textColour: virtualstudio.darkMode ? "#FAFBFB" : "#0F0D0D"
property string meterColor: virtualstudio.darkMode ? "gray" : "#E0E0E0"
- property real imageLightnessValue: virtualstudio.darkMode ? 1.0 : 0.0
+ property real imageLightnessValue: virtualstudio.darkMode ? 0.8 : 0.2
property real muteButtonLightnessValue: virtualstudio.darkMode ? 1.0 : 0.0
property real muteButtonMutedLightnessValue: 0.24
property real muteButtonMutedSaturationValue: 0.73
function getCurrentInputDeviceIndex () {
if (virtualstudio.inputDevice === "") {
- return inputComboModel.findIndex(elem => elem.type === "element");
+ return virtualstudio.inputComboModel.findIndex(elem => elem.type === "element");
}
- let idx = inputComboModel.findIndex(elem => elem.type === "element" && elem.text === virtualstudio.inputDevice);
+ let idx = virtualstudio.inputComboModel.findIndex(elem => elem.type === "element" && elem.text === virtualstudio.inputDevice);
if (idx < 0) {
- idx = inputComboModel.findIndex(elem => elem.type === "element");
+ idx = virtualstudio.inputComboModel.findIndex(elem => elem.type === "element");
}
return idx;
function getCurrentOutputDeviceIndex() {
if (virtualstudio.outputDevice === "") {
- return outputComboModel.findIndex(elem => elem.type === "element");
+ return virtualstudio.outputComboModel.findIndex(elem => elem.type === "element");
}
- let idx = outputComboModel.findIndex(elem => elem.type === "element" && elem.text === virtualstudio.outputDevice);
+ let idx = virtualstudio.outputComboModel.findIndex(elem => elem.type === "element" && elem.text === virtualstudio.outputDevice);
if (idx < 0) {
- idx = outputComboModel.findIndex(elem => elem.type === "element");
+ idx = virtualstudio.outputComboModel.findIndex(elem => elem.type === "element");
}
return idx;
}
- function getNetworkStatsText (networkStats) {
- let minRtt = networkStats.minRtt;
- let maxRtt = networkStats.maxRtt;
- let avgRtt = networkStats.avgRtt;
+ function getNetworkStatsText () {
+ let minRtt = virtualstudio.networkStats.minRtt;
+ let maxRtt = virtualstudio.networkStats.maxRtt;
+ let avgRtt = virtualstudio.networkStats.avgRtt;
- let texts = ["Measuring stats ...", ""];
+ let texts = ["<b>Outage detected! Your connection is unstable.</b>", "Please plug into Ethernet & turn off WIFI."];
+
+ if (virtualstudio.networkOutage) {
+ return texts;
+ }
+
+ texts = ["Measuring stats ...", ""];
if (!minRtt || !maxRtt) {
return texts;
property bool showToolTip: false
- Colorize {
- anchors.fill: parent
- source: parent
- hue: 0
- saturation: 0
- lightness: virtualstudio.darkMode ? 0.8 : 0.2
- }
-
MouseArea {
id: outputMouseArea
anchors.fill: parent
}
}
+ Colorize {
+ anchors.fill: outputHelpIcon
+ source: outputHelpIcon
+ hue: 0
+ saturation: 0
+ lightness: imageLightnessValue
+ }
+
Image {
id: headphonesIcon
anchors.left: outputLabel.left
sourceSize: Qt.size(28 * virtualstudio.uiScale, 28 * virtualstudio.uiScale)
fillMode: Image.PreserveAspectFit
smooth: true
+ }
- Colorize {
- anchors.fill: parent
- source: parent
- hue: 0
- saturation: 0
- lightness: virtualstudio.darkMode ? 1 : 0
- }
+ Colorize {
+ anchors.fill: headphonesIcon
+ source: headphonesIcon
+ hue: 0
+ saturation: 0
+ lightness: imageLightnessValue
}
ComboBox {
anchors.rightMargin: rightMargin * virtualstudio.uiScale
width: parent.width - leftSpacer.width - rightMargin * virtualstudio.uiScale
enabled: virtualstudio.connectionState == "Connected"
- model: outputComboModel
+ model: virtualstudio.outputComboModel
currentIndex: getCurrentOutputDeviceIndex()
delegate: ItemDelegate {
required property var modelData
width: parent.width
contentItem: Text {
leftPadding: modelData.type === "element" && outputCombo.model.filter(it => it.type === "header").length > 0 ? 24 : 12
- text: modelData.text
+ text: modelData.text || ""
font.bold: modelData.type === "header"
}
highlighted: outputCombo.highlightedIndex === index
horizontalAlignment: Text.AlignHLeft
verticalAlignment: Text.AlignVCenter
elide: Text.ElideRight
- text: outputCombo.model[outputCombo.currentIndex].text ? outputCombo.model[outputCombo.currentIndex].text : ""
+ text: outputCombo.model[outputCombo.currentIndex] && outputCombo.model[outputCombo.currentIndex].text ? outputCombo.model[outputCombo.currentIndex].text : ""
}
}
anchors.top: outputChannelsLabel.bottom
anchors.topMargin: 4 * virtualstudio.uiScale
enabled: virtualstudio.connectionState == "Connected"
- model: outputChannelsComboModel
+ model: virtualstudio.outputChannelsComboModel
currentIndex: (() => {
- let idx = outputChannelsComboModel.findIndex(elem => elem.baseChannel === virtualstudio.baseOutputChannel
+ let idx = virtualstudio.outputChannelsComboModel.findIndex(elem => elem.baseChannel === virtualstudio.baseOutputChannel
&& elem.numChannels === virtualstudio.numOutputChannels);
if (idx < 0) {
idx = 0;
property bool showToolTip: false
- Colorize {
- anchors.fill: parent
- source: parent
- hue: 0
- saturation: 0
- lightness: virtualstudio.darkMode ? 0.8 : 0.2
- }
-
MouseArea {
id: inputMouseArea
anchors.fill: parent
}
}
+ Colorize {
+ anchors.fill: inputHelpIcon
+ source: inputHelpIcon
+ hue: 0
+ saturation: 0
+ lightness: imageLightnessValue
+ }
+
Image {
id: microphoneIcon
anchors.left: inputLabel.left
sourceSize: Qt.size(32 * virtualstudio.uiScale, 32 * virtualstudio.uiScale)
fillMode: Image.PreserveAspectFit
smooth: true
+ }
- Colorize {
- anchors.fill: parent
- source: parent
- hue: 0
- saturation: 0
- lightness: virtualstudio.darkMode ? 1 : 0
- }
+ Colorize {
+ anchors.fill: microphoneIcon
+ source: microphoneIcon
+ hue: 0
+ saturation: 0
+ lightness: imageLightnessValue
}
ComboBox {
id: inputCombo
- model: inputComboModel
+ model: virtualstudio.inputComboModel
currentIndex: getCurrentInputDeviceIndex()
anchors.left: outputCombo.left
anchors.right: outputCombo.right
width: parent.width
contentItem: Text {
leftPadding: modelData.type === "element" && inputCombo.model.filter(it => it.type === "header").length > 0 ? 24 : 12
- text: modelData.text
+ text: modelData.text || ""
font.bold: modelData.type === "header"
}
highlighted: inputCombo.highlightedIndex === index
horizontalAlignment: Text.AlignHLeft
verticalAlignment: Text.AlignVCenter
elide: Text.ElideRight
- text: inputCombo.model[inputCombo.currentIndex].text ? inputCombo.model[inputCombo.currentIndex].text : ""
+ text: inputCombo.model[inputCombo.currentIndex] && inputCombo.model[inputCombo.currentIndex].text ? inputCombo.model[inputCombo.currentIndex].text : ""
}
}
anchors.top: inputChannelsLabel.bottom
anchors.topMargin: 4 * virtualstudio.uiScale
enabled: virtualstudio.connectionState == "Connected"
- model: inputChannelsComboModel
+ model: virtualstudio.inputChannelsComboModel
currentIndex: (() => {
- let idx = inputChannelsComboModel.findIndex(elem => elem.baseChannel === virtualstudio.baseInputChannel
+ let idx = virtualstudio.inputChannelsComboModel.findIndex(elem => elem.baseChannel === virtualstudio.baseInputChannel
&& elem.numChannels === virtualstudio.numInputChannels);
if (idx < 0) {
idx = 0;
anchors.top: inputMixModeLabel.bottom
anchors.topMargin: 4 * virtualstudio.uiScale
enabled: virtualstudio.connectionState == "Connected"
- model: inputMixModeComboModel
+ model: virtualstudio.inputMixModeComboModel
currentIndex: (() => {
- let idx = inputMixModeComboModel.findIndex(elem => elem.value === virtualstudio.inputMixMode);
+ let idx = virtualstudio.inputMixModeComboModel.findIndex(elem => elem.value === virtualstudio.inputMixMode);
if (idx < 0) {
idx = 0;
}
onClicked: {
inputMixModeCombo.currentIndex = index
inputMixModeCombo.popup.close()
- virtualstudio.inputMixMode = inputMixModeComboModel[index].value
+ virtualstudio.inputMixMode = virtualstudio.inputMixModeComboModel[index].value
virtualstudio.validateDevicesState()
}
}
}
}
}
+
+ Popup {
+ id: feedbackDetectedModal
+ padding: 1
+ width: parent.width
+ height: 232 * virtualstudio.uiScale
+ anchors.centerIn: parent
+ modal: true
+ focus: true
+ closePolicy: Popup.NoAutoClose
+
+ background: Rectangle {
+ anchors.fill: parent
+ color: "transparent"
+ radius: 6 * virtualstudio.uiScale
+ border.width: 1
+ border.color: buttonStroke
+ clip: true
+ }
+
+ contentItem: Rectangle {
+ width: parent.width
+ height: 232 * virtualstudio.uiScale
+ color: backgroundColour
+ radius: 6 * virtualstudio.uiScale
+
+ Item {
+ id: feedbackDetectedContent
+ anchors.top: parent.top
+ anchors.topMargin: 24 * virtualstudio.uiScale
+ anchors.bottom: parent.bottom
+ anchors.left: parent.left
+ anchors.leftMargin: 24 * virtualstudio.uiScale
+ anchors.right: parent.right
+
+ Text {
+ id: feedbackDetectedHeader
+ anchors.top: parent.top
+ anchors.topMargin: 16 * virtualstudio.uiScale
+ width: parent.width
+ text: "Audio feedback detected!"
+ font {family: "Poppins"; pixelSize: fontMedium * virtualstudio.fontScale * virtualstudio.uiScale; bold: true }
+ color: textColour
+ elide: Text.ElideRight
+ wrapMode: Text.WordWrap
+ }
+
+ Text {
+ id: feedbackDetectedText
+ anchors.top: feedbackDetectedHeader.bottom
+ anchors.topMargin: 16 * virtualstudio.uiScale
+ width: parent.width
+ text: "JackTrip detected a feedback loop. Your monitor and input volume have automatically been disabled."
+ font {family: "Poppins"; pixelSize: fontSmall * virtualstudio.fontScale * virtualstudio.uiScale }
+ color: textColour
+ elide: Text.ElideRight
+ wrapMode: Text.WordWrap
+ }
+
+ Text {
+ id: feedbackDetectedText2
+ anchors.top: feedbackDetectedText.bottom
+ anchors.topMargin: 16 * virtualstudio.uiScale
+ width: parent.width
+ text: "You can disable this behavior under <b>Settings</b> > <b>Advanced</b>"
+ textFormat: Text.RichText
+ font {family: "Poppins"; pixelSize: fontSmall * virtualstudio.fontScale * virtualstudio.uiScale }
+ color: textColour
+ elide: Text.ElideRight
+ wrapMode: Text.WordWrap
+ }
+
+ Button {
+ id: closeFeedbackDetectedModalButton
+ anchors.horizontalCenter: parent.horizontalCenter
+ anchors.bottomMargin: rightMargin * virtualstudio.uiScale
+ anchors.bottom: parent.bottom
+ width: 150 * virtualstudio.uiScale; height: 30 * virtualstudio.uiScale
+ onClicked: feedbackDetectedModal.close()
+
+ background: Rectangle {
+ radius: 6 * virtualstudio.uiScale
+ color: closeFeedbackDetectedModalButton.down ? browserButtonPressedColour : (closeFeedbackDetectedModalButton.hovered ? browserButtonHoverColour : browserButtonColour)
+ border.width: 1
+ border.color: closeFeedbackDetectedModalButton.down ? browserButtonPressedStroke : (closeFeedbackDetectedModalButton.hovered ? browserButtonHoverStroke : browserButtonStroke)
+ }
+
+ Text {
+ text: "Ok"
+ font.family: "Poppins"
+ font.pixelSize: fontSmall * virtualstudio.fontScale * virtualstudio.uiScale
+ font.weight: Font.Bold
+ color: !Boolean(virtualstudio.devicesError) && virtualstudio.backendAvailable ? saveButtonText : disabledButtonText
+ anchors.horizontalCenter: parent.horizontalCenter
+ anchors.verticalCenter: parent.verticalCenter
+ }
+ }
+
+ }
+ }
+ }
}
Item {
x: 0; y: 0
width: parent.width
height: 100 * virtualstudio.uiScale
- model: inputMeterModel
- clipped: inputClipped
+ model: virtualstudio.inputMeterLevels
+ clipped: virtualstudio.inputClipped
}
Slider {
property bool showToolTip: false
- Colorize {
- anchors.fill: parent
- source: parent
- hue: 0
- saturation: 0
- lightness: virtualstudio.darkMode ? 0.8 : 0.2
- }
-
MouseArea {
id: inputStudioMouseArea
anchors.fill: parent
}
}
}
+
+ Colorize {
+ anchors.fill: inputStudioHelpIcon
+ source: inputStudioHelpIcon
+ hue: 0
+ saturation: 0
+ lightness: imageLightnessValue
+ }
}
Item {
x: 0; y: 0
width: parent.width
height: 100 * virtualstudio.uiScale
- model: outputMeterModel
- clipped: outputClipped
+ model: virtualstudio.outputMeterLevels
+ clipped: virtualstudio.outputClipped
}
Slider {
property bool showToolTip: false
- Colorize {
- anchors.fill: parent
- source: parent
- hue: 0
- saturation: 0
- lightness: virtualstudio.darkMode ? 0.8 : 0.2
- }
-
MouseArea {
id: outputStudioMouseArea
anchors.fill: parent
}
}
+ Colorize {
+ anchors.fill: outputStudioHelpIcon
+ source: outputStudioHelpIcon
+ hue: 0
+ saturation: 0
+ lightness: imageLightnessValue
+ }
+
Text {
id: outputMonText
width: 40 * virtualstudio.uiScale
property bool showToolTip: false
- Colorize {
- anchors.fill: parent
- source: parent
- hue: 0
- saturation: 0
- lightness: virtualstudio.darkMode ? 0.8 : 0.2
- }
-
MouseArea {
id: outputMonMouseArea
anchors.fill: parent
}
}
}
+
+ Colorize {
+ anchors.fill: outputMonHelpIcon
+ source: outputMonHelpIcon
+ hue: 0
+ saturation: 0
+ lightness: imageLightnessValue
+ }
}
Item {
Text {
id: netstat0
x: 0; y: 0
- text: getNetworkStatsText(virtualstudio.networkStats)[0]
+ text: getNetworkStatsText()[0]
font {family: "Poppins"; pixelSize: fontTiny * virtualstudio.fontScale * virtualstudio.uiScale }
color: textColour
}
Text {
id: netstat1
x: 0
- text: getNetworkStatsText(virtualstudio.networkStats)[1]
+ text: getNetworkStatsText()[1]
font {family: "Poppins"; pixelSize: fontTiny * virtualstudio.fontScale * virtualstudio.uiScale }
topPadding: 8 * virtualstudio.uiScale
anchors.top: netstat0.bottom
wrapMode: Text.WordWrap
}
}
+
+ Connections {
+ target: virtualstudio
+
+ function onFeedbackDetected() {
+ feedbackDetectedModal.visible = true;
+ }
+ }
}
Item {
width: parent.width; height: parent.height
clip: true
-
+
property int leftMargin: 16
property int fontBig: 28
property int fontMedium: 18
property int fontSmall: 11
-
+
property string textColour: virtualstudio.darkMode ? "#FAFBFB" : "#0F0D0D"
property string buttonColour: virtualstudio.darkMode ? "#FAFBFB" : "#F0F1F1"
property string buttonHoverColour: virtualstudio.darkMode ? "#E9E9E9" : "#E4E5E5"
saturation: 0
lightness: imageLightnessValue
}
-
+
Text {
id: ohnoHeader
text: "Oh no!"
Item {
width: parent.width; height: parent.height
clip: true
-
+
property string textColour: virtualstudio.darkMode ? "#FAFBFB" : "#0F0D0D"
property string shadowColour: virtualstudio.darkMode ? "40000000" : "#80A1A1A1"
property string buttonColour: virtualstudio.darkMode ? "#FAFBFB" : "#F0F1F1"
property string buttonStroke: virtualstudio.darkMode ? "#636060" : "#DEDFDF"
property string buttonHoverStroke: virtualstudio.darkMode ? "#6F6C6C" : "#B0B5B5"
property string buttonPressedStroke: virtualstudio.darkMode ? "#6F6C6C" : "#B0B5B5"
-
+
Image {
id: jtlogo
source: "logo.svg"
import QtQuick 2.12
import QtQuick.Controls 2.12
+import QtGraphicalEffects 1.12
+import VS 1.0
Item {
width: parent.width; height: parent.height
clip: true
-
+
+ state: auth.authenticationStage
+ states: [
+ State {
+ name: "unauthenticated"
+ },
+ State {
+ name: "refreshing"
+ },
+ State {
+ name: "polling"
+ },
+ State {
+ name: "success"
+ },
+ State {
+ name: "failed"
+ }
+ ]
+
Rectangle {
width: parent.width; height: parent.height
color: backgroundColour
}
- property bool failTextVisible: false
- property bool showBackButton: true
-
+ property bool codeCopied: false
+ property int numFailures: 0;
+
property string backgroundColour: virtualstudio.darkMode ? "#272525" : "#FAFBFB"
property string textColour: virtualstudio.darkMode ? "#FAFBFB" : "#0F0D0D"
property string buttonColour: virtualstudio.darkMode ? "#FAFBFB" : "#F0F1F1"
property string buttonTextHover: virtualstudio.darkMode ? "#242222" : "#D00A0A"
property string buttonTextPressed: virtualstudio.darkMode ? "#323030" : "#D00A0A"
property string shadowColour: virtualstudio.darkMode ? "40000000" : "#80A1A1A1"
-
- onFailTextVisibleChanged: {
- authFailedText.visible = failTextVisible;
- loginButton.visible = failTextVisible || !virtualstudio.hasRefreshToken;
- backButton.visible = failTextVisible || !virtualstudio.hasRefreshToken;
- loggingInText.visible = !failTextVisible && virtualstudio.hasRefreshToken;
- }
-
- Image {
- id: loginLogo
- source: "logo.svg"
- x: parent.width / 2 - (150 * virtualstudio.uiScale); y: 110 * virtualstudio.uiScale
- width: 42 * virtualstudio.uiScale; height: 76 * virtualstudio.uiScale
- sourceSize: Qt.size(loginLogo.width,loginLogo.height)
- fillMode: Image.PreserveAspectFit
- smooth: true
- }
+ property string linkTextColour: virtualstudio.darkMode ? "#8B8D8D" : "#272525"
+ property string toolTipTextColour: codeCopied ? "#FAFBFB" : textColour
+ property string toolTipBackgroundColour: codeCopied ? "#57B147" : (virtualstudio.darkMode ? "#323232" : "#F3F3F3")
+ property string tooltipStroke: virtualstudio.darkMode ? "#80827D7D" : "#34979797"
+ property string disabledButtonText: "#D3D4D4"
+ property string errorTextColour: "#DB0A0A"
- Image {
- source: virtualstudio.darkMode ? "jacktrip white.png" : "jacktrip.png"
- anchors.bottom: loginLogo.bottom
- x: parent.width / 2 - (88 * virtualstudio.uiScale)
- width: 238 * virtualstudio.uiScale; height: 56 * virtualstudio.uiScale
- }
+ property bool showCodeFlow: (loginScreen.state === "unauthenticated" && !auth.attemptingRefreshToken) || (loginScreen.state === "polling" || loginScreen.state === "failed" || (loginScreen.state === "success" && auth.authenticationMethod === "code flow"))
+ property bool showLoading: (loginScreen.state === "unauthenticated" ** auth.attemptingRefreshToken) || loginScreen.state === "refreshing" || (loginScreen.state === "success" && auth.authenticationMethod === "refresh token")
- Text {
- text: "Virtual Studio"
- font.family: "Poppins"
- font.pixelSize: 28 * virtualstudio.fontScale * virtualstudio.uiScale
- anchors.horizontalCenter: parent.horizontalCenter
- y: 208 * virtualstudio.uiScale
- color: textColour
+ Clipboard {
+ id: clipboard
}
- Text {
- id: loggingInText
- text: "Logging in..."
- font.family: "Poppins"
- font.pixelSize: 18 * virtualstudio.fontScale * virtualstudio.uiScale
+ Item {
+ id: loginScreenHeader
anchors.horizontalCenter: parent.horizontalCenter
- y: 282 * virtualstudio.uiScale
- visible: virtualstudio.hasRefreshToken
- color: textColour
+ y: showCodeFlow ? 48 * virtualstudio.uiScale : 144 * virtualstudio.uiScale
+
+ Image {
+ id: loginLogo
+ source: "logo.svg"
+ x: parent.width / 2 - (150 * virtualstudio.uiScale);
+ width: 42 * virtualstudio.uiScale; height: 76 * virtualstudio.uiScale
+ sourceSize: Qt.size(loginLogo.width,loginLogo.height)
+ fillMode: Image.PreserveAspectFit
+ smooth: true
+ }
+
+ Image {
+ source: virtualstudio.darkMode ? "jacktrip white.png" : "jacktrip.png"
+ anchors.bottom: loginLogo.bottom
+ x: parent.width / 2 - (88 * virtualstudio.uiScale)
+ width: 238 * virtualstudio.uiScale; height: 56 * virtualstudio.uiScale
+ }
+
+ Text {
+ text: "Virtual Studio"
+ font.family: "Poppins"
+ font.pixelSize: 24 * virtualstudio.fontScale * virtualstudio.uiScale
+ anchors.horizontalCenter: parent.horizontalCenter
+ y: 80 * virtualstudio.uiScale
+ color: textColour
+ }
}
- Text {
- id: authFailedText
- text: "Log in failed. Please try again."
- font.family: "Poppins"
- font.pixelSize: 16 * virtualstudio.fontScale * virtualstudio.uiScale
+ Item {
+ id: codeFlow
anchors.horizontalCenter: parent.horizontalCenter
- y: 272 * virtualstudio.uiScale
- visible: failTextVisible
- color: textColour
- }
+ y: 68 * virtualstudio.uiScale
+ height: parent.height - codeFlow.y
+ visible: showCodeFlow
+ width: parent.width
- Button {
- id: loginButton
- background: Rectangle {
- radius: 6 * virtualstudio.uiScale
- color: loginButton.down ? buttonPressedColour : (loginButton.hovered ? buttonHoverColour : buttonColour)
- border.width: 1
- border.color: loginButton.down ? buttonPressedStroke : (loginButton.hovered ? buttonHoverStroke : buttonStroke)
- layer.enabled: !loginButton.down
+ Text {
+ id: deviceVerificationExplanation
+ text: `Please sign in and confirm the following code using your web browser. Return here when you are done.`
+ font.family: "Poppins"
+ font.pixelSize: 11 * virtualstudio.fontScale * virtualstudio.uiScale
+ anchors.horizontalCenter: parent.horizontalCenter
+ y: 128 * virtualstudio.uiScale
+ width: 500 * virtualstudio.uiScale;
+ visible: true
+ color: textColour
+ wrapMode: Text.WordWrap
+ horizontalAlignment: Text.AlignHCenter
+ textFormat: Text.RichText
+ onLinkActivated: link => {
+ if (!Boolean(auth.verificationCode)) {
+ return;
+ }
+ virtualstudio.openLink(link)
+ }
}
- onClicked: { virtualstudio.showFirstRun = false; failTextVisible = false; virtualstudio.login() }
- anchors.horizontalCenter: parent.horizontalCenter
- y: showBackButton ? 321 * virtualstudio.uiScale : 371 * virtualstudio.uiScale
- width: 263 * virtualstudio.uiScale; height: 64 * virtualstudio.uiScale
+
+ Image {
+ id: successIcon
+ source: "check.svg"
+ y: 224 * virtualstudio.uiScale
+ anchors.horizontalCenter: parent.horizontalCenter
+ visible: loginScreen.state === "success"
+ sourceSize: Qt.size(96 * virtualstudio.uiScale, 96 * virtualstudio.uiScale)
+ fillMode: Image.PreserveAspectFit
+ smooth: true
+ }
+
+ Colorize {
+ anchors.fill: successIcon
+ source: successIcon
+ hue: .44
+ saturation: .55
+ lightness: .49
+ visible: loginScreen.state === "success"
+ }
+
Text {
- text: "Sign In"
+ id: deviceVerificationCode
+ text: auth.verificationCode || ((numFailures >= 5) ? "Error" : "Loading...");
font.family: "Poppins"
- font.pixelSize: 18 * virtualstudio.fontScale * virtualstudio.uiScale
- font.weight: Font.Bold
+ font.pixelSize: 20 * virtualstudio.fontScale * virtualstudio.uiScale
+ font.letterSpacing: Boolean(auth.verificationCode) ? 8 : 1
anchors.horizontalCenter: parent.horizontalCenter
- anchors.verticalCenter: parent.verticalCenter
- color: loginButton.down ? buttonTextPressed : (loginButton.hovered ? buttonTextHover : buttonTextColour)
+ y: 196 * virtualstudio.uiScale
+ width: 360 * virtualstudio.uiScale;
+ visible: !auth.isAuthenticated
+ color: Boolean(auth.verificationCode) ? textColour : disabledButtonText
+ wrapMode: Text.WordWrap
+ horizontalAlignment: Text.AlignHCenter
+
+ Timer {
+ id: copiedResetTimer
+ interval: 2000; running: false; repeat: false
+ onTriggered: codeCopied = false;
+ }
+
+ MouseArea {
+ id: deviceVerificationCodeMouseArea
+ anchors.fill: parent
+ cursorShape: Qt.PointingHandCursor
+ enabled: Boolean(auth.verificationCode)
+ hoverEnabled: true
+ onClicked: () => {
+ codeCopied = true;
+ clipboard.setText(auth.verificationCode);
+ copiedResetTimer.restart()
+ }
+ }
+
+ ToolTip {
+ parent: deviceVerificationCode
+ visible: loginScreen.state === "polling" && deviceVerificationCodeMouseArea.containsMouse
+ delay: 100
+ contentItem: Rectangle {
+ color: toolTipBackgroundColour
+ radius: 3
+ anchors.fill: parent
+ layer.enabled: true
+ border.width: 1
+ border.color: tooltipStroke
+
+ Text {
+ anchors.centerIn: parent
+ font { family: "Poppins"; pixelSize: 8 * virtualstudio.fontScale * virtualstudio.uiScale}
+ text: codeCopied ? qsTr("📋 Copied code to clipboard") : qsTr("📋 Copy code to Clipboard")
+ color: toolTipTextColour
+ }
+ }
+ background: Rectangle {
+ color: "transparent"
+ }
+ }
}
- visible: !virtualstudio.hasRefreshToken
- }
- Button {
- id: backButton
- visible: showBackButton
- background: Rectangle {
- radius: 6 * virtualstudio.uiScale
- color: backButton.down ? buttonPressedColour : (backButton.hovered ? buttonHoverColour : buttonColour)
- border.width: 1
- border.color: backButton.down ? buttonPressedStroke : (backButton.hovered ? buttonHoverStroke : buttonStroke)
- layer.enabled: !backButton.down
+ Button {
+ id: loginButton
+ background: Rectangle {
+ radius: 6 * virtualstudio.uiScale
+ color: loginButton.down ? buttonPressedColour : (loginButton.hovered ? buttonHoverColour : buttonColour)
+ border.width: 1
+ border.color: loginButton.down ? buttonPressedStroke : (loginButton.hovered ? buttonHoverStroke : buttonStroke)
+ layer.enabled: !loginButton.down
+ }
+ onClicked: {
+ if (auth.verificationCode && auth.verificationUrl) {
+ virtualstudio.openLink(auth.verificationUrl);
+ }
+ }
+ anchors.horizontalCenter: parent.horizontalCenter
+ y: 260 * virtualstudio.uiScale
+ width: 263 * virtualstudio.uiScale; height: 64 * virtualstudio.uiScale
+ Text {
+ text: "Sign In"
+ font.family: "Poppins"
+ font.pixelSize: 18 * virtualstudio.fontScale * virtualstudio.uiScale
+ font.weight: Font.Bold
+ anchors.horizontalCenter: parent.horizontalCenter
+ anchors.verticalCenter: parent.verticalCenter
+ color: loginButton.down ? buttonTextPressed : (loginButton.hovered ? buttonTextHover : buttonTextColour)
+ }
+ visible: !auth.isAuthenticated
}
- onClicked: { virtualstudio.windowState = "start" }
- anchors.horizontalCenter: parent.horizontalCenter
- y: 401 * virtualstudio.uiScale
- width: 263 * virtualstudio.uiScale; height: 64 * virtualstudio.uiScale
+
Text {
- text: "Back"
+ id: authFailedText
+ text: "There was an error trying to sign in. Please try again."
font.family: "Poppins"
- font.pixelSize: 18 * virtualstudio.fontScale * virtualstudio.uiScale
+ font.pixelSize: 10 * virtualstudio.fontScale * virtualstudio.uiScale
anchors.horizontalCenter: parent.horizontalCenter
- anchors.verticalCenter: parent.verticalCenter
- color: backButton.down ? buttonTextPressed : (backButton.hovered ? buttonTextHover : buttonTextColour)
+ anchors.bottom: loginScreenFooter.top
+ anchors.bottomMargin: 16 * virtualstudio.uiScale
+ visible: (loginScreen.state === "failed" || numFailures > 0) && loginScreen.state !== "success"
+ color: errorTextColour
}
- }
+ Item {
+ id: loginScreenFooter
+ anchors.horizontalCenter: parent.horizontalCenter
+ anchors.bottom: parent.bottom
+ anchors.bottomMargin: 24 * virtualstudio.uiScale
+ width: parent.width
+ height: 48 * virtualstudio.uiScale
+
+ property bool showBackButton: !virtualstudio.vsFtux
+ property bool showClassicModeButton: virtualstudio.vsFtux
- Button {
- id: classicModeButton
- visible: !showBackButton && virtualstudio.showFirstRun && virtualstudio.vsFtux
- background: Rectangle {
- radius: 6 * virtualstudio.uiScale
- color: classicModeButton.down ? buttonPressedColour : (classicModeButton.hovered ? buttonHoverColour : backgroundColour)
- border.width: 0
- layer.enabled: !classicModeButton.down
+ Item {
+ id: backButton
+ visible: parent.showBackButton
+ anchors.verticalCenter: parent.verticalCenter
+ x: (parent.x + parent.width / 2) - backButton.width - 8 * virtualstudio.uiScale
+ width: 144 * virtualstudio.uiScale; height: 32 * virtualstudio.uiScale
+ Text {
+ text: "Back"
+ font.family: "Poppins"
+ font.underline: true
+ font.pixelSize: 11 * virtualstudio.fontScale * virtualstudio.uiScale
+ anchors.horizontalCenter: parent.horizontalCenter
+ anchors.verticalCenter: parent.verticalCenter
+ color: textColour
+ }
+ MouseArea {
+ anchors.fill: parent
+ onClicked: () => { if (!auth.isAuthenticated) { virtualstudio.windowState = "start"; } }
+ cursorShape: Qt.PointingHandCursor
+ }
+ }
+
+ Item {
+ id: classicModeButton
+ visible: parent.showClassicModeButton
+ anchors.verticalCenter: parent.verticalCenter
+ x: (parent.x + parent.width / 2) - classicModeButton.width - 8 * virtualstudio.uiScale
+ width: 160 * virtualstudio.uiScale; height: 32 * virtualstudio.uiScale
+ Text {
+ text: "Use Classic Mode"
+ font.underline: true
+ font.family: "Poppins"
+ font.pixelSize: 11 * virtualstudio.fontScale * virtualstudio.uiScale
+ anchors.horizontalCenter: parent.horizontalCenter
+ anchors.verticalCenter: parent.verticalCenter
+ color: textColour
+ }
+ MouseArea {
+ anchors.fill: parent
+ onClicked: () => { virtualstudio.windowState = "login"; virtualstudio.toStandard(); }
+ cursorShape: Qt.PointingHandCursor
+ }
+ }
+
+ Item {
+ id: resetCodeButton
+ visible: true
+ x: (parent.showBackButton || parent.showClassicModeButton) ? (parent.x + parent.width / 2) + 8 * virtualstudio.uiScale : (parent.x + parent.width / 2) - resetCodeButton.width / 2
+ anchors.verticalCenter: parent.verticalCenter
+ width: 144 * virtualstudio.uiScale; height: 32 * virtualstudio.uiScale
+ Text {
+ text: "Reset Code"
+ font.family: "Poppins"
+ font.underline: true
+ font.pixelSize: 11 * virtualstudio.fontScale * virtualstudio.uiScale
+ anchors.horizontalCenter: parent.horizontalCenter
+ anchors.verticalCenter: parent.verticalCenter
+ color: textColour
+ }
+ MouseArea {
+ anchors.fill: parent
+ onClicked: () => {
+ if (auth.verificationCode && auth.verificationUrl) {
+ auth.resetCode();
+ }
+ }
+ cursorShape: Qt.PointingHandCursor
+ }
+ }
}
- onClicked: { virtualstudio.windowState = "login"; virtualstudio.toStandard(); }
+ }
+
+ Item {
+ id: refreshToken
anchors.horizontalCenter: parent.horizontalCenter
- y: 600 * virtualstudio.uiScale
- width: 160 * virtualstudio.uiScale; height: 32 * virtualstudio.uiScale
+ y: 108 * virtualstudio.uiScale
+ visible: showLoading
+
Text {
- text: "Use Classic Mode"
+ id: loadingAudioInterfaces
+ text: "Configuring Audio...";
font.family: "Poppins"
- font.pixelSize: 9 * virtualstudio.fontScale * virtualstudio.uiScale
+ font.pixelSize: 16 * virtualstudio.fontScale * virtualstudio.uiScale
anchors.horizontalCenter: parent.horizontalCenter
- anchors.verticalCenter: parent.verticalCenter
- color: classicModeButton.down ? buttonTextPressed : (classicModeButton.hovered ? buttonTextHover : textColour)
+ y: 214 * virtualstudio.uiScale
+ width: 360 * virtualstudio.uiScale;
+ color: textColour
+ wrapMode: Text.WordWrap
+ horizontalAlignment: Text.AlignHCenter
+ visible: loginScreen.state === "success"
+ }
+
+ Text {
+ id: loadingViaRefreshToken
+ text: "Logging In...";
+ font.family: "Poppins"
+ font.pixelSize: 20 * virtualstudio.fontScale * virtualstudio.uiScale
+ anchors.horizontalCenter: parent.horizontalCenter
+ y: 208 * virtualstudio.uiScale
+ width: 360 * virtualstudio.uiScale;
+ color: textColour
+ wrapMode: Text.WordWrap
+ horizontalAlignment: Text.AlignHCenter
+ visible: !loadingAudioInterfaces.visible
+ }
+ }
+
+ Connections {
+ target: auth
+ function onUpdatedAuthenticationStage (stage) {
+ loginScreen.state = stage;
+ if (stage === "failed") {
+ numFailures = numFailures + 1;
+ if (numFailures < 5 && !virtualstudio.hasRefreshToken) {
+ virtualstudio.login();
+ }
+ }
+ if (stage === "success") {
+ numFailures = 0;
+ }
}
}
}
Item {
required property var model
property int bins: 15
-
property int innerMargin: 2 * virtualstudio.uiScale
property int clipWidth: 10 * virtualstudio.uiScale
required property bool clipped
-
property bool enabled: true
property string meterColor: enabled ? (virtualstudio.darkMode ? "#5B5858" : "#D3D4D4") : "#EAECEC"
-
- property string meterGreen: "#61C554"
- property string meterYellow: "#F5BF4F"
property string meterRed: "#F21B1B"
- function getBoxColor (idx, level) {
-
- if (!enabled) {
- return meterColor;
- }
-
- // Case where the meter should be filled
- if (level > (idx / bins)) {
- let fillColor = meterGreen;
- if (idx > 8 && idx <= 11) {
- fillColor = meterYellow;
- } else if (idx > 11) {
- fillColor = meterRed;
- }
- return fillColor;
-
- // Case where the meter should not be filled
- } else {
- return meterColor
- }
- }
-
- ListView {
+ Item {
id: meters
x: 0; y: 0
width: parent.width - clipWidth
height: parent.height
- model: parent.model
- delegate: Item {
+ MeterBars {
+ id: leftchannel
x: 0;
- width: parent.width
+ y: 0;
+ width: parent.width - clipWidth
height: 14 * virtualstudio.uiScale
- required property var modelData
-
- property int boxHeight: 10 * virtualstudio.uiScale
- property int boxWidth: (width / bins) - innerMargin
- property int boxRadius: 4 * virtualstudio.uiScale
-
- Rectangle {
- id: box0
- x: 0;
- y: 0;
- width: boxWidth
- height: boxHeight
- color: getBoxColor(0, parent.modelData.level)
- radius: boxRadius
- }
-
- Rectangle {
- id: box1
- x: boxWidth + innerMargin;
- y: 0;
- width: boxWidth
- height: boxHeight
- color: getBoxColor(1, parent.modelData.level)
- radius: boxRadius
- }
-
- Rectangle {
- id: box2
- x: (boxWidth) * 2 + innerMargin * 2;
- y: 0;
- width: boxWidth
- height: boxHeight
- color: getBoxColor(2, parent.modelData.level)
- radius: boxRadius
- }
-
- Rectangle {
- id: box3
- x: (boxWidth) * 3 + innerMargin * 3;
- y: 0;
- width: boxWidth
- height: boxHeight
- color: getBoxColor(3, parent.modelData.level)
- radius: boxRadius
- }
-
- Rectangle {
- id: box4
- x: (boxWidth) * 4 + innerMargin * 4;
- y: 0;
- width: boxWidth
- height: boxHeight
- color: getBoxColor(4, parent.modelData.level)
- radius: boxRadius
- }
-
- Rectangle {
- id: box5
- x: (boxWidth) * 5 + innerMargin * 5;
- y: 0;
- width: boxWidth
- height: boxHeight
- color: getBoxColor(5, parent.modelData.level)
- radius: boxRadius
- }
-
- Rectangle {
- id: box6
- x: (boxWidth) * 6 + innerMargin * 6;
- y: 0;
- width: boxWidth
- height: boxHeight
- color: getBoxColor(6, parent.modelData.level)
- radius: boxRadius
- }
-
- Rectangle {
- id: box7
- x: (boxWidth) * 7 + innerMargin * 7;
- y: 0;
- width: boxWidth
- height: boxHeight
- color: getBoxColor(7, parent.modelData.level)
- radius: boxRadius
- }
-
- Rectangle {
- id: box8
- x: (boxWidth) * 8 + innerMargin * 8;
- y: 0;
- width: boxWidth
- height: boxHeight
- color: getBoxColor(8, parent.modelData.level)
- radius: boxRadius
- }
-
- Rectangle {
- id: box9
- x: (boxWidth) * 9 + innerMargin * 9;
- y: 0;
- width: boxWidth
- height: boxHeight
- color: getBoxColor(9, parent.modelData.level)
- radius: boxRadius
- }
-
- Rectangle {
- id: box10
- x: (boxWidth) * 10 + innerMargin * 10;
- y: 0;
- width: boxWidth
- height: boxHeight
- color: getBoxColor(10, parent.modelData.level)
- radius: boxRadius
- }
-
- Rectangle {
- id: box11
- x: (boxWidth) * 11 + innerMargin * 11;
- y: 0;
- width: boxWidth
- height: boxHeight
- color: getBoxColor(11, parent.modelData.level)
- radius: boxRadius
- }
-
- Rectangle {
- id: box12
- x: (boxWidth) * 12 + innerMargin * 12;
- y: 0;
- width: boxWidth
- height: boxHeight
- color: getBoxColor(12, parent.modelData.level)
- radius: boxRadius
- }
-
- Rectangle {
- id: box13
- x: (boxWidth) * 13 + innerMargin * 13;
- y: 0;
- width: boxWidth
- height: boxHeight
- color: getBoxColor(13, parent.modelData.level)
- radius: boxRadius
- }
+ level: parent.parent.model[0]
+ enabled: parent.parent.enabled
+ }
- Rectangle {
- id: box14
- x: (boxWidth) * 14 + innerMargin * 14;
- y: 0;
- width: boxWidth
- height: boxHeight
- color: getBoxColor(14, parent.modelData.level)
- radius: boxRadius
- }
+ MeterBars {
+ id: rightchannel
+ x: 0;
+ y: leftchannel.height
+ width: parent.width - clipWidth
+ height: 14 * virtualstudio.uiScale
+ level: parent.parent.model[1]
+ enabled: parent.parent.enabled
}
}
--- /dev/null
+import QtQuick 2.12
+import QtQuick.Controls 2.12
+import QtGraphicalEffects 1.12
+
+Item {
+ required property var level
+ required property var enabled
+ property int bins: 15
+ property int innerMargin: 2 * virtualstudio.uiScale
+ property int boxHeight: 10 * virtualstudio.uiScale
+ property int boxWidth: (width / bins) - innerMargin
+ property int boxRadius: 4 * virtualstudio.uiScale
+ property string meterColor: enabled ? (virtualstudio.darkMode ? "#5B5858" : "#D3D4D4") : "#EAECEC"
+ property string meterGreen: "#61C554"
+ property string meterYellow: "#F5BF4F"
+ property string meterRed: "#F21B1B"
+
+ function getBoxColor (idx) {
+ // Case where the meter should not be filled
+ if (!enabled || level <= (idx / bins)) {
+ return meterColor;
+ }
+ // Case where the meter should be filled
+ let fillColor = meterGreen;
+ if (idx > 8 && idx <= 11) {
+ fillColor = meterYellow;
+ } else if (idx > 11) {
+ fillColor = meterRed;
+ }
+ return fillColor;
+ }
+
+ Rectangle {
+ id: box0
+ x: 0;
+ y: 0;
+ width: boxWidth
+ height: boxHeight
+ color: getBoxColor(0)
+ radius: boxRadius
+ }
+
+ Rectangle {
+ id: box1
+ x: boxWidth + innerMargin;
+ y: 0;
+ width: boxWidth
+ height: boxHeight
+ color: getBoxColor(1)
+ radius: boxRadius
+ }
+
+ Rectangle {
+ id: box2
+ x: (boxWidth) * 2 + innerMargin * 2;
+ y: 0;
+ width: boxWidth
+ height: boxHeight
+ color: getBoxColor(2)
+ radius: boxRadius
+ }
+
+ Rectangle {
+ id: box3
+ x: (boxWidth) * 3 + innerMargin * 3;
+ y: 0;
+ width: boxWidth
+ height: boxHeight
+ color: getBoxColor(3)
+ radius: boxRadius
+ }
+
+ Rectangle {
+ id: box4
+ x: (boxWidth) * 4 + innerMargin * 4;
+ y: 0;
+ width: boxWidth
+ height: boxHeight
+ color: getBoxColor(4)
+ radius: boxRadius
+ }
+
+ Rectangle {
+ id: box5
+ x: (boxWidth) * 5 + innerMargin * 5;
+ y: 0;
+ width: boxWidth
+ height: boxHeight
+ color: getBoxColor(5)
+ radius: boxRadius
+ }
+
+ Rectangle {
+ id: box6
+ x: (boxWidth) * 6 + innerMargin * 6;
+ y: 0;
+ width: boxWidth
+ height: boxHeight
+ color: getBoxColor(6)
+ radius: boxRadius
+ }
+
+ Rectangle {
+ id: box7
+ x: (boxWidth) * 7 + innerMargin * 7;
+ y: 0;
+ width: boxWidth
+ height: boxHeight
+ color: getBoxColor(7)
+ radius: boxRadius
+ }
+
+ Rectangle {
+ id: box8
+ x: (boxWidth) * 8 + innerMargin * 8;
+ y: 0;
+ width: boxWidth
+ height: boxHeight
+ color: getBoxColor(8)
+ radius: boxRadius
+ }
+
+ Rectangle {
+ id: box9
+ x: (boxWidth) * 9 + innerMargin * 9;
+ y: 0;
+ width: boxWidth
+ height: boxHeight
+ color: getBoxColor(9)
+ radius: boxRadius
+ }
+
+ Rectangle {
+ id: box10
+ x: (boxWidth) * 10 + innerMargin * 10;
+ y: 0;
+ width: boxWidth
+ height: boxHeight
+ color: getBoxColor(10)
+ radius: boxRadius
+ }
+
+ Rectangle {
+ id: box11
+ x: (boxWidth) * 11 + innerMargin * 11;
+ y: 0;
+ width: boxWidth
+ height: boxHeight
+ color: getBoxColor(11)
+ radius: boxRadius
+ }
+
+ Rectangle {
+ id: box12
+ x: (boxWidth) * 12 + innerMargin * 12;
+ y: 0;
+ width: boxWidth
+ height: boxHeight
+ color: getBoxColor(12)
+ radius: boxRadius
+ }
+
+ Rectangle {
+ id: box13
+ x: (boxWidth) * 13 + innerMargin * 13;
+ y: 0;
+ width: boxWidth
+ height: boxHeight
+ color: getBoxColor(13)
+ radius: boxRadius
+ }
+
+ Rectangle {
+ id: box14
+ x: (boxWidth) * 14 + innerMargin * 14;
+ y: 0;
+ width: boxWidth
+ height: boxHeight
+ color: getBoxColor(14)
+ radius: boxRadius
+ }
+}
\ No newline at end of file
function getCurrentInputDeviceIndex () {
if (virtualstudio.inputDevice === "") {
- return inputComboModel.findIndex(elem => elem.type === "element");
+ return virtualstudio.inputComboModel.findIndex(elem => elem.type === "element");
}
- let idx = inputComboModel.findIndex(elem => elem.type === "element" && elem.text === virtualstudio.inputDevice);
+ let idx = virtualstudio.inputComboModel.findIndex(elem => elem.type === "element" && elem.text === virtualstudio.inputDevice);
if (idx < 0) {
- idx = inputComboModel.findIndex(elem => elem.type === "element");
+ idx = virtualstudio.inputComboModel.findIndex(elem => elem.type === "element");
}
return idx;
function getCurrentOutputDeviceIndex() {
if (virtualstudio.outputDevice === "") {
- return outputComboModel.findIndex(elem => elem.type === "element");
+ return virtualstudio.outputComboModel.findIndex(elem => elem.type === "element");
}
- let idx = outputComboModel.findIndex(elem => elem.type === "element" && elem.text === virtualstudio.outputDevice);
+ let idx = virtualstudio.outputComboModel.findIndex(elem => elem.type === "element" && elem.text === virtualstudio.outputDevice);
if (idx < 0) {
- idx = outputComboModel.findIndex(elem => elem.type === "element");
+ idx = virtualstudio.outputComboModel.findIndex(elem => elem.type === "element");
}
return idx;
border.width: 1
border.color: modeButton.down ? buttonPressedStroke : (modeButton.hovered ? buttonHoverStroke : buttonStroke)
}
- onClicked: { virtualstudio.windowState = "login"; virtualstudio.toStandard(); }
+ onClicked: {
+ // essentially the same here as clicking the cancel button
+ virtualstudio.windowState = "browse";
+ inputCurrIndex = virtualstudio.previousInput;
+ outputCurrIndex = virtualstudio.previousOutput;
+ virtualstudio.revertSettings();
+
+ // switch mode
+ virtualstudio.toStandard();
+ }
x: 234 * virtualstudio.uiScale; y: 100 * virtualstudio.uiScale
width: 216 * virtualstudio.uiScale; height: 30 * virtualstudio.uiScale
Text {
currentIndex: virtualstudio.bufferStrategy
onActivated: { virtualstudio.bufferStrategy = currentIndex }
font.family: "Poppins"
- visible: virtualstudio.audioBackend != "JACK"
}
Text {
x: 48 * virtualstudio.uiScale
text: "Buffer Strategy"
font { family: "Poppins"; pixelSize: fontMedium * virtualstudio.fontScale * virtualstudio.uiScale }
- visible: virtualstudio.audioBackend != "JACK"
color: textColour
}
+
+ ComboBox {
+ id: feedbackDetectionCombo
+ x: updateChannelCombo.x; y: bufferStrategyCombo.y + (48 * virtualstudio.uiScale)
+ width: updateChannelCombo.width; height: updateChannelCombo.height
+ model: feedbackDetectionComboModel
+ currentIndex: virtualstudio.feedbackDetectionEnabled ? 0 : 1
+ onActivated: {
+ if (currentIndex === 1) {
+ virtualstudio.feedbackDetectionEnabled = false;
+ } else {
+ virtualstudio.feedbackDetectionEnabled = true;
+ }
+ }
+ font.family: "Poppins"
+ }
+
+ Text {
+ anchors.verticalCenter: feedbackDetectionCombo.verticalCenter
+ x: 48 * virtualstudio.uiScale
+ text: "Feedback Detection"
+ font { family: "Poppins"; pixelSize: fontMedium * virtualstudio.fontScale * virtualstudio.uiScale }
+ color: textColour
+ }
+
}
Rectangle {
border.width: 1
border.color: logoutButton.down ? buttonPressedStroke : (logoutButton.hovered ? buttonHoverStroke : buttonStroke)
}
- onClicked: { virtualstudio.windowState = "login"; virtualstudio.logout() }
+ onClicked: { virtualstudio.showFirstRun = false; virtualstudio.logout(); }
anchors.horizontalCenter: parent.horizontalCenter
y: editButton.y + (48 * virtualstudio.uiScale)
width: 260 * virtualstudio.uiScale; height: 30 * virtualstudio.uiScale
border.width: 1
border.color: testModeButton.down ? buttonPressedStroke : (testModeButton.hovered ? buttonHoverStroke : buttonStroke)
}
- onClicked: { virtualstudio.testMode = !virtualstudio.testMode; virtualstudio.windowState = "login"; virtualstudio.logout() }
+ onClicked: { virtualstudio.testMode = !virtualstudio.testMode; }
anchors.horizontalCenter: parent.horizontalCenter
y: logoutButton.y + (48 * virtualstudio.uiScale)
width: 260 * virtualstudio.uiScale; height: 30 * virtualstudio.uiScale
width: 664; height: 83 * virtualstudio.uiScale
radius: 6 * virtualstudio.uiScale
color: backgroundColour
-
+
property string serverLocation: "Germany - Berlin"
property string flagImage: "flags/DE.svg"
property string hostname: "app.jacktrip.org"
property bool available: true
property bool connected: false
property bool inviteCopied: false
-
+
property int leftMargin: 81
property int topMargin: 13
property int bottomToolTipMargin: 8
property int rightToolTipMargin: 4
-
+
property real fontBig: 18
property real fontMedium: 11
property real fontSmall: 8
-
+
property string backgroundColour: virtualstudio.darkMode ? "#494646" : "#F4F6F6"
property string textColour: virtualstudio.darkMode ? "#FAFBFB" : "#0F0D0D"
property string shadowColour: virtualstudio.darkMode ? "#40000000" : "#80A1A1A1"
property string joinAvailableHoverColour: virtualstudio.darkMode ? "#BAC7B8" : "#B0DCAB"
property string joinAvailablePressedColour: virtualstudio.darkMode ? "#D8E2D6" : "#BAE8B5"
property string joinAvailableStroke: virtualstudio.darkMode ? "#748F70" : "#5DB752"
-
+
property string joinUnavailableColour: baseButtonColour
property string joinUnavailableHoverColour: baseButtonHoverColour
property string joinUnavailablePressedColour: baseButtonPressedColour
fillMode: Image.PreserveAspectFit
smooth: true
}
-
+
Rectangle {
x: 33 * virtualstudio.uiScale; y: 8 * virtualstudio.uiScale
width: 32 * virtualstudio.uiScale; height: width
radius: width / 2
color: available ? "#0C1424" : "#B3B3B3"
}
-
+
Image {
id: flag
source: flagImage
radius: width / 2
}
}
-
+
Text {
x: leftMargin * virtualstudio.uiScale; y: 11 * virtualstudio.uiScale;
width: (admin || connected) ? parent.width - (310 * virtualstudio.uiScale) : parent.width - (233 * virtualstudio.uiScale)
verticalAlignment: Text.AlignVCenter
color: textColour
}
-
+
Rectangle {
id: publicRect
x: leftMargin * virtualstudio.uiScale; y: 52 * virtualstudio.uiScale
smooth: true
}
}
-
+
Text {
anchors.verticalCenter: publicRect.verticalCenter
x: (leftMargin + 22) * virtualstudio.uiScale
elide: Text.ElideRight
color: textColour
}
-
+
Button {
id: joinButton
x: (admin || connected) ? parent.width - (219 * virtualstudio.uiScale) : parent.width - (142 * virtualstudio.uiScale)
smooth: true
}
}
-
+
Text {
anchors.horizontalCenter: joinButton.horizontalCenter
y: 56 * virtualstudio.uiScale
interval: 2000; running: false; repeat: false
onTriggered: inviteCopied = false;
}
- onClicked: {
+ onClicked: {
inviteCopied = true;
if (virtualstudio.testMode) {
hostname = "test.jacktrip.org";
}
}
}
-
+
Text {
anchors.horizontalCenter: inviteButton.horizontalCenter
y: 56 * virtualstudio.uiScale
visible: true
color: textColour
}
-
+
Button {
id: manageOrVideoButton
x: parent.width - (65 * virtualstudio.uiScale); y: topMargin * virtualstudio.uiScale
border.width: manageOrVideoButton.down ? 1 : 0
border.color: manageStroke
}
- onClicked: {
+ onClicked: {
if (connected) {
virtualstudio.launchVideo(-1)
} else {
smooth: true
}
}
-
+
Text {
anchors.horizontalCenter: manageOrVideoButton.horizontalCenter
y: 56 * virtualstudio.uiScale
--- /dev/null
+<svg xmlns="http://www.w3.org/2000/svg" height="240" viewBox="0 96 960 960" width="240"><path d="M421 676.692 320.077 574q-7.154-5.385-16.615-5.769-9.462-.385-15.847 6-7.154 7.154-7.154 16.615 0 9.462 7.154 15.616l109.923 110.154q9.049 11 23.371 11t24.553-11l227.153-226.385q5.616-6.385 6-15.846.385-9.462-6-16.847-7.384-6.153-16.961-6.038-9.577.115-15.731 6.269L421 676.692ZM480.134 952q-78.082 0-146.274-29.859-68.193-29.86-119.141-80.762-50.947-50.902-80.833-119.033Q104 654.215 104 576.134q0-77.569 29.918-146.371 29.919-68.803 80.922-119.917 51.003-51.114 119.032-80.48Q401.901 200 479.866 200q77.559 0 146.353 29.339 68.794 29.34 119.922 80.422 51.127 51.082 80.493 119.841Q856 498.361 856 575.95q0 78.358-29.339 146.21-29.34 67.853-80.408 118.902-51.069 51.048-119.81 80.993Q557.702 952 480.134 952ZM480 908.231q137.897 0 235.064-97.282Q812.231 713.666 812.231 576q0-137.897-97.167-235.064T480 243.769q-137.666 0-234.949 97.167Q147.769 438.103 147.769 576q0 137.666 97.282 234.949Q342.334 908.231 480 908.231ZM480 576Z"/></svg>
\ No newline at end of file
#include "messageDialog.h"
+#include <QDateTime>
#include <QMenu>
#include <QScrollBar>
#include <QSettings>
, m_outStreams(streamCount)
, m_outBufs(streamCount)
, m_windowFunction(windowFunction)
+ , m_addTimeStamp(true)
+ , m_timeStampFormat(QStringLiteral("hh:mm:ss: "))
+ , m_startOfLine(true)
{
m_ui->setupUi(this);
for (quint32 i = 0; i < streamCount; i++) {
if (!m_windowFunction.isEmpty()) {
setWindowTitle(m_windowFunction);
+ if (m_windowFunction == QLatin1String("Stats")) {
+ m_addTimeStamp = false;
+ } else {
+ // Create an indent for wrapped lines if we're adding a timestamp.
+ // Because we're using a fixed width font we can just multiply our
+ // timeStamp length by the average character width.
+ QTextBlockFormat indent;
+ QFontMetrics metrics(m_ui->messagesTextEdit->font());
+ int marginWidth = metrics.averageCharWidth() * m_timeStampFormat.length();
+ indent.setLeftMargin(marginWidth);
+ indent.setTextIndent(-marginWidth);
+ m_ui->messagesTextEdit->textCursor().setBlockFormat(indent);
+ }
}
}
void MessageDialog::receiveOutput(const QString& output)
{
+ if (output.isEmpty()) {
+ return;
+ }
+
// Automatically scroll if we're at the bottom of the text box.
- bool autoScroll = (m_ui->messagesTextEdit->verticalScrollBar()->value()
- == m_ui->messagesTextEdit->verticalScrollBar()->maximum());
+ int scrollLocation = (m_ui->messagesTextEdit->verticalScrollBar()->value());
+ bool autoScroll =
+ (scrollLocation == m_ui->messagesTextEdit->verticalScrollBar()->maximum());
+
// Make sure our cursor is at the end.
m_ui->messagesTextEdit->moveCursor(QTextCursor::End);
- m_ui->messagesTextEdit->insertPlainText(output);
+
+ if (m_addTimeStamp) {
+ QString timeStamp = QDateTime::currentDateTime().toString(m_timeStampFormat);
+ if (m_startOfLine) {
+ m_ui->messagesTextEdit->insertPlainText(timeStamp);
+ }
+ if (output.indexOf(QChar('\n')) == -1) {
+ m_ui->messagesTextEdit->insertPlainText(output);
+ } else {
+ QStringList lines = output.split(QChar('\n'));
+ m_ui->messagesTextEdit->insertPlainText(
+ QStringLiteral("%1\n").arg(lines.at(0)));
+ int length = lines.length();
+ if (output.endsWith(QChar('\n'))) {
+ length--;
+ }
+ for (int i = 1; i < length; i++) {
+ m_ui->messagesTextEdit->insertPlainText(
+ QStringLiteral("%1%2\n").arg(timeStamp, lines.at(i)));
+ }
+ }
+ m_startOfLine = output.endsWith(QChar('\n'));
+ } else {
+ m_ui->messagesTextEdit->insertPlainText(output);
+ }
if (autoScroll) {
m_ui->messagesTextEdit->verticalScrollBar()->setValue(
m_ui->messagesTextEdit->verticalScrollBar()->maximum());
+ } else {
+ m_ui->messagesTextEdit->verticalScrollBar()->setValue(scrollLocation);
}
}
QVector<QSharedPointer<std::ostream>> m_outStreams;
QVector<QSharedPointer<textbuf>> m_outBufs;
QString m_windowFunction;
+ bool m_addTimeStamp;
+ QString m_timeStampFormat;
+ bool m_startOfLine;
};
#endif // MESSAGEDIALOG_H
</property>
<layout class="QGridLayout" name="gridLayout">
<item row="0" column="0">
- <widget class="QPlainTextEdit" name="messagesTextEdit">
+ <widget class="QTextEdit" name="messagesTextEdit">
<property name="readOnly">
<bool>true</bool>
</property>
#include "../Meter.h"
#include "../Reverb.h"
-QJackTrip::QJackTrip(Settings* settings, bool suppressCommandlineWarning, QWidget* parent)
+QJackTrip::QJackTrip(QSharedPointer<Settings> settings, bool suppressCommandlineWarning,
+ QWidget* parent)
: QMainWindow(parent)
, m_ui(new Ui::QJackTrip)
, m_netManager(new QNetworkAccessManager(this))
, m_jackTripRunning(false)
, m_isExiting(false)
, m_exitSent(false)
+ , m_suppressCommandlineWarning(suppressCommandlineWarning)
, m_hideWarning(false)
{
m_ui->setupUi(this);
+ m_cliSettings = settings;
// Set up our debug window, and relay everything to our real cout.
std::cout.rdbuf(m_debugDialog->getOutputStream()->rdbuf());
#endif
migrateSettings();
- loadSettings(settings);
-
- // Display a warning about any ignored command line options.
- if (settings->guiIgnoresArguments() && !suppressCommandlineWarning) {
- QMessageBox msgBox;
- msgBox.setText(
- "You have supplied command line options that the GUI version of JackTrip "
- "currently ignores. (Everything else will run as expected.)\n\nRun "
- "\"jacktrip -h\" for more details.");
- msgBox.setWindowTitle(QStringLiteral("Command line options"));
- msgBox.exec();
- }
+ m_ui->optionsTabWidget->setCurrentIndex(0);
QVector<QLabel*> labels;
labels << m_ui->inFreeverbLabel << m_ui->inZitarevLabel << m_ui->outFreeverbLabel;
labels.at(index)->setToolTip(m_ui->outZitarevLabel->toolTip());
m_ui->outZitarevLabel->setToolTip(QLatin1String(""));
}
-
- // Add an autoqueue indicator to the status bar.
- m_ui->statusBar->addPermanentWidget(&m_autoQueueIndicator);
- if (m_ui->jitterCheckBox->isChecked() && m_ui->autoQueueCheckBox->isChecked()) {
- m_autoQueueIndicator.setText(QStringLiteral("Auto queue: enabled"));
- } else {
- m_autoQueueIndicator.setText(QStringLiteral("Auto queue: disabled"));
- }
-
-#ifdef USE_WEAK_JACK
- // Check if Jack is actually available
- if (have_libjack() != 0) {
-#ifdef RT_AUDIO
- m_audioFallback = true;
- m_usingRtAudioAlready = m_ui->backendComboBox->currentIndex() == 1;
- m_ui->backendComboBox->setCurrentIndex(1);
- m_ui->backendComboBox->setEnabled(false);
- m_ui->backendLabel->setEnabled(false);
-
- // If we're in Hub Server mode, switch us back to P2P server mode.
- if (m_ui->typeComboBox->currentIndex() == HUB_SERVER) {
- m_ui->typeComboBox->setCurrentIndex(P2P_SERVER);
- }
- m_ui->typeComboBox->removeItem(HUB_SERVER);
- m_ui->backendWarningLabel->setText(
- "JACK was not found. This means that only the RtAudio backend is available "
- "and that JackTrip cannot be run in hub server mode.");
- } else {
- // If we've fallen back to RtAudio before and JACK is now installed, use JACK.
- QSettings settings;
- settings.beginGroup(QStringLiteral("Audio"));
- if (settings.value(QStringLiteral("UsingFallback"), false).toBool()) {
- m_ui->backendComboBox->setCurrentIndex(0);
- settings.setValue(QStringLiteral("UsingFallback"), false);
- }
- settings.endGroup();
-#else // RT_AUDIO
- QMessageBox msgBox;
- msgBox.setText(
- "An installation of JACK was not found, and no other audio backends are "
- "available. JackTrip will not be able to start. (Please install JACK to fix "
- "this.)");
- msgBox.setWindowTitle("JACK Not Available");
- msgBox.exec();
-#endif // RT_AUDIO
- }
-#endif // USE_WEAK_JACK
-
- m_ui->optionsTabWidget->setCurrentIndex(0);
}
void QJackTrip::closeEvent(QCloseEvent* event)
rect = metrics.boundingRect(0, 0, width, 0, Qt::TextWordWrap,
m_ui->authNotVSLabel->text());
m_ui->authNotVSLabel->setMinimumHeight(rect.height());
+
+ // The previous minimum heights should protect any further word wrapped labels,
+ // but it's worth including any additional ones here for future proofing.
+ width = m_ui->scriptingTab->contentsRect().width()
+ - m_ui->scriptingTab->contentsMargins().left()
+ - m_ui->scriptingTab->contentsMargins().right()
+ - m_ui->scriptingTab->layout()->contentsMargins().left()
+ - m_ui->scriptingTab->contentsMargins().right();
+ rect = metrics.boundingRect(0, 0, width, 0, Qt::TextWordWrap,
+ m_ui->environmentVariableLabel->text());
+ m_ui->environmentVariableLabel->setMinimumHeight(rect.height());
}
void QJackTrip::showEvent(QShowEvent* event)
QMainWindow::showEvent(event);
if (m_firstShow) {
QSettings settings;
- settings.beginGroup(QStringLiteral("Window"));
- QByteArray geometry = settings.value(QStringLiteral("Geometry")).toByteArray();
- if (geometry.size() > 0) {
- restoreGeometry(geometry);
- } else {
- // Because of hidden elements in our dialog window, it's vertical size in the
- // creator is getting rediculous. Set it to something sensible by default if
- // this is our first load.
- this->resize(QSize(this->size().height(), 600));
+ loadSettings(m_cliSettings.data());
+
+ // Display a warning about any ignored command line options.
+ if (m_cliSettings->guiIgnoresArguments() && !m_suppressCommandlineWarning) {
+ QMessageBox msgBox;
+ msgBox.setText(
+ "You have supplied command line options that the GUI version of JackTrip "
+ "currently ignores. (Everything else will run as expected.)\n\nRun "
+ "\"jacktrip -h\" for more details.");
+ msgBox.setWindowTitle(QStringLiteral("Command line options"));
+ msgBox.exec();
}
- settings.endGroup();
- // Use the ipify API to find our external IP address.
- connect(m_netManager.data(), &QNetworkAccessManager::finished, this,
- &QJackTrip::receivedIP);
- m_netManager->get(QNetworkRequest(QUrl(QStringLiteral("https://api.ipify.org"))));
- m_netManager->get(
- QNetworkRequest(QUrl(QStringLiteral("https://api6.ipify.org"))));
+ // Add an autoqueue indicator to the status bar.
+ m_ui->statusBar->addPermanentWidget(&m_autoQueueIndicator);
+ if (m_ui->jitterCheckBox->isChecked() && m_ui->autoQueueCheckBox->isChecked()) {
+ m_autoQueueIndicator.setText(QStringLiteral("Auto queue: enabled"));
+ } else {
+ m_autoQueueIndicator.setText(QStringLiteral("Auto queue: disabled"));
+ }
- // Also show our JACK not found warning if needed.
+#ifdef USE_WEAK_JACK
+ // Check if Jack is actually available
+ if (have_libjack() != 0) {
#ifdef RT_AUDIO
- if (m_audioFallback) {
- QSettings settings;
+ bool usingRtAudioAlready = m_ui->backendComboBox->currentIndex() == 1;
+ m_ui->backendComboBox->setCurrentIndex(1);
+ m_ui->backendComboBox->setEnabled(false);
+ m_ui->backendLabel->setEnabled(false);
+
+ // If we're in Hub Server mode, switch us back to P2P server mode.
+ if (m_ui->typeComboBox->currentIndex() == HUB_SERVER) {
+ m_ui->typeComboBox->setCurrentIndex(P2P_SERVER);
+ }
+ m_ui->typeComboBox->removeItem(HUB_SERVER);
+ m_ui->backendWarningLabel->setText(
+ "JACK was not found. This means that only the RtAudio backend is "
+ "available and that JackTrip cannot be run in hub server mode.");
+
settings.beginGroup(QStringLiteral("Audio"));
if (!settings.value(QStringLiteral("HideJackWarning"), false).toBool()) {
QCheckBox* dontBugMe =
if (m_hideWarning) {
settings.setValue(QStringLiteral("HideJackWarning"), true);
}
- if (!m_usingRtAudioAlready) {
+ if (!usingRtAudioAlready) {
settings.setValue(QStringLiteral("UsingFallback"), true);
}
}
settings.endGroup();
- }
+ } else {
+ // If we've fallen back to RtAudio before and JACK is now installed, use JACK.
+ settings.beginGroup(QStringLiteral("Audio"));
+ if (settings.value(QStringLiteral("UsingFallback"), false).toBool()) {
+ m_ui->backendComboBox->setCurrentIndex(0);
+ settings.setValue(QStringLiteral("UsingFallback"), false);
+ }
+ settings.endGroup();
+#else // RT_AUDIO
+ QMessageBox msgBox;
+ msgBox.setText(
+ "An installation of JACK was not found, and no other audio backends are "
+ "available. JackTrip will not be able to start. (Please install JACK to "
+ "fix this.)");
+ msgBox.setWindowTitle("JACK Not Available");
+ msgBox.exec();
#endif // RT_AUDIO
+ }
+#endif // USE_WEAK_JACK
+
+ settings.beginGroup(QStringLiteral("Window"));
+ QByteArray geometry = settings.value(QStringLiteral("Geometry")).toByteArray();
+ if (geometry.size() > 0) {
+ restoreGeometry(geometry);
+ } else {
+ // Because of hidden elements in our dialog window, it's vertical size in the
+ // creator is getting rediculous. Set it to something sensible by default if
+ // this is our first load.
+ this->resize(QSize(this->size().height(), 600));
+ }
+ settings.endGroup();
+
+ // Use the ipify API to find our external IP address.
+ connect(m_netManager.data(), &QNetworkAccessManager::finished, this,
+ &QJackTrip::receivedIP);
+ m_netManager->get(QNetworkRequest(QUrl(QStringLiteral("https://api.ipify.org"))));
+ m_netManager->get(
+ QNetworkRequest(QUrl(QStringLiteral("https://api6.ipify.org"))));
m_firstShow = false;
}
}
disconnectScript.setArguments(arguments);
disconnectScript.setStandardOutputFile(QProcess::nullDevice());
disconnectScript.setStandardErrorFile(QProcess::nullDevice());
+ QProcessEnvironment env = QProcessEnvironment::systemEnvironment();
+ env.insert(QStringLiteral("JT_CLIENT_NAME"), m_assignedClientName);
+ env.insert(QStringLiteral("JT_SEND_CHANNELS"),
+ QString::number(m_ui->channelSendSpinBox->value()));
+ env.insert(QStringLiteral("JT_RECV_CHANNELS"),
+ QString::number(m_ui->channelRecvSpinBox->value()));
+ disconnectScript.setProcessEnvironment(env);
disconnectScript.startDetached();
}
}
void QJackTrip::receivedConnectionFromPeer()
{
m_ui->statusBar->showMessage(QStringLiteral("Received Connection from Peer!"));
+ m_assignedClientName = m_jackTrip->getAssignedClientName();
if (m_ui->connectScriptCheckBox->isChecked()) {
QStringList arguments = m_ui->connectScriptEdit->text().split(QStringLiteral(" "),
Qt::SkipEmptyParts);
connectScript.setArguments(arguments);
connectScript.setStandardOutputFile(QProcess::nullDevice());
connectScript.setStandardErrorFile(QProcess::nullDevice());
+ QProcessEnvironment env = QProcessEnvironment::systemEnvironment();
+ env.insert(QStringLiteral("JT_CLIENT_NAME"), m_assignedClientName);
+ env.insert(QStringLiteral("JT_SEND_CHANNELS"),
+ QString::number(m_ui->channelSendSpinBox->value()));
+ env.insert(QStringLiteral("JT_RECV_CHANNELS"),
+ QString::number(m_ui->channelRecvSpinBox->value()));
+ connectScript.setProcessEnvironment(env);
connectScript.startDetached();
}
}
Q_OBJECT
public:
- explicit QJackTrip(Settings* settings, bool suppressCommandlineWarning = false,
- QWidget* parent = nullptr);
+ explicit QJackTrip(QSharedPointer<Settings> settings,
+ bool suppressCommandlineWarning = false,
+ QWidget* parent = nullptr);
~QJackTrip() override;
void closeEvent(QCloseEvent* event) override;
QScopedPointer<QGridLayout> m_outputLayout;
std::ostream m_realCout;
std::ostream m_realCerr;
+ QString m_assignedClientName;
bool m_jackTripRunning;
bool m_isExiting;
bool m_exitSent;
+ QSharedPointer<Settings> m_cliSettings;
+ bool m_suppressCommandlineWarning;
+
float m_meterMax = 0.0;
float m_meterMin = -64.0;
QLabel m_autoQueueIndicator;
bool m_hideWarning;
- bool m_audioFallback = false;
- bool m_usingRtAudioAlready = false;
- bool m_firstShow = true;
+ bool m_firstShow = true;
#ifndef NO_VS
QSharedPointer<VirtualStudio> m_vs;
<file>AudioSettings.qml</file>
<file>Settings.qml</file>
<file>Meter.qml</file>
+ <file>MeterBars.qml</file>
<file>Connected.qml</file>
<file>Failed.qml</file>
<file>Setup.qml</file>
<file>loud.svg</file>
<file>refresh.svg</file>
<file>ethernet.png</file>
+ <file>check.svg</file>
<file>ohno.png</file>
<file>headphones.svg</file>
<file>Prompt.svg</file>
<string>Scripting</string>
</attribute>
<layout class="QGridLayout" name="gridLayout_12">
- <item row="1" column="0">
- <widget class="QLineEdit" name="connectScriptEdit">
+ <item row="3" column="0">
+ <widget class="QLineEdit" name="disconnectScriptEdit">
<property name="enabled">
<bool>false</bool>
</property>
</widget>
</item>
- <item row="3" column="0">
- <widget class="QLineEdit" name="disconnectScriptEdit">
+ <item row="1" column="0">
+ <widget class="QLineEdit" name="connectScriptEdit">
<property name="enabled">
<bool>false</bool>
</property>
</widget>
</item>
- <item row="4" column="0">
- <spacer name="scriptingVerticalSpacer">
- <property name="orientation">
- <enum>Qt::Vertical</enum>
- </property>
- <property name="sizeHint" stdset="0">
- <size>
- <width>20</width>
- <height>40</height>
- </size>
- </property>
- </spacer>
- </item>
<item row="1" column="1">
<widget class="QPushButton" name="connectScriptBrowse">
<property name="enabled">
</property>
</widget>
</item>
+ <item row="5" column="0">
+ <spacer name="scriptingVerticalSpacer">
+ <property name="orientation">
+ <enum>Qt::Vertical</enum>
+ </property>
+ <property name="sizeHint" stdset="0">
+ <size>
+ <width>20</width>
+ <height>40</height>
+ </size>
+ </property>
+ </spacer>
+ </item>
<item row="2" column="0" colspan="2">
<widget class="QCheckBox" name="disconnectScriptCheckBox">
<property name="text">
</property>
</widget>
</item>
+ <item row="4" column="0" colspan="2">
+ <widget class="QLabel" name="environmentVariableLabel">
+ <property name="text">
+ <string>Any scripts that you write will have access to the following environment variables:
+
+JT_CLIENT_NAME - The name of the created JACK client
+JT_SEND_CHANNELS - The number of audio channels sent to the network
+JT_RECV_CHANNELS - The number of audio channels received from the network</string>
+ </property>
+ <property name="wordWrap">
+ <bool>true</bool>
+ </property>
+ </widget>
+ </item>
</layout>
</widget>
</widget>
#include <algorithm>
#include <iostream>
+#include "../Settings.h"
#include "../jacktrip_globals.h"
#include "about.h"
#include "qjacktrip.h"
#endif
VirtualStudio::VirtualStudio(bool firstRun, QObject* parent)
- : QObject(parent), m_showFirstRun(firstRun)
+ : QObject(parent)
+ , m_showFirstRun(firstRun)
+ , m_inputMeterLevels(2, 0)
+ , m_outputMeterLevels(2, 0)
+ , m_inputComboModel(QJsonArray::fromStringList(QStringList(QLatin1String(""))))
+ , m_outputComboModel(QJsonArray::fromStringList(QStringList(QLatin1String(""))))
+ , m_inputChannelsComboModel(
+ QJsonArray::fromStringList(QStringList(QLatin1String(""))))
+ , m_outputChannelsComboModel(
+ QJsonArray::fromStringList(QStringList(QLatin1String(""))))
+ , m_inputMixModeComboModel(QJsonArray::fromStringList(QStringList(QLatin1String(""))))
{
QSettings settings;
m_updateChannel =
settings.endGroup();
m_previousUiScale = m_uiScale;
+ // use a singleton QNetworkAccessManager
+ m_networkAccessManager.reset(new QNetworkAccessManager);
+
+ // instantiate API
+ m_api.reset(new VsApi(m_networkAccessManager.data()));
+ m_api->setApiHost(PROD_API_HOST);
+ if (m_testMode) {
+ m_api->setApiHost(TEST_API_HOST);
+ }
+
+ // instantiate auth
+ m_auth.reset(new VsAuth(&m_view, m_networkAccessManager.data(), m_api.data()));
+ connect(m_auth.data(), &VsAuth::authSucceeded, this,
+ &VirtualStudio::slotAuthSucceeded);
+ connect(m_auth.data(), &VsAuth::authFailed, this, &VirtualStudio::slotAuthFailed);
+ connect(m_auth.data(), &VsAuth::refreshTokenFailed, this, [=]() {
+ m_auth->authenticate(QStringLiteral("")); // retry without using refresh token
+ });
+ connect(m_auth.data(), &VsAuth::fetchUserInfoFailed, this, [=]() {
+ m_auth->authenticate(QStringLiteral("")); // retry without using refresh token
+ });
+ connect(m_auth.data(), &VsAuth::deviceCodeExpired, this, [=]() {
+ m_auth->authenticate(QStringLiteral("")); // retry without using refresh token
+ });
+
// Load our font for our qml interface
QFontDatabase::addApplicationFont(QStringLiteral(":/vs/Poppins-Regular.ttf"));
QFontDatabase::addApplicationFont(QStringLiteral(":/vs/Poppins-Bold.ttf"));
m_fontScale = 4.0 / 3.0;
// Initialize timers needed for clip indicators
- m_inputClipTimer.setTimerType(Qt::PreciseTimer);
+ m_inputClipTimer.setTimerType(Qt::CoarseTimer);
m_inputClipTimer.setSingleShot(true);
m_inputClipTimer.setInterval(3000);
- m_outputClipTimer.setTimerType(Qt::PreciseTimer);
+ m_outputClipTimer.setTimerType(Qt::CoarseTimer);
m_outputClipTimer.setSingleShot(true);
m_outputClipTimer.setInterval(3000);
-
m_inputClipTimer.callOnTimeout([&]() {
- m_view.engine()->rootContext()->setContextProperty(QStringLiteral("inputClipped"),
- QVariant::fromValue(false));
+ m_inputClipped = false;
+ emit updatedInputClipped(m_inputClipped);
});
-
m_outputClipTimer.callOnTimeout([&]() {
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("outputClipped"), QVariant::fromValue(false));
+ m_outputClipped = false;
+ emit updatedOutputClipped(m_outputClipped);
+ });
+
+ m_inputMeterLevels[0] = m_inputMeterLevels[1] = 0;
+ m_outputMeterLevels[0] = m_outputMeterLevels[1] = 0;
+
+ // Initialize timer needed for network outage indicator
+ m_networkOutageTimer.setTimerType(Qt::CoarseTimer);
+ m_networkOutageTimer.setSingleShot(true);
+ m_networkOutageTimer.setInterval(5000);
+ m_networkOutageTimer.callOnTimeout([&]() {
+ m_networkOutage = false;
+ emit updatedNetworkOutage(m_networkOutage);
});
settings.beginGroup(QStringLiteral("Audio"));
m_outMultiplier = settings.value(QStringLiteral("OutMultiplier"), 1).toFloat();
m_inMuted = settings.value(QStringLiteral("InMuted"), false).toBool();
m_outMuted = settings.value(QStringLiteral("OutMuted"), false).toBool();
+ m_feedbackDetectionEnabled =
+ settings.value(QStringLiteral("FeedbackDetectionEnabled"), true).toBool();
+
#ifdef RT_AUDIO
m_useRtAudio = settings.value(QStringLiteral("Backend"), 1).toInt() == 1;
m_inputDevice = settings.value(QStringLiteral("InputDevice"), "").toString();
m_bufferSize = settings.value(QStringLiteral("BufferSize"), 128).toInt();
m_previousBuffer = m_bufferSize;
- refreshDevices();
m_previousInput = m_inputDevice;
m_previousOutput = m_outputDevice;
}
#else
m_selectableBackend = false;
- m_vsAudioInterface.reset(new VsAudioInterface());
-
- // Set our combo box models to an empty list to avoid a reference error
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("inputComboModel"),
- QVariant::fromValue(QStringList(QLatin1String(""))));
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("outputComboModel"),
- QVariant::fromValue(QStringList(QLatin1String(""))));
-
- QJsonObject inputMixModeComboElement = QJsonObject();
- inputMixModeComboElement.insert(QString::fromStdString("label"),
- QString::fromStdString("Mono"));
- inputMixModeComboElement.insert(QString::fromStdString("value"),
- static_cast<int>(AudioInterface::MONO));
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("inputMixModeComboModel"),
- QVariant::fromValue(
- QVariant(QVariantList() << QVariant(QJsonValue(inputMixModeComboElement)))));
-
- QJsonObject inputChannelsComboElement = QJsonObject();
- inputChannelsComboElement.insert(QString::fromStdString("label"),
- QString::fromStdString("1"));
- inputChannelsComboElement.insert(QString::fromStdString("baseChannel"),
- QVariant(0).toInt());
- inputChannelsComboElement.insert(QString::fromStdString("numChannels"),
- QVariant(1).toInt());
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("inputChannelsComboModel"),
- QVariant::fromValue(
- QVariant(QVariantList() << QVariant(QJsonValue(inputChannelsComboElement)))));
-
- QJsonObject outputChannelsComboElement = QJsonObject();
- outputChannelsComboElement.insert(QString::fromStdString("label"),
- QString::fromStdString("1 & 2"));
- outputChannelsComboElement.insert(QString::fromStdString("baseChannel"),
- QVariant(0).toInt());
- outputChannelsComboElement.insert(QString::fromStdString("numChannels"),
- QVariant(2).toInt());
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("outputChannelsComboModel"),
- QVariant::fromValue(QVariant(
- QVariantList() << QVariant(QJsonValue(outputChannelsComboElement)))));
+ QJsonObject element;
+
+ element.insert(QString::fromStdString("label"), QString::fromStdString("Mono"));
+ element.insert(QString::fromStdString("value"),
+ static_cast<int>(AudioInterface::MONO));
+ m_inputMixModeComboModel = QJsonArray();
+ m_inputMixModeComboModel.push_back(element);
+
+ element = QJsonObject();
+ element.insert(QString::fromStdString("label"), QString::fromStdString("1"));
+ element.insert(QString::fromStdString("baseChannel"), QVariant(0).toInt());
+ element.insert(QString::fromStdString("numChannels"), QVariant(1).toInt());
+ m_inputChannelsComboModel = QJsonArray();
+ m_inputChannelsComboModel.push_back(element);
+
+ element = QJsonObject();
+ element.insert(QString::fromStdString("label"), QString::fromStdString("1 & 2"));
+ element.insert(QString::fromStdString("baseChannel"), QVariant(0).toInt());
+ element.insert(QString::fromStdString("numChannels"), QVariant(2).toInt());
+ m_outputChannelsComboModel = QJsonArray();
+ m_outputChannelsComboModel.push_back(element);
#endif
m_bufferStrategy = settings.value(QStringLiteral("BufferStrategy"), 2).toInt();
m_view.engine()->rootContext()->setContextProperty(
QStringLiteral("updateChannelComboModel"),
QVariant::fromValue(m_updateChannelOptions));
+ m_view.engine()->rootContext()->setContextProperty(
+ QStringLiteral("feedbackDetectionComboModel"),
+ QVariant::fromValue(m_feedbackDetectionOptions));
m_view.engine()->rootContext()->setContextProperty(QStringLiteral("virtualstudio"),
this);
m_view.engine()->rootContext()->setContextProperty(QStringLiteral("serverModel"),
QStringLiteral("permissions"), QVariant::fromValue(m_permissions.data()));
#endif
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("inputMeterModel"), QVariant::fromValue(QVector<float>()));
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("outputMeterModel"), QVariant::fromValue(QVector<float>()));
- m_view.engine()->rootContext()->setContextProperty(QStringLiteral("inputClipped"),
- QVariant::fromValue(false));
- m_view.engine()->rootContext()->setContextProperty(QStringLiteral("outputClipped"),
- QVariant::fromValue(false));
+ resetMeters();
m_view.engine()->rootContext()->setContextProperty(
QStringLiteral("backendComboModel"),
}
m_checkSsl = false;
}
-
- if (!m_showFirstRun) {
- toVirtualStudio();
+ if (m_windowState == "login") {
+ login();
}
m_view.show();
}
return m_audioReady;
}
+bool VirtualStudio::inputClipped()
+{
+ return m_inputClipped;
+}
+
+bool VirtualStudio::outputClipped()
+{
+ return m_outputClipped;
+}
+
+bool VirtualStudio::networkOutage()
+{
+ return m_networkOutage;
+}
+
bool VirtualStudio::backendAvailable()
{
if constexpr ((isBackendAvailable<AudioInterfaceMode::JACK>()
settings.endGroup();
}
+bool VirtualStudio::feedbackDetectionEnabled()
+{
+ return m_feedbackDetectionEnabled;
+}
+
+void VirtualStudio::setFeedbackDetectionEnabled(bool enabled)
+{
+ m_feedbackDetectionEnabled = enabled;
+ QSettings settings;
+ settings.beginGroup(QStringLiteral("Audio"));
+ settings.setValue(QStringLiteral("FeedbackDetectionEnabled"),
+ m_feedbackDetectionEnabled);
+ settings.endGroup();
+ emit feedbackDetectionEnabledChanged();
+}
+
void VirtualStudio::setAudioActivated(bool activated)
{
m_audioActivated = activated;
return m_networkStats;
}
+const QVector<float>& VirtualStudio::inputMeterLevels() const
+{
+ return m_inputMeterLevels;
+}
+
+const QVector<float>& VirtualStudio::outputMeterLevels() const
+{
+ return m_outputMeterLevels;
+}
+
+const QJsonArray& VirtualStudio::inputComboModel() const
+{
+ return m_inputComboModel;
+}
+
+const QJsonArray& VirtualStudio::outputComboModel() const
+{
+ return m_outputComboModel;
+}
+
+const QJsonArray& VirtualStudio::inputChannelsComboModel() const
+{
+ return m_inputChannelsComboModel;
+}
+
+const QJsonArray& VirtualStudio::outputChannelsComboModel() const
+{
+ return m_outputChannelsComboModel;
+}
+
+const QJsonArray& VirtualStudio::inputMixModeComboModel() const
+{
+ return m_inputMixModeComboModel;
+}
+
QString VirtualStudio::updateChannel()
{
return m_updateChannel;
qDebug() << "Not allowed";
return;
}
+
m_testMode = test;
+
+ // clear existing auth state
+ m_auth->logout();
+
+ // Clear existing registers - any existing instance data will be overwritten
+ // when m_auth->authenticate finishes and slotAuthSucceeded() is called again
QSettings settings;
settings.beginGroup(QStringLiteral("VirtualStudio"));
settings.setValue(QStringLiteral("TestMode"), m_testMode);
+ settings.remove(QStringLiteral("RefreshToken"));
+ settings.remove(QStringLiteral("UserId"));
+ settings.remove(QStringLiteral("ShowInactive"));
+ settings.remove(QStringLiteral("ShowSelfHosted"));
+ settings.remove(QStringLiteral("ShowDeviceSetup"));
+ settings.remove(QStringLiteral("ShowWarnings"));
settings.endGroup();
+
+ // deregister app
+ if (m_device != nullptr) {
+ m_device->removeApp();
+ }
+
+ // stop timers, clear data, etc.
+ m_refreshTimer.stop();
+ m_heartbeatTimer.stop();
+ m_userMetadata = QJsonObject();
+ m_userId.clear();
+
+ // re-run authentication. This should not require another browser flow since
+ // we're starting with the existing refresh token
+ m_auth->authenticate(m_refreshToken);
emit testModeChanged();
}
void VirtualStudio::joinStudio()
{
- if (!m_authenticated || m_studioToJoin.isEmpty() || m_servers.isEmpty()) {
+ bool authenticated = m_auth->isAuthenticated();
+ if (!authenticated || m_studioToJoin.isEmpty() || m_servers.isEmpty()) {
// No servers yet. Making sure we have them.
// getServerList emits refreshFinished which
// will come back to this function.
- if (m_authenticated && !m_studioToJoin.isEmpty() && m_servers.isEmpty()) {
+ if (authenticated && !m_studioToJoin.isEmpty() && m_servers.isEmpty()) {
getServerList(true, true);
}
return;
void VirtualStudio::toVirtualStudio()
{
- if (!m_refreshToken.isEmpty()) {
- // Attempt to refresh our virtual studio auth token
- setupAuthenticator();
-
- // Something about this is required for refreshing auth tokens:
- // https://bugreports.qt.io/browse/QTBUG-84866
- m_authenticator->setModifyParametersFunction([](QAbstractOAuth2::Stage stage,
- QVariantMap* parameters) {
- if (stage == QAbstractOAuth2::Stage::RequestingAccessToken) {
- QByteArray code = parameters->value(QStringLiteral("code")).toByteArray();
- (*parameters)[QStringLiteral("code")] = QUrl::fromPercentEncoding(code);
- } else if (stage == QAbstractOAuth2::Stage::RequestingAuthorization) {
- parameters->insert(QStringLiteral("audience"), AUTH_AUDIENCE);
- }
- if (!parameters->contains("client_id")) {
- parameters->insert("client_id", AUTH_CLIENT_ID);
- }
- });
-
- m_authenticator->setRefreshToken(m_refreshToken);
- m_authenticator->refreshAccessToken();
+ if (m_windowState == "login") {
+ login();
}
}
void VirtualStudio::login()
{
- setupAuthenticator();
- m_authenticator->grant();
+ if (m_refreshToken.isEmpty()) {
+ m_auth->authenticate(QStringLiteral(""));
+ } else {
+ m_auth->authenticate(m_refreshToken);
+ }
}
void VirtualStudio::logout()
logoutURL.setQuery(query);
launchBrowser(logoutURL);
- m_authenticator->setToken(QLatin1String(""));
- m_authenticator->setRefreshToken(QLatin1String(""));
+ m_auth->logout();
QSettings settings;
settings.beginGroup(QStringLiteral("VirtualStudio"));
m_userMetadata = QJsonObject();
m_userId.clear();
emit hasRefreshTokenChanged();
+
+ // reset window state
+ setWindowState(QStringLiteral("login"));
}
void VirtualStudio::refreshStudios(int index, bool signalRefresh)
void VirtualStudio::refreshDevices()
{
#ifdef RT_AUDIO
- if (!m_vsAudioInterface.isNull()) {
- m_vsAudioInterface->closeAudio();
- setAudioReady(false);
- }
-
- refreshRtAudioDevices();
- validateDevicesState();
- if (!m_vsAudioInterface.isNull()) {
- restartAudio();
- }
+ if (m_vsAudioInterface.isNull())
+ return;
+ m_vsAudioInterface->closeAudio();
+ setAudioReady(false);
+ restartAudio();
#endif
}
void VirtualStudio::refreshRtAudioDevices()
{
- if (!m_useRtAudio) {
+ if (!m_useRtAudio || m_vsAudioInterface.isNull())
return;
- }
#ifdef RT_AUDIO
- RtAudioInterface::getDeviceList(&m_inputDeviceList, &m_inputDeviceCategories,
- &m_inputDeviceChannels, true);
- RtAudioInterface::getDeviceList(&m_outputDeviceList, &m_outputDeviceCategories,
- &m_outputDeviceChannels, false);
-
- QVariant inputComboModel = formatDeviceList(
- m_inputDeviceList, m_inputDeviceCategories, m_inputDeviceChannels);
- QVariant outputComboModel = formatDeviceList(
- m_outputDeviceList, m_outputDeviceCategories, m_outputDeviceChannels);
- m_view.engine()->rootContext()->setContextProperty(QStringLiteral("inputComboModel"),
- inputComboModel);
- m_view.engine()->rootContext()->setContextProperty(QStringLiteral("outputComboModel"),
- outputComboModel);
+ m_vsAudioInterface->refreshRtAudioDevices();
+ m_vsAudioInterface->getDeviceList(&m_inputDeviceList, &m_inputDeviceCategories,
+ &m_inputDeviceChannels, true);
+ m_vsAudioInterface->getDeviceList(&m_outputDeviceList, &m_outputDeviceCategories,
+ &m_outputDeviceChannels, false);
+ m_inputComboModel = formatDeviceList(m_inputDeviceList, m_inputDeviceCategories,
+ m_inputDeviceChannels);
+ m_outputComboModel = formatDeviceList(m_outputDeviceList, m_outputDeviceCategories,
+ m_outputDeviceChannels);
+ emit inputComboModelChanged();
+ emit outputComboModelChanged();
#endif
}
QString::fromStdString("Mono"));
inputMixModeComboElement.insert(QString::fromStdString("value"),
static_cast<int>(AudioInterface::MONO));
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("inputMixModeComboModel"),
- QVariant::fromValue(QVariant(
- QVariantList() << QVariant(QJsonValue(inputMixModeComboElement)))));
+ m_inputMixModeComboModel = QJsonArray();
+ m_inputMixModeComboModel.push_back(inputMixModeComboElement);
+ emit inputMixModeComboModelChanged();
// Set the input channels combo to only have channel 1 as an option
- QJsonObject inputChannelsComboElement = QJsonObject();
+ QJsonObject inputChannelsComboElement;
inputChannelsComboElement.insert(QString::fromStdString("label"),
QString::fromStdString("1"));
inputChannelsComboElement.insert(QString::fromStdString("baseChannel"),
QVariant(0).toInt());
inputChannelsComboElement.insert(QString::fromStdString("numChannels"),
QVariant(1).toInt());
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("inputChannelsComboModel"),
- QVariant::fromValue(QVariant(
- QVariantList() << QVariant(QJsonValue(inputChannelsComboElement)))));
+ m_inputChannelsComboModel = QJsonArray();
+ m_inputChannelsComboModel.push_back(inputChannelsComboElement);
+ emit inputChannelsComboModelChanged();
// Set the only allowed options for these variables automatically
m_baseInputChannel = 0;
} else {
// set the input channels selector to have the options based on the currently
// selected device
- QVariantList items = QVariantList();
+ m_inputChannelsComboModel = QJsonArray();
for (int i = 0; i < numDevicesChannelsAvailable; i++) {
QJsonObject element = QJsonObject();
element.insert(QString::fromStdString("label"), QVariant(i + 1).toString());
element.insert(QString::fromStdString("baseChannel"), QVariant(i).toInt());
element.insert(QString::fromStdString("numChannels"), QVariant(1).toInt());
- items.push_back(QVariant(QJsonValue(element)));
+ m_inputChannelsComboModel.push_back(element);
}
for (int i = 0; i < numDevicesChannelsAvailable; i++) {
if (i % 2 == 0) {
QVariant(i).toInt());
element.insert(QString::fromStdString("numChannels"),
QVariant(2).toInt());
- items.push_back(QVariant(QJsonValue(element)));
+ m_inputChannelsComboModel.push_back(element);
}
}
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("inputChannelsComboModel"), QVariant(items));
+ emit inputChannelsComboModelChanged();
// if the current m_baseInputChannel or m_numInputChannels is invalid based on
// this device's option, use the first two channels by default
QString::fromStdString("Mix to Mono"));
inputMixModeComboElement2.insert(QString::fromStdString("value"),
static_cast<int>(AudioInterface::MIXTOMONO));
-
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("inputMixModeComboModel"),
- QVariant::fromValue(QVariant(
- QVariantList() << QVariant(QJsonValue(inputMixModeComboElement1))
- << QVariant(QJsonValue(inputMixModeComboElement2)))));
+ m_inputMixModeComboModel = QJsonArray();
+ m_inputMixModeComboModel.push_back(inputMixModeComboElement1);
+ m_inputMixModeComboModel.push_back(inputMixModeComboElement2);
+ emit inputMixModeComboModelChanged();
// if m_inputMixMode is an invalid value, set it to "stereo" by default
// given that we are using 2 channels
QString::fromStdString("Mono"));
inputMixModeComboElement.insert(QString::fromStdString("value"),
static_cast<int>(AudioInterface::MONO));
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("inputMixModeComboModel"),
- QVariant::fromValue(QVariant(
- QVariantList() << QVariant(QJsonValue(inputMixModeComboElement)))));
+ m_inputMixModeComboModel = QJsonArray();
+ m_inputMixModeComboModel.push_back(inputMixModeComboElement);
+ emit inputMixModeComboModelChanged();
// if m_inputMixMode is an invalid value, set it to AudioInterface::MONO
if (m_inputMixMode != static_cast<int>(AudioInterface::MONO)) {
QVariant(0).toInt());
outputChannelsComboElement.insert(QString::fromStdString("numChannels"),
QVariant(1).toInt());
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("outputChannelsComboModel"),
- QVariant::fromValue(QVariant(
- QVariantList() << QVariant(QJsonValue(outputChannelsComboElement)))));
+ m_outputChannelsComboModel = QJsonArray();
+ m_outputChannelsComboModel.push_back(outputChannelsComboElement);
+ emit outputChannelsComboModelChanged();
// Set the only allowed options for these variables automatically
m_baseOutputChannel = 0;
} else {
// set the output channels selector to have the options based on the currently
// selected device
- QVariantList items = QVariantList();
+ m_outputChannelsComboModel = QJsonArray();
for (int i = 0; i < numDevicesChannelsAvailable; i++) {
if (i % 2 == 0) {
QJsonObject element = QJsonObject();
QVariant(i).toInt());
element.insert(QString::fromStdString("numChannels"),
QVariant(2).toInt());
- items.push_back(QVariant(QJsonValue(element)));
+ m_outputChannelsComboModel.push_back(element);
}
}
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("outputChannelsComboModel"), QVariant(items));
+ emit outputChannelsComboModelChanged();
// if the current m_baseOutputChannel or m_numOutputChannels is invalid based on
// this device's option, use the first two channels by default
settings.setValue(QStringLiteral("InputMixMode"), m_inputMixMode);
settings.setValue(QStringLiteral("BaseOutputChannel"), m_baseOutputChannel);
settings.setValue(QStringLiteral("NumOutputChannels"), m_numOutputChannels);
+ settings.setValue(QStringLiteral("FeedbackDetectionEnabled"),
+ m_feedbackDetectionEnabled);
settings.endGroup();
m_previousUseRtAudio = m_useRtAudio;
m_studioSocket = new VsWebSocket(
QUrl(QStringLiteral("wss://%1/api/servers/%2?auth_code=%3")
- .arg(m_apiHost, studioInfo->id(), m_authenticator->token())),
- m_authenticator->token(), QString(), QString());
+ .arg(m_api->getApiHost(), studioInfo->id(), m_auth->accessToken())),
+ m_auth->accessToken(), QString(), QString());
connect(m_studioSocket, &VsWebSocket::textMessageReceived, this,
[&](QString message) {
handleWebsocketMessage(message);
numOutputChannels = m_numOutputChannels;
}
#endif
- int bufferStrategy = m_bufferStrategy;
- if (bufferStrategy == 2) {
- bufferStrategy = 3;
- }
- JackTrip* jackTrip =
- m_device->initJackTrip(m_useRtAudio, input, output, baseInputChannel,
- numInputChannels, baseOutputChannel, numOutputChannels,
- inputMixMode, buffer_size, bufferStrategy, studioInfo);
+ JackTrip* jackTrip = m_device->initJackTrip(
+ m_useRtAudio, input, output, baseInputChannel, numInputChannels,
+ baseOutputChannel, numOutputChannels, inputMixMode, buffer_size,
+ m_bufferStrategy, studioInfo);
if (jackTrip == 0) {
processError("Could not bind port");
return;
QObject::connect(jackTrip, &JackTrip::signalReceivedConnectionFromPeer, this,
&VirtualStudio::receivedConnectionFromPeer,
Qt::QueuedConnection);
+ QObject::connect(jackTrip, &JackTrip::signalUdpWaitingTooLong, this,
+ &VirtualStudio::udpWaitingTooLong, Qt::QueuedConnection);
setAudioActivated(false);
connect(this, &VirtualStudio::updatedMonitorVolume, m_monitor,
&Monitor::volumeUpdated);
+ // Setup output analyzer
+ if (m_feedbackDetectionEnabled) {
+ m_outputAnalyzerPlugin = new Analyzer(jackTrip->getNumOutputChannels());
+ m_outputAnalyzerPlugin->setIsMonitoringAnalyzer(true);
+ jackTrip->appendProcessPluginToMonitor(m_outputAnalyzerPlugin);
+ connect(m_outputAnalyzerPlugin, &Analyzer::signalFeedbackDetected, this,
+ &VirtualStudio::detectedFeedbackLoop);
+ }
+
// Setup output meter
// Note: Add this to monitor process to include self-volume
m_outputMeter = new Meter(jackTrip->getNumOutputChannels());
}
#endif
m_device->startJackTrip();
-
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("inputMeterModel"),
- QVariant::fromValue(QVector<float>(jackTrip->getNumInputChannels())));
-
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("outputMeterModel"),
- QVariant::fromValue(QVector<float>(jackTrip->getNumOutputChannels())));
-
+ resetMeters();
m_device->startPinger(studioInfo);
} catch (const std::exception& e) {
// Let the user know what our exception was.
QUrl url;
if (!start) {
url = QUrl(QStringLiteral("https://%1/studios/%2")
- .arg(m_apiHost,
+ .arg(m_api->getApiHost(),
static_cast<VsServerInfo*>(m_servers.at(studioIndex))->id()));
} else {
QString expiration =
{QLatin1String("expiresAt"), expiration}};
QJsonDocument request = QJsonDocument(json);
- QNetworkReply* reply = m_authenticator->put(
- QStringLiteral("https://%1/api/servers/%2")
- .arg(m_apiHost,
- static_cast<VsServerInfo*>(m_servers.at(studioIndex))->id()),
+ QNetworkReply* reply = m_api->updateServer(
+ (static_cast<VsServerInfo*>(m_servers.at(studioIndex)))->id(),
request.toJson());
connect(reply, &QNetworkReply::finished, this, [&, reply]() {
if (reply->error() != QNetworkReply::NoError) {
// We're here from a connected screen. Use our current studio.
studioIndex = m_currentStudio;
}
- QUrl url = QUrl(
- QStringLiteral("https://%1/studios/%2/live")
- .arg(m_apiHost, static_cast<VsServerInfo*>(m_servers.at(studioIndex))->id()));
+ QUrl url =
+ QUrl(QStringLiteral("https://%1/studios/%2/live")
+ .arg(m_api->getApiHost(),
+ static_cast<VsServerInfo*>(m_servers.at(studioIndex))->id()));
QDesktopServices::openUrl(url);
}
void VirtualStudio::createStudio()
{
- QUrl url = QUrl(QStringLiteral("https://%1/studios/create").arg(m_apiHost));
+ QUrl url = QUrl(QStringLiteral("https://%1/studios/create").arg(m_api->getApiHost()));
QDesktopServices::openUrl(url);
}
void VirtualStudio::editProfile()
{
- QUrl url = QUrl(QStringLiteral("https://%1/profile").arg(m_apiHost));
+ QUrl url = QUrl(QStringLiteral("https://%1/profile").arg(m_api->getApiHost()));
QDesktopServices::openUrl(url);
}
void VirtualStudio::exit()
{
+ m_startTimer.stop();
+ m_retryPeriodTimer.stop();
m_refreshTimer.stop();
m_heartbeatTimer.stop();
+ m_inputClipTimer.stop();
+ m_outputClipTimer.stop();
+ m_networkOutageTimer.stop();
if (m_onConnectedScreen) {
m_isExiting = true;
}
}
-void VirtualStudio::slotAuthSucceded()
+void VirtualStudio::slotAuthSucceeded()
{
// Determine which API host to use
m_apiHost = PROD_API_HOST;
if (m_testMode) {
m_apiHost = TEST_API_HOST;
}
+ m_api->setApiHost(m_apiHost);
- m_authenticated = true;
- m_refreshToken = m_authenticator->refreshToken();
+ m_refreshToken = m_auth->refreshToken();
emit hasRefreshTokenChanged();
QSettings settings;
settings.setValue(QStringLiteral("UiMode"), QJackTrip::VIRTUAL_STUDIO);
settings.endGroup();
m_vsModeActive = true;
- m_device = new VsDevice(m_authenticator.data(), m_testMode);
+ m_device = new VsDevice(m_auth.data(), m_api.data());
m_device->registerApp();
- if (m_showDeviceSetup) {
- if constexpr (isBackendAvailable<AudioInterfaceMode::JACK>()
- || isBackendAvailable<AudioInterfaceMode::RTAUDIO>()) {
- setAudioActivated(true);
- }
- }
-
- if (m_userId.isEmpty()) {
- getUserId();
- } else {
- getSubscriptions();
- getServerList(true, false);
+ // always activate audio at startup for now.
+ // otherwise, IF someone has the device setup disabled/unchecked,
+ // AND IF they don't manually navigate to audio settings before connecting,
+ // the "Change Device Settings" dialog will have all empty dropdown lists
+ // TODO: rework so it can be deferred properly
+ // if (m_showDeviceSetup) {
+ if constexpr (isBackendAvailable<AudioInterfaceMode::JACK>()
+ || isBackendAvailable<AudioInterfaceMode::RTAUDIO>()) {
+ setAudioActivated(true);
}
- if (m_regions.isEmpty()) {
- getRegions();
- }
- if (m_userMetadata.isEmpty() && !m_userId.isEmpty()) {
- getUserMetadata();
- }
+ getUserId();
+ getSubscriptions();
+ getServerList(true, false);
+ getRegions();
+ getUserMetadata();
// attempt to join studio if requested
if (!m_studioToJoin.isEmpty()) {
void VirtualStudio::slotAuthFailed()
{
- m_authenticated = false;
emit authFailed();
}
void VirtualStudio::updatedInputVuMeasurements(const float* valuesInDecibels,
int numChannels)
{
- QJsonArray uiValues;
bool detectedClip = false;
// Always output 2 meter readings to the UI
}
// Produce a normalized value from 0 to 1
- float meter = (dB - m_meterMin) / (m_meterMax - m_meterMin);
-
- QJsonObject object{{QStringLiteral("dB"), dB}, {QStringLiteral("level"), meter}};
- uiValues.push_back(object);
+ m_inputMeterLevels[i] = (dB - m_meterMin) / (m_meterMax - m_meterMin);
// Signal a clip if we haven't done so already
if (dB >= -0.05 && !detectedClip) {
m_inputClipTimer.start();
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("inputClipped"), QVariant::fromValue(true));
+ m_inputClipped = true;
+ emit updatedInputClipped(m_inputClipped);
detectedClip = true;
}
}
&& m_numInputChannels == 1)
|| (m_inputMixMode == static_cast<int>(AudioInterface::MIXTOMONO)
&& m_numInputChannels == 2)) {
- uiValues[1] = uiValues[0];
+ m_inputMeterLevels[1] = m_inputMeterLevels[0];
}
#endif
- m_view.engine()->rootContext()->setContextProperty(QStringLiteral("inputMeterModel"),
- QVariant::fromValue(uiValues));
+ emit updatedInputMeterLevels(m_inputMeterLevels);
}
void VirtualStudio::updatedOutputVuMeasurements(const float* valuesInDecibels,
int numChannels)
{
- QJsonArray uiValues;
bool detectedClip = false;
// Always output 2 meter readings to the UI
}
// Produce a normalized value from 0 to 1
- float meter = (dB - m_meterMin) / (m_meterMax - m_meterMin);
-
- QJsonObject object{{QStringLiteral("dB"), dB}, {QStringLiteral("level"), meter}};
- uiValues.push_back(object);
+ m_outputMeterLevels[i] = (dB - m_meterMin) / (m_meterMax - m_meterMin);
// Signal a clip if we haven't done so already
if (dB >= -0.05 && !detectedClip) {
m_outputClipTimer.start();
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("outputClipped"), QVariant::fromValue(true));
+ m_outputClipped = true;
+ emit updatedOutputClipped(m_outputClipped);
detectedClip = true;
}
}
#ifdef RT_AUDIO
if (m_numOutputChannels == 1) {
- uiValues[1] = uiValues[0];
+ m_outputMeterLevels[1] = m_outputMeterLevels[0];
}
#endif
- m_view.engine()->rootContext()->setContextProperty(QStringLiteral("outputMeterModel"),
- QVariant::fromValue(uiValues));
-}
-
-void VirtualStudio::setupAuthenticator()
-{
- if (m_authenticator.isNull()) {
- // Set up our authorization flow
- m_authenticator.reset(new QOAuth2AuthorizationCodeFlow);
- m_authenticator->setScope(
- QStringLiteral("openid profile email offline_access read:servers"));
- connect(m_authenticator.data(),
- &QOAuth2AuthorizationCodeFlow::authorizeWithBrowser, this,
- &VirtualStudio::launchBrowser);
-
- const quint16 port = 52424;
-
- m_authenticator->setAuthorizationUrl(AUTH_AUTHORIZE_URI);
- m_authenticator->setClientIdentifier(AUTH_CLIENT_ID);
- m_authenticator->setAccessTokenUrl(AUTH_TOKEN_URI);
-
- m_authenticator->setModifyParametersFunction([](QAbstractOAuth2::Stage stage,
- QVariantMap* parameters) {
- if (stage == QAbstractOAuth2::Stage::RequestingAccessToken) {
- QByteArray code = parameters->value(QStringLiteral("code")).toByteArray();
- (*parameters)[QStringLiteral("code")] = QUrl::fromPercentEncoding(code);
- } else if (stage == QAbstractOAuth2::Stage::RequestingAuthorization) {
- parameters->insert(QStringLiteral("audience"),
- QStringLiteral("https://api.jacktrip.org"));
- }
- });
+ emit updatedOutputMeterLevels(m_outputMeterLevels);
+}
- QOAuthHttpServerReplyHandler* replyHandler =
- new QOAuthHttpServerReplyHandler(port, this);
- replyHandler->setCallbackText(QStringLiteral(
- "<div id=\"container\" style=\"width:100%; max-width:1200px; height: auto; "
- "margin: 100px auto; text-align:center;\">\n"
- "<img src=\"https://files.jacktrip.org/logos/jacktrip_icon.svg\" "
- "alt=\"JackTrip\">\n"
- "<h1 style=\"font-size: 30px; font-weight: 600; padding-top:20px;\">Virtual "
- "Studio Login Successful</h1>\n"
- "<p style=\"font-size: 21px; font-weight:300;\">You may close this window "
- "and return to the JackTrip application.</p>\n"
- "<p style=\"font-size: 21px; font-weight:300;\">Alternatively, "
- " <a href=\"https://app.jacktrip.org/studios/create\">click "
- "here</a> to create your first studio.</p>\n"
- "</div>\n"));
- m_authenticator->setReplyHandler(replyHandler);
- connect(m_authenticator.data(), &QOAuth2AuthorizationCodeFlow::granted, this,
- &VirtualStudio::slotAuthSucceded);
- connect(m_authenticator.data(), &QOAuth2AuthorizationCodeFlow::requestFailed,
- this, &VirtualStudio::slotAuthFailed);
- }
+void VirtualStudio::detectedFeedbackLoop()
+{
+ setInputMuted(true);
+ setMonitorVolume(0);
+ emit feedbackDetected();
+}
+
+void VirtualStudio::udpWaitingTooLong()
+{
+ m_networkOutageTimer.start();
+ m_networkOutage = true;
+ emit updatedNetworkOutage(m_networkOutage);
}
void VirtualStudio::sendHeartbeat()
topServerId = static_cast<VsServerInfo*>(m_servers.at(index))->id();
}
- QNetworkReply* reply =
- m_authenticator->get(QStringLiteral("https://%1/api/servers").arg(m_apiHost));
+ QNetworkReply* reply = m_api->getServers();
connect(
reply, &QNetworkReply::finished, this,
[&, reply, topServerId, firstLoad, signalRefresh]() {
void VirtualStudio::getUserId()
{
- QNetworkReply* reply =
- m_authenticator->get(QStringLiteral("https://auth.jacktrip.org/userinfo"));
- connect(reply, &QNetworkReply::finished, this, [=]() {
- if (reply->error() != QNetworkReply::NoError) {
- std::cout << "Error: " << reply->errorString().toStdString() << std::endl;
- emit authFailed();
- reply->deleteLater();
- return;
- }
-
- QByteArray response = reply->readAll();
- QJsonDocument userInfo = QJsonDocument::fromJson(response);
- m_userId = userInfo.object()[QStringLiteral("sub")].toString();
-
- QSettings settings;
- settings.beginGroup(QStringLiteral("VirtualStudio"));
- settings.setValue(QStringLiteral("UserId"), m_userId);
- settings.endGroup();
- getSubscriptions();
- getServerList(true, false);
-
- if (m_userMetadata.isEmpty() && !m_userId.isEmpty()) {
- getUserMetadata();
- }
+ m_userId = m_auth->userId();
+ if (m_userId.isEmpty()) {
+ emit authFailed();
+ return;
+ }
- reply->deleteLater();
- });
+ QSettings settings;
+ settings.beginGroup(QStringLiteral("VirtualStudio"));
+ settings.setValue(QStringLiteral("UserId"), m_userId);
+ settings.endGroup();
}
void VirtualStudio::getSubscriptions()
{
- QNetworkReply* reply = m_authenticator->get(
- QStringLiteral("https://%1/api/users/%2/subscriptions").arg(m_apiHost, m_userId));
+ QNetworkReply* reply = m_api->getSubscriptions(m_userId);
connect(reply, &QNetworkReply::finished, this, [&, reply]() {
if (reply->error() != QNetworkReply::NoError) {
std::cout << "Error: " << reply->errorString().toStdString() << std::endl;
void VirtualStudio::getRegions()
{
- QNetworkReply* reply = m_authenticator->get(
- QStringLiteral("https://%1/api/users/%2/regions").arg(m_apiHost, m_userId));
+ QNetworkReply* reply = m_api->getRegions(m_userId);
connect(reply, &QNetworkReply::finished, this, [&, reply]() {
if (reply->error() != QNetworkReply::NoError) {
std::cout << "Error: " << reply->errorString().toStdString() << std::endl;
void VirtualStudio::getUserMetadata()
{
- QNetworkReply* reply = m_authenticator->get(
- QStringLiteral("https://%1/api/users/%2").arg(m_apiHost, m_userId));
+ QNetworkReply* reply = m_api->getUser(m_userId);
connect(reply, &QNetworkReply::finished, this, [&, reply]() {
if (reply->error() != QNetworkReply::NoError) {
std::cout << "Error: " << reply->errorString().toStdString() << std::endl;
void VirtualStudio::startAudio()
{
+ std::cout << "Starting Audio" << std::endl;
#ifdef __APPLE__
if (m_permissions->micPermission() != "granted") {
return;
QStringLiteral("audioInterface"), m_vsAudioInterface.data());
}
#ifdef RT_AUDIO
+ refreshRtAudioDevices();
validateDevicesState();
m_vsAudioInterface->setInputDevice(m_inputDevice, false);
m_vsAudioInterface->setOutputDevice(m_outputDevice, false);
m_audioReady = true;
emit audioReadyChanged();
-
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("inputMeterModel"),
- QVariant::fromValue(QVector<float>(m_vsAudioInterface->getNumInputChannels())));
-
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("outputMeterModel"),
- QVariant::fromValue(QVector<float>(m_vsAudioInterface->getNumOutputChannels())));
+ resetMeters();
m_vsAudioInterface->startProcess();
}
void VirtualStudio::restartAudio()
{
+ std::cout << "Restarting Audio" << std::endl;
#ifdef __APPLE__
if (m_permissions->micPermission() != "granted") {
return;
// Start VsAudioInterface again
if (!m_vsAudioInterface.isNull()) {
#ifdef RT_AUDIO
+ refreshRtAudioDevices();
validateDevicesState();
m_vsAudioInterface->setInputDevice(m_inputDevice, false);
m_vsAudioInterface->setOutputDevice(m_outputDevice, false);
m_audioReady = true;
emit audioReadyChanged();
-
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("inputMeterModel"),
- QVariant::fromValue(
- QVector<float>(m_vsAudioInterface->getNumInputChannels())));
-
- m_view.engine()->rootContext()->setContextProperty(
- QStringLiteral("outputMeterModel"),
- QVariant::fromValue(
- QVector<float>(m_vsAudioInterface->getNumOutputChannels())));
+ resetMeters();
m_vsAudioInterface->startProcess();
} else {
}
}
+void VirtualStudio::resetMeters()
+{
+ m_inputMeterLevels[0] = m_inputMeterLevels[1] = 0;
+ m_outputMeterLevels[0] = m_outputMeterLevels[1] = 0;
+ m_inputClipped = m_outputClipped = false;
+ emit updatedInputMeterLevels(m_inputMeterLevels);
+ emit updatedOutputMeterLevels(m_outputMeterLevels);
+ emit updatedInputClipped(m_inputClipped);
+ emit updatedOutputClipped(m_outputClipped);
+}
+
void VirtualStudio::stopAudio()
{
// Stop VsAudioInterface
QJsonObject json = {{QLatin1String("enabled"), false}};
QJsonDocument request = QJsonDocument(json);
studioInfo->setHost(QLatin1String(""));
- QNetworkReply* reply = m_authenticator->put(
- QStringLiteral("https://%1/api/servers/%2").arg(m_apiHost, studioInfo->id()),
- request.toJson());
+ QNetworkReply* reply = m_api->updateServer(studioInfo->id(), request.toJson());
connect(reply, &QNetworkReply::finished, this, [=]() {
if (m_isExiting && !m_jackTripRunning) {
emit signalExit();
}
#ifdef RT_AUDIO
-QVariant VirtualStudio::formatDeviceList(const QStringList& devices,
- const QStringList& categories,
- const QList<int>& channels)
+QJsonArray VirtualStudio::formatDeviceList(const QStringList& devices,
+ const QStringList& categories,
+ const QList<int>& channels)
{
QStringList uniqueCategories = QStringList(categories);
uniqueCategories.removeDuplicates();
containsCategories = false;
}
- QVariantList items = QVariantList();
+ QJsonArray items;
for (int i = 0; i < uniqueCategories.size(); i++) {
QString category = uniqueCategories.at(i);
header.insert(QString::fromStdString("type"),
QString::fromStdString("header"));
header.insert(QString::fromStdString("category"), category);
- items.push_back(QVariant(QJsonValue(header)));
+ items.push_back(header);
}
for (int j = 0; j < devices.size(); j++) {
QString::fromStdString("element"));
element.insert(QString::fromStdString("channels"), channels.at(j));
element.insert(QString::fromStdString("category"), category);
- items.push_back(QVariant(QJsonValue(element)));
+ items.push_back(element);
}
}
}
- return QVariant(items);
+ return items;
}
#endif
#include <QEventLoop>
#include <QList>
#include <QMutex>
+#include <QNetworkAccessManager>
#include <QScopedPointer>
#include <QSharedPointer>
#include <QTimer>
#include <QVector>
#include <QtNetworkAuth>
+#include "../Analyzer.h"
#include "../JackTrip.h"
#include "../Meter.h"
#include "../Monitor.h"
#include "../Volume.h"
+#include "vsApi.h"
#include "vsAudioInterface.h"
+#include "vsAuth.h"
#include "vsConstants.h"
#include "vsDevice.h"
#include "vsQuickView.h"
int bufferSize READ bufferSize WRITE setBufferSize NOTIFY bufferSizeChanged)
Q_PROPERTY(int bufferStrategy READ bufferStrategy WRITE setBufferStrategy NOTIFY
bufferStrategyChanged)
+ Q_PROPERTY(bool feedbackDetectionEnabled READ feedbackDetectionEnabled WRITE
+ setFeedbackDetectionEnabled NOTIFY feedbackDetectionEnabledChanged)
Q_PROPERTY(int currentStudio READ currentStudio NOTIFY currentStudioChanged)
Q_PROPERTY(QUrl studioToJoin READ studioToJoin WRITE setStudioToJoin NOTIFY
studioToJoinChanged)
NOTIFY showCreateStudioChanged)
Q_PROPERTY(QString connectionState READ connectionState NOTIFY connectionStateChanged)
Q_PROPERTY(QJsonObject networkStats READ networkStats NOTIFY networkStatsChanged)
+ Q_PROPERTY(bool networkOutage READ networkOutage NOTIFY updatedNetworkOutage)
Q_PROPERTY(QString updateChannel READ updateChannel WRITE setUpdateChannel NOTIFY
updateChannelChanged)
updatedMonitorVolume)
Q_PROPERTY(
bool inputMuted READ inputMuted WRITE setInputMuted NOTIFY updatedInputMuted)
+ Q_PROPERTY(QVector<float> outputMeterLevels READ outputMeterLevels NOTIFY
+ updatedOutputMeterLevels)
+ Q_PROPERTY(QVector<float> inputMeterLevels READ inputMeterLevels NOTIFY
+ updatedInputMeterLevels)
+ Q_PROPERTY(
+ QJsonArray inputComboModel READ inputComboModel NOTIFY inputComboModelChanged)
+ Q_PROPERTY(
+ QJsonArray outputComboModel READ outputComboModel NOTIFY outputComboModelChanged)
+ Q_PROPERTY(QJsonArray inputChannelsComboModel READ inputChannelsComboModel NOTIFY
+ inputChannelsComboModelChanged)
+ Q_PROPERTY(QJsonArray outputChannelsComboModel READ outputChannelsComboModel NOTIFY
+ outputChannelsComboModelChanged)
+ Q_PROPERTY(QJsonArray inputMixModeComboModel READ inputMixModeComboModel NOTIFY
+ inputMixModeComboModelChanged)
+ Q_PROPERTY(bool inputClipped READ inputClipped NOTIFY updatedInputClipped)
+ Q_PROPERTY(bool outputClipped READ outputClipped NOTIFY updatedOutputClipped)
Q_PROPERTY(bool audioActivated READ audioActivated WRITE setAudioActivated NOTIFY
audioActivatedChanged)
Q_PROPERTY(
void setBufferSize(int index);
int bufferStrategy();
void setBufferStrategy(int index);
+ bool feedbackDetectionEnabled();
+ void setFeedbackDetectionEnabled(bool enabled);
int currentStudio();
QJsonObject regions();
QJsonObject userMetadata();
QString connectionState();
QJsonObject networkStats();
- QVector<float> inputMeterLevels();
- QVector<float> outputMeterLevels();
+ const QVector<float>& inputMeterLevels() const;
+ const QVector<float>& outputMeterLevels() const;
+ const QJsonArray& inputComboModel() const;
+ const QJsonArray& outputComboModel() const;
+ const QJsonArray& inputChannelsComboModel() const;
+ const QJsonArray& outputChannelsComboModel() const;
+ const QJsonArray& inputMixModeComboModel() const;
QString updateChannel();
void setUpdateChannel(const QString& channel);
bool showInactive();
Q_INVOKABLE void restartAudio();
bool audioActivated();
bool audioReady();
+ bool inputClipped();
+ bool outputClipped();
+ bool networkOutage();
bool backendAvailable();
QString windowState();
QString apiHost();
void openLink(const QString& url);
void updatedInputVuMeasurements(const float* valuesInDecibels, int numChannels);
void updatedOutputVuMeasurements(const float* valuesInDecibels, int numChannels);
+ void udpWaitingTooLong();
void setInputVolume(float multiplier);
void setOutputVolume(float multiplier);
void setMonitorVolume(float multiplier);
void setMonitorMuted(bool muted);
void setAudioActivated(bool activated);
void setAudioReady(bool ready);
+ void detectedFeedbackLoop();
void setWindowState(QString state);
void exit();
void triggerPlayOutputAudio();
void bufferSizeChanged();
void bufferStrategyChanged();
+ void feedbackDetectionEnabledChanged();
void currentStudioChanged();
void regionsChanged();
void userMetadataChanged();
void newScale();
void darkModeChanged();
void testModeChanged();
+ void feedbackDetected();
void signalExit();
void periodicRefresh();
void failedMessageChanged();
void updatedInputMuted(bool muted);
void updatedOutputMuted(bool muted);
void updatedMonitorMuted(bool muted);
+ void updatedInputMeterLevels(const QVector<float>& levels);
+ void updatedOutputMeterLevels(const QVector<float>& levels);
+ void inputComboModelChanged();
+ void outputComboModelChanged();
+ void inputChannelsComboModelChanged();
+ void outputChannelsComboModelChanged();
+ void inputMixModeComboModelChanged();
+ void updatedInputClipped(bool clip);
+ void updatedOutputClipped(bool clip);
+ void updatedNetworkOutage(bool outage);
void audioActivatedChanged();
void audioReadyChanged();
void windowStateUpdated();
void apiHostChanged();
private slots:
- void slotAuthSucceded();
+ void slotAuthSucceeded();
void slotAuthFailed();
void processFinished();
void processError(const QString& errorMessage);
void updatedDevicesWarningHelpUrl(const QString& url);
private:
- void setupAuthenticator();
-
void sendHeartbeat();
void getServerList(bool firstLoad = false, bool signalRefresh = false,
int index = -1);
void getUserMetadata();
void stopStudio();
void toggleAudio();
+ void resetMeters();
void stopAudio();
bool readyToJoin();
#ifdef RT_AUDIO
- QVariant formatDeviceList(const QStringList& devices, const QStringList& categories,
- const QList<int>& channels);
+ QJsonArray formatDeviceList(const QStringList& devices, const QStringList& categories,
+ const QList<int>& channels);
#endif
bool m_showFirstRun = false;
QString m_userId;
VsQuickView m_view;
QSharedPointer<QJackTrip> m_standardWindow;
- QScopedPointer<QOAuth2AuthorizationCodeFlow> m_authenticator;
+ QScopedPointer<VsAuth> m_auth;
+ QScopedPointer<VsApi> m_api;
+ QScopedPointer<QNetworkAccessManager> m_networkAccessManager;
QList<QObject*> m_servers;
QStringList m_subscribedServers;
float m_fontScale = 1;
float m_uiScale;
float m_previousUiScale;
- int m_bufferStrategy = 0;
- QString m_apiHost = PROD_API_HOST;
- bool m_darkMode = false;
- bool m_testMode = false;
- QString m_failedMessage = "";
+ int m_bufferStrategy = 0;
+ bool m_feedbackDetectionEnabled = true;
+ QString m_apiHost = PROD_API_HOST;
+ bool m_darkMode = false;
+ bool m_testMode = false;
+ QString m_failedMessage = "";
QUrl m_studioToJoin;
bool m_authenticated = false;
bool m_audioActivated = false;
bool m_audioReady = false;
-
+ bool m_inputClipped = false;
+ bool m_outputClipped = false;
+ bool m_networkOutage = false;
+
+ Analyzer* m_inputAnalyzerPlugin;
+ Analyzer* m_outputAnalyzerPlugin;
+ QVector<float> m_inputMeterLevels;
+ QVector<float> m_outputMeterLevels;
+ QJsonArray m_inputComboModel;
+ QJsonArray m_outputComboModel;
+ QJsonArray m_inputChannelsComboModel;
+ QJsonArray m_outputChannelsComboModel;
+ QJsonArray m_inputMixModeComboModel;
Meter* m_inputMeter;
Meter* m_outputMeter;
Meter* m_inputTestMeter;
Monitor* m_monitor;
QTimer m_inputClipTimer;
QTimer m_outputClipTimer;
+ QTimer m_networkOutageTimer;
QString m_devicesWarningMsg = QStringLiteral("");
QString m_devicesErrorMsg = QStringLiteral("");
#endif
QStringList m_bufferOptions = {"16", "32", "64", "128", "256", "512", "1024"};
QStringList m_bufferStrategyOptions = {"Minimal Latency", "Stable Latency",
- "Loss Concealment"};
+ "Loss Concealment (3)",
+ "Loss Concealment (4)"};
QStringList m_updateChannelOptions = {"Stable", "Edge"};
+ QStringList m_feedbackDetectionOptions = {"Enabled", "Disabled"};
#ifdef __APPLE__
NoNap m_noNap;
Rectangle {
property string backgroundColour: virtualstudio.darkMode ? "#272525" : "#FAFBFB"
property string textColour: virtualstudio.darkMode ? "#FAFBFB" : "#0F0D0D"
-
+
width: 696
height: 577
color: backgroundColour
State {
name: "start"
PropertyChanges { target: startScreen; x: 0 }
- PropertyChanges { target: loginScreen; x: window.width; failTextVisible: loginScreen.failTextVisible }
+ PropertyChanges { target: loginScreen; x: window.width; }
PropertyChanges { target: setupScreen; x: window.width }
PropertyChanges { target: browseScreen; x: window.width }
PropertyChanges { target: settingsScreen; x: window.width }
State {
name: "login"
PropertyChanges { target: startScreen; x: -startScreen.width }
- PropertyChanges { target: loginScreen; x: 0; failTextVisible: false }
+ PropertyChanges { target: loginScreen; x: 0; }
PropertyChanges { target: setupScreen; x: window.width }
PropertyChanges { target: browseScreen; x: window.width }
PropertyChanges { target: settingsScreen; x: window.width }
FirstLaunch {
id: startScreen
}
-
+
Setup {
id: setupScreen
}
Browse {
id: browseScreen
}
-
+
Login {
id: loginScreen
}
Connections {
target: virtualstudio
function onAuthSucceeded() {
+ if (virtualstudio.windowState !== "login") {
+ // can happen on settings screen when switching between prod and test
+ return;
+ }
if (virtualstudio.showDeviceSetup) {
virtualstudio.windowState = "setup";
} else {
virtualstudio.windowState = "browse";
}
}
- function onAuthFailed() {
- loginScreen.failTextVisible = true;
- }
function onConnected() {
virtualstudio.windowState = "connected";
}
function onDisconnected() {
virtualstudio.windowState = "browse";
}
+ function onWindowStateUpdated() {
+ if (virtualstudio.windowState === "login") {
+ virtualstudio.login();
+ }
+ }
}
}
--- /dev/null
+//*****************************************************************
+/*
+ JackTrip: A System for High-Quality Audio Network Performance
+ over the Internet
+
+ Copyright (c) 2008-2022 Juan-Pablo Caceres, Chris Chafe.
+ SoundWIRE group at CCRMA, Stanford University.
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation
+ files (the "Software"), to deal in the Software without
+ restriction, including without limitation the rights to use,
+ copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following
+ conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ OTHER DEALINGS IN THE SOFTWARE.
+*/
+//*****************************************************************
+
+/**
+ * \file vsApi.cpp
+ * \author Dominick Hing
+ * \date May 2023
+ */
+
+#include "vsApi.h"
+
+VsApi::VsApi(QNetworkAccessManager* networkAccessManager)
+{
+ m_networkAccessManager = networkAccessManager;
+}
+
+QNetworkReply* VsApi::getAuth0UserInfo()
+{
+ return get(QUrl("https://auth.jacktrip.org/userinfo"));
+}
+
+QNetworkReply* VsApi::getUser(const QString& userId)
+{
+ return get(QUrl(QString("https://%1/api/users/%2").arg(m_apiHost, userId)));
+}
+
+QNetworkReply* VsApi::getServers()
+{
+ return get(QUrl(QString("https://%1/api/servers").arg(m_apiHost)));
+}
+
+QNetworkReply* VsApi::getSubscriptions(const QString& userId)
+{
+ return get(
+ QUrl(QString("https://%1/api/users/%2/subscriptions").arg(m_apiHost, userId)));
+}
+
+QNetworkReply* VsApi::getRegions(const QString& userId)
+{
+ return get(QUrl(QString("https://%1/api/users/%2/regions").arg(m_apiHost, userId)));
+}
+
+QNetworkReply* VsApi::getDevice(const QString& deviceId)
+{
+ return get(QUrl(QString("https://%1/api/devices/%2").arg(m_apiHost, deviceId)));
+}
+
+QNetworkReply* VsApi::postDevice(const QByteArray& data)
+{
+ return post(QUrl(QString("https://%1/api/devices").arg(m_apiHost)), data);
+}
+
+QNetworkReply* VsApi::postDeviceHeartbeat(const QString& deviceId, const QByteArray& data)
+{
+ return post(
+ QUrl(QString("https://%1/api/devices/%2/heartbeat").arg(m_apiHost, deviceId)),
+ data);
+}
+
+QNetworkReply* VsApi::updateServer(const QString& serverId, const QByteArray& data)
+{
+ return put(QUrl(QString("https://%1/api/servers/%2").arg(m_apiHost, serverId)), data);
+}
+
+QNetworkReply* VsApi::updateDevice(const QString& deviceId, const QByteArray& data)
+{
+ return put(QUrl(QString("https://%1/api/devices/%2").arg(m_apiHost, deviceId)), data);
+}
+
+QNetworkReply* VsApi::deleteDevice(const QString& deviceId)
+{
+ return deleteResource(
+ QUrl(QString("https://%1/api/devices/%2").arg(m_apiHost, deviceId)));
+}
+
+QNetworkReply* VsApi::get(const QUrl& url)
+{
+ QNetworkRequest request = QNetworkRequest(url);
+ request.setRawHeader(QByteArray("Authorization"),
+ QString("Bearer %1").arg(m_accessToken).toUtf8());
+
+ QNetworkReply* reply = m_networkAccessManager->get(request);
+ return reply;
+}
+
+QNetworkReply* VsApi::post(const QUrl& url, const QByteArray& data)
+{
+ QNetworkRequest request = QNetworkRequest(url);
+ request.setRawHeader(QByteArray("Authorization"),
+ QString("Bearer %1").arg(m_accessToken).toUtf8());
+ request.setRawHeader(QByteArray("Content-Type"),
+ QString("application/json").toUtf8());
+
+ QNetworkReply* reply = m_networkAccessManager->post(request, data);
+ return reply;
+}
+
+QNetworkReply* VsApi::put(const QUrl& url, const QByteArray& data)
+{
+ QNetworkRequest request = QNetworkRequest(url);
+ request.setRawHeader(QByteArray("Authorization"),
+ QString("Bearer %1").arg(m_accessToken).toUtf8());
+ request.setRawHeader(QByteArray("Content-Type"),
+ QString("application/json").toUtf8());
+ QNetworkReply* reply = m_networkAccessManager->put(request, data);
+ return reply;
+}
+
+QNetworkReply* VsApi::deleteResource(const QUrl& url)
+{
+ QNetworkRequest request = QNetworkRequest(url);
+ request.setRawHeader(QByteArray("Authorization"),
+ QString("Bearer %1").arg(m_accessToken).toUtf8());
+
+ QNetworkReply* reply = m_networkAccessManager->deleteResource(request);
+ return reply;
+}
\ No newline at end of file
--- /dev/null
+//*****************************************************************
+/*
+ JackTrip: A System for High-Quality Audio Network Performance
+ over the Internet
+
+ Copyright (c) 2008-2022 Juan-Pablo Caceres, Chris Chafe.
+ SoundWIRE group at CCRMA, Stanford University.
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation
+ files (the "Software"), to deal in the Software without
+ restriction, including without limitation the rights to use,
+ copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following
+ conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ OTHER DEALINGS IN THE SOFTWARE.
+*/
+//*****************************************************************
+
+/**
+ * \file vsApi.h
+ * \author Dominick Hing
+ * \date May 2023
+ */
+
+#ifndef VSAPI_H
+#define VSAPI_H
+
+#include <QEventLoop>
+#include <QJsonParseError>
+#include <QMap>
+#include <QNetworkAccessManager>
+#include <QNetworkReply>
+#include <QNetworkRequest>
+#include <QString>
+#include <QUrl>
+#include <QVariant>
+#include <iostream>
+
+class VsApi : public QObject
+{
+ Q_OBJECT
+
+ public:
+ VsApi(QNetworkAccessManager* networkAccessManager);
+ void setAccessToken(QString token) { m_accessToken = token; };
+ void setApiHost(QString host) { m_apiHost = host; }
+ QString getApiHost() { return m_apiHost; }
+
+ QNetworkReply* getAuth0UserInfo();
+ QNetworkReply* getUser(const QString& userId);
+ QNetworkReply* getServers();
+ QNetworkReply* getSubscriptions(const QString& userId);
+ QNetworkReply* getRegions(const QString& userId);
+ QNetworkReply* getDevice(const QString& deviceId);
+
+ QNetworkReply* postDevice(const QByteArray& data);
+ QNetworkReply* postDeviceHeartbeat(const QString& deviceId, const QByteArray& data);
+
+ QNetworkReply* updateServer(const QString& serverId, const QByteArray& data);
+ QNetworkReply* updateDevice(const QString& deviceId, const QByteArray& data);
+
+ QNetworkReply* deleteDevice(const QString& deviceId);
+
+ private:
+ QNetworkReply* get(const QUrl& url);
+ QNetworkReply* put(const QUrl& url, const QByteArray& data);
+ QNetworkReply* post(const QUrl& url, const QByteArray& data);
+ QNetworkReply* deleteResource(const QUrl& url);
+
+ QString m_accessToken;
+ QString m_apiHost;
+ QNetworkAccessManager* m_networkAccessManager;
+};
+
+#endif // VSAPI_H
\ No newline at end of file
m_audioInterface->setInputDevice(m_inputDeviceName);
m_audioInterface->setOutputDevice(m_outputDeviceName);
m_audioInterface->setBufferSizeInSamples(m_audioBufferSize);
+ static_cast<RtAudioInterface*>(m_audioInterface.get())->setDevices(m_devices);
// Note: setup might change the number of channels and/or buffer size
m_audioInterface->setup(true);
}
}
+void VsAudioInterface::setOutputDevice(QString deviceName, bool shouldRestart)
+{
+ m_outputDeviceName = deviceName.toStdString();
+ if (!m_audioInterface.isNull()) {
+ if (m_audioActive && shouldRestart) {
+ emit settingsUpdated();
+ }
+ }
+}
+
#ifdef RT_AUDIO
void VsAudioInterface::setBaseInputChannel(int baseChannel, bool shouldRestart)
{
}
return;
}
-#endif
-void VsAudioInterface::setOutputDevice(QString deviceName, bool shouldRestart)
-{
- m_outputDeviceName = deviceName.toStdString();
- if (!m_audioInterface.isNull()) {
- if (m_audioActive && shouldRestart) {
- emit settingsUpdated();
- }
- }
-}
-#ifdef RT_AUDIO
void VsAudioInterface::setBaseOutputChannel(int baseChannel, bool shouldRestart)
{
if (m_audioInterfaceMode != VsAudioInterface::RTAUDIO) {
}
}
}
+
+void VsAudioInterface::refreshRtAudioDevices()
+{
+ RtAudioInterface::scanDevices(m_devices);
+}
+
+void VsAudioInterface::getDeviceList(QStringList* list, QStringList* categories,
+ QList<int>* channels, bool isInput)
+{
+ RtAudio baseRtAudio;
+ RtAudio::Api baseRtAudioApi = baseRtAudio.getCurrentApi();
+ if (categories != NULL) {
+ categories->clear();
+ }
+ if (channels != NULL) {
+ channels->clear();
+ }
+ list->clear();
+
+ // do not include blacklisted audio interfaces
+ // these are known to be unstable and cause JackTrip to crash
+ QVector<QString> blacklisted_devices = {
+#ifdef _WIN32
+ // Realtek ASIO: seems to crash any computer that tries to use it
+ QString::fromUtf8("Realtek ASIO"),
+#endif
+ // JackRouter: crashes if not running; use Jack backend instead
+ QString::fromUtf8("JackRouter"),
+ };
+
+ // Explicitly add default device
+ QString defaultDeviceName = "";
+ uint32_t defaultDeviceIdx;
+ RtAudio::DeviceInfo defaultDeviceInfo;
+ if (isInput) {
+ defaultDeviceIdx = baseRtAudio.getDefaultInputDevice();
+ } else {
+ defaultDeviceIdx = baseRtAudio.getDefaultOutputDevice();
+ }
+
+ if (defaultDeviceIdx != 0) {
+ defaultDeviceInfo = baseRtAudio.getDeviceInfo(defaultDeviceIdx);
+ defaultDeviceName = QString::fromStdString(defaultDeviceInfo.name);
+ }
+
+ if (blacklisted_devices.contains(defaultDeviceName)) {
+ std::cout << "RTAudio: blacklisted default " << (isInput ? "input" : "output")
+ << " device: " << defaultDeviceName.toStdString() << std::endl;
+ } else if (defaultDeviceName != "") {
+ list->append(defaultDeviceName);
+ if (categories != NULL) {
+#ifdef _WIN32
+ switch (baseRtAudioApi) {
+ case RtAudio::WINDOWS_ASIO:
+ categories->append(QStringLiteral("Low-Latency (ASIO)"));
+ break;
+ case RtAudio::WINDOWS_WASAPI:
+ categories->append(QStringLiteral("High-Latency (Non-ASIO)"));
+ break;
+ case RtAudio::WINDOWS_DS:
+ categories->append(QStringLiteral("High-Latency (Non-ASIO)"));
+ break;
+ default:
+ categories->append(QStringLiteral(""));
+ break;
+ }
+#else
+ categories->append(QStringLiteral(""));
+#endif
+ }
+ if (channels != NULL) {
+ if (isInput) {
+ channels->append(defaultDeviceInfo.inputChannels);
+ } else {
+ channels->append(defaultDeviceInfo.outputChannels);
+ }
+ }
+ }
+
+ for (int n = 0; n < m_devices.size(); ++n) {
+#ifdef _WIN32
+ if (m_devices[n].api == RtAudio::UNIX_JACK) {
+ continue;
+ }
+#endif
+ // Don't include duplicate entries
+ if (list->contains(m_devices[n].name)) {
+ continue;
+ }
+
+ // Skip the default device, since we already added it
+ if (m_devices[n].name == defaultDeviceName
+ && m_devices[n].api == baseRtAudioApi) {
+ continue;
+ }
+
+ // Skip if no channels available
+ if ((isInput && m_devices[n].inputChannels == 0)
+ || (!isInput && m_devices[n].outputChannels == 0)) {
+ continue;
+ }
+
+ // Skip blacklisted devices
+ if (blacklisted_devices.contains(m_devices[n].name)) {
+ std::cout << "RTAudio: blacklisted " << (isInput ? "input" : "output")
+ << " device: " << m_devices[n].name.toStdString() << std::endl;
+ continue;
+ }
+
+ // Good to go!
+ if (isInput) {
+ list->append(m_devices[n].name);
+ if (channels != NULL) {
+ channels->append(m_devices[n].inputChannels);
+ }
+ } else {
+ list->append(m_devices[n].name);
+ if (channels != NULL) {
+ channels->append(m_devices[n].outputChannels);
+ }
+ }
+
+ if (categories == NULL) {
+ continue;
+ }
+
+#ifdef _WIN32
+ switch (m_devices[n].api) {
+ case RtAudio::WINDOWS_ASIO:
+ categories->append("Low-Latency (ASIO)");
+ break;
+ case RtAudio::WINDOWS_WASAPI:
+ categories->append("High-Latency (Non-ASIO)");
+ break;
+ case RtAudio::WINDOWS_DS:
+ categories->append("High-Latency (Non-ASIO)");
+ break;
+ default:
+ categories->append("");
+ break;
+ }
+#else
+ categories->append("");
+#endif
+ }
+}
#endif
void VsAudioInterface::setAudioInterfaceMode(bool useRtAudio, bool shouldRestart)
#define VSDAUDIOINTERFACE_H
#include <QDebug>
+#include <QList>
#include <QObject>
#include <QSharedPointer>
#include <QString>
+#include <QStringList>
#ifndef NO_JACK
#include "../JackAudioInterface.h"
public slots:
void setInputDevice(QString deviceName, bool shouldRestart = true);
+ void setOutputDevice(QString deviceName, bool shouldRestart = true);
#ifdef RT_AUDIO
void setBaseInputChannel(int baseChannel, bool shouldRestart = true);
void setNumInputChannels(int numChannels, bool shouldRestart = true);
void setInputMixMode(const int mode, bool shouldRestart = true);
-#endif
- void setOutputDevice(QString deviceName, bool shouldRestart = true);
-#ifdef RT_AUDIO
void setBaseOutputChannel(int baseChannel, bool shouldRestart = true);
void setNumOutputChannels(int numChannels, bool shouldRestart = true);
+ void refreshRtAudioDevices();
+ void getDeviceList(QStringList* list, QStringList* categories, QList<int>* channels,
+ bool isInput);
#endif
void setAudioInterfaceMode(bool useRtAudio, bool shouldRestart = true);
void setInputVolume(float multiplier);
Volume* m_outputVolumePlugin;
Tone* m_outputTonePlugin;
+#ifdef RT_AUDIO
+ QVector<RtAudioDevice> m_devices;
+#endif
+
void updateDevicesErrorMsg(const QString& msg);
void updateDevicesWarningMsg(const QString& msg);
void updateDevicesErrorHelpUrl(const QString& url);
--- /dev/null
+//*****************************************************************
+/*
+ JackTrip: A System for High-Quality Audio Network Performance
+ over the Internet
+
+ Copyright (c) 2008-2022 Juan-Pablo Caceres, Chris Chafe.
+ SoundWIRE group at CCRMA, Stanford University.
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation
+ files (the "Software"), to deal in the Software without
+ restriction, including without limitation the rights to use,
+ copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following
+ conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ OTHER DEALINGS IN THE SOFTWARE.
+*/
+//*****************************************************************
+
+/**
+ * \file vsAuth.cpp
+ * \author Dominick Hing
+ * \date May 2023
+ */
+
+#include "vsAuth.h"
+
+#include "./vsConstants.h"
+
+VsAuth::VsAuth(VsQuickView* view, QNetworkAccessManager* networkAccessManager, VsApi* api)
+ : m_clientId(AUTH_CLIENT_ID), m_authorizationServerHost(AUTH_SERVER_HOST)
+{
+ m_view = view;
+ m_networkAccessManager = networkAccessManager;
+ m_api = api;
+ m_deviceCodeFlow.reset(new VsDeviceCodeFlow(networkAccessManager));
+
+ connect(m_deviceCodeFlow.data(), &VsDeviceCodeFlow::deviceCodeFlowInitialized, this,
+ &VsAuth::initializedCodeFlow);
+ connect(m_deviceCodeFlow.data(), &VsDeviceCodeFlow::deviceCodeFlowError, this,
+ &VsAuth::handleAuthFailed);
+ connect(m_deviceCodeFlow.data(), &VsDeviceCodeFlow::onCompletedCodeFlow, this,
+ &VsAuth::codeFlowCompleted);
+ connect(m_deviceCodeFlow.data(), &VsDeviceCodeFlow::deviceCodeFlowTimedOut, this,
+ &VsAuth::codeExpired);
+
+ m_view->engine()->rootContext()->setContextProperty(QStringLiteral("auth"), this);
+
+ m_verificationUrl = QStringLiteral("https://auth.jacktrip.org/activate");
+}
+
+void VsAuth::authenticate(QString currentRefreshToken)
+{
+ if (currentRefreshToken.isEmpty()) {
+ // if no refresh token, initialize device flow
+ m_deviceCodeFlow->grant();
+ } else {
+ m_attemptingRefreshToken = true;
+ emit updatedAttemptingRefreshToken(m_attemptingRefreshToken);
+
+ // otherwise, use refresh token to gain a new access token
+ m_refreshToken = currentRefreshToken;
+ refreshAccessToken(m_refreshToken);
+ }
+}
+
+void VsAuth::initializedCodeFlow(QString code, QString verificationUrl)
+{
+ m_verificationCode = code;
+ m_verificationUrl = verificationUrl;
+ m_authenticationStage = QStringLiteral("polling");
+
+ emit updatedAuthenticationStage(m_authenticationStage);
+ emit updatedVerificationCode(m_verificationCode);
+ emit updatedVerificationUrl(m_verificationUrl);
+}
+
+void VsAuth::fetchUserInfo(QString accessToken)
+{
+ QNetworkReply* reply = m_api->getAuth0UserInfo();
+ connect(reply, &QNetworkReply::finished, this, [=]() {
+ if (reply->error() != QNetworkReply::NoError) {
+ std::cout << "VsAuth::fetchUserInfo Error: "
+ << reply->errorString().toStdString() << std::endl;
+ handleAuthFailed(); // handle failure
+ emit fetchUserInfoFailed();
+ reply->deleteLater();
+ return;
+ }
+
+ QByteArray response = reply->readAll();
+ QJsonDocument userInfo = QJsonDocument::fromJson(response);
+ QString userId = userInfo.object()[QStringLiteral("sub")].toString();
+
+ handleAuthSucceeded(userId, accessToken);
+ });
+}
+
+void VsAuth::refreshAccessToken(QString refreshToken)
+{
+ m_authenticationStage = QStringLiteral("refreshing");
+ emit updatedAuthenticationStage(m_authenticationStage);
+
+ QNetworkRequest request = QNetworkRequest(
+ QUrl(QString("https://%1/oauth/token").arg(m_authorizationServerHost)));
+
+ request.setRawHeader(QByteArray("Content-Type"),
+ QByteArray("application/x-www-form-urlencoded"));
+
+ QString data = QString("grant_type=refresh_token&client_id=%1&refresh_token=%2")
+ .arg(m_clientId, refreshToken);
+
+ // send request
+ QNetworkReply* reply = m_networkAccessManager->post(request, data.toUtf8());
+
+ connect(reply, &QNetworkReply::finished, this, [=]() {
+ QByteArray buffer = reply->readAll();
+
+ // Error: failed to get device code
+ if (reply->error()) {
+ std::cout << "Failed to get new access token: " << buffer.toStdString()
+ << std::endl;
+ handleAuthFailed(); // handle failure
+ emit refreshTokenFailed();
+ reply->deleteLater();
+ return;
+ }
+
+ // parse JSON from string response
+ QJsonParseError parseError;
+ QJsonDocument data = QJsonDocument::fromJson(buffer, &parseError);
+ if (parseError.error) {
+ std::cout << "Error parsing JSON for Access Token: "
+ << parseError.errorString().toStdString() << std::endl;
+ handleAuthFailed(); // handle failure
+ emit refreshTokenFailed();
+ reply->deleteLater();
+ return;
+ }
+
+ // received access token
+ QJsonObject object = data.object();
+ QString accessToken = object.value(QLatin1String("access_token")).toString();
+ m_api->setAccessToken(accessToken); // set access token
+ fetchUserInfo(accessToken); // get user ID from Auth0
+ reply->deleteLater();
+ });
+}
+
+void VsAuth::resetCode()
+{
+ if (!m_verificationCode.isEmpty()) {
+ m_deviceCodeFlow->cancelCodeFlow();
+ m_deviceCodeFlow->grant();
+ }
+}
+
+void VsAuth::codeFlowCompleted(QString accessToken, QString refreshToken)
+{
+ m_refreshToken = refreshToken;
+ m_api->setAccessToken(accessToken);
+ fetchUserInfo(accessToken);
+}
+
+void VsAuth::codeExpired()
+{
+ emit deviceCodeExpired();
+}
+
+void VsAuth::handleAuthSucceeded(QString userId, QString accessToken)
+{
+ // Success case: we got our access token (either through the refresh token or device
+ // code flow), and fetched the user ID
+ std::cout << "Successfully authenticated Virtual Studio user" << std::endl;
+ std::cout << "User ID: " << userId.toStdString() << std::endl;
+
+ if (m_authenticationStage == QStringLiteral("polling")) {
+ m_authenticationMethod = QStringLiteral("code flow");
+ } else {
+ m_authenticationMethod = QStringLiteral("refresh token");
+ }
+
+ m_userId = userId;
+ m_verificationCode = QStringLiteral("");
+ m_accessToken = accessToken;
+ m_authenticationStage = QStringLiteral("success");
+ m_attemptingRefreshToken = false;
+ m_isAuthenticated = true;
+
+ emit updatedUserId(m_userId);
+ emit updatedAuthenticationStage(m_authenticationStage);
+ emit updatedVerificationCode(m_verificationCode);
+ emit updatedIsAuthenticated(m_isAuthenticated);
+ emit updatedAttemptingRefreshToken(m_attemptingRefreshToken);
+ emit updatedAuthenticationMethod(m_authenticationMethod);
+
+ // notify UI and virtual studio class of success
+ emit authSucceeded();
+}
+
+void VsAuth::handleAuthFailed()
+{
+ // this might get called because there was an error getting the access token,
+ // or there was an issue fetching the user ID. We need both to say
+ // that authentication succeeded
+ std::cout << "Failed to authenticate user" << std::endl;
+
+ m_userId = QStringLiteral("");
+ m_verificationCode = QStringLiteral("");
+ m_accessToken = QStringLiteral("");
+ m_authenticationStage = QStringLiteral("failed");
+ m_authenticationMethod = QStringLiteral("");
+ m_attemptingRefreshToken = false;
+ m_isAuthenticated = false;
+
+ emit updatedUserId(m_userId);
+ emit updatedAuthenticationStage(m_authenticationStage);
+ emit updatedVerificationCode(m_verificationCode);
+ emit updatedIsAuthenticated(m_isAuthenticated);
+ emit updatedAttemptingRefreshToken(m_attemptingRefreshToken);
+ emit updatedAuthenticationMethod(m_authenticationMethod);
+
+ // notify UI and virtual studio class of failure
+ emit authFailed();
+}
+
+void VsAuth::cancelAuthenticationFlow()
+{
+ m_deviceCodeFlow->cancelCodeFlow();
+
+ m_userId = QStringLiteral("");
+ m_verificationCode = QStringLiteral("");
+ m_accessToken = QStringLiteral("");
+ m_authenticationStage = QStringLiteral("unauthenticated");
+ m_isAuthenticated = false;
+
+ emit updatedUserId(m_userId);
+ emit updatedAuthenticationStage(m_authenticationStage);
+ emit updatedVerificationCode(m_verificationCode);
+ emit updatedIsAuthenticated(m_isAuthenticated);
+}
+
+void VsAuth::logout()
+{
+ if (!m_isAuthenticated) {
+ std::cout << "Warning: attempting to logout while not authenticated" << std::endl;
+ }
+
+ // reset auth state
+ m_userId = QStringLiteral("");
+ m_verificationCode = QStringLiteral("");
+ m_accessToken = QStringLiteral("");
+ m_authenticationStage = QStringLiteral("unauthenticated");
+ m_isAuthenticated = false;
+
+ emit updatedUserId(m_userId);
+ emit updatedAuthenticationStage(m_authenticationStage);
+ emit updatedVerificationCode(m_verificationCode);
+ emit updatedIsAuthenticated(m_isAuthenticated);
+}
\ No newline at end of file
--- /dev/null
+//*****************************************************************
+/*
+ JackTrip: A System for High-Quality Audio Network Performance
+ over the Internet
+
+ Copyright (c) 2008-2022 Juan-Pablo Caceres, Chris Chafe.
+ SoundWIRE group at CCRMA, Stanford University.
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation
+ files (the "Software"), to deal in the Software without
+ restriction, including without limitation the rights to use,
+ copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following
+ conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ OTHER DEALINGS IN THE SOFTWARE.
+*/
+//*****************************************************************
+
+/**
+ * \file vsAuth.h
+ * \author Dominick Hing
+ * \date May 2023
+ */
+
+#ifndef VSAUTH_H
+#define VSAUTH_H
+
+#include <QNetworkAccessManager>
+#include <QQmlContext>
+#include <QQmlEngine>
+#include <QString>
+#include <iostream>
+
+#include "vsApi.h"
+#include "vsDeviceCodeFlow.h"
+#include "vsQuickView.h"
+
+class VsAuth : public QObject
+{
+ Q_OBJECT
+
+ Q_PROPERTY(QString authenticationStage READ authenticationStage NOTIFY
+ updatedAuthenticationStage);
+ Q_PROPERTY(QString verificationCode READ deviceCode NOTIFY updatedVerificationCode);
+ Q_PROPERTY(
+ QString verificationUrl READ deviceVerificationUrl NOTIFY updatedVerificationUrl);
+ Q_PROPERTY(bool isAuthenticated READ isAuthenticated NOTIFY updatedIsAuthenticated);
+ Q_PROPERTY(QString authenticationMethod READ authenticationMethod NOTIFY
+ updatedAuthenticationMethod);
+ Q_PROPERTY(bool attemptingRefreshToken READ attemptingRefreshToken NOTIFY
+ updatedAttemptingRefreshToken);
+ Q_PROPERTY(QString userId READ userId NOTIFY updatedUserId);
+
+ public:
+ VsAuth(VsQuickView* view, QNetworkAccessManager* networkAccessManager, VsApi* api);
+
+ void authenticate(QString currentRefreshToken);
+ void refreshAccessToken(QString refreshToken);
+ Q_INVOKABLE void resetCode();
+ void logout();
+
+ public slots:
+ void cancelAuthenticationFlow();
+
+ // getter methods
+ QString authenticationStage() { return m_authenticationStage; };
+ QString deviceCode() { return m_verificationCode; };
+ QString deviceVerificationUrl() { return m_verificationUrl; };
+ bool isAuthenticated() { return m_isAuthenticated; };
+ QString userId() { return m_userId; };
+ QString accessToken() { return m_accessToken; };
+ QString refreshToken() { return m_refreshToken; };
+ QString authenticationMethod() { return m_authenticationMethod; }
+ bool attemptingRefreshToken() { return m_attemptingRefreshToken; }
+
+ signals:
+ void updatedAuthenticationStage(QString authenticationStage);
+ void updatedVerificationCode(QString deviceCode);
+ void updatedVerificationUrl(QUrl verificationUrl);
+ void updatedIsAuthenticated(bool isAuthenticated);
+ void updatedUserId(QString userId);
+ void updatedAuthenticationMethod(QString grant);
+ void updatedAttemptingRefreshToken(bool attemptingRefreshToken);
+ void authSucceeded();
+ void authFailed();
+ void refreshTokenFailed();
+ void fetchUserInfoFailed();
+ void deviceCodeExpired();
+
+ private slots:
+ void handleAuthSucceeded(QString userId, QString accessToken);
+ void handleAuthFailed();
+ void initializedCodeFlow(QString code, QString verificationUrl);
+ void codeFlowCompleted(QString accessToken, QString refreshToken);
+ void codeExpired();
+
+ private:
+ void fetchUserInfo(QString accessToken);
+
+ QString m_clientId;
+ QString m_authorizationServerHost;
+
+ QString m_authenticationStage = QStringLiteral("unauthenticated");
+ QString m_verificationCode = QStringLiteral("");
+ QString m_verificationUrl;
+ QString m_authenticationMethod = QStringLiteral("");
+
+ bool m_attemptingRefreshToken = false;
+ bool m_isAuthenticated = false;
+ QString m_userId;
+ QString m_accessToken;
+ QString m_refreshToken;
+
+ VsQuickView* m_view;
+ QNetworkAccessManager* m_networkAccessManager;
+ VsApi* m_api;
+ QScopedPointer<VsDeviceCodeFlow> m_deviceCodeFlow;
+};
+
+#endif
\ No newline at end of file
#include <QString>
const QString AUTH_AUTHORIZE_URI = QStringLiteral("https://auth.jacktrip.org/authorize");
-const QString AUTH_TOKEN_URI = QStringLiteral("https://auth.jacktrip.org/oauth/token");
-const QString AUTH_AUDIENCE = QStringLiteral("https://api.jacktrip.org");
-const QString AUTH_CLIENT_ID = QStringLiteral("cROUJag0UVKDaJ6jRAKRzlVjKVFNU39I");
-const QString PROD_API_HOST = QStringLiteral("app.jacktrip.org");
-const QString TEST_API_HOST = QStringLiteral("test.jacktrip.org");
+const QString AUTH_TOKEN_URI = QStringLiteral("https://auth.jacktrip.org/oauth/token");
+const QString AUTH_AUDIENCE = QStringLiteral("https://api.jacktrip.org");
+const QString AUTH_CLIENT_ID = QStringLiteral("cROUJag0UVKDaJ6jRAKRzlVjKVFNU39I");
+const QString PROD_API_HOST = QStringLiteral("app.jacktrip.org");
+const QString TEST_API_HOST = QStringLiteral("test.jacktrip.org");
+const QString AUTH_SERVER_HOST = QStringLiteral("auth.jacktrip.org");
#endif // VSCONSTANTS_H
#include <QDebug>
// Constructor
-VsDevice::VsDevice(QOAuth2AuthorizationCodeFlow* authenticator, bool testMode,
- QObject* parent)
- : QObject(parent), m_authenticator(authenticator)
+VsDevice::VsDevice(VsAuth* auth, VsApi* api, QObject* parent)
+ : QObject(parent), m_auth(auth), m_api(api), m_sendVolumeTimer(this)
{
QSettings settings;
settings.beginGroup(QStringLiteral("VirtualStudio"));
(float)settings.value(QStringLiteral("MonMultiplier"), 0).toDouble();
settings.endGroup();
- m_sendVolumeTimer = new QTimer(this);
- m_sendVolumeTimer->setSingleShot(true);
- connect(m_sendVolumeTimer, &QTimer::timeout, this, &VsDevice::sendLevels);
-
- // Determine which API host to use
- m_apiHost = PROD_API_HOST;
- if (testMode) {
- m_apiHost = TEST_API_HOST;
- }
+ m_sendVolumeTimer.setSingleShot(true);
+ connect(&m_sendVolumeTimer, &QTimer::timeout, this, &VsDevice::sendLevels);
// Set server levels to stored versions
QJsonObject json = {
{QLatin1String("monitorVolume"), (int)(m_monitorVolume * 100.0)}};
QJsonDocument request = QJsonDocument(json);
- QNetworkReply* reply = m_authenticator->put(
- QStringLiteral("https://%1/api/devices/%2").arg(m_apiHost, m_appID),
- request.toJson());
+ QNetworkReply* reply = m_api->updateDevice(m_appID, request.toJson());
connect(reply, &QNetworkReply::finished, this, [=]() {
// Got error
if (reply->error() != QNetworkReply::NoError) {
});
}
+VsDevice::~VsDevice()
+{
+ m_sendVolumeTimer.stop();
+ stopJackTrip();
+ stopPinger();
+}
+
// registerApp idempotently registers an emulated device belonging to the current user
void VsDevice::registerApp()
{
}
// check if device exists
- QNetworkReply* reply = m_authenticator->get(
- QStringLiteral("https://%1/api/devices/%2").arg(m_apiHost, m_appID));
+ QNetworkReply* reply = m_api->getDevice(m_appID);
+ ;
connect(reply, &QNetworkReply::finished, this, [=]() {
// Got error
if (reply->error() != QNetworkReply::NoError) {
return;
}
- QNetworkReply* reply = m_authenticator->deleteResource(
- QStringLiteral("https://%1/api/devices/%2").arg(m_apiHost, m_appID));
+ QNetworkReply* reply = m_api->deleteDevice(m_appID);
connect(reply, &QNetworkReply::finished, this, [=]() {
if (reply->error() != QNetworkReply::NoError) {
std::cout << "Error: " << reply->errorString().toStdString() << std::endl;
if (m_webSocket == nullptr) {
m_webSocket =
new VsWebSocket(QUrl(QStringLiteral("wss://%1/api/devices/%2/heartbeat")
- .arg(m_apiHost, m_appID)),
- m_authenticator->token(), m_apiPrefix, m_apiSecret);
+ .arg(m_api->getApiHost(), m_appID)),
+ m_auth->accessToken(), m_apiPrefix, m_apiSecret);
connect(m_webSocket, &VsWebSocket::textMessageReceived, this,
&VsDevice::onTextMessageReceived);
}
m_webSocket->sendMessage(request.toJson());
} else {
// Send heartbeat via POST API
- QNetworkReply* reply = m_authenticator->post(
- QStringLiteral("https://%1/api/devices/%2/heartbeat").arg(m_apiHost, m_appID),
- request.toJson());
+ QNetworkReply* reply = m_api->postDeviceHeartbeat(m_appID, request.toJson());
connect(reply, &QNetworkReply::finished, this, [=]() {
if (reply->error() != QNetworkReply::NoError) {
std::cout << "Error: " << reply->errorString().toStdString() << std::endl;
{QLatin1String("serverId"), serverId},
};
QJsonDocument request = QJsonDocument(json);
- QNetworkReply* reply = m_authenticator->put(
- QStringLiteral("https://%1/api/devices/%2").arg(m_apiHost, m_appID),
- request.toJson());
+ QNetworkReply* reply = m_api->updateDevice(m_appID, request.toJson());
connect(reply, &QNetworkReply::finished, this, [=]() {
if (reply->error() != QNetworkReply::NoError) {
std::cout << "Error: " << reply->errorString().toStdString() << std::endl;
{QLatin1String("playbackMute"), m_playbackMute},
{QLatin1String("monitorVolume"), (int)(m_monitorVolume * 100.0)}};
QJsonDocument request = QJsonDocument(json);
- QNetworkReply* reply = m_authenticator->put(
- QStringLiteral("https://%1/api/devices/%2").arg(m_apiHost, m_appID),
- request.toJson());
+ QNetworkReply* reply = m_api->updateDevice(m_appID, request.toJson());
connect(reply, &QNetworkReply::finished, this, [=]() {
if (reply->error() != QNetworkReply::NoError) {
std::cout << "Error: " << reply->errorString().toStdString() << std::endl;
m_jackTrip->setBufferStrategy(bufferStrategy + 1);
if (bufferStrategy == 2 || bufferStrategy == 3) {
// use -q auto3 for loss concealment
- m_jackTrip->setBufferQueueLength(-5);
+ m_jackTrip->setBufferQueueLength(-3);
} else {
// use -q auto
m_jackTrip->setBufferQueueLength(-500);
return;
}
m_captureVolume = multiplier;
-
- if (m_sendVolumeTimer) {
- m_sendVolumeTimer->start(200);
- }
+ m_sendVolumeTimer.start(100);
}
// updateCaptureMute sets VsDevice's capture (input) mute to the provided boolean
return;
}
m_captureMute = muted;
-
- if (m_sendVolumeTimer) {
- m_sendVolumeTimer->start(200);
- }
+ m_sendVolumeTimer.start(100);
}
// updatePlaybackVolume sets VsDevice's playback (output) volume to the provided float
return;
}
m_playbackVolume = multiplier;
-
- if (m_sendVolumeTimer) {
- m_sendVolumeTimer->start(200);
- }
+ m_sendVolumeTimer.start(100);
}
// updatePlaybackMute sets VsDevice's playback (output) mute to the provided boolean
return;
}
m_playbackMute = muted;
-
- if (m_sendVolumeTimer) {
- m_sendVolumeTimer->start(200);
- }
+ m_sendVolumeTimer.start(100);
}
// updateMonitorVolume sets VsDevice's monitor to the provided float
if (multiplier == m_monitorVolume) {
return;
}
-
m_monitorVolume = multiplier;
-
- if (m_sendVolumeTimer) {
- m_sendVolumeTimer->start(200);
- }
+ m_sendVolumeTimer.start(100);
}
// terminateJackTrip is a slot intended to be triggered on jacktrip process signals
};
QJsonDocument request = QJsonDocument(json);
- QNetworkReply* reply = m_authenticator->post(
- QStringLiteral("https://%1/api/devices").arg(m_apiHost), request.toJson());
+ QNetworkReply* reply = m_api->postDevice(request.toJson());
connect(reply, &QNetworkReply::finished, this, [=]() {
if (reply->error() != QNetworkReply::NoError) {
std::cout << "Error: " << reply->errorString().toStdString() << std::endl;
#include "../JackTrip.h"
#include "../jacktrip_globals.h"
+#include "vsApi.h"
+#include "vsAuth.h"
#include "vsConstants.h"
#include "vsPinger.h"
#include "vsServerInfo.h"
public:
// Constructor
- explicit VsDevice(QOAuth2AuthorizationCodeFlow* authenticator, bool testMode,
- QObject* parent = nullptr);
+ explicit VsDevice(VsAuth* auth, VsApi* api, QObject* parent = nullptr);
+ virtual ~VsDevice();
// Public functions
void registerApp();
int selectBindPort();
QString randomString(int stringLength);
+ VsAuth* m_auth = nullptr;
+ VsApi* m_api = nullptr;
VsPinger* m_pinger = NULL;
QString m_appID;
QString m_token;
QString m_apiPrefix;
QString m_apiSecret;
- QString m_apiHost = PROD_API_HOST;
QJsonObject m_deviceAgentConfig;
VsWebSocket* m_webSocket = NULL;
QScopedPointer<JackTrip> m_jackTrip;
- QOAuth2AuthorizationCodeFlow* m_authenticator;
QRandomGenerator m_randomizer;
float m_captureVolume = 1.0;
bool m_captureMute = false;
float m_playbackVolume = 1.0;
bool m_playbackMute = false;
float m_monitorVolume = 0;
- QTimer* m_sendVolumeTimer;
+ QTimer m_sendVolumeTimer;
bool m_reconnect = false;
};
--- /dev/null
+//*****************************************************************
+/*
+ JackTrip: A System for High-Quality Audio Network Performance
+ over the Internet
+
+ Copyright (c) 2008-2022 Juan-Pablo Caceres, Chris Chafe.
+ SoundWIRE group at CCRMA, Stanford University.
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation
+ files (the "Software"), to deal in the Software without
+ restriction, including without limitation the rights to use,
+ copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following
+ conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ OTHER DEALINGS IN THE SOFTWARE.
+*/
+//*****************************************************************
+
+/**
+ * \file vsDeviceCodeFlow.cpp
+ * \author Dominick Hing
+ * \date May 2023
+ */
+
+#include "./vsDeviceCodeFlow.h"
+
+#include "./vsConstants.h"
+
+VsDeviceCodeFlow::VsDeviceCodeFlow(QNetworkAccessManager* networkAccessManager)
+ : m_clientId(AUTH_CLIENT_ID)
+ , m_audience(AUTH_AUDIENCE)
+ , m_authorizationServerHost(AUTH_SERVER_HOST)
+ , m_authenticationError(false)
+ , m_netManager(networkAccessManager)
+{
+ // start polling when the device flow has been initialized
+ connect(this, &VsDeviceCodeFlow::deviceCodeFlowInitialized, this,
+ &VsDeviceCodeFlow::startPolling);
+ connect(&m_tokenPollingTimer, &QTimer::timeout, this,
+ &VsDeviceCodeFlow::onPollingTimerTick);
+ connect(&m_deviceFlowExpirationTimer, &QTimer::timeout, this,
+ &VsDeviceCodeFlow::onDeviceCodeExpired);
+
+ m_tokenPollingTimer.setSingleShot(false);
+ m_deviceFlowExpirationTimer.setSingleShot(true);
+}
+
+void VsDeviceCodeFlow::grant()
+{
+ initDeviceAuthorizationCodeFlow();
+}
+
+void VsDeviceCodeFlow::initDeviceAuthorizationCodeFlow()
+{
+ // form initial request for device authorization code
+ QNetworkRequest request = QNetworkRequest(
+ QUrl(QString("https://%1/oauth/device/code").arg(m_authorizationServerHost)));
+
+ request.setRawHeader(QByteArray("Content-Type"),
+ QByteArray("application/x-www-form-urlencoded"));
+
+ QString data =
+ QString("client_id=%1&scope=%2&audience=%3")
+ .arg(m_clientId,
+ QLatin1String("openid profile email offline_access read:servers"),
+ m_audience);
+
+ // send request
+ QNetworkReply* reply = m_netManager->post(request, data.toUtf8());
+ connect(reply, &QNetworkReply::finished, this, [=]() {
+ bool success = processDeviceCodeNetworkReply(reply);
+ if (success) {
+ // notify success along with user code and verification URL
+ emit deviceCodeFlowInitialized(m_userCode, m_verificationUriComplete);
+ } else if (m_authenticationError) {
+ // notify failure
+ emit deviceCodeFlowError();
+ }
+ reply->deleteLater();
+ });
+}
+
+void VsDeviceCodeFlow::startPolling()
+{
+ if (m_pollingInterval <= 0 || m_deviceCodeValidityDuration <= 0) {
+ std::cout << "Could not start polling. This should not print and indicates a bug."
+ << std::endl;
+ return;
+ }
+
+ // poll on a regular interval, up until the expiration of the code
+ m_tokenPollingTimer.setInterval(m_pollingInterval * 1000);
+ m_deviceFlowExpirationTimer.setInterval(m_deviceCodeValidityDuration * 1000);
+
+ m_tokenPollingTimer.start();
+ m_deviceFlowExpirationTimer.start();
+}
+
+void VsDeviceCodeFlow::stopPolling()
+{
+ if (m_tokenPollingTimer.isActive()) {
+ m_tokenPollingTimer.stop();
+ }
+ if (m_deviceFlowExpirationTimer.isActive()) {
+ m_deviceFlowExpirationTimer.stop();
+ }
+}
+
+void VsDeviceCodeFlow::onPollingTimerTick()
+{
+ // form request to /oauth/token
+ QNetworkRequest request = QNetworkRequest(
+ QUrl(QString("https://%1/oauth/token").arg(m_authorizationServerHost)));
+
+ request.setRawHeader(QByteArray("Content-Type"),
+ QByteArray("application/x-www-form-urlencoded"));
+
+ QString data =
+ QString("client_id=%1&device_code=%2&grant_type=%3")
+ .arg(m_clientId, m_deviceCode,
+ QLatin1String("urn:ietf:params:oauth:grant-type:device_code"));
+
+ // send send request for token
+ QNetworkReply* reply = m_netManager->post(request, data.toUtf8());
+ connect(reply, &QNetworkReply::finished, this, [=]() {
+ bool success = processPollingOAuthTokenNetworkReply(reply);
+ if (m_authenticationError) {
+ // shouldn't happen
+ emit deviceCodeFlowError();
+ } else if (success) {
+ // flow successfully completed
+ emit onCompletedCodeFlow(m_accessToken, m_refreshToken);
+ // cleanup
+ stopPolling();
+ cleanupDeviceCodeFlow();
+ }
+ reply->deleteLater();
+ });
+}
+
+void VsDeviceCodeFlow::onDeviceCodeExpired()
+{
+ emit deviceCodeFlowTimedOut();
+
+ std::cout << "Device Code has expired." << std::endl;
+ stopPolling();
+ cleanupDeviceCodeFlow();
+}
+
+void VsDeviceCodeFlow::cancelCodeFlow()
+{
+ stopPolling();
+ cleanupDeviceCodeFlow();
+}
+
+bool VsDeviceCodeFlow::processDeviceCodeNetworkReply(QNetworkReply* reply)
+{
+ QByteArray buffer = reply->readAll();
+
+ // Error: failed to get device code
+ if (reply->error()) {
+ std::cout << "Failed to get device code: " << buffer.toStdString() << std::endl;
+ m_authenticationError = true;
+ return false;
+ }
+
+ // parse JSON from string response
+ QJsonParseError parseError;
+ QJsonDocument data = QJsonDocument::fromJson(buffer, &parseError);
+ if (parseError.error) {
+ std::cout << "Error parsing JSON for Device Code: "
+ << parseError.errorString().toStdString() << std::endl;
+ m_authenticationError = true;
+ return false;
+ }
+
+ // get fields
+ QJsonObject object = data.object();
+ m_deviceCode = object.value(QLatin1String("device_code")).toString();
+ m_userCode = object.value(QLatin1String("user_code")).toString();
+ m_verificationUri = object.value(QLatin1String("verification_uri")).toString();
+ m_verificationUriComplete =
+ object.value(QLatin1String("verification_uri_complete")).toString();
+ m_pollingInterval =
+ object.value(QLatin1String("interval")).toInt(5); // default to 5s
+ m_deviceCodeValidityDuration =
+ object.value(QLatin1String("expires_in")).toInt(900); // default to 900s
+
+ // return true if success
+ return true;
+}
+
+bool VsDeviceCodeFlow::processPollingOAuthTokenNetworkReply(QNetworkReply* reply)
+{
+ QByteArray buffer = reply->readAll();
+
+ // Error: failed to get device code (this is expected)
+ if (reply->error()) {
+ return false;
+ }
+
+ // parse JSON from string response
+ QJsonParseError parseError;
+ QJsonDocument data = QJsonDocument::fromJson(buffer, &parseError);
+ if (parseError.error) {
+ std::cout << "Error parsing JSON for access token: "
+ << parseError.errorString().toStdString() << std::endl;
+ return false;
+ }
+
+ // get fields
+ QJsonObject object = data.object();
+ m_idToken = object.value(QLatin1String("id_token")).toString();
+ m_accessToken = object.value(QLatin1String("access_token")).toString();
+ m_refreshToken = object.value(QLatin1String("refresh_token")).toString();
+ m_authenticationError = false;
+
+ // return true if success
+ return true;
+}
+
+void VsDeviceCodeFlow::cleanupDeviceCodeFlow()
+{
+ m_deviceCode = QStringLiteral("");
+ m_userCode = QStringLiteral("");
+ m_verificationUri = QStringLiteral("https://auth.jacktrip.org/activate");
+ m_verificationUriComplete = QStringLiteral("");
+
+ m_pollingInterval = -1;
+ m_deviceCodeValidityDuration = -1;
+}
+
+QString VsDeviceCodeFlow::accessToken()
+{
+ return m_accessToken;
+}
\ No newline at end of file
--- /dev/null
+//*****************************************************************
+/*
+ JackTrip: A System for High-Quality Audio Network Performance
+ over the Internet
+
+ Copyright (c) 2008-2022 Juan-Pablo Caceres, Chris Chafe.
+ SoundWIRE group at CCRMA, Stanford University.
+
+ Permission is hereby granted, free of charge, to any person
+ obtaining a copy of this software and associated documentation
+ files (the "Software"), to deal in the Software without
+ restriction, including without limitation the rights to use,
+ copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the
+ Software is furnished to do so, subject to the following
+ conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+ OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+ HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+ WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+ OTHER DEALINGS IN THE SOFTWARE.
+*/
+//*****************************************************************
+
+/**
+ * \file vsDeviceCodeFlow.h
+ * \author Dominick Hing
+ * \date May 2023
+ */
+
+#ifndef VSDEVICECODEFLOW_H
+#define VSDEVICECODEFLOW_H
+
+#include <QEventLoop>
+#include <QJsonDocument>
+#include <QJsonObject>
+#include <QJsonParseError>
+#include <QNetworkAccessManager>
+#include <QNetworkReply>
+#include <QSettings>
+#include <QString>
+#include <QTimer>
+#include <iostream>
+
+#include "vsDeviceCodeFlow.h"
+
+class VsDeviceCodeFlow : public QObject
+{
+ Q_OBJECT
+
+ public:
+ explicit VsDeviceCodeFlow(QNetworkAccessManager* networkAccessManager);
+ virtual ~VsDeviceCodeFlow() { stopPolling(); }
+
+ void grant();
+ void refreshAccessToken(){};
+ void initDeviceAuthorizationCodeFlow();
+
+ bool processDeviceCodeNetworkReply(QNetworkReply* reply);
+ bool processPollingOAuthTokenNetworkReply(QNetworkReply* reply);
+ void startPolling();
+ void stopPolling();
+ void onPollingTimerTick();
+ void onDeviceCodeExpired();
+ void cancelCodeFlow();
+ void cleanupDeviceCodeFlow();
+
+ bool authenticated();
+ QString accessToken();
+
+ signals:
+ void deviceCodeFlowInitialized(QString code, QString verificationUrl);
+ void deviceCodeFlowError();
+ void deviceCodeFlowTimedOut();
+ void onCompletedCodeFlow(QString accessToken, QString refreshToken);
+
+ private:
+ QString m_clientId;
+ QString m_audience;
+ QString m_authorizationServerHost;
+
+ // state used specifically in the device code flow
+ QString m_deviceCode;
+ QString m_userCode;
+ QString m_verificationUri;
+ QString m_verificationUriComplete;
+ int m_pollingInterval = -1; // seconds
+ int m_deviceCodeValidityDuration = -1; // seconds
+
+ QTimer m_tokenPollingTimer;
+ QTimer m_deviceFlowExpirationTimer;
+
+ // authentication state variables
+ bool m_authenticationError;
+ QString m_refreshToken;
+ QString m_accessToken;
+ QString m_idToken;
+
+ QScopedPointer<QNetworkAccessManager> m_netManager;
+};
+
+#endif // VSDEVICECODEFLOW
\ No newline at end of file
public:
explicit VsPing(uint32_t pingNum, uint32_t timeout_msec);
+ virtual ~VsPing() { mTimer.stop(); }
uint32_t pingNumber() { return mPingNumber; }
QDateTime sentTimestamp() { return mSent; }
* \param path The path to ping the server on
*/
explicit VsPinger(QString scheme, QString host, QString path);
+ virtual ~VsPinger() { stop(); }
void start();
void stop();
bool active() { return mStarted; };
Rectangle {
property string backgroundColour: virtualstudio.darkMode ? "#272525" : "#FAFBFB"
property string textColour: virtualstudio.darkMode ? "#FAFBFB" : "#0F0D0D"
-
+
width: 696
height: 577
color: backgroundColour
states: [
State {
name: "login"
- PropertyChanges { target: loginScreen; x: 0; failTextVisible: false }
+ PropertyChanges { target: loginScreen; x: 0 }
PropertyChanges { target: setupScreen; x: window.width }
PropertyChanges { target: browseScreen; x: window.width }
PropertyChanges { target: settingsScreen; x: window.width }
transitions: Transition {
NumberAnimation { properties: "x"; duration: 800; easing.type: Easing.InOutQuad }
}
-
+
Setup {
id: setupScreen
}
Browse {
id: browseScreen
}
-
+
Login {
id: loginScreen
- showBackButton: false
}
Settings {
Connections {
target: virtualstudio
function onAuthSucceeded() {
+ if (virtualstudio.windowState !== "login") {
+ // can happen on settings screen when switching between prod and test
+ return;
+ }
if (virtualstudio.showDeviceSetup) {
virtualstudio.windowState = "setup";
} else {
virtualstudio.windowState = "browse";
}
}
- function onAuthFailed() {
- loginScreen.failTextVisible = true;
- }
function onConnected() {
virtualstudio.windowState = "connected";
}
function onDisconnected() {
virtualstudio.windowState = "browse";
}
+ function onWindowStateUpdated() {
+ if (virtualstudio.windowState === "login") {
+ virtualstudio.login();
+ }
+ }
}
}
#include "AudioInterface.h"
-constexpr const char* const gVersion = "1.9.0"; ///< JackTrip version
+constexpr const char* const gVersion = "1.10.0"; ///< JackTrip version
//*******************************************************************************
/// \name Default Values
// Remove the console that appears if we're on windows and not running from a
// command line.
if (!isRunFromCmd()) {
+ std::cout << "This extra window is caused by a bug in Microsoft Windows. "
+ << "It can safely be ignored or closed." << std::endl
+ << std::endl
+ << "To fix this bug, please upgrade to the latest version of "
+ << "Windows Terminal available in the Microsoft App Store:"
+ << std::endl
+ << "https://aka.ms/terminal" << std::endl;
+
FreeConsole();
}
#endif // _WIN32
app->setApplicationName(QStringLiteral("JackTrip"));
app->setApplicationVersion(gVersion);
- Settings cliSettings(true);
- cliSettings.parseInput(argc, argv);
+ QSharedPointer<Settings> cliSettings;
+ cliSettings.reset(new Settings(true));
+ cliSettings->parseInput(argc, argv);
#ifndef NO_VS
// Register clipboard Qml type
vsInit.reset(new VsInit());
vsInit->checkForInstance(deeplink);
#endif // _WIN32
- window.reset(new QJackTrip(&cliSettings, !deeplink.isEmpty()));
+ window.reset(new QJackTrip(cliSettings, !deeplink.isEmpty()));
#else
- window.reset(new QJackTrip(&cliSettings));
+ window.reset(new QJackTrip(cliSettings));
#endif // NO_VS
QObject::connect(window.data(), &QJackTrip::signalExit, app.data(),
&QCoreApplication::quit, Qt::QueuedConnection);