Teaspeak-Server/server/src/client/web/VoiceBridge.cpp

193 lines
8.0 KiB
C++
Raw Normal View History

#include <misc/std_unique_ptr.h>
#include <log/LogUtils.h>
#include <pipes/rtc/PeerConnection.h>
#include <pipes/rtc/AudioStream.h>
#include <misc/endianness.h>
#include "WebClient.h"
#include "VoiceBridge.h"
using namespace std;
using namespace ts;
using namespace ts::server;
using namespace ts::web;
void log(pipes::Logger::LogLevel level, const std::string& name, const std::string& message, ...) {
2020-01-24 02:57:58 +01:00
auto max_length = 1024 * 8;
char buffer[max_length];
2020-01-24 02:57:58 +01:00
va_list args;
va_start(args, message);
max_length = vsnprintf(buffer, max_length, message.c_str(), args);
va_end(args);
2020-01-24 02:57:58 +01:00
debugMessage(LOG_GENERAL, "[WebRTC][{}][{}] {}", level, name, string(buffer));
}
VoiceBridge::VoiceBridge(const shared_ptr<WebClient>& owner) : _owner(owner) {
2020-01-24 02:57:58 +01:00
auto config = make_shared<rtc::PeerConnection::Config>();
config->nice_config = make_shared<rtc::NiceWrapper::Config>();
config->nice_config->ice_port_range = {config::web::webrtc_port_min, config::web::webrtc_port_max};
for(const auto& entry : config::web::ice_servers) {
auto dp = entry.find(':');
if(dp == string::npos) continue;
auto host = entry.substr(0, dp);
auto port = entry.substr(dp + 1);
if(port.find_last_not_of("0123456789") != string::npos) continue;
if(host == "stun.l.google.com" && port == "9302")
port = "19302"; /* fix for the invalid config value until 1.3.14beta1 :) */
try {
config->nice_config->ice_servers.push_back({host,(uint16_t) stoi(port)});
} catch(std::exception& ex) {}
}
config->nice_config->ice_servers.push_back({"stun.l.google.com", 19302});
config->nice_config->allow_ice_udp = true;
config->nice_config->allow_ice_tcp = false;
config->nice_config->use_upnp = config::web::enable_upnp;
//FIXME Use the internal thread or a shared worker
/* Not creating the thread here because DataPipes has a better impl with a join
config->nice_config->main_loop = std::shared_ptr<GMainLoop>(g_main_loop_new(nullptr, false), g_main_loop_unref);
std::thread(g_main_loop_run, config->nice_config->main_loop.get()).detach();
*/
config->logger = make_shared<pipes::Logger>();
config->logger->callback_log = log;
//config->sctp.local_port = 5202; //Fire Fox don't support a different port :D
this->connection = make_unique<rtc::PeerConnection>(config);
}
2020-02-22 14:14:44 +01:00
VoiceBridge::~VoiceBridge() {
__asm__("nop");
}
int VoiceBridge::server_id() {
2020-01-24 02:57:58 +01:00
auto locked = this->_owner.lock();
return locked ? locked->getServerId() : 0;
}
std::shared_ptr<server::WebClient> VoiceBridge::owner() {
2020-01-24 02:57:58 +01:00
return this->_owner.lock();
}
bool VoiceBridge::initialize(std::string &error) {
2020-01-24 02:57:58 +01:00
if(!this->connection->initialize(error)) return false;
2020-03-20 17:00:15 +01:00
this->connection->callback_ice_candidate = [&](const rtc::IceCandidate& candidate) {
if(candidate.is_finished_candidate()) {
if(auto callback{this->callback_ice_candidate}; callback)
callback(candidate);
} else {
if(auto callback{this->callback_ice_candidate_finished}; callback)
callback();
2020-01-24 02:57:58 +01:00
}
};
2020-03-20 17:00:15 +01:00
2020-01-24 02:57:58 +01:00
this->connection->callback_new_stream = [&](const std::shared_ptr<rtc::Stream> &channel) { this->handle_media_stream(channel); }; //bind(&VoiceBridge::handle_media_stream, this, placeholders::_1); => crash
this->connection->callback_setup_fail = [&](rtc::PeerConnection::ConnectionComponent comp, const std::string& reason) {
debugMessage(this->server_id(), "{} WebRTC setup failed! Component {} ({})", CLIENT_STR_LOG_PREFIX_(this->owner()), comp, reason);
if(this->callback_failed)
this->callback_failed();
};
return true;
}
bool VoiceBridge::parse_offer(const std::string &sdp) {
2020-01-24 02:57:58 +01:00
this->offer_timestamp = chrono::system_clock::now();
string error;
return this->connection->apply_offer(error, sdp);
}
int VoiceBridge::apply_ice(const std::deque<std::shared_ptr<rtc::IceCandidate>>& candidates) {
2020-01-24 02:57:58 +01:00
return this->connection->apply_ice_candidates(candidates);
}
void VoiceBridge::remote_ice_finished() {
2020-03-20 17:00:15 +01:00
this->connection->remote_candidates_finished();
}
std::string VoiceBridge::generate_answer() {
2020-01-24 02:57:58 +01:00
return this->connection->generate_answer(true);
}
void VoiceBridge::execute_tick() {
2020-01-24 02:57:58 +01:00
if(!this->_voice_channel) {
if(this->offer_timestamp.time_since_epoch().count() > 0 && this->offer_timestamp + chrono::seconds(10) < chrono::system_clock::now()) {
this->offer_timestamp = chrono::system_clock::time_point();
this->connection->callback_setup_fail(rtc::PeerConnection::ConnectionComponent::BASE, "setup timeout");
}
}
}
void VoiceBridge::handle_media_stream(const std::shared_ptr<rtc::Stream> &undefined_stream) {
2020-01-24 02:57:58 +01:00
if(undefined_stream->type() == rtc::CHANTYPE_APPLICATION) {
auto stream = dynamic_pointer_cast<rtc::ApplicationStream>(undefined_stream);
if(!stream) return;
stream->callback_datachannel_new = [&](const std::shared_ptr<rtc::DataChannel> &channel) { this->handle_data_channel(channel); }; //bind(&VoiceBridge::handle_data_channel, this, placeholders::_1); => may crash?
} else if(undefined_stream->type() == rtc::CHANTYPE_AUDIO) {
auto stream = dynamic_pointer_cast<rtc::AudioStream>(undefined_stream);
if(!stream) return;
this->_audio_channel = stream;
stream->register_local_extension("urn:ietf:params:rtp-hdrext:ssrc-audio-level");
//bind(&VoiceBridge::handle_audio_data, this, placeholders::_1, placeholders::_2, placeholders::_3); => may crash?
stream->incoming_data_handler = [&](const std::shared_ptr<rtc::AudioChannel> &channel, const pipes::buffer_view &data, size_t payload_offset) { this->handle_audio_data(channel, data, payload_offset); };
} else {
logError(this->server_id(), "Got offer for unknown channel of type {}", undefined_stream->type());
}
}
void VoiceBridge::handle_data_channel(const std::shared_ptr<rtc::DataChannel> &channel) {
2020-01-24 02:57:58 +01:00
if(channel->lable() == "main") {
this->_voice_channel = channel;
debugMessage(this->server_id(), "{} Got voice channel!", CLIENT_STR_LOG_PREFIX_(this->owner()));
this->callback_initialized();
}
weak_ptr<rtc::DataChannel> weak_channel = channel;
channel->callback_binary = [&, weak_channel](const pipes::buffer_view& buffer) {
this->callback_voice_data(buffer.view(2), buffer[0] == 1, buffer[1] == 1); /* buffer.substr(2), buffer[0] == 1, buffer[1] == 1 */
};
channel->callback_close = [&, channel] {
if(channel == this->_voice_channel) {
this->_voice_channel = nullptr;
//TODO may callback?
debugMessage(this->server_id(), "{} Voice channel disconnected!", CLIENT_STR_LOG_PREFIX_(this->owner()));
}
};
}
void VoiceBridge::handle_audio_data(const std::shared_ptr<rtc::AudioChannel> &channel, const pipes::buffer_view &data, size_t payload_offset) {
2020-01-24 02:57:58 +01:00
if(channel->codec->type != rtc::codec::TypedAudio::OPUS) {
debugMessage(this->server_id(), "{} Got unknown codec ({})!", CLIENT_STR_LOG_PREFIX_(this->owner()), channel->codec->type);
return;
}
auto ac = _audio_channel.lock();
if(!ac) return;
for(const auto& ext : ac->list_extensions(0x02)) {
if(ext->name == "urn:ietf:params:rtp-hdrext:ssrc-audio-level") {
int level;
if(rtc::protocol::rtp_header_extension_parse_audio_level(data, ext->id, &level) == 0) {
//debugMessage(this->server_id(), "Audio level: {}", level);
if(level == 127) return; //Silence
}
break;
}
}
//int level;
//rtc::protocol::rtp_header_extension_parse_audio_level((char*) data.data(), data.length(), 1, &level);
auto target_buffer = buffer::allocate_buffer(data.length() - payload_offset + 3);
le2be16(this->voice.packet_id++, (char*) target_buffer.data_ptr());
target_buffer[2] = 5;
memcpy(&target_buffer[3], &data[payload_offset], data.length() - payload_offset);
this->callback_voice_data(target_buffer, this->voice.packet_id < 7, false);
}