This commit is contained in:
birb 2025-04-11 21:38:56 -05:00
commit fa7935dad6
No known key found for this signature in database
GPG Key ID: BFB779869FEE99D2
25 changed files with 40174 additions and 0 deletions

23
.gitignore vendored Normal file
View File

@ -0,0 +1,23 @@
node_modules
# Output
.output
.vercel
.netlify
.wrangler
/.svelte-kit
/build
# OS
.DS_Store
Thumbs.db
# Env
.env
.env.*
!.env.example
!.env.test
# Vite
vite.config.js.timestamp-*
vite.config.ts.timestamp-*

1
.npmrc Normal file
View File

@ -0,0 +1 @@
engine-strict=true

10
LICENSE Normal file
View File

@ -0,0 +1,10 @@
Permission to use, copy, modify, and/or distribute this software for
any purpose with or without fee is hereby granted.
THE SOFTWARE IS PROVIDED “AS IS” AND THE AUTHOR DISCLAIMS ALL
WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE
FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY
DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

38
README.md Normal file
View File

@ -0,0 +1,38 @@
# sv
Everything you need to build a Svelte project, powered by [`sv`](https://github.com/sveltejs/cli).
## Creating a project
If you're seeing this, you've probably already done this step. Congrats!
```bash
# create a new project in the current directory
npx sv create
# create a new project in my-app
npx sv create my-app
```
## Developing
Once you've created a project and installed dependencies with `npm install` (or `pnpm install` or `yarn`), start a development server:
```bash
npm run dev
# or start the server and open the app in a new browser tab
npm run dev -- --open
```
## Building
To create a production version of your app:
```bash
npm run build
```
You can preview the production build with `npm run preview`.
> To deploy your app, you may need to install an [adapter](https://svelte.dev/docs/kit/adapters) for your target environment.

13
jsconfig.json Normal file
View File

@ -0,0 +1,13 @@
{
"extends": "./.svelte-kit/tsconfig.json",
"compilerOptions": {
"allowJs": true,
"checkJs": false,
"moduleResolution": "bundler"
}
// Path aliases are handled by https://svelte.dev/docs/kit/configuration#alias
// except $lib which is handled by https://svelte.dev/docs/kit/configuration#files
//
// If you want to overwrite includes/excludes, make sure to copy over the relevant includes/excludes
// from the referenced tsconfig.json - TypeScript does not merge them in
}

2167
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

27
package.json Normal file
View File

@ -0,0 +1,27 @@
{
"name": "freeremote-client",
"private": true,
"version": "0.0.1",
"type": "module",
"scripts": {
"dev": "vite dev",
"build": "vite build",
"preview": "vite preview",
"prepare": "svelte-kit sync || echo ''"
},
"devDependencies": {
"@sveltejs/adapter-static": "^3.0.8",
"@sveltejs/kit": "^2.16.0",
"@sveltejs/vite-plugin-svelte": "^5.0.0",
"@tailwindcss/vite": "^4.1.3",
"daisyui": "^5.0.17",
"svelte": "^5.0.0",
"tailwindcss": "^4.1.3",
"vite": "^6.2.5"
},
"dependencies": {
"extend": "^3.0.2",
"opus-decoder": "^0.7.7",
"socket.io-client": "^4.8.1"
}
}

7
src/app.css Normal file
View File

@ -0,0 +1,7 @@
@import "tailwindcss";
@plugin "daisyui";
@theme {
--font-sans: "Inter", system-ui, "Roboto", sans-serif;
--font-mono: "IBM Plex Mono", SFMono-Regular, monospace;
}

29
src/app.html Normal file
View File

@ -0,0 +1,29 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="preconnect" href="https://fonts.googleapis.com" />
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
<link
href="https://fonts.googleapis.com/css2?family=Material+Symbols+Outlined"
rel="stylesheet"
/>
<link
href="https://fonts.googleapis.com/css2?family=Fira+Mono:wght@400;500;700&display=swap"
rel="stylesheet"
/>
<link
href="https://fonts.googleapis.com/css2?family=IBM+Plex+Mono:ital,wght@0,100;0,200;0,300;0,400;0,500;0,600;0,700;1,100;1,200;1,300;1,400;1,500;1,600;1,700&family=Inter:ital,opsz,wght@0,14..32,100..900;1,14..32,100..900&display=swap"
rel="stylesheet"
/>
<meta name="viewport" content="width=device-width, initial-scale=1" />
%sveltekit.head%
</head>
<body data-sveltekit-preload-data="hover">
<div style="display: contents">%sveltekit.body%</div>
<script
src="/recorder.js"
type="text/javascript"
></script>
</body>
</html>

294
src/lib/queue.js Normal file
View File

@ -0,0 +1,294 @@
// https://github.com/Johni0702/web-audio-buffer-queue/tree/master
// ISC license
import extend from 'extend'
import { Buffer } from 'buffer';
import { Writable } from 'readable-stream'
/**
* A source node that plays queued PCM buffers.
*
* When no more data is queued, this node emits silence.
*
* The queued buffers are played at the frequency of the audio context.
*
* Multiple channels are supported, both interleaved and
* non interleaved layouts. Every single buffer queued is expected
* to contain the same amount of samples for every channel. Therefore a single
* frame may not be split across mutliple buffers.
*
* When in object mode, the input format is determined automatically.
* Supported formats are Float32Array, Int16Array and AudioBuffer.
* When not in object mode, the input format has to be specified manually by
* passing {@link BufferQueueNode#Float32Array} or {@link BufferQueueNode#Int16Array}
* to the constructor.
*
* Note that this does only implement a small part of the AudioNode interface.
* This node will disconnect automatically when its stream is closed.
*
* @extends Writable
*/
class BufferQueueNode extends Writable {
/**
* Create a BufferQueueNode.
* @param {Object} [options] - Options passed to the Writable constructor.
* @param {AudioBufferFormat} [options.dataType=BufferQueueNode.Float32Array] -
* Format of input data when not in objectMode.
* @param {boolean} [options.interleaved=true] - Whether the input data is interleaved
* @param {number} [options.channels=1] - Number of channels
* @param {number} [options.bufferSize=0] - Buffer size, must be a power of two
* between 256 and 16284. May also be 0 in which case the implementation will
* pick a good value (recommanded).
* @param {AudioContext} [options.audioContext=require('audio-context')()] - The audio context
*/
constructor (options) {
super(options)
options = extend({
dataType: Float32ArrayBuffer,
objectMode: false,
interleaved: true,
channels: 1,
bufferSize: 0
}, options)
this._dataType = options.dataType
this._objectMode = options.objectMode
this._interleaved = options.interleaved
const channels = this._channels = options.channels
const bufferSize = options.bufferSize
const audioContext = options.audioContext
// const sampleRate = audioContext.sampleRate
// Queue that holds all future audio buffer
this._queue = []
// Create a script processor node that will inject our samples
var processorNode = audioContext.createScriptProcessor(bufferSize, 0, channels)
// Create a buffer source that will power the script processor
// Note: This isn't strictly required, however some browsers are buggy
var inputNode = audioContext.createBufferSource()
// That source should be looping over a short, silent buffer
inputNode.loop = true
var shuttingDown = false
var shutDown = false
// The buffer which holds the current audio data
var currentBuffer = null
// Offset into the current buffer
var currentBufferOffset
processorNode.addEventListener('audioprocess', (e) => {
if (shutDown) {
// Already shut down
return
}
const out = e.outputBuffer
// Offset into the output buffer
let outOffset = 0
// Try to fill the whole output buffer
while (outOffset < out.length) {
// If we don't have a current buffer but there are some in the queue
if (!currentBuffer && this._queue.length > 0) {
// Then get the next queued buffer from the queue
currentBuffer = this._queue.shift()
currentBufferOffset = 0
}
// If we still don't have any data,
if (!currentBuffer) {
// then fill the rest of the output with silence
for (let channel = 0; channel < channels; channel++) {
out.getChannelData(channel).fill(0, outOffset)
}
// and shut down if requested
if (shuttingDown) {
shutDown = true
process.nextTick(() => this.emit('close'))
}
break
}
// Otherwise (we have data), copy as much as possible
const remainingOutput = out.length - outOffset
const remainingInput = currentBuffer.length - currentBufferOffset
const remaining = Math.min(remainingOutput, remainingInput)
// Do the actual copying
currentBuffer.copyTo(out, outOffset, currentBufferOffset, remaining)
// Increase offsets
currentBufferOffset += remaining
outOffset += remaining
// Check if there is still data remaining in the current buffer
if (currentBufferOffset >= currentBuffer.length) {
currentBuffer = null
}
}
})
// Connect the input node to the script processor
// inputNode.connect(processorNode)
// inputNode.start()
// Store node for later connecting
this._node = processorNode
this.on('finish', () => {
shuttingDown = true
})
this.on('close', () => {
processorNode.disconnect()
})
}
/**
* Connect this node to another node.
* @see https://developer.mozilla.org/en-US/docs/Web/API/AudioNode/connect(AudioNode)
*/
connect () {
return this._node.connect.apply(this._node, arguments)
}
/**
* Disconnect this node from another node.
* @see https://developer.mozilla.org/en-US/docs/Web/API/AudioNode/disconnect
*/
disconnect () {
return this._node.disconnect.apply(this._node, arguments)
}
_write (chunk, encoding, callback) {
if (this._objectMode) {
if (chunk instanceof Float32Array) {
chunk = new Float32ArrayBuffer(this._channels, this._interleaved, chunk)
} else if (chunk instanceof Int16Array) {
chunk = new Int16ArrayBuffer(this._channels, this._interleaved, chunk)
} else {
chunk = new AudioBufferBuffer(chunk)
}
} else {
chunk = new (this._dataType)(this._channels, this._interleaved, chunk)
}
this._queue.push(chunk)
callback(null)
}
}
/**
* @interface AudioBufferFormat
*/
/**
* Copy samples from this buffer to the target AudioBuffer.
*
* @function
* @name AudioBufferFormat#copyTo
* @param {AudioBuffer} to - The target audio buffer
* @param {number} toOffset - Offset into the target audio buffer
* @param {number} fromOffset - Offset into this buffer
* @param {number} length - Amount of sample-frames to copy
*/
/** @implements AudioBufferFormat */
class AudioBufferBuffer {
constructor (it) {
this._it = it
}
get length () {
return this._it.length
}
copyTo (to, toOffset, fromOffset, length) {
for (let channel = 0; channel < this._it.numberOfChannels; channel++) {
const source = this._it.getChannelData(channel)
to.copyToChannel(source.subarray(fromOffset, fromOffset + length), channel, toOffset)
}
}
}
class TypedArrayBuffer {
constructor (channels, interleaved, it) {
this._channels = channels
this._interleaved = interleaved
this._it = it
}
get length () {
return this._it.length / this._channels
}
/**
* Return the sample at the specified offset
* @param {number} i - The offset
* @returns {number} The sample
*/
_get (i) {
return this._it[i]
}
/**
* Copy some samples to the specified array.
* @param {Float32Array} to - The target array
* @param {number} toOffset - Offset into the target array
* @param {number} fromOffset - Offset into this array
* @param {number} length - Amount of samples to copy
*/
_bulkCopy (to, toOffset, fromOffset, length) {
to.set(this._it.subarray(fromOffset, fromOffset + length), toOffset)
}
copyTo (to, toOffset, fromOffset, length) {
for (let channel = 0; channel < this._channels; channel++) {
const channelData = to.getChannelData(channel)
if (this._interleaved && this._channels > 1) {
// For interleaved data we have to copy every sample on its own
for (let i = 0; i < length; i++) {
const actualFromOffset = (fromOffset + i) * this._channels + channel
channelData[toOffset + i] = this._get(actualFromOffset)
}
} else {
// Otherwise we can do a bulk copy
const actualFromOffset = this.length * channel + fromOffset
this._bulkCopy(channelData, toOffset, actualFromOffset, length)
}
}
}
}
/** @implements AudioBufferFormat */
class Float32ArrayBuffer extends TypedArrayBuffer {
constructor (channels, interleaved, it) {
if (it instanceof Buffer) {
it = new Float32Array(it.buffer, it.byteOffset, it.byteLength / 4)
} else if (!(it instanceof Float32Array)) {
throw new Error('Unsupported buffer type: ' + it)
}
super(channels, interleaved, it)
}
}
/** @implements AudioBufferFormat */
class Int16ArrayBuffer extends TypedArrayBuffer {
constructor (channels, interleaved, it) {
if (it instanceof Buffer) {
it = new Int16Array(it.buffer, it.byteOffset, it.byteLength / 2)
} else if (!(it instanceof Int16Array)) {
throw new Error('Unsupported buffer type: ' + it)
}
super(channels, interleaved, it)
}
/** @see TypedArrayBuffer#_get */
_get (i) {
const val = this._it[i]
return val / ((1 << 15) - (val > 0 ? 1 : 0))
}
/** @see TypedArrayBuffer#_bulkCopy */
_bulkCopy (to, toOffset, fromOffset, length) {
for (let i = 0; i < length; i++) {
to[toOffset + i] = this._get(fromOffset + i)
}
}
}
BufferQueueNode.AudioBuffer = AudioBufferBuffer
BufferQueueNode.Float32Array = Float32ArrayBuffer
BufferQueueNode.Int16Array = Int16ArrayBuffer
export default BufferQueueNode

View File

@ -0,0 +1,7 @@
<script>
import '../app.css';
let { children } = $props();
</script>
{@render children()}

1
src/routes/+page.js Normal file
View File

@ -0,0 +1 @@
export const prerender = true;

156
src/routes/+page.svelte Normal file
View File

@ -0,0 +1,156 @@
<script>
// deps
import BufferQueueNode from "$lib/queue";
import { untrack } from "svelte";
import io from "socket.io-client";
import { fade } from "svelte/transition";
import { OpusDecoder } from "opus-decoder";
function clamp(val, min, max) {
return Math.min(Math.max(val, min), max);
}
function avg(arr) {
return arr.reduce((prev, cur) => prev + cur, 0) / arr.length;
}
// state vars
let status = $state("disconnected");
let remoteState = $state();
let files = $state();
let errors = $state([]);
let dbm = $state(0);
let dbmList = $state([0]);
let pwr = $state(0);
let swr = $state(0);
let context = $state();
let recorder;
let sunits = $derived.by(() => {
let average = avg(dbmList);
if (average > 79) {
return `S9+${average - 73}`;
} else {
return `S${Math.round(clamp((average + 127) / 6, 1, 9))}`;
}
});
let socket;
async function enableMic() {
recorder = new Recorder({
encoderApplication: 2049,
encoderFrameSize: 40,
streamPages: true,
rawOpus: true,
});
recorder.ondataavailable = (data) => {
if (remoteState?.transmitting) socket.emit("audio", data);
};
recorder.start();
context = new AudioContext({ sampleRate: 48000 });
}
// when the file is uploaded we do the thing
$effect(async () => {
if (files && files.length > 0) {
status = "connecting";
let rawKey = await files[0].text();
try {
let json = JSON.parse(
rawKey.match(
/{"callsign":"[A-Z0-9]{1,15}","license":"[a-z]+","id":"[0-9]+","url":"[a-z0-9\-\:/.]+","expiration":[0-9]+}/
)[0]
);
const decoder = new OpusDecoder();
await decoder.ready;
let queueNode = new BufferQueueNode({
audioContext: context,
});
queueNode.connect(context.destination);
socket = io(json.url);
window.socket = socket;
socket.on("connect", () => {
status = "connected";
socket.emit("auth", rawKey);
});
socket.on("disconnect", () => {
status = "disconnected";
});
socket.on("state", (state) => {
remoteState = state;
});
socket.on("dbm", (d) => {
dbm = Math.min(d, -33);
dbmList.push(dbm);
if (dbmList.length > 25) dbmList.shift();
});
socket.on("pwr", (p) => {
pwr = p;
});
socket.on("swr", (s) => {
swr = s;
});
socket.on("audio", (chunk) => {
queueNode._write(
decoder.decodeFrame(new Uint8Array(chunk))
.channelData[0],
null,
() => {}
);
});
} catch (e) {
console.log(e);
errors.unshift("Invalid key!");
status = "disconnected";
return;
}
}
});
</script>
{#if status === "disconnected"}
<div
class="w-screen min-h-screen flex flex-col items-center justify-center gap-2"
>
<h1 class="text-3xl font-semibold">freeremote</h1>
{#if context}
<p>Please upload your key below to access your remote station.</p>
<input
accept="text/plain"
bind:files
type="file"
class="file-input"
/>
{:else}
<p>Please click the button below to continue.</p>
<button class="btn btn-soft" onclick={enableMic}>
Enable microphone
</button>
{/if}
</div>
{:else if status === "connecting"}
<div
class="w-screen min-h-screen flex flex-col items-center justify-center gap-2"
>
<span class="loading loading-spinner loading-xl"></span>
</div>
{:else}
<p>{remoteState?.frequency}</p>
<p>{dbm}</p>
<p>{sunits}</p>
{/if}
<div class="toast toast-top toast-end">
{#each errors as error, i}
<div role="alert" class="alert alert-error" transition:fade>
<span>{error}</span>
<button
class="material-symbols-outlined cursor-pointer"
onclick={() => {
errors.splice(i, 1);
}}
>
close
</button>
</div>
{/each}
</div>

143
static/encoderWorker.js Normal file
View File

@ -0,0 +1,143 @@
"use strict";
// Code courtesy of Symbl, see https://github.com/symblai/opus-encdec for license
var OggOpusEncoder, OpusEncoderLib;
if(typeof require === 'function'){
OpusEncoderLib = require('./libopus-encoder.js');
OggOpusEncoder = require('./oggOpusEncoder.js').OggOpusEncoder;
} else if (typeof importScripts === "function") {
importScripts('./libopus-encoder.js');
importScripts('./oggOpusEncoder.js');
}
// Run in AudioWorkletGlobal scope
if (typeof registerProcessor === 'function') {
class EncoderWorklet extends AudioWorkletProcessor {
constructor(){
super();
this.continueProcess = true;
this.port.onmessage = ({ data }) => {
if (this.encoder) {
switch( data['command'] ){
case 'getHeaderPages':
this.postPage(this.encoder.generateIdPage());
this.postPage(this.encoder.generateCommentPage());
break;
case 'done':
this.encoder.encodeFinalFrame().forEach(pageData => this.postPage(pageData));
this.encoder.destroy();
delete this.encoder;
this.port.postMessage( {message: 'done'} );
break;
case 'flush':
this.postPage(this.encoder.flush());
this.port.postMessage( {message: 'flushed'} );
break;
default:
// Ignore any unknown commands and continue recieving commands
}
}
switch( data['command'] ){
case 'close':
this.continueProcess = false;
break;
case 'init':
this.encoder = new OggOpusEncoder( data, OpusEncoderLib );
this.port.postMessage( {message: 'ready'} );
break;
default:
// Ignore any unknown commands and continue recieving commands
}
}
}
process(inputs) {
if (this.encoder && inputs[0] && inputs[0].length && inputs[0][0] && inputs[0][0].length){
this.encoder.encode( inputs[0] ).forEach(pageData => this.postPage(pageData));
}
return this.continueProcess;
}
postPage(pageData) {
if (pageData) {
this.port.postMessage( pageData, [pageData.page.buffer] );
}
}
}
registerProcessor('encoder-worklet', EncoderWorklet);
}
// run in scriptProcessor worker scope
else {
var encoder;
var postPageGlobal = (pageData) => {
if (pageData) {
postMessage( pageData, [pageData.page.buffer] );
}
}
onmessage = ({ data }) => {
if (encoder) {
switch( data['command'] ){
case 'encode':
encoder.encode( data['buffers'] ).forEach(pageData => postPageGlobal(pageData));
break;
case 'getHeaderPages':
postPageGlobal(encoder.generateIdPage());
postPageGlobal(encoder.generateCommentPage());
break;
case 'done':
encoder.encodeFinalFrame().forEach(pageData => postPageGlobal(pageData));
encoder.destroy();
encoder = null;
postMessage( {message: 'done'} );
break;
case 'flush':
postPageGlobal(encoder.flush());
postMessage( {message: 'flushed'} );
break;
default:
// Ignore any unknown commands and continue recieving commands
}
}
switch( data['command'] ){
case 'close':
close();
break;
case 'init':
encoder = new OggOpusEncoder( data, OpusEncoderLib );
postMessage( {message: 'ready'} );
break;
default:
// Ignore any unknown commands and continue recieving commands
}
};
}
// Exports for unit testing.
var module = module || {};
module.exports = {
OpusEncoderLib: OpusEncoderLib,
OggOpusEncoder: OggOpusEncoder
};

34698
static/libopus-encoder.js Normal file

File diff suppressed because one or more lines are too long

27
static/libopus-encoder.min.js vendored Normal file

File diff suppressed because one or more lines are too long

Binary file not shown.

File diff suppressed because it is too large Load Diff

1
static/libopus-encoder.wasm.min.js vendored Normal file

File diff suppressed because one or more lines are too long

Binary file not shown.

Binary file not shown.

378
static/oggOpusEncoder.js Normal file
View File

@ -0,0 +1,378 @@
// Code courtesy of Symbl, see https://github.com/symblai/opus-encdec for license
var OggOpusEncoder = function( config, Module ){
if ( !Module ) {
throw new Error('Module with exports required to initialize an encoder instance');
}
this.config = Object.assign({
encoderApplication: 2049, // 2048 = Voice (Lower fidelity)
// 2049 = Full Band Audio (Highest fidelity)
// 2051 = Restricted Low Delay (Lowest latency)
encoderFrameSize: 20, // Specified in ms.
encoderSampleRate: 48000, // Desired encoding sample rate. Audio will be resampled
maxFramesPerPage: 40, // Tradeoff latency with overhead
numberOfChannels: 1,
originalSampleRate: 44100,
resampleQuality: 3, // Value between 0 and 10 inclusive. 10 being highest quality.
serial: Math.floor(Math.random() * 4294967296)
}, config );
// encode "raw" opus stream?
// -> either config.rawOpus = true/false,
// or config.mimeType = 'audio/opus'
// (instead of 'audio/ogg; codecs=opus')
this.rawOpus = typeof this.config.rawOpus === 'boolean'?
this.config.rawOpus :
/^audio\/opus\b/i.test(this.config.mimeType);
var useOgg = !this.rawOpus;
this._opus_encoder_create = Module._opus_encoder_create;
this._opus_encoder_destroy = Module._opus_encoder_destroy;
this._opus_encoder_ctl = Module._opus_encoder_ctl;
this._speex_resampler_process_interleaved_float = Module._speex_resampler_process_interleaved_float;
this._speex_resampler_init = Module._speex_resampler_init;
this._speex_resampler_destroy = Module._speex_resampler_destroy;
this._opus_encode_float = Module._opus_encode_float;
this._free = Module._free;
this._malloc = Module._malloc;
this.HEAPU8 = Module.HEAPU8;
this.HEAP32 = Module.HEAP32;
this.HEAPF32 = Module.HEAPF32;
this.pageIndex = 0;
this.granulePosition = 0;
this.segmentData = useOgg? new Uint8Array( 65025 ) : new Uint8Array( 255 ); // Maximum length of oggOpus data
this.segmentDataIndex = 0;
this.segmentTable = useOgg? new Uint8Array( 255 ) : null; // Maximum data segments
this.segmentTableIndex = 0;
this.framesInPage = 0;
this.encodedData = !useOgg? [] : undefined;
this.encodedDataLength = 0;
this.isReady = Module.isReady;
if(!this.isReady){
Module.onready = function(){
this.isReady = true;
this.onready && this.onready();
}
}
if(useOgg){
this.initChecksumTable();
}
this.initCodec();
this.initResampler();
if ( this.config.numberOfChannels === 1 ) {
this.interleave = function( buffers ) { return buffers[0]; };
}
};
OggOpusEncoder.prototype.encode = function( buffers ) {
// Determine bufferLength dynamically
if ( !this.bufferLength ) {
this.bufferLength = buffers[0].length;
this.interleavedBuffers = new Float32Array( this.bufferLength * this.config.numberOfChannels );
}
var useOgg = !this.rawOpus;
var samples = this.interleave( buffers );
var sampleIndex = 0;
var exportPages = useOgg? [] : null;
var encodedData = useOgg? null : [];
var bufferLength = this.resampler? this.resampleBufferLength : this.encoderBufferLength;
var buffer = this.resampler? this.resampleBuffer : this.encoderBuffer;
while ( sampleIndex < samples.length ) {
var lengthToCopy = Math.min(bufferLength - this.sampleBufferIndex, samples.length - sampleIndex );
buffer.set( samples.subarray( sampleIndex, sampleIndex+lengthToCopy ), this.sampleBufferIndex );
sampleIndex += lengthToCopy;
this.sampleBufferIndex += lengthToCopy;
if ( this.sampleBufferIndex === bufferLength ) {
if (this.resampler) {
this._speex_resampler_process_interleaved_float( this.resampler, this.resampleBufferPointer, this.resampleSamplesPerChannelPointer, this.encoderBufferPointer, this.encoderSamplesPerChannelPointer );
}
var packetLength = this._opus_encode_float( this.encoder, this.encoderBufferPointer, this.encoderSamplesPerChannel, this.encoderOutputPointer, this.encoderOutputMaxLength );
if(useOgg){
exportPages.concat(this.segmentPacket( packetLength ));
this.framesInPage++;
if ( this.framesInPage >= this.config.maxFramesPerPage ) {
exportPages.push( this.generatePage() );
}
} else {
encodedData.push({page: new Uint8Array(this.encoderOutputBuffer.subarray(0, packetLength))});
this.encodedDataLength += packetLength;
}
this.sampleBufferIndex = 0;
}
}
return useOgg ? exportPages : encodedData;
};
OggOpusEncoder.prototype.destroy = function() {
if ( this.encoder ) {
this._free(this.encoderSamplesPerChannelPointer);
delete this.encoderSamplesPerChannelPointer;
this._free(this.encoderBufferPointer);
delete this.encoderBufferPointer;
this._free(this.encoderOutputPointer);
delete this.encoderOutputPointer;
this._opus_encoder_destroy(this.encoder);
delete this.encoder;
if(this.resampler){
this._free(this.resampleSamplesPerChannelPointer);
delete this.resampleSamplesPerChannelPointer;
this._free(this.resampleBufferPointer);
delete this.resampleBufferPointer;
this._speex_resampler_destroy(this.resampler);
delete this.resampler;
}
if(this.encodedData){
this.encodedData = null;
}
}
};
OggOpusEncoder.prototype.flush = function() {
var exportPage;
if ( this.framesInPage ) {
exportPage = this.generatePage();
}
// discard any pending data in resample buffer (only a few ms worth)
this.sampleBufferIndex = 0;
return exportPage;
};
OggOpusEncoder.prototype.encodeFinalFrame = function() {
var useOgg = !this.rawOpus;
var exportPages = useOgg? [] : null;
// Encode the data remaining in the resample buffer.
if ( this.sampleBufferIndex > 0 ) {
var dataToFill = (this.resampleBufferLength - this.sampleBufferIndex) / this.config.numberOfChannels;
var numBuffers = Math.ceil(dataToFill / this.bufferLength);
for ( var i = 0; i < numBuffers; i++ ) {
var finalFrameBuffers = [];
for ( var j = 0; j < this.config.numberOfChannels; j++ ) {
finalFrameBuffers.push( new Float32Array( this.bufferLength ));
}
if(useOgg){
exportPages.concat(this.encode( finalFrameBuffers ));
} else {
return this.encode( finalFrameBuffers );
}
}
}
if(useOgg){
this.headerType += 4;
exportPages.push(this.generatePage());
return exportPages;
}
};
OggOpusEncoder.prototype.getChecksum = function( data ){
var checksum = 0;
for ( var i = 0; i < data.length; i++ ) {
checksum = (checksum << 8) ^ this.checksumTable[ ((checksum>>>24) & 0xff) ^ data[i] ];
}
return checksum >>> 0;
};
OggOpusEncoder.prototype.generateCommentPage = function(){
var segmentDataView = new DataView( this.segmentData.buffer );
segmentDataView.setUint32( 0, 1937076303, true ) // Magic Signature 'Opus'
segmentDataView.setUint32( 4, 1936154964, true ) // Magic Signature 'Tags'
segmentDataView.setUint32( 8, 10, true ); // Vendor Length
segmentDataView.setUint32( 12, 1868784978, true ); // Vendor name 'Reco'
segmentDataView.setUint32( 16, 1919247474, true ); // Vendor name 'rder'
segmentDataView.setUint16( 20, 21322, true ); // Vendor name 'JS'
segmentDataView.setUint32( 22, 0, true ); // User Comment List Length
if(!this.rawOpus){
this.segmentTableIndex = 1;
this.segmentDataIndex = this.segmentTable[0] = 26;
this.headerType = 0;
return this.generatePage();
} else {
const encodedData = new Uint8Array(this.segmentData.subarray(0, 26));
this.encodedDataLength += 26;
}
};
OggOpusEncoder.prototype.generateIdPage = function(){
var segmentDataView = new DataView( this.segmentData.buffer );
segmentDataView.setUint32( 0, 1937076303, true ) // Magic Signature 'Opus'
segmentDataView.setUint32( 4, 1684104520, true ) // Magic Signature 'Head'
segmentDataView.setUint8( 8, 1, true ); // Version
segmentDataView.setUint8( 9, this.config.numberOfChannels, true ); // Channel count
segmentDataView.setUint16( 10, 3840, true ); // pre-skip (80ms)
segmentDataView.setUint32( 12, this.config.originalSampleRateOverride || this.config.originalSampleRate, true ); // original sample rate
segmentDataView.setUint16( 16, 0, true ); // output gain
segmentDataView.setUint8( 18, 0, true ); // channel map 0 = mono or stereo
if(!this.rawOpus){
this.segmentTableIndex = 1;
this.segmentDataIndex = this.segmentTable[0] = 19;
this.headerType = 2;
return this.generatePage();
} else {
const encodedData = new Uint8Array(this.segmentData.subarray(0, 19));
this.encodedDataLength += 19;
}
};
OggOpusEncoder.prototype.generatePage = function(){
var granulePosition = ( this.lastPositiveGranulePosition === this.granulePosition) ? -1 : this.granulePosition;
var pageBuffer = new ArrayBuffer( 27 + this.segmentTableIndex + this.segmentDataIndex );
var pageBufferView = new DataView( pageBuffer );
var page = new Uint8Array( pageBuffer );
pageBufferView.setUint32( 0, 1399285583, true); // Capture Pattern starts all page headers 'OggS'
pageBufferView.setUint8( 4, 0, true ); // Version
pageBufferView.setUint8( 5, this.headerType, true ); // 1 = continuation, 2 = beginning of stream, 4 = end of stream
// Number of samples upto and including this page at 48000Hz, into signed 64 bit Little Endian integer
// Javascript Number maximum value is 53 bits or 2^53 - 1
pageBufferView.setUint32( 6, granulePosition, true );
if (granulePosition < 0) {
pageBufferView.setInt32( 10, Math.ceil(granulePosition/4294967297) - 1, true );
}
else {
pageBufferView.setInt32( 10, Math.floor(granulePosition/4294967296), true );
}
pageBufferView.setUint32( 14, this.config.serial, true ); // Bitstream serial number
pageBufferView.setUint32( 18, this.pageIndex++, true ); // Page sequence number
pageBufferView.setUint8( 26, this.segmentTableIndex, true ); // Number of segments in page.
page.set( this.segmentTable.subarray(0, this.segmentTableIndex), 27 ); // Segment Table
page.set( this.segmentData.subarray(0, this.segmentDataIndex), 27 + this.segmentTableIndex ); // Segment Data
pageBufferView.setUint32( 22, this.getChecksum( page ), true ); // Checksum
var exportPage = { message: 'page', page: page, samplePosition: this.granulePosition };
this.segmentTableIndex = 0;
this.segmentDataIndex = 0;
this.framesInPage = 0;
if ( granulePosition > 0 ) {
this.lastPositiveGranulePosition = granulePosition;
}
return exportPage;
};
OggOpusEncoder.prototype.initChecksumTable = function(){
this.checksumTable = [];
for ( var i = 0; i < 256; i++ ) {
var r = i << 24;
for ( var j = 0; j < 8; j++ ) {
r = ((r & 0x80000000) != 0) ? ((r << 1) ^ 0x04c11db7) : (r << 1);
}
this.checksumTable[i] = (r & 0xffffffff);
}
};
OggOpusEncoder.prototype.setOpusControl = function( control, value ){
var location = this._malloc( 4 );
this.HEAP32[ location >> 2 ] = value;
this._opus_encoder_ctl( this.encoder, control, location );
this._free( location );
};
OggOpusEncoder.prototype.initCodec = function() {
var errLocation = this._malloc( 4 );
this.encoder = this._opus_encoder_create( this.config.encoderSampleRate, this.config.numberOfChannels, this.config.encoderApplication, errLocation );
this._free( errLocation );
if ( this.config.encoderBitRate ) {
this.setOpusControl( 4002, this.config.encoderBitRate );
}
if ( this.config.encoderComplexity ) {
this.setOpusControl( 4010, this.config.encoderComplexity );
}
this.encoderSamplesPerChannel = this.config.encoderSampleRate * this.config.encoderFrameSize / 1000;
this.encoderSamplesPerChannelPointer = this._malloc( 4 );
this.HEAP32[ this.encoderSamplesPerChannelPointer >> 2 ] = this.encoderSamplesPerChannel;
this.sampleBufferIndex = 0;
this.encoderBufferLength = this.encoderSamplesPerChannel * this.config.numberOfChannels;
this.encoderBufferPointer = this._malloc( this.encoderBufferLength * 4 ); // 4 bytes per sample
this.encoderBuffer = this.HEAPF32.subarray( this.encoderBufferPointer >> 2, (this.encoderBufferPointer >> 2) + this.encoderBufferLength );
this.encoderOutputMaxLength = 4000;
this.encoderOutputPointer = this._malloc( this.encoderOutputMaxLength );
this.encoderOutputBuffer = this.HEAPU8.subarray( this.encoderOutputPointer, this.encoderOutputPointer + this.encoderOutputMaxLength );
};
OggOpusEncoder.prototype.initResampler = function() {
if ( this.config.originalSampleRate === this.config.encoderSampleRate ) {
this.resampler = null;
return;
}
var errLocation = this._malloc( 4 );
this.resampler = this._speex_resampler_init( this.config.numberOfChannels, this.config.originalSampleRate, this.config.encoderSampleRate, this.config.resampleQuality, errLocation );
this._free( errLocation );
this.resampleSamplesPerChannel = this.config.originalSampleRate * this.config.encoderFrameSize / 1000;
this.resampleSamplesPerChannelPointer = this._malloc( 4 );
this.HEAP32[ this.resampleSamplesPerChannelPointer >> 2 ] = this.resampleSamplesPerChannel;
this.resampleBufferLength = this.resampleSamplesPerChannel * this.config.numberOfChannels;
this.resampleBufferPointer = this._malloc( this.resampleBufferLength * 4 ); // 4 bytes per sample
this.resampleBuffer = this.HEAPF32.subarray( this.resampleBufferPointer >> 2, (this.resampleBufferPointer >> 2) + this.resampleBufferLength );
};
OggOpusEncoder.prototype.interleave = function( buffers ) {
for ( var i = 0; i < this.bufferLength; i++ ) {
for ( var channel = 0; channel < this.config.numberOfChannels; channel++ ) {
this.interleavedBuffers[ i * this.config.numberOfChannels + channel ] = buffers[ channel ][ i ];
}
}
return this.interleavedBuffers;
};
OggOpusEncoder.prototype.segmentPacket = function( packetLength ) {
var packetIndex = 0;
var exportPages = [];
while ( packetLength >= 0 ) {
if ( this.segmentTableIndex === 255 ) {
exportPages.push( this.generatePage() );
this.headerType = 1;
}
var segmentLength = Math.min( packetLength, 255 );
this.segmentTable[ this.segmentTableIndex++ ] = segmentLength;
this.segmentData.set( this.encoderOutputBuffer.subarray( packetIndex, packetIndex + segmentLength ), this.segmentDataIndex );
this.segmentDataIndex += segmentLength;
packetIndex += segmentLength;
packetLength -= 255;
}
this.granulePosition += ( 48 * this.config.encoderFrameSize );
if ( this.segmentTableIndex === 255 ) {
exportPages.push( this.generatePage() );
this.headerType = 0;
}
return exportPages;
};
if(typeof exports !== 'undefined'){
exports.OggOpusEncoder = OggOpusEncoder;
} else if(typeof module === 'object' && module && module.exports){
module.exports.OggOpusEncoder = OggOpusEncoder;
}

327
static/recorder.js Normal file
View File

@ -0,0 +1,327 @@
"use strict";
// Code courtesy of Symbl, see https://github.com/symblai/opus-encdec for license
var AudioContext = globalThis.AudioContext || globalThis.webkitAudioContext;
// Constructor
var Recorder = function( config = {} ){
if ( !Recorder.isRecordingSupported() ) {
throw new Error("Recording is not supported in this browser");
}
this.state = "inactive";
this.config = Object.assign({
bufferLength: 4096,
encoderApplication: 2049,
encoderFrameSize: 20,
encoderPath: 'encoderWorker.js',
encoderSampleRate: 48000,
maxFramesPerPage: 40,
mediaTrackConstraints: true,
monitorGain: 0,
numberOfChannels: 1,
recordingGain: 1,
resampleQuality: 3,
streamPages: false,
wavBitDepth: 16,
sourceNode: { context: null },
}, config );
this.encodedSamplePosition = 0;
this.initAudioContext();
this.initialize = this.initWorklet().then(() => this.initEncoder());
};
// Static Methods
Recorder.isRecordingSupported = function(){
const getUserMediaSupported = globalThis.navigator && globalThis.navigator.mediaDevices && globalThis.navigator.mediaDevices.getUserMedia;
return AudioContext && getUserMediaSupported && globalThis.WebAssembly;
};
Recorder.version = '0.1.1';
// Instance Methods
Recorder.prototype.clearStream = function(){
if ( this.stream ){
if ( this.stream.getTracks ) {
this.stream.getTracks().forEach(track => track.stop());
}
else {
this.stream.stop();
}
}
};
Recorder.prototype.close = function() {
this.monitorGainNode.disconnect();
this.recordingGainNode.disconnect();
if (this.sourceNode) {
this.sourceNode.disconnect();
}
this.clearStream();
if (this.encoder) {
this.encoderNode.disconnect();
this.encoder.postMessage({ command: "close" });
}
if ( !this.config.sourceNode.context ){
return this.audioContext.close();
}
return Promise.resolve();
}
Recorder.prototype.encodeBuffers = function( inputBuffer ){
if ( this.state === "recording" ) {
var buffers = [];
for ( var i = 0; i < inputBuffer.numberOfChannels; i++ ) {
buffers[i] = inputBuffer.getChannelData(i);
}
this.encoder.postMessage({
command: "encode",
buffers: buffers
});
}
};
Recorder.prototype.initAudioContext = function(){
this.audioContext = this.config.sourceNode.context ? this.config.sourceNode.context : new AudioContext();
this.monitorGainNode = this.audioContext.createGain();
this.setMonitorGain( this.config.monitorGain );
this.recordingGainNode = this.audioContext.createGain();
this.setRecordingGain( this.config.recordingGain );
};
Recorder.prototype.initEncoder = function() {
console.log('audioWorklet support not detected. Falling back to scriptProcessor');
// Skip the first buffer
this.encodeBuffers = () => delete this.encodeBuffers;
this.encoderNode = this.audioContext.createScriptProcessor( this.config.bufferLength, this.config.numberOfChannels, this.config.numberOfChannels );
this.encoderNode.onaudioprocess = ({ inputBuffer }) => this.encodeBuffers( inputBuffer );
this.encoderNode.connect( this.audioContext.destination ); // Requires connection to destination to process audio
this.encoder = new globalThis.Worker(this.config.encoderPath);
};
Recorder.prototype.initSourceNode = function(){
if ( this.config.sourceNode.context ) {
this.sourceNode = this.config.sourceNode;
return Promise.resolve();
}
return globalThis.navigator.mediaDevices.getUserMedia({ audio : this.config.mediaTrackConstraints }).then( stream => {
this.stream = stream;
this.sourceNode = this.audioContext.createMediaStreamSource( stream );
});
};
Recorder.prototype.initWorker = function(){
var onPage = (this.config.streamPages ? this.streamPage : this.storePage).bind(this);
this.recordedPages = [];
this.totalLength = 0;
return new Promise(resolve => {
var callback = ({ data }) => {
switch( data['message'] ){
case 'ready':
resolve();
break;
case 'page':
this.encodedSamplePosition = data['samplePosition'];
onPage(data['page']);
break;
case 'done':
this.encoder.removeEventListener( "message", callback );
this.finish();
break;
default:
if (data["page"]) {
onPage(data["page"]);
}
}
};
this.encoder.addEventListener( "message", callback );
// must call start for messagePort messages
if( this.encoder.start ) {
this.encoder.start()
}
// exclude sourceNode
const {sourceNode, ...config} = this.config;
this.encoder.postMessage( Object.assign({
command: 'init',
originalSampleRate: this.audioContext.sampleRate,
wavSampleRate: this.audioContext.sampleRate
}, config));
});
};
Recorder.prototype.initWorklet = function() {
if (this.audioContext.audioWorklet) {
return this.audioContext.audioWorklet.addModule(this.config.encoderPath);
}
return Promise.resolve();
}
Recorder.prototype.pause = function( flush ) {
if ( this.state === "recording" ) {
this.state = "paused";
this.recordingGainNode.disconnect();
if ( flush && this.config.streamPages ) {
return new Promise(resolve => {
var callback = ({ data }) => {
if ( data["message"] === 'flushed' ) {
this.encoder.removeEventListener( "message", callback );
this.onpause();
resolve();
}
};
this.encoder.addEventListener( "message", callback );
// must call start for messagePort messages
if ( this.encoder.start ) {
this.encoder.start()
}
this.encoder.postMessage( { command: "flush" } );
});
}
this.onpause();
return Promise.resolve();
}
};
Recorder.prototype.resume = function() {
if ( this.state === "paused" ) {
this.state = "recording";
this.recordingGainNode.connect(this.encoderNode);
this.onresume();
}
};
Recorder.prototype.setRecordingGain = function( gain ){
this.config.recordingGain = gain;
if ( this.recordingGainNode && this.audioContext ) {
this.recordingGainNode.gain.setTargetAtTime(gain, this.audioContext.currentTime, 0.01);
}
};
Recorder.prototype.setMonitorGain = function( gain ){
this.config.monitorGain = gain;
if ( this.monitorGainNode && this.audioContext ) {
this.monitorGainNode.gain.setTargetAtTime(gain, this.audioContext.currentTime, 0.01);
}
};
Recorder.prototype.start = function(){
if ( this.state === "inactive" ) {
this.state = 'loading';
this.encodedSamplePosition = 0;
return this.audioContext.resume()
.then(() => this.initialize)
.then(() => Promise.all([this.initSourceNode(), this.initWorker()]))
.then(() => {
this.state = "recording";
this.encoder.postMessage({ command: 'getHeaderPages' });
this.sourceNode.connect( this.monitorGainNode );
this.sourceNode.connect( this.recordingGainNode );
this.monitorGainNode.connect( this.audioContext.destination );
this.recordingGainNode.connect( this.encoderNode );
this.onstart();
})
.catch(error => {
this.state = 'inactive';
throw error;
});
}
return Promise.resolve();
};
Recorder.prototype.stop = function(){
if ( this.state === "paused" || this.state === "recording" ) {
this.state = "inactive";
// macOS and iOS requires the source to remain connected (in case stopped while paused)
this.recordingGainNode.connect( this.encoderNode );
this.monitorGainNode.disconnect();
this.clearStream();
return new Promise(resolve => {
var callback = ({ data }) => {
if ( data["message"] === 'done' ) {
this.encoder.removeEventListener( "message", callback );
resolve();
}
};
this.encoder.addEventListener( "message", callback );
// must call start for messagePort messages
if( this.encoder.start ) {
this.encoder.start()
}
this.encoder.postMessage({ command: "done" });
});
}
return Promise.resolve();
};
Recorder.prototype.storePage = function( page ) {
this.recordedPages.push( page );
this.totalLength += page.length;
};
Recorder.prototype.streamPage = function( page ) {
this.ondataavailable( page );
};
Recorder.prototype.finish = function() {
if( !this.config.streamPages ) {
var outputData = new Uint8Array( this.totalLength );
this.recordedPages.reduce( function( offset, page ){
outputData.set( page, offset );
return offset + page.length;
}, 0);
this.ondataavailable( outputData );
}
this.onstop();
};
// Callback Handlers
Recorder.prototype.ondataavailable = function(){};
Recorder.prototype.onpause = function(){};
Recorder.prototype.onresume = function(){};
Recorder.prototype.onstart = function(){};
Recorder.prototype.onstop = function(){};
window.Recorder = Recorder;

5
svelte.config.js Normal file
View File

@ -0,0 +1,5 @@
import adapter from '@sveltejs/adapter-static';
const config = { kit: { adapter: adapter() } };
export default config;

7
vite.config.js Normal file
View File

@ -0,0 +1,7 @@
import tailwindcss from '@tailwindcss/vite';
import { sveltekit } from '@sveltejs/kit/vite';
import { defineConfig } from 'vite';
export default defineConfig({
plugins: [tailwindcss(), sveltekit()]
});