test gc
This commit is contained in:
parent
44eaa5ee8d
commit
dc50d732ad
4 changed files with 276 additions and 31 deletions
|
@ -21,6 +21,8 @@ export class BrowserWindow {
|
||||||
private devMode: boolean = !app.isPackaged;
|
private devMode: boolean = !app.isPackaged;
|
||||||
|
|
||||||
private audioStream: any = new Stream.PassThrough();
|
private audioStream: any = new Stream.PassThrough();
|
||||||
|
private headerSent: any = false;
|
||||||
|
private chromecastIP : any = [];
|
||||||
private clientPort: number = 0;
|
private clientPort: number = 0;
|
||||||
private remotePort: number = 6942;
|
private remotePort: number = 6942;
|
||||||
private EnvironmentVariables: object = {
|
private EnvironmentVariables: object = {
|
||||||
|
@ -317,7 +319,7 @@ export class BrowserWindow {
|
||||||
console.error('Req not defined')
|
console.error('Req not defined')
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
if (req.url.includes("audio.webm") || (req.headers.host.includes("localhost") && (this.devMode || req.headers["user-agent"].includes("Electron")))) {
|
if (req.url.includes("audio.wav") || (req.headers.host.includes("localhost") && (this.devMode || req.headers["user-agent"].includes("Electron")))) {
|
||||||
next();
|
next();
|
||||||
} else {
|
} else {
|
||||||
res.redirect("https://discord.gg/applemusic");
|
res.redirect("https://discord.gg/applemusic");
|
||||||
|
@ -402,16 +404,22 @@ export class BrowserWindow {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
app.get("/audio.webm", (req, res) => {
|
app.get("/audio.wav", (req, res) => {
|
||||||
try {
|
try {
|
||||||
|
const ip = req.headers['x-forwarded-for'] || req.connection.remoteAddress;
|
||||||
|
if (!this.chromecastIP.includes(ip)) {
|
||||||
|
this.headerSent = false;
|
||||||
|
this.chromecastIP.push(ip)
|
||||||
|
}
|
||||||
req.socket.setTimeout(Number.MAX_SAFE_INTEGER);
|
req.socket.setTimeout(Number.MAX_SAFE_INTEGER);
|
||||||
// CiderBase.requests.push({req: req, res: res});
|
// CiderBase.requests.push({req: req, res: res});
|
||||||
// var pos = CiderBase.requests.length - 1;
|
// var pos = CiderBase.requests.length - 1;
|
||||||
// req.on("close", () => {
|
req.on("close", () => {
|
||||||
// console.info("CLOSED", CiderBase.requests.length);
|
console.log('disconnected')
|
||||||
// requests.splice(pos, 1);
|
this.headerSent = false
|
||||||
// console.info("CLOSED", CiderBase.requests.length);
|
this.chromecastIP = this.chromecastIP.filter((item: any) => item !== ip);
|
||||||
// });
|
});
|
||||||
|
|
||||||
this.audioStream.on("data", (data: any) => {
|
this.audioStream.on("data", (data: any) => {
|
||||||
try {
|
try {
|
||||||
res.write(data);
|
res.write(data);
|
||||||
|
@ -762,9 +770,119 @@ export class BrowserWindow {
|
||||||
`)
|
`)
|
||||||
});
|
});
|
||||||
|
|
||||||
ipcMain.on('writeAudio', (event, buffer) => {
|
ipcMain.on('writeWAV', (event, leftpcm, rightpcm, bufferlength) => {
|
||||||
this.audioStream.write(Buffer.from(buffer));
|
|
||||||
})
|
function interleave16(leftChannel: any, rightChannel: any) {
|
||||||
|
var length = leftChannel.length + rightChannel.length;
|
||||||
|
var result = new Int16Array(length);
|
||||||
|
|
||||||
|
var inputIndex = 0;
|
||||||
|
|
||||||
|
for (var index = 0; index < length;) {
|
||||||
|
result[index++] = leftChannel[inputIndex];
|
||||||
|
result[index++] = rightChannel[inputIndex];
|
||||||
|
inputIndex++;
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
//https://github.com/HSU-ANT/jsdafx
|
||||||
|
|
||||||
|
function quantization(audiobufferleft: any, audiobufferright: any) {
|
||||||
|
|
||||||
|
let h = Float32Array.from([1]);
|
||||||
|
let nsState = new Array(0);
|
||||||
|
let ditherstate = new Float32Array(0);
|
||||||
|
let qt = Math.pow(2, 1 - 16);
|
||||||
|
|
||||||
|
//noise shifting order 3
|
||||||
|
h = Float32Array.from([1.623, -0.982, 0.109]);
|
||||||
|
for (let i = 0; i < nsState.length; i++) {
|
||||||
|
nsState[i] = new Float32Array(h.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function setChannelCount(nc: any) {
|
||||||
|
if (ditherstate.length !== nc) {
|
||||||
|
ditherstate = new Float32Array(nc);
|
||||||
|
}
|
||||||
|
if (nsState.length !== nc) {
|
||||||
|
nsState = new Array(nc);
|
||||||
|
for (let i = 0; i < nsState.length; i++) {
|
||||||
|
nsState[i] = new Float32Array(h.length);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function hpDither(channel: any) {
|
||||||
|
const rnd = Math.random() - 0.5;
|
||||||
|
const d = rnd - ditherstate[channel];
|
||||||
|
ditherstate[channel] = rnd;
|
||||||
|
return d;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
setChannelCount(2);
|
||||||
|
const inputs = [audiobufferleft, audiobufferright];
|
||||||
|
const outputs = [audiobufferleft, audiobufferright];
|
||||||
|
|
||||||
|
for (let channel = 0; channel < inputs.length; channel++) {
|
||||||
|
const inputData = inputs[channel];
|
||||||
|
const outputData = outputs[channel];
|
||||||
|
for (let sample = 0; sample < bufferlength; sample++) {
|
||||||
|
let input = inputData[sample];
|
||||||
|
// console.log('a2',inputData.length);
|
||||||
|
for (let i = 0; i < h.length; i++) {
|
||||||
|
input -= h[i] * nsState[channel][i];
|
||||||
|
}
|
||||||
|
// console.log('a3',input);
|
||||||
|
let d_rand = 0.0;
|
||||||
|
// ditherstate = new Float32Array(h.length);
|
||||||
|
d_rand = hpDither(channel);
|
||||||
|
const tmpOutput = qt * Math.round(input / qt + d_rand);
|
||||||
|
for (let i = h.length - 1; i >= 0; i--) {
|
||||||
|
nsState[channel][i] = nsState[channel][i - 1];
|
||||||
|
}
|
||||||
|
nsState[channel][0] = tmpOutput - input;
|
||||||
|
outputData[sample] = tmpOutput;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return outputs;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function convert(n: any) {
|
||||||
|
var v = n < 0 ? n * 32768 : n * 32767; // convert in range [-32768, 32767]
|
||||||
|
return Math.max(-32768, Math.min(32768, v)); // clamp
|
||||||
|
}
|
||||||
|
|
||||||
|
let newaudio = quantization(leftpcm, rightpcm);
|
||||||
|
// console.log(newaudio.length);
|
||||||
|
let pcmData = Buffer.from(new Int8Array(interleave16(Int16Array.from(newaudio[0], x => convert(x)), Int16Array.from(newaudio[1], x => convert(x))).buffer));
|
||||||
|
|
||||||
|
if (!this.headerSent) {
|
||||||
|
console.log('new header')
|
||||||
|
const header = Buffer.alloc(44)
|
||||||
|
header.write('RIFF', 0)
|
||||||
|
header.writeUInt32LE(2147483600, 4)
|
||||||
|
header.write('WAVE', 8)
|
||||||
|
header.write('fmt ', 12)
|
||||||
|
header.writeUInt8(16, 16)
|
||||||
|
header.writeUInt8(1, 20)
|
||||||
|
header.writeUInt8(2, 22)
|
||||||
|
header.writeUInt32LE(48000, 24)
|
||||||
|
header.writeUInt32LE(16, 28)
|
||||||
|
header.writeUInt8(4, 32)
|
||||||
|
header.writeUInt8(16, 34)
|
||||||
|
header.write('data', 36)
|
||||||
|
header.writeUInt32LE(2147483600 + 44 - 8, 40)
|
||||||
|
this.audioStream.write(Buffer.concat([header, pcmData]));
|
||||||
|
this.headerSent = true;
|
||||||
|
} else {
|
||||||
|
this.audioStream.write(pcmData);
|
||||||
|
}
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
//QR Code
|
//QR Code
|
||||||
ipcMain.handle('showQR', async (_event, _) => {
|
ipcMain.handle('showQR', async (_event, _) => {
|
||||||
|
|
|
@ -142,8 +142,8 @@ export default class ChromecastPlugin {
|
||||||
}
|
}
|
||||||
let media = {
|
let media = {
|
||||||
// Here you can plug an URL to any mp4, webm, mp3 or jpg file with the proper contentType.
|
// Here you can plug an URL to any mp4, webm, mp3 or jpg file with the proper contentType.
|
||||||
contentId: 'http://' + this.getIp() + ':9000/audio.webm',
|
contentId: 'http://' + this.getIp() + ':9000/audio.wav',
|
||||||
contentType: 'audio/webm',
|
contentType: 'audio/wav',
|
||||||
streamType: 'LIVE', // or LIVE
|
streamType: 'LIVE', // or LIVE
|
||||||
|
|
||||||
// Title and cover displayed while buffering
|
// Title and cover displayed while buffering
|
||||||
|
|
|
@ -9,6 +9,7 @@ var CiderAudio = {
|
||||||
vibrantbassNode: null,
|
vibrantbassNode: null,
|
||||||
llpw: null,
|
llpw: null,
|
||||||
analogWarmth: null,
|
analogWarmth: null,
|
||||||
|
recorderNode: null,
|
||||||
},
|
},
|
||||||
ccON: false,
|
ccON: false,
|
||||||
mediaRecorder: null,
|
mediaRecorder: null,
|
||||||
|
@ -140,33 +141,159 @@ var CiderAudio = {
|
||||||
sendAudio: function (){
|
sendAudio: function (){
|
||||||
if (!CiderAudio.ccON) {
|
if (!CiderAudio.ccON) {
|
||||||
CiderAudio.ccON = true
|
CiderAudio.ccON = true
|
||||||
let searchInt = setInterval(function () {
|
let searchInt = setInterval(async function () {
|
||||||
if (CiderAudio.context != null && CiderAudio.audioNodes.gainNode != null) {
|
if (CiderAudio.context != null && CiderAudio.audioNodes.gainNode != null) {
|
||||||
var options = {
|
// var options = {
|
||||||
mimeType: 'audio/webm; codecs=opus'
|
// mimeType: 'audio/webm; codecs=opus'
|
||||||
};
|
// };
|
||||||
var destnode = CiderAudio.context.createMediaStreamDestination();
|
// var destnode = CiderAudio.context.createMediaStreamDestination();
|
||||||
CiderAudio.audioNodes.gainNode.connect(destnode)
|
// CiderAudio.audioNodes.gainNode.connect(destnode)
|
||||||
CiderAudio.mediaRecorder = new MediaRecorder(destnode.stream, options);
|
// CiderAudio.mediaRecorder = new MediaRecorder(destnode.stream, options);
|
||||||
CiderAudio.mediaRecorder.start(1);
|
// CiderAudio.mediaRecorder.start(1);
|
||||||
CiderAudio.mediaRecorder.ondataavailable = function (e) {
|
// CiderAudio.mediaRecorder.ondataavailable = function (e) {
|
||||||
e.data.arrayBuffer().then(buffer => {
|
// e.data.arrayBuffer().then(buffer => {
|
||||||
ipcRenderer.send('writeAudio', buffer)
|
// ipcRenderer.send('writeAudio', buffer)
|
||||||
|
// }
|
||||||
|
// );
|
||||||
|
// }
|
||||||
|
const worklet = `class RecorderWorkletProcessor extends AudioWorkletProcessor {
|
||||||
|
static get parameterDescriptors() {
|
||||||
|
return [{
|
||||||
|
name: 'isRecording',
|
||||||
|
defaultValue: 0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'numberOfChannels',
|
||||||
|
defaultValue: 2
|
||||||
}
|
}
|
||||||
);
|
];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
this._bufferSize = 4096;
|
||||||
|
this._buffers = null;
|
||||||
|
this._initBuffer();
|
||||||
|
}
|
||||||
|
_initBuffers(numberOfChannels) {
|
||||||
|
this._buffers = [];
|
||||||
|
for (let channel=0; channel < numberOfChannels; channel++) {
|
||||||
|
this._buffers.push(new Float32Array(this._bufferSize));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_initBuffer() {
|
||||||
|
this._bytesWritten = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
_isBufferEmpty() {
|
||||||
|
return this._bytesWritten === 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
_isBufferFull() {
|
||||||
|
return this._bytesWritten === this._bufferSize;
|
||||||
|
}
|
||||||
|
_pushToBuffers(audioRawData, numberOfChannels) {
|
||||||
|
if (this._isBufferFull()) {
|
||||||
|
this._flush();
|
||||||
|
}
|
||||||
|
let dataLength = audioRawData[0].length;
|
||||||
|
for (let idx=0; idx<dataLength; idx++) {
|
||||||
|
for (let channel=0; channel < numberOfChannels; channel++) {
|
||||||
|
let value = audioRawData[channel][idx];
|
||||||
|
this._buffers[channel][this._bytesWritten] = value;
|
||||||
|
}
|
||||||
|
this._bytesWritten += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_flush() {
|
||||||
|
let buffers = [];
|
||||||
|
this._buffers.forEach((buffer, channel) => {
|
||||||
|
if (this._bytesWritten < this._bufferSize) {
|
||||||
|
buffer = buffer.slice(0, this._bytesWritten);
|
||||||
|
}
|
||||||
|
buffers[channel] = buffer;
|
||||||
|
});
|
||||||
|
this.port.postMessage({
|
||||||
|
eventType: 'data',
|
||||||
|
audioBuffer: buffers,
|
||||||
|
bufferSize: this._bufferSize
|
||||||
|
});
|
||||||
|
this._initBuffer();
|
||||||
|
}
|
||||||
|
|
||||||
|
_recordingStopped() {
|
||||||
|
this.port.postMessage({
|
||||||
|
eventType: 'stop'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
process(inputs, outputs, parameters) {
|
||||||
|
const isRecordingValues = parameters.isRecording;
|
||||||
|
const numberOfChannels = parameters.numberOfChannels[0];
|
||||||
|
if (this._buffers === null) {
|
||||||
|
this._initBuffers(numberOfChannels);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let dataIndex = 0; dataIndex < isRecordingValues.length; dataIndex++)
|
||||||
|
{
|
||||||
|
const shouldRecord = isRecordingValues[dataIndex] === 1;
|
||||||
|
if (!shouldRecord && !this._isBufferEmpty()) {
|
||||||
|
this._flush();
|
||||||
|
this._recordingStopped();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (shouldRecord) {
|
||||||
|
let audioRawData = inputs[0];
|
||||||
|
this._pushToBuffers(audioRawData, numberOfChannels);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
registerProcessor('recorder-worklet', RecorderWorkletProcessor);`
|
||||||
|
let blob = new Blob([worklet], { type: 'application/javascript' });
|
||||||
|
await CiderAudio.context.audioWorklet.addModule(URL.createObjectURL(blob))
|
||||||
|
.then(() => {
|
||||||
|
|
||||||
|
const channels = 2;
|
||||||
|
CiderAudio.audioNodes.recorderNode = new window.AudioWorkletNode(CiderAudio.context,
|
||||||
|
'recorder-worklet',
|
||||||
|
{ parameterData: { numberOfChannels: channels } });
|
||||||
|
CiderAudio.audioNodes.recorderNode.port.onmessage = (e) => {
|
||||||
|
const data = e.data;
|
||||||
|
switch (data.eventType) {
|
||||||
|
case "data":
|
||||||
|
const audioData = data.audioBuffer;
|
||||||
|
const bufferSize = data.bufferSize;
|
||||||
|
ipcRenderer.send('writeWAV', audioData[0], audioData[1], bufferSize);
|
||||||
|
break;
|
||||||
|
case "stop":
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
CiderAudio.audioNodes.recorderNode.parameters.get('isRecording').setValueAtTime(1, CiderAudio.context.currentTime);
|
||||||
|
CiderAudio.audioNodes.gainNode.connect(CiderAudio.audioNodes.recorderNode);
|
||||||
|
|
||||||
|
});
|
||||||
clearInterval(searchInt);
|
clearInterval(searchInt);
|
||||||
}
|
}
|
||||||
}, 1000);
|
}, 1000);
|
||||||
}
|
} else {if (CiderAudio.audioNodes.recorderNode != null && CiderAudio.context != null) {
|
||||||
|
CiderAudio.audioNodes.recorderNode.parameters.get('isRecording').setValueAtTime(1, CiderAudio.context.currentTime);
|
||||||
|
// CiderAudio.audioNodes.recorderNode = null;
|
||||||
|
// CiderAudio.ccON = false;
|
||||||
|
}}
|
||||||
|
|
||||||
},
|
},
|
||||||
stopAudio(){
|
stopAudio(){
|
||||||
if (CiderAudio.mediaRecorder != null){
|
if (CiderAudio.audioNodes.recorderNode != null && CiderAudio.context != null) {
|
||||||
CiderAudio.mediaRecorder.stop();
|
CiderAudio.audioNodes.recorderNode.parameters.get('isRecording').setValueAtTime(0, CiderAudio.context.currentTime);
|
||||||
CiderAudio.mediaRecorder = null;
|
// CiderAudio.audioNodes.recorderNode = null;
|
||||||
CiderAudio.ccON = false;
|
// CiderAudio.ccON = false;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
analogWarmth_h2_3: function (status, hierarchy){
|
analogWarmth_h2_3: function (status, hierarchy){
|
||||||
|
|
|
@ -96,7 +96,7 @@
|
||||||
<span class="usermenu-item-icon"><%- include("../svg/smartphone.svg") %></span>
|
<span class="usermenu-item-icon"><%- include("../svg/smartphone.svg") %></span>
|
||||||
<span class="usermenu-item-name">{{$root.getLz('action.showWebRemoteQR')}}</span>
|
<span class="usermenu-item-name">{{$root.getLz('action.showWebRemoteQR')}}</span>
|
||||||
</button>
|
</button>
|
||||||
<button class="usermenu-item" v-if="cfg.advanced.AudioContext && isDev" @click="modals.castMenu = true">
|
<button class="usermenu-item" v-if="cfg.advanced.AudioContext" @click="modals.castMenu = true">
|
||||||
<span class="usermenu-item-icon"><%- include("../svg/cast.svg") %></span>
|
<span class="usermenu-item-icon"><%- include("../svg/cast.svg") %></span>
|
||||||
<span class="usermenu-item-name">Cast</span>
|
<span class="usermenu-item-name">Cast</span>
|
||||||
</button>
|
</button>
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue