Fix SDL audio playback with 16-bit stereo sound

My commit last month "Fix SDL audio playback with surround sound" broke
16-bit stereo sound. S_TransferStereo16() still assumed that dma.samples
was a power of two. I also cleaned up code related to the previously
mentioned commit.
This commit is contained in:
Zack Middleton 2018-10-01 21:28:15 -05:00
parent 93dd14c9fb
commit 58b0fb07cd
4 changed files with 17 additions and 20 deletions

View file

@ -1242,9 +1242,6 @@ void S_GetSoundtime(void)
int samplepos;
static int buffers;
static int oldsamplepos;
int fullsamples;
fullsamples = dma.samples / dma.channels;
if( CL_VideoRecording( ) )
{
@ -1268,13 +1265,13 @@ void S_GetSoundtime(void)
if (s_paintedtime > 0x40000000)
{ // time to chop things off to avoid 32 bit limits
buffers = 0;
s_paintedtime = fullsamples;
s_paintedtime = dma.fullsamples;
S_Base_StopAllSounds ();
}
}
oldsamplepos = samplepos;
s_soundtime = buffers*fullsamples + samplepos/dma.channels;
s_soundtime = buffers*dma.fullsamples + samplepos/dma.channels;
#if 0
// check to make sure that we haven't overshot
@ -1295,7 +1292,6 @@ void S_GetSoundtime(void)
void S_Update_(void) {
unsigned endtime;
int samps;
static float lastTime = 0.0f;
float ma, op;
float thisTime, sane;
@ -1339,9 +1335,8 @@ void S_Update_(void) {
& ~(dma.submission_chunk-1);
// never mix more than the complete buffer
samps = dma.samples / dma.channels;
if (endtime - s_soundtime > samps)
endtime = s_soundtime + samps;
if (endtime - s_soundtime > dma.fullsamples)
endtime = s_soundtime + dma.fullsamples;