fix rounding bug in buffer size calculation (MAX_SAMPLES_PER_WINDOW)
This commit is contained in:
parent
bef021a181
commit
1ba525b929
@ -110,7 +110,13 @@ typedef signed int Int32_t;
|
||||
#define MAX_dB 120. /* Table entries for 0...MAX_dB (normal max. values are 70...80 dB) */
|
||||
|
||||
#define MAX_ORDER (BUTTER_ORDER > YULE_ORDER ? BUTTER_ORDER : YULE_ORDER)
|
||||
#define MAX_SAMPLES_PER_WINDOW (size_t) (MAX_SAMP_FREQ * RMS_WINDOW_TIME) /* max. Samples per Time slice */
|
||||
/* [JEC] the following was originally #defined as:
|
||||
* (size_t) (MAX_SAMP_FREQ * RMS_WINDOW_TIME)
|
||||
* but that seemed to fail to take into account the ceil() part of the
|
||||
* sampleWindow calculation in ResetSampleFrequency(), and was causing
|
||||
* buffer overflows for 48kHz analysis, hence the +1.
|
||||
*/
|
||||
#define MAX_SAMPLES_PER_WINDOW (size_t) (MAX_SAMP_FREQ * RMS_WINDOW_TIME + 1.) /* max. Samples per Time slice */
|
||||
#define PINK_REF 64.82 /* 298640883795 */ /* calibration value */
|
||||
|
||||
static Float_t linprebuf [MAX_ORDER * 2];
|
||||
|
Loading…
Reference in New Issue
Block a user