Go to the documentation of this file.
30 for (
i = 0;
i < length;
i++) {
41 for (
i = 0;
i < length;
i++) {
52 for (
i = 0;
i < length;
i++) {
65 for (
i = 0;
i < length;
i++) {
68 b = (unsigned)((
int)(dfactor * (unsigned)(
b >> dshift) + 128) >> 8) << dshift;
static void decorrelate_sm(int32_t *p1, int32_t *p2, int length)
av_cold void ff_takdsp_init(TAKDSPContext *c)
static void decorrelate_ls(const int32_t *p1, int32_t *p2, int length)
static void decorrelate_sf(int32_t *p1, const int32_t *p2, int length, int dshift, int dfactor)
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
The reader does not expect b to be semantically here and if the code is changed by maybe adding a a division or other the signedness will almost certainly be mistaken To avoid this confusion a new type was SUINT is the C unsigned type but it holds a signed int to use the same example SUINT a
#define i(width, name, range_min, range_max)
static void decorrelate_sr(int32_t *p1, const int32_t *p2, int length)
av_cold void ff_takdsp_init_riscv(TAKDSPContext *dsp)
void ff_takdsp_init_x86(TAKDSPContext *c)