Go to the documentation of this file.
42 int snr_offset,
int floor,
43 const uint8_t *
bap_tab, uint8_t *bap);
static int nb_coefs(int length, int level, uint64_t sn)
void ff_ac3_bit_alloc_calc_bap_armv6(int16_t *mask, int16_t *psd, int start, int end, int snr_offset, int floor, const uint8_t *bap_tab, uint8_t *bap)
int av_get_cpu_flags(void)
Return the flags which specify extensions supported by the CPU.
void ff_ac3_update_bap_counts_arm(uint16_t mant_cnt[16], uint8_t *bap, int len)
static atomic_int cpu_flags
void ff_float_to_fixed24_neon(int32_t *dst, const float *src, size_t len)
static __device__ float floor(float a)
static const uint8_t bap_tab[64]
void ff_ac3_sum_square_butterfly_int32_neon(int64_t sum[4], const int32_t *coef0, const int32_t *coef1, int len)
Undefined Behavior In the C some operations are like signed integer dereferencing freed accessing outside allocated Undefined Behavior must not occur in a C it is not safe even if the output of undefined operations is unused The unsafety may seem nit picking but Optimizing compilers have in fact optimized code on the assumption that no undefined Behavior occurs Optimizing code based on wrong assumptions can and has in some cases lead to effects beyond the output of computations The signed integer overflow problem in speed critical code Code which is highly optimized and works with signed integers sometimes has the problem that often the output of the computation does not c
uint8_t ptrdiff_t const uint8_t ptrdiff_t int intptr_t intptr_t int int16_t * dst
av_cold void ff_ac3dsp_init_arm(AC3DSPContext *c)
void ff_ac3_sum_square_butterfly_float_neon(float sum[4], const float *coef0, const float *coef1, int len)
void ff_ac3_exponent_min_neon(uint8_t *exp, int num_reuse_blocks, int nb_coefs)
void ff_ac3_extract_exponents_neon(uint8_t *exp, int32_t *coef, int nb_coefs)
#define have_armv6(flags)