33 1, 2, 6, 7, 15, 16, 28, 29,
34 3, 5, 8, 14, 17, 27, 30, 43,
35 4, 9, 13, 18, 26, 31, 42, 44,
36 10, 12, 19, 25, 32, 41, 45, 54,
37 11, 20, 24, 33, 40, 46, 53, 55,
38 21, 23, 34, 39, 47, 52, 56, 61,
39 22, 35, 38, 48, 51, 57, 60, 62,
40 36, 37, 49, 50, 58, 59, 63, 64,
46 #define COMPILE_TEMPLATE_MMXEXT 0 47 #define COMPILE_TEMPLATE_SSE2 0 48 #define COMPILE_TEMPLATE_SSSE3 0 49 #define RENAME(a) a ## _mmx 50 #define RENAME_FDCT(a) a ## _mmx 54 #if HAVE_MMXEXT_INLINE 55 #undef COMPILE_TEMPLATE_SSSE3 56 #undef COMPILE_TEMPLATE_SSE2 57 #undef COMPILE_TEMPLATE_MMXEXT 58 #define COMPILE_TEMPLATE_MMXEXT 1 59 #define COMPILE_TEMPLATE_SSE2 0 60 #define COMPILE_TEMPLATE_SSSE3 0 63 #define RENAME(a) a ## _mmxext 64 #define RENAME_FDCT(a) a ## _mmxext 69 #undef COMPILE_TEMPLATE_MMXEXT 70 #undef COMPILE_TEMPLATE_SSE2 71 #undef COMPILE_TEMPLATE_SSSE3 72 #define COMPILE_TEMPLATE_MMXEXT 0 73 #define COMPILE_TEMPLATE_SSE2 1 74 #define COMPILE_TEMPLATE_SSSE3 0 77 #define RENAME(a) a ## _sse2 78 #define RENAME_FDCT(a) a ## _sse2 83 #undef COMPILE_TEMPLATE_MMXEXT 84 #undef COMPILE_TEMPLATE_SSE2 85 #undef COMPILE_TEMPLATE_SSSE3 86 #define COMPILE_TEMPLATE_MMXEXT 0 87 #define COMPILE_TEMPLATE_SSE2 1 88 #define COMPILE_TEMPLATE_SSSE3 1 91 #define RENAME(a) a ## _ssse3 92 #define RENAME_FDCT(a) a ## _sse2 108 "pxor %%mm7, %%mm7 \n\t" 110 "pxor %%mm0, %%mm0 \n\t" 111 "pxor %%mm1, %%mm1 \n\t" 112 "movq (%0), %%mm2 \n\t" 113 "movq 8(%0), %%mm3 \n\t" 114 "pcmpgtw %%mm2, %%mm0 \n\t" 115 "pcmpgtw %%mm3, %%mm1 \n\t" 116 "pxor %%mm0, %%mm2 \n\t" 117 "pxor %%mm1, %%mm3 \n\t" 118 "psubw %%mm0, %%mm2 \n\t" 119 "psubw %%mm1, %%mm3 \n\t" 120 "movq %%mm2, %%mm4 \n\t" 121 "movq %%mm3, %%mm5 \n\t" 122 "psubusw (%2), %%mm2 \n\t" 123 "psubusw 8(%2), %%mm3 \n\t" 124 "pxor %%mm0, %%mm2 \n\t" 125 "pxor %%mm1, %%mm3 \n\t" 126 "psubw %%mm0, %%mm2 \n\t" 127 "psubw %%mm1, %%mm3 \n\t" 128 "movq %%mm2, (%0) \n\t" 129 "movq %%mm3, 8(%0) \n\t" 130 "movq %%mm4, %%mm2 \n\t" 131 "movq %%mm5, %%mm3 \n\t" 132 "punpcklwd %%mm7, %%mm4 \n\t" 133 "punpckhwd %%mm7, %%mm2 \n\t" 134 "punpcklwd %%mm7, %%mm5 \n\t" 135 "punpckhwd %%mm7, %%mm3 \n\t" 136 "paddd (%1), %%mm4 \n\t" 137 "paddd 8(%1), %%mm2 \n\t" 138 "paddd 16(%1), %%mm5 \n\t" 139 "paddd 24(%1), %%mm3 \n\t" 140 "movq %%mm4, (%1) \n\t" 141 "movq %%mm2, 8(%1) \n\t" 142 "movq %%mm5, 16(%1) \n\t" 143 "movq %%mm3, 24(%1) \n\t" 164 "pxor %%xmm7, %%xmm7 \n\t" 166 "pxor %%xmm0, %%xmm0 \n\t" 167 "pxor %%xmm1, %%xmm1 \n\t" 168 "movdqa (%0), %%xmm2 \n\t" 169 "movdqa 16(%0), %%xmm3 \n\t" 170 "pcmpgtw %%xmm2, %%xmm0 \n\t" 171 "pcmpgtw %%xmm3, %%xmm1 \n\t" 172 "pxor %%xmm0, %%xmm2 \n\t" 173 "pxor %%xmm1, %%xmm3 \n\t" 174 "psubw %%xmm0, %%xmm2 \n\t" 175 "psubw %%xmm1, %%xmm3 \n\t" 176 "movdqa %%xmm2, %%xmm4 \n\t" 177 "movdqa %%xmm3, %%xmm5 \n\t" 178 "psubusw (%2), %%xmm2 \n\t" 179 "psubusw 16(%2), %%xmm3 \n\t" 180 "pxor %%xmm0, %%xmm2 \n\t" 181 "pxor %%xmm1, %%xmm3 \n\t" 182 "psubw %%xmm0, %%xmm2 \n\t" 183 "psubw %%xmm1, %%xmm3 \n\t" 184 "movdqa %%xmm2, (%0) \n\t" 185 "movdqa %%xmm3, 16(%0) \n\t" 186 "movdqa %%xmm4, %%xmm6 \n\t" 187 "movdqa %%xmm5, %%xmm0 \n\t" 188 "punpcklwd %%xmm7, %%xmm4 \n\t" 189 "punpckhwd %%xmm7, %%xmm6 \n\t" 190 "punpcklwd %%xmm7, %%xmm5 \n\t" 191 "punpckhwd %%xmm7, %%xmm0 \n\t" 192 "paddd (%1), %%xmm4 \n\t" 193 "paddd 16(%1), %%xmm6 \n\t" 194 "paddd 32(%1), %%xmm5 \n\t" 195 "paddd 48(%1), %%xmm0 \n\t" 196 "movdqa %%xmm4, (%1) \n\t" 197 "movdqa %%xmm6, 16(%1) \n\t" 198 "movdqa %%xmm5, 32(%1) \n\t" 199 "movdqa %%xmm0, 48(%1) \n\t" 208 "%xmm4",
"%xmm5",
"%xmm6",
"%xmm7")
228 #if HAVE_6REGS && HAVE_MMXEXT_INLINE 240 #if HAVE_6REGS && HAVE_SSSE3_INLINE int dct_algo
DCT algorithm, see FF_DCT_* below.
#define INLINE_SSE2(flags)
static atomic_int cpu_flags
Macro definitions for various function/variable attributes.
The exact code depends on how similar the blocks are and how related they are to the block
it s the only field you need to keep assuming you have a context There is some magic you don t need to care about around this just let it vf offset
uint16_t(* dct_offset)[64]
#define DECLARE_ALIGNED(n, t, v)
Declare a variable that is aligned in memory.
#define INLINE_MMX(flags)
#define INLINE_SSSE3(flags)
static const uint16_t inv_zigzag_direct16[64]
av_cold void ff_dct_encode_init_x86(MpegEncContext *s)
Libavcodec external API header.
int av_get_cpu_flags(void)
Return the flags which specify extensions supported by the CPU.
int(* dct_quantize)(struct MpegEncContext *s, int16_t *block, int n, int qscale, int *overflow)
void(* denoise_dct)(struct MpegEncContext *s, int16_t *block)
#define XMM_CLOBBERS_ONLY(...)
struct AVCodecContext * avctx
#define INLINE_MMXEXT(flags)
__asm__(".macro parse_r var r\n\t""\\var = -1\n\t"_IFC_REG(0) _IFC_REG(1) _IFC_REG(2) _IFC_REG(3) _IFC_REG(4) _IFC_REG(5) _IFC_REG(6) _IFC_REG(7) _IFC_REG(8) _IFC_REG(9) _IFC_REG(10) _IFC_REG(11) _IFC_REG(12) _IFC_REG(13) _IFC_REG(14) _IFC_REG(15) _IFC_REG(16) _IFC_REG(17) _IFC_REG(18) _IFC_REG(19) _IFC_REG(20) _IFC_REG(21) _IFC_REG(22) _IFC_REG(23) _IFC_REG(24) _IFC_REG(25) _IFC_REG(26) _IFC_REG(27) _IFC_REG(28) _IFC_REG(29) _IFC_REG(30) _IFC_REG(31)".iflt \\var\n\t"".error \"Unable to parse register name \\r\"\n\t"".endif\n\t"".endm")